+ ./ya make -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --add-peerdirs-tests all --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -A --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.l7ZiKJHGk7 --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest ydb --build-custom-json=/home/runner/actions_runner/_work/ydb/ydb/graph.json --custom-context=/home/runner/actions_runner/_work/ydb/ydb/context.json --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 5.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a | 0.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a | 0.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a | 4.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut | 4.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a | 4.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a | 4.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a | 4.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a | 4.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/libcore-tx-locks.a | 3.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blob_depot/ut/unittest | 3.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tracing/libydb-core-tracing.a | 3.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 3.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a | 3.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a | 3.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a | 3.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} | 3.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/manager.cpp | 3.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} | 3.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} | 3.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a | 4.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} | 4.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a | 4.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a | 4.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a | 4.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} | 4.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_vacuum/unittest | 4.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/transfer/libydb-core-transfer.a | 4.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a | 4.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} | 4.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} | 4.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_replication/unittest | 4.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a | 4.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/host/libcore-kqp-host.a | 4.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/backup/async_jobs/libcolumnshard-backup-async_jobs.a | 4.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a | 4.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a | 5.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a | 5.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.a | 5.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a | 5.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a | 5.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a | 6.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a | 6.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a | 6.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a | 8.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a | 9.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a | 9.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a | 9.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a | 9.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a | 9.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a | 9.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a | 9.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |10.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |10.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |10.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |10.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |10.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |10.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |10.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |10.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |10.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |10.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |10.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |10.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |10.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |10.8%| [AR] {BAZEL_DOWNLOAD} $(B)/build/cow/on/libbuild-cow-on.a |10.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |10.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |10.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/http-parser/libcontrib-deprecated-http-parser.a |10.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |10.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |10.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |11.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |11.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |11.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |11.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |11.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |11.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |11.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |11.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |11.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |11.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |11.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |11.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |11.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |11.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |11.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |11.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |12.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |12.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |12.1%| PREPARE $(YMAKE_PYTHON3-212672652) - 6.22 MB |12.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |12.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |12.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |12.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |12.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |12.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |12.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |12.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |12.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |12.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |13.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |13.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |13.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |13.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |13.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |15.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |15.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |16.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/priorities/usage/events.cpp |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |16.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/priorities/usage/abstract.cpp |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |16.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/session.h_serialized.cpp |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |17.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/common/libpy3tests-stress-common.global.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |17.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |18.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |18.1%| [CP] {default-linux-x86_64, release, asan} $(B)/library/cpp/sanitizer/plugin/sanitizer.py.pyplugin |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohappyeyeballs/libpy3contrib-python-aiohappyeyeballs.global.a |18.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |18.9%| PREPARE $(LLD_ROOT-3107549726) - 25.18 MB |18.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |18.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |19.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |19.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |19.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service.cpp |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |19.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |20.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/lwtrace_probes.cpp |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |20.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |20.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |21.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |21.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |21.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/priorities/service/counters.cpp |21.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |22.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |22.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |22.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |23.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |23.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |23.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |23.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |24.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |24.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |24.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |25.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |25.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |25.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |25.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |25.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |25.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |26.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |27.4%| PREPARE $(YMAKE_PYTHON3) |27.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |27.1%| PREPARE $(CLANG_FORMAT-3815817643) |28.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |28.3%| PREPARE $(VCS) |28.7%| [CP] {default-linux-x86_64, release, asan} $(B)/common_test.context |28.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tracing/trace_collection.cpp |29.0%| PREPARE $(BLACK-3355069439) - 8.40 MB |29.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |30.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |30.0%| PREPARE $(FLAKE8_PY3-3596799299) - 16.79 MB |30.1%| PREPARE $(RUFF-3583455953) - 12.10 MB |30.3%| PREPARE $(PYTHON) - 44.57 MB |30.3%| PREPARE $(TEST_TOOL_HOST-sbr:10658668958) - 25.44 MB |30.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/replication.cpp |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |36.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/libydb-services-fq.a |37.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/kesus/libydb-services-kesus.a |37.6%| PREPARE $(CLANG-1922233694) - 192.97 MB |38.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/node_service/kqp_node_service.cpp |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |38.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/node_service/kqp_node_state.cpp |39.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/local_rpc/helper.cpp |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |39.6%| PREPARE $(CLANG16-1380963495) - 293.63 MB |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/value_helpers/libimpl-internal-value_helpers.a |39.8%| PREPARE $(WITH_JDK17-sbr:9470949154) - 176.18 MB |40.1%| PREPARE $(JDK17-2548586558) |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |40.2%| PREPARE $(WITH_JDK-sbr:9470949154) |40.2%| PREPARE $(JDK_DEFAULT-2548586558) |40.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/state_server_interface.cpp |40.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/service/service.cpp |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |40.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/usage/config.cpp |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |40.8%| PREPARE $(CLANG18-1866954364) - 302.02 MB |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/plain_status/libimpl-internal-plain_status.a |41.6%| PREPARE $(CLANG-2403293607) - 293.63 MB |41.9%| PREPARE $(CLANG20-882982600) |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |41.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/backup/import/import_actor.cpp |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |42.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |42.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |42.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |42.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |43.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |43.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |43.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |43.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |43.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/service/manager.cpp |42.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |43.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_transform.cpp |43.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |43.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_index.cpp |43.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |44.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |44.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/column_table.cpp |44.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.global.a |43.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |43.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_delete.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |43.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_column.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/executor/libclient-types-executor.a |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/s3_backups/s3_backups |44.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_column.cpp |44.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |44.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |44.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |44.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/scheme.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |44.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/transfer_writer.cpp |44.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/libydb-services-config.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/audit_helpers/audit_helper.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |44.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/auth/libydb-services-auth.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |44.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |44.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |44.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |44.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_mon.cpp |45.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/purecalc_input.cpp |45.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/libydb-core-mon.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |43.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |44.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_host.cpp |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |44.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/purecalc_output.cpp |44.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/bridge/libydb-services-bridge.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/time/libsrc-library-time.a |44.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/row_table.cpp |44.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/backup/libydb-services-backup.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/codes/libservices-datastreams-codes.a |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_opt.cpp |44.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |44.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/libydb-services-cms.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |44.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/abstract.cpp |44.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |44.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |44.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/acquire_snapshot_impl.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |44.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |44.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_read_validate.cpp |44.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/discovery/libydb-services-discovery.a |44.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/libydb-services-metadata.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/tablet/libydb-services-tablet.a |45.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/view/libydb-services-view.a |45.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/view/behaviour.cpp |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_write.cpp |45.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |44.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/protos/receipt/libsqs_topic-protos-receipt.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/optimization/libservices-metadata-optimization.a |44.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |44.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |44.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |44.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |44.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |44.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |45.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |45.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/queue_url/libservices-sqs_topic-queue_url.a |45.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |45.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |45.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/add_column.cpp |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |44.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/test_shard_mon.cpp |44.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/replication/libydb-services-replication.a |44.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |44.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |44.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |45.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/sqs_topic/libydb-services-sqs_topic.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |45.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |45.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ymq/libydb-services-ymq.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |45.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_impl.cpp |44.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/tx_initialize.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |45.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |45.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/tx_load_everything.cpp |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/test_shard/libydb-services-test_shard.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |45.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_index.cpp |45.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/libydb-services-ydb.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/rows/libformats-arrow-rows.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/events/liblibs-checkpointing-events.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_explain_prepared.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/long_tx_service_impl.cpp |45.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_translate.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/load_actor_state.cpp |45.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_gateway_proxy.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |45.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/common/liblibs-row_dispatcher-common.a |45.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_statement_rewrite.cpp |45.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |45.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/commit_impl.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |45.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |45.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/services.cpp |45.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/queue_url/holder/libsqs_topic-queue_url-holder.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |45.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/utils/metadata_helpers.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |46.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/priorities/usage/service.cpp |45.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |45.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/helpers.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/test_tablet.cpp |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |46.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/utils/scheme_helpers.cpp |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/layers/libessentials-core-layers.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/utils/liblibs-control_plane_proxy-utils.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |46.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/block_events.cpp |46.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |46.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/fulltext_udf.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/math_udf.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/kqp_benches.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/digest_udf.cpp |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |46.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/json2_udf.cpp |46.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/libydb-core-health_check.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |46.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/re2_udf.cpp |46.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/host/kqp_runner.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |46.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |46.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/backup/async_jobs/import_downloader.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |46.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |46.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |46.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |46.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/test_shard_context.cpp |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |46.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/arrow_builders.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/unicode_udf.cpp |46.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/backup/import/task.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/url_lister/libcore-qplayer-url_lister.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |46.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |46.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |46.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |46.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/helpers/libkqp-compile_service-helpers.a |46.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/string_udf.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/common/datetime2_udf.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |45.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |45.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/result_set_format/libkqp-common-result_set_format.a |45.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |46.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |46.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |46.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |46.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/test_tablet/tx_init_scheme.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |45.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |45.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |45.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |45.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |45.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_sharding.cpp |45.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |45.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_drop_stream_result.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/common/libbehaviour-streaming_query-common.a |45.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |45.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |46.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |46.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |45.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |45.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |45.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |45.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |45.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_heartbeat.cpp |45.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |45.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |45.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/transfer/purecalc.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |45.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |45.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |45.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/partcheck/partcheck |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |44.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |44.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |44.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/script_executions_utils/libkqp-proxy_service-script_executions_utils.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/libydb-core-load_test.a |45.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |45.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |45.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |45.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |45.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |45.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |44.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/libydb-core-mind.a |44.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |44.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |44.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |44.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |44.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |44.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/proxy/libpersqueue-common-proxy.a |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/log_backend/json_envelope.cpp |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_actor_settings.cpp |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |45.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/net_classifier.h_serialized.cpp |45.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/test_runtime.cpp |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/blob/libpersqueue-pqtablet-blob.a |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/local_pgwire/local_pgwire_util.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/actors/pool_handlers_actors.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/local_pgwire/sql_parser.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/query_data/kqp_query_data.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/util.cpp |47.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/appdata.cpp |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |48.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/basics/runtime.cpp |49.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/wait_events.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_meta.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/next_token.cpp |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |51.6%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |51.6%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_schedulable_task.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/tree/dynamic.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_schedulable_actor.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_arrow_memory_pool.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_fulltext_analyze.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/counters.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_effects.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_join_helpers.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_program_builder.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_table.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_factory.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_sequencer_factory.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_service.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/tree/snapshot.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/config_examples.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ycsb/info_collector.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/aggregated_result.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/archive.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/interconnect_load.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/percentile.h_serialized.cpp |51.6%| PREPARE $(OS_SDK_ROOT-sbr:243881345) - 8.40 MB |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/backup/import/session.cpp |51.5%| [BI] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/buildinfo_data.h |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split_antlr4/libproto_ast-gen-v1_proto_split_antlr4.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_data/kqp_predictor.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/log_backend/log_backend_build.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/address_classification/net_classifier.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_prune_columns_stage.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rename_unused_stage.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/common/columnshard.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_type_ann.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_session_info.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_dp_cost_based.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_rm_service.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/local_pgwire.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_operator.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/actors/scheme_actors.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_index.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_rules.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/virtual_group.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/common/kqp_ut_common.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/log_backend/log_backend.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update_index.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/actors/cpu_load_actors.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_vector_index.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/kqp_workload_service.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_statistics.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_temp_tables_manager.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/request_controller_info.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_resource_estimation.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/console_interaction.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_service.cpp |51.4%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svnversion.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/node_report.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/register_node.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/monitoring.cpp |51.6%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |51.6%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svn_interface.c |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_defaults.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/run_script_actor/kqp_run_script_actor.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_resource_info_exchanger.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_query_state.cpp |51.7%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/build_info/build_info_static.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/local_pgwire_connection.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_data/kqp_prepared_query.cpp |51.5%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |51.5%| PREPARE $(GDB) - 25.18 MB |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_compute_statistics.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/select_groups.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_vector_actor.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/update_seen_operational.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_plan_conversion_utils.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_transformer.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_snapshot_manager.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_effects.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp |51.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/bsc_audit.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_worker_common.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/yql_single_query.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_response.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/grouper.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/vdisk_write.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config_fit_groups.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/propose_group_key.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/pgwire_kqp_proxy.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/common.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/pdisk_write.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_delete_index.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_databases_cache.cpp |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update.cpp |51.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_constant_folding_stage.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/service_actor.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_returning.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/grpc_service.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config_fit_pdisks.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_indexes.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_buffer_lookup_actor.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/group_write.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/bulk_mkql_upsert.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/tables/table_queries.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/pdisk_log.cpp |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/quota/libpersqueue-pqtablet-quota.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/local_pgwire/local_pgwire_auth_actor.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/counters/libpersqueue-public-counters.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/datastreams_proxy.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/codecs/libpersqueue-public-codecs.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/balancer.cpp |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/describer/libpersqueue-public-describer.a |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/put_records_actor.cpp |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/libnc_private-common-v1.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/keyvalue_write.cpp |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert.cpp |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_index_generator/libpersqueue-public-partition_index_generator.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/write_meta/libpersqueue-public-write_meta.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_key_range/libpersqueue-public-partition_key_range.a |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/pdisk_read.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libcore-protos-nbs.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/scrub.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |51.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/out/libcore-protos-out.a |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/memory.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_fulltext_index.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__create_tablet.cpp |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/libydb-core-quoter.a |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/kqp_upsert.cpp |50.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/mlp/libpersqueue-public-mlp.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |51.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/sasl/libcore-security-sasl.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |51.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/libydb-core-public_http.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |50.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/token_manager/libcore-security-token_manager.a |50.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libydb-core-protos.a |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/test_load_read_iterator.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |50.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/libydb-core-security.a |50.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/leader_tablet_info.cpp |50.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/test_load_actor.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |50.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_compiler/kqp_mkql_compiler.cpp |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |51.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |51.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |51.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |51.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |50.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_uniq_helper.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |50.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__configure_subdomain.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |50.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |51.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |51.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |51.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/drain.cpp |51.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/streaming_queries/libcore-sys_view-streaming_queries.a |51.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/libydb-core-tablet.a |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |51.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_full_text_source.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |51.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive.cpp |51.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/follower_tablet_info.cpp |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/protos/libbackup-import-protos.a |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/domain_info.cpp |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/libydb-core-testlib.a |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/kqp.cpp |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/backup/iscan/libcolumnshard-backup-iscan.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |51.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_balancer.cpp |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |52.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/testlib/actor_helpers.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/testlib/storage_helpers.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |51.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/remove.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/common.cpp |51.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/read.cpp |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/blob_set.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/read.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/boot_queue.cpp |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/error_collector.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_log.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/read.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/fill.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/duplicate_filtering.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_domains.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_pool_info.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__delete_tablet_result.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tablet_info.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/columnshard.h_serialized.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert_index.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__block_storage_result.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/common_data.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__adopt_tablet.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_group_info.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/columnshard.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/common.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_impl.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/splitter.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ycsb/kqp_select.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/histogram_borders.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/monitoring.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/req_tracer.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/scan.h_serialized.cpp |52.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/rbo/kqp_convert_to_physical.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__reassign_groups_on_decommit.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__response_tablet_seq.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__init_scheme.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/background_controller.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__reassign_groups.cpp |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/node_info.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/writes_monitor.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters/column_tables.cpp |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/address.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__configure_scale_recommender.cpp |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_statics.cpp |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan_static/libclang_rt.asan_static-x86_64.a |52.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/constructor.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_peer_stats_calculator.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__lock_tablet.cpp |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__resume_tablet.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__register_node.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_pile.cpp |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_dc_followers.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__request_tablet_owners.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |52.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__seize_tablets.cpp |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |52.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__request_tablet_seq.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ibdrv/libcontrib-libs-ibdrv.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__status.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__disconnect_node.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_domain.cpp |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__load_everything.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__kill_node.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__release_tablets_reply.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libutils-fetch-proto.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libssh2/libcontrib-libs-libssh2.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__process_pending_operations.cpp |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__switch_drain.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__stop_tablet.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |52.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |52.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config_cmd.cpp |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |52.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_bridge.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |52.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__tablet_owners_reply.cpp |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/diagnostics/libtx-columnshard-diagnostics.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |52.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/settings.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__set_down.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__seize_tablets_reply.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/disk_metrics.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__generate_data_ut.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__sync_tablets.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cluster_balancing.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__unlock_tablet.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_drive_status.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/rpc_client/libcpp-mapreduce-rpc_client.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_mapper.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/drop_donor.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/bridge.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/get_group.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__restart_tablet.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tablet_move_info.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/compaction_info.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_write_constraint.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/init_scheme.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_box.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_host_config.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/cmds_storage_pool.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__release_tablets.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/config.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/write.cpp |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablet_status.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/storage_stats_calculator.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__start_tablet.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/bsc.cpp |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablet_groups.cpp |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |52.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |52.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_metrics_exchange.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/commit_config.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/update_group_latencies.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/update_last_seen_ready.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/common/result.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablet_metrics.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/self_heal.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/filter.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__process_boot_queue.cpp |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/migrate.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/load_everything.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/defs.cpp |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |52.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |52.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_layout_checker.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/read_coordinator.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/stat_processor.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/sys_view.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/predicate/predicate.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/events.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/fake_coordinator.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/base_with_blobs.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/common.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/action.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/actor.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/scan.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storage.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/shred.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/events/delete_blobs.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/local/storage.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tablet_flat_dummy.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/indexation.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/backup/iscan/iscan.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/manager.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storages_manager.cpp |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/blobs_manager.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/engine_logs.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tx_helpers.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/stats.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_info.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/queue.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc_actor.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/remove.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/column_fetching/manager.cpp |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/portions.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tenant_runtime.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/remove.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/tablet_helpers.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/layout_helpers.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc_actor.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/adapter.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script_counters.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/common_helper.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_reader/task.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |52.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/counters_manager.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/constructor.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/construction/context.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/blob_manager_db.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/manager/shared_blobs.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/logic.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/crypto/quictls/libngtcp2-crypto-quictls.a |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |52.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/controller/controller.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/manager.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/control.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/counters/portion_index.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |52.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/control.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/transfer.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/cs_helper.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/constructor.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/status.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/manager.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/request.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/test_client.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/transfer.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/changes.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/manager.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/merger.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/column_fetching/cache_policy.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_locks/manager/manager.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/merger.cpp |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |52.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/column_engine.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/move_portions.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/remove_portions.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/predicate/range.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/collector.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/metadata_accessor.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/merged_column.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |52.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snowball/libcontrib-libs-snowball.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/diagnostics/scan_diagnostics_actor.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/logic.cpp |52.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |52.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/data_accessor.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp |52.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/compacted.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/libnc_private-iam-v1.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |52.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a |52.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/column_engine_logs.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/common/context.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/remap.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/events/change_owning.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_portion_chunk.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |52.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/libcontrib-restricted-abseil-cpp-tstring.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/dump_helpers/libyt-lib-dump_helpers.a |53.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |53.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |52.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |53.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |53.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/libcontrib-restricted-abseil-cpp.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/ucontext_impl/libboost-context-ucontext_impl.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_accessor.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/portion_info.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/predicate/container.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/tracing/libengines-reader-tracing.a |53.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/logic.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/full_capture/libyt-lib-full_capture.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/iterator.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/db_wrapper.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/builder.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/index_chunk.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/utf8_range/librestricted-google-utf8_range.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |53.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/loading/stages.cpp |52.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |53.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/merge_subset.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/tier_info.cpp |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/checker.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/header.cpp |53.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/column_record.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common/comparable.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/common.h_serialized.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/tiering/common.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/common.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_actor.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_portion.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/write_with_blobs.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/abstract.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_cursor.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common/description.cpp |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/written.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script.cpp |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |53.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/resolver.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common/conveyor_task.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/abstract.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/meta.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common/result.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetch_steps.cpp |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_context.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/predicate/filter.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/constructor.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/read_with_blobs.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/iterator.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/concurrent_hash/libcpp-containers-concurrent_hash.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetched_data.cpp |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/constructor.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/temp_files/libyt-lib-temp_files.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/splitter.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cron_expression/liblibrary-cpp-cron_expression.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |53.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script_cursor.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/default_fetching.cpp |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |53.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |53.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessors_ordering.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/logic.cpp |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/context.cpp |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/portions/constructors.cpp |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/scanner.cpp |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_file_download/libyt-lib-yt_file_download.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |53.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/manager.cpp |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |53.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/schema.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/limit_sorted.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/context.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |53.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/events.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.a |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |53.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetching.cpp |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |53.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/full_scan_sorted.cpp |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/executor.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessor_callback.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/counters.cpp |52.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/read_metadata.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/put_status.cpp |52.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/constructor.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/merge.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/sub_columns_fetching.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/metadata.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/abstract.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |52.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/constructors.cpp |52.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetching.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_file_client.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/iterator.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/plain_read_data.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/context.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/schema.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/private_events.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetching.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/not_sorted.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/chaos_lease.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/cluster_directory.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/target_cluster_injecting_channel.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/schema.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/aggr.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |53.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/common.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/constructors.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetched_data.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/ut_common.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |53.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/metadata.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/plain_read_data.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/dynamic.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/provided.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/abstract.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/constructor.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/context.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/constrained_schema.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/limit.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/scanner.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/schema.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/source.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/counters.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/chaos_lease_base.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_file_session.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/source.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/source.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.global.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/source.cpp |53.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/iterator.cpp |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |53.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/metadata.cpp |53.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/schema.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/tiering/tier_info.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/metadata.cpp |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetched_data.cpp |52.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/snapshot_scheme.cpp |52.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/source.cpp |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/objects_cache.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/constructor.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/actor/actor.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/constructor.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/preset_schemas.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/result.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/meta.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/meta.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/interval.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/constructor.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/filtered_scheme.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/abstract_scheme.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/constructor.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/counters.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/constructor.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/fetcher.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/tiering.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blob.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/metadata.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/column_features.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/abstract_scheme.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/source.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/scheme.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/collection.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/counters.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/index/index.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_version.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/storage.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/filtered_scheme.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/const.cpp |53.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters.cpp |53.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/schema.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_diff.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/fulltext/libfulltext_udf.global.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/versioned_index.cpp |54.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/index_info.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/context.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/constructor.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/meta.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/scheme/snapshot_scheme.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/meta.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |54.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/column.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/container.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/counters.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/optimizer.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/header.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor_composite/service/counters.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/blob_constructor.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/meta.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/constructor.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/meta.cpp |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor_composite/service/common.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/constructor.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/stages.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/constructor.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |54.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/chunk_meta.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/settings.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/abstract.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portions_index.cpp |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tx_completed/libsubscriber-events-tx_completed.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/snapshot.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/meta.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/zero_level.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tracing/libtx-columnshard-tracing.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/granule/granule.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/one_layer.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/transparent.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/constructor.cpp |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/abstract.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/snapshot.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/interaction.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/compacted_blob_constructor.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/zero_level.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/constructor.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/constructor.cpp |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/optimizer.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/tz/libcpp-type_info-tz.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/write_controller.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/empty.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tz_types/libyt-library-tz_types.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/events.cpp |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/write.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/read_finished.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/data.cpp |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/abstract/abstract.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/tracing/libtx-conveyor-tracing.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/constructor.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_appearance_snapshot.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/transparent.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_deprecated_snapshot.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/constructor.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/tracing/libtx-conveyor_composite-tracing.a |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor/service/worker.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/constructor.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_unused_tables_template.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/one_layer.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_version_info.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/abstract.cpp |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/usage/common.h_serialized.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/normalizer.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.cpp |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/copy_blob_ids_to_v2.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_schema.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_v0_meta.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_index_columns.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_empty.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blob_cache.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/empty.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_info.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_actualization.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/defs.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/granule/normalizer.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/granule/clean_granule.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_hooks.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/batch_builder/builder.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/batch_builder/restore.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/tables/normalizer.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__overload.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/broken_blobs.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/tiling.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/abstract.cpp |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/batch_builder/merger.cpp |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/category.cpp |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/background_controller.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/worker.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v1_chunks.cpp |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/version/libversion_definition.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v2_chunks.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/special_cleaner.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_sub_columns_portions.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/long_timer.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/read_http_reply_protocol.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__locks.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/name_service_client_protocol.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/common/context.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/libydb-core-tx.a |53.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/tracing/liblimiter-grouped_memory-tracing.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/source/libtx-general_cache-source.a |53.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/audit/libydb-core-audit.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/libydb-core-base.a |53.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy_schemereq.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/message_seqno.cpp |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer.cpp |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |53.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |53.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |53.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor2.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_actor.cpp |53.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/libcore-base-generated.a |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/write_data.cpp |53.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/leaked_blobs.cpp |53.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/events.cpp |53.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/blob_info.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/tablet/gc_counters.cpp |53.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_private_events.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/manager.cpp |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/slice_builder/pack_builder.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/batch_slice.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/events.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_subdomain_path_id.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/write.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/tablet/broken_txs.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/probes.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/overload_manager/overload_subscribers.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/range_ops.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/service.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/resource_subscriber/task.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/process.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/column_info.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/chunks.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/manager.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_inserted_portions.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/portion/portion.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/html.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/vdisk_priorities.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/resource_subscriber/actor.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/scan_common.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/workers_pool.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/write_actor.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/blobstorage_vdiskid.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/datashard_s3_upload.h_serialized.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/usage/config.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_service.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/usage/service.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/stream_scan_common.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/type_serialization.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/resource_subscriber/events.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/service/scope.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/usage/abstract.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__check.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/tablet/write_queue.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/deprecated/yaml_config_parser.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_download.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/usage/events.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/tablet/ext_tx_base.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/mediator_queue.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/service/service.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor/usage/events.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/usage/service.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/locks/dependencies.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/locks/read_start.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/actorsys_test/single_thread_ic_mock.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/restore.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/columnshard_splitter.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_state.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/actor_activity_names.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_user_table.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/usage/common.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/erase_rows_condition.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__schema_upgrade.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__acquire_read_step.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/usage/config.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__restore_params.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/audit/audit_log_impl.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/execution_unit_kind.h_serialized.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__restore_transaction.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_iface.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__stop_guard.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/send_data_protocol.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__configure.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/audit/audit_config/audit_config.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/incr_restore_helpers.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__init.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_impl.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__mediators_confirmations.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__monitoring.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/key_conflicts.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/version/version_definition.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/read_data_protocol.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/feature_flags_service.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/backtrace.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/auth.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__schema.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage_relevance.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_bsc.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/shard_writer.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage_grouptype.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/event_filter.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/actorsys_test/testactorsys.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/boot_type.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/domain.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/write_data.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/boot_type.h_serialized.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/counters.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/path.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/local_user_token.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_state.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/debug_log.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/group_stat.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/memory_controller_iface.h_serialized.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/logoblob.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/fulltext.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/localdb.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/kmeans_clusters.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/services_assert.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/row_version.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/pool_stats_collector.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__plan_step.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/audit/heartbeat_actor/heartbeat_actor.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/traceid.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/storage_pools.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_pipe.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/table_index.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/subdomain.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/ut_helpers.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__last_step_subscriptions.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/tablet_status_checker.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/tablet.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/tx_processing.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_monitoring.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/space_monitor.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/secondary_index.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/data_events/shards_splitter.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/connect_socket_protocol.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator__read_step_subscriptions.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/testing.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/remove_locks.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/operation.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |54.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/prefix_kmeans.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/recompute_kmeans.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/restore_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/backup/impl/local_partition_reader.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/upload_stats.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/base/blobstorage_events.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/memory_state_migration.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_proxy.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/backup_restore_traits.h_serialized.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/rotate_cdc_stream_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_guardian.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/blobstorage_syncstate.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/change_exchange.h_serialized.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_replica.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/export_common.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execution_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/backup_restore_traits.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/appdata.cpp |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/request.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_exchange.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_body_serializer.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/channel_kind.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/blocks.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/change_record_cdc_serializer.cpp |54.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_block.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/blob_depot.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/export_scan.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/unisched.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/agent.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_lookup.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/tablet_killer.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_replica.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_publish.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/key_validator.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3_upload.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/import_s3.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/given_id_range.cpp |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_in_rs_unit.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_event_filter.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/assimilator.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_decommit.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/coro_tx.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/bridge.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/garbage.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/wilson_tracing_control.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_resolve.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_get_block.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/event.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/load_based_timeout.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/blob_mapping_cache.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_load.cpp |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_put.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/read.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_range.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/follower_edge.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/group_metrics_exchange.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/metrics.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/s3.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/status.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_commit_blob_seq.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/query.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/blocks.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_effects.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_failpoints.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_trash.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_patch.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_mon.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_gc.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |54.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3_scan.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_delete_rows.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/comm.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_collect_garbage.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/mon_main.cpp |54.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/events.cpp |54.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/abstract.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_get.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/resolved_value.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/stage_features.cpp |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_init_schema.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_load.cpp |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/data_uncertain.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/op_apply_config.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/proxy.cpp |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_status.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/garbage_collection.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_discover.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3_write.cpp |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/s3_delete.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/agent/storage_check_integrity.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/filter_kmeans.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/fulltext_dict.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/sample_k.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/unique_index.cpp |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/fulltext.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |54.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/block_fail_point_unit.cpp |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/local_kmeans.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/metadata/libblobstorage-pdisk-metadata.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/backup_unit.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |54.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_drivemodel_db.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_defs.h_serialized.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_delayed_cost_loop.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/kmeans_helper.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/group_stat_aggregator.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__init.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_uncommitted.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_log_cache.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_params.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/drivedata_serializer.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_signal_event.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_atomicblockcounter.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/usage/service.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_flightcontrol.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/usage/config.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/other/mon_get_blob_page.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/other/mon_blob_range_page.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |54.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tracing/http.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/program/libcore-tx-program.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover.cpp |54.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/reshuffle_kmeans.cpp |54.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3of4.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_block.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_impl.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_blob.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__write.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/bridge/syncer/syncer.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/mock/dsproxy_mock.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_state.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_algo.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_collect.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_common.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__vacuum.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_blackboard.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemon.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__configure.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_assimilate.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_mon.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_impl.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_status.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/worker.h_serialized.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_outofspace.cpp |54.7%| [CP] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm16/yql/essentials/minikql/computation/mkql_computation_node_codegen.h |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_histograms.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_handle_class.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/group_sessions.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_patch.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_stat.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_hugeblobctx.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_strategy_base.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_histogram_latency.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_check_integrity_get.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_performance_params.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_block.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_log.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_monactor.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put_impl.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/bridge/proxy/bridge_proxy.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3dc.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/program/registry.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_delete.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/program/program.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_range.cpp |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/program/builder.cpp |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_requestimpl.cpp |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_cdc_stream_common.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/kqp_gateway.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_mon.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_sectorrestorator.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemonactor.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hulloptlsn.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_read.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_syslogreader.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_encrypt.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/kqp_ic_gateway.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multicollect.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/topic_message.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_http.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/execute_queue.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/two_part_description.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_metadata.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_mongroups.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_mon.cpp |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_writer.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_read_log.cpp |53.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/subscriber.h_serialized.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request_reporting.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_indexrestoreget.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_driveestimator.cpp |53.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |53.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_completion_impl.cpp |53.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/scheme_cache.h_serialized.cpp |53.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |54.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_log.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_scan.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/metadata/blobstorage_pdisk_metadata.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__schema_upgrade.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multiget.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_console.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__init.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_dynamic.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_logreader.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_resource.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group_resolver.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pdisk.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator__schema.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_alloc.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_internal_interface.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/balancing_actor.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_bridge.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_static_group.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_defrag.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_validate.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_mon.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_scrub.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_proxy.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_cache.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_persistent_storage.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pipe.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_async.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_connectivity.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_impl.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/other/mon_vdisk_stream.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/handoff_map.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_fsm.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_quorum.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisfinder.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_stat_aggr.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_vdisk_guids.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_osiris.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/node_warden_vdisk.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_common.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_status.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/deleter.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_cost_tracker.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_tools.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_binding.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/worker.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_bridge.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_osiris.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_helpers.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/tablet_queue.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugedefs.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_event_filter.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/sender.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_actor.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_costmodel.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_events.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_state_storage.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_storage_config.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/hulldb_bulksst_add.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_dblogcutter.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_defs.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/opaque_path_description.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_rewriter.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullactor.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugerecovery.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/service.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisproxy.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisrunner.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_datasnap.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_essence.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_public.cpp |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_log.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/make_request/libimpl-internal-make_request.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_recoverylogwriter.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstvec.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/base_table_writer.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_selector.cpp |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_generate.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/helpers.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_cache.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_login_helper.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/monitoring.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_entryserialize.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_quantum.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_chain.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idx.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllogcutternotify.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/blobstorage_hullcompdelete.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/recovery/hulldb_recovery.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_tx_infly.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_subop_types.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/balance/utils.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_subop_state_types.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_response.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_subop_state_types.h_serialized.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_scatter_gather.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/kqp_metadata_loader.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/populator.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_system_names.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_context.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstslice.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_selfheal.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hull.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/replica.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/program/resolver.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/events.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_public.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllog.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hulldb_bulksstmngr.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_cache/scheme_cache.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/load_test.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_statestorage_config_generator.cpp |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/restore_corrupted_blob_actor.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/libvdisk-synclog-phantom_flag_storage.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/metadata/metadata_actor.cpp |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_proxy.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idxsnap.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_compactfreshappendix.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_committer.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/subscriber.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/cache.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_reader.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_logreplay.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/assimilation.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_mon.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_defs.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_sst.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgimpl.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_unreadable.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/backup.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogformat.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_stattablet.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_pdisk.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_readbatch.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_queue.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_process.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_extr.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_readactor.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_snapshot.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_huge.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_continuous_backup_cleaner.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_barrier.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmem.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_statdb.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_stathuge.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgwriter.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogneighbors.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_readbulksst.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_public.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/query/query_range.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import_helpers.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_sysviews_update.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/compile_context.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/db_key_resolver.cpp |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__list.cpp |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/libcore-client-server.a |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/compile_result.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__forget.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__list.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__progress.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_configdummy.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/ic_nodes_cache_service.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_thresholds.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/http_ping.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_snapshot.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__get.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog_private_events.cpp |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/util/libcms-console-util.a |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/libcore-cms-console.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_index_utils.cpp |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_state.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__get.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__forget.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_block_and_get.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_db.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmovedpatch_actor.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_lock.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_index_build_info.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_shred.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/topic_sdk_test_setup.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_incremental_restore_scan.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_overload_handler.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_tracker.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_committer.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogrecovery.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_propagator.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxywrite.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_task.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_incremental_restore_finalize.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_finalize.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_audit.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmultiput_actor.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_mon_dbmainpage.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/grpc_library_helper.cpp |54.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_scheduler.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/libcore-config-init.a |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfullhandler.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_state.cpp |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/txusage_fixture.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/tx_processor.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxyobtain.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_loggedrec.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_syncloghttp.cpp |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/libcore-config-validation.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_committer.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/libcore-control-lib.a |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/libydb-core-control.a |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_actor.cpp |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata_proxy.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/base/libcontrol-lib-base.a |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgreader.cpp |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_recovery.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/lib/immediate_control_board_html_renderer.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/lib/dynamic_control_board_impl.cpp |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/discovery/libydb-core-discovery.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_firstrun.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_builder.cpp |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogreader.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfull.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/counters_info/libydb-core-counters_info.a |54.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |54.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |54.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |54.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/minikql_compile/mkql_compile_service.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_compactionstate.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_monactors.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_ic_debug.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/columns/update.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeletonfront.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_tablet_state.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/grpc_proxy_status.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/columns/schema.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_cms.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_test_shard_request.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__configure.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_console.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_http_server.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_proxy.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_blobstorage_config.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/manager/manager.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_tx_request.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_pool_state.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_config_subscriptions.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__shred_manager.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_types.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_node_registration.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_tablet_counters.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator_impl.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_hive_create_tablet.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/modifications_validator.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_fill_node.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_metacache.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/util/config_index.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/cli_utils/melancholic_gopher.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/grpc_server.cpp |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/registry.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__init_scheme.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server.cpp |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/librun.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configuration_info_collector.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/indexes/schema.cpp |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_config_subscription.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_drain_node.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_dispatcher_proxy.cpp |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/version/libversion.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__replace_yaml_config.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_bootstrap.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__log_cleanup.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_cache.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configs_provider.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_read_session_info.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/init_noop.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/indexes/update.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_persqueue.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_tenant_failed.cpp |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_computational_units.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__revert_pool_state.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_scheme_request.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__create_tenant.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_nameservice.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_source_factory.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_tenant_done.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_scheme_initroot.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_data_source.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__root_shred_manager.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/validation_functions.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/core_validators.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__alter_tenant.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__get_log_tail.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/immediate_controls_configurator.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_dispatcher.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_source_builder.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/http.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.a |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__remove_tenant.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/minikql/minikql_engine_host.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/private_events.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/immediate_control_board_actor.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_resolve_node.cpp |55.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/logger.cpp |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/lag_provider.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/abstract.cpp |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_handshake.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_confirmed_subdomain.cpp |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/assign_internal.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/visitor.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/aggr_common.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/graph_optimization.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/stream_logic.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/reserve.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/original.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/abstract.h_serialized.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/assign_const.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/common.cpp |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/collection.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__replace_config_subscriptions.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/log_settings_configurator.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/execution.h_serialized.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_config.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/aggr_common.h_serialized.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/filter.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/hash.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/tier/s3_uri.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_last_provided_config.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/functions.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/header.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/tier/identifier.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__drop_yaml_config.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/unboxed_reader.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/aggr_keys.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__get_yaml_metadata.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/custom_registry.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_hash_func_propagate_transformer.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__get_yaml_config.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/projection.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/graph_execute.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/program/index.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configs_manager.cpp |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_tenant_state.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/special_keys.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/arrow_helpers_minikql.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/converter.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/auth_config_validator.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/process_columns.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/net_classifier_updater.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_subdomain_key.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/arrow_batch_builder.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/event_util.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/size_calcer.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/arrow_filter.cpp |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/libydb-core-viewer.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_configs_subscriber.cpp |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/permutations.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/column_shard_config_validator.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/jaeger_tracing_configurator.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/converter.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/init.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/upload_rows_counters.h_serialized.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/upload_rows_counters.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/mon.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/dummy.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/global.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/read_table_impl.h_serialized.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/layout/layout.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/ttl/schema.cpp |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__cleanup_subscriptions.cpp |55.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/libydb-core-util.a |55.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/discovery/discovery.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_tenants_manager.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/validators.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/feature_flags_configurator.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_kicli.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/ttl/update.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/client.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/wb_filter.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/wb_merge.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/wb_aggregate.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__add_config_subscription.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__set_config.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/address_classifier.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/backoff.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/stlog.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/aws.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fast_tls.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/console.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cpuinfo.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/init.h_serialized.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cache.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/lib/immediate_control_board_impl.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/source_location.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/gen_step.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/context.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/concurrent_rw_hash.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/random.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/format.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/base_utils/node_by_host.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hazard.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/page_map.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fragmented_buffer.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/update.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hyperlog_counter.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_heap.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ulid.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ui64id.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/text.cpp |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/replication.h_serialized.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_table.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/resource_id_resolver.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/session_info.cpp |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_consumer_remover.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/proto/libactor-cloud_events-proto.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_remover.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/attributes/libcore-ymq-attributes.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/update.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__update_tenant_pool_config.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/config_helpers.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/update.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/user_settings_names.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_config_base/config_base.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/update.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmd_config.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_discovery.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/object.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/base_utils/format_info.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/object.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/update.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tablet.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/update.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_admin.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_actorsystem_perftest.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/auth_mocks.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_debug.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/object.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/update.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/auto_config_initializer.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_stress.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_root.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/update.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/log.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/schema/update.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/infly.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/program/execution.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_initroot.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/version/version.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/local_rate_limiter_allocator.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_console.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/message_delay_stats.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_cms.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/base_utils/format_util.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/factories.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/options/update.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/create_table.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/sha256.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/common/common.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_table.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_cluster_discovery.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tenant.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_genconfig.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_fakeinitshard.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/hash_intervals.cpp |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue.cpp |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/schema/schema.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/topic_description.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/sharding.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_sink_precompute.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_store.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_alterer.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/config.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/engine/minikql/flat_local_tx_factory.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/xml_builder.cpp |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/parser.rl6.cpp |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_base.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/logging.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy.cpp |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_disk.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/sys_params.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/queues/fifo/schema.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/queues/std/schema.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/rdma/cq_actor/libinterconnect-rdma-cq_actor.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/async/liblibrary-actors-async.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator__reserve.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/address/libactors-interconnect-address.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/nodes_manager.cpp |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/poller/libactors-interconnect-poller.a |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_transfer.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_with_stream.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/main.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/rdma/libactors-interconnect-rdma.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/cloud_enums.h_serialized.cpp |55.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/error/libcore-ymq-error.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/acl.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/helpers.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/service_initializer.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/time_cast/time_cast.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/object_storage.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/queue_id.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/store/store.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/query_id.h_serialized.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_cache_append.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/probes.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/upload_rows.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/options/schema.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/tenant_resolver.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/read_table_impl.cpp |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator__scheme.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/config_helpers.cpp |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/create_store.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/hash_modulo.cpp |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/failure_injection.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |55.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/config_parser.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/global_plugins/libydb-library-global_plugins.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/fetcher.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/object.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator_client/actor_client.cpp |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/upload_columns.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_validate_config.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/schemereq.cpp |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/stream_creator.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/table/table.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/hashes_checker/liblibrary-login-hashes_checker.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/tier/object.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/locks_db.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/sasl/liblibrary-login-sasl.a |55.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/ttl/validator.cpp |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/resolvereq.cpp |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_store.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |55.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/column_families.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/controller.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_remover.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/plan2svg/libydb-library-plan2svg.a |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/persqueue/topic_parser/counters.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/upload_rows_common_impl.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_description.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_type_ann.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/arrow_helpers.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_bs.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_dead_letter_source_queues.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/range_treap/libydb-library-range_treap.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/count_queues.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan/libclang_rt.asan-x86_64.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/util/memory_tracker.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_drop_dst_result.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/random.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_pq.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_impl.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_operation.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer_request.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/run.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_pipe_req.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |55.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/manager.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/describe.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |55.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |55.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/signals/libydb-library-signals.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/yajl/libcontrib-deprecated-yajl.a |55.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/rpc_long_tx.cpp |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/hash_slider.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/slide_limiter/usage/events.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/slide_limiter/usage/abstract.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/histogram.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/private.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/commitreq.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/states.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/client.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/datareq.cpp |55.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer_topic_data.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_vdisk.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/auth_factory.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan_cxx/libclang_rt.asan_cxx-x86_64.a |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/object_counter.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/actor.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/signals/agent.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/dec/libbrotli-c-dec.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/common/libbrotli-c-common.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/snapshotreq.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/enc/libbrotli-c-enc.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_profiles.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/kikimr_services_initializers.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |55.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |55.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |55.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_settings.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cleanup_queue_data.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_query.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator_impl.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_browse.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/error.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/tx_proxy_status.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/secret_resolver.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cfg/cfg.cpp |55.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/create_queue.cpp |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config.pb.cc |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/proxy_service.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_scheme.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/get_queue_url.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/untag_queue.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/delete_queue.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_pdisk.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/create_user.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/libdq-comp_nodes-hash_join_utils.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/garbage_collector.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_permissions.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/libdq-comp_nodes-llvm16.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/delete_message.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/time_counters.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/executor.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_queues.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/modify_permissions.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_wb_req.cpp |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_meta_extractor.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/delete_user.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/async_io/probes.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_read_actor_base.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/test_helper/program_constructor.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/test_helper/kernels_wrapper.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/libclient-nc_private-audit.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |56.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/libaudit-v1-common.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/backup/import/control.cpp |56.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/auth_multi_factory.cpp |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/physical/predicate_collector.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |56.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/error/error.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/get_queue_attributes.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/dlq_helpers.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_create.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_users.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_hash.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_utils.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_output.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_filter.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/run_query.cpp |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/change_visibility.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_ytql.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_input.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_epoch.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_cbo_helpers.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/tag_queue.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/send_message.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/http/xml.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_gateway.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_write_actor.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_forwarding_gateway.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/queue_leader.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_integration.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_partitions.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_intent_determination.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_layers_integration.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_key.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/secure_protobuf_printer.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_context.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table_desc.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_rd_read_actor.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/user_settings_reader.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_reorder.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_optimize.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/retention.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_helpers.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_integration.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_discovery_targets_result.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_settings.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_peephole.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_impl.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_optimize.cpp |56.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/libydb-core-cms.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/libapi-client-nc_private.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_wide_flow.cpp |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/index_events_processor.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |56.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |56.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/base/libpublic-lib-base.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/common/libimpl-internal-common.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_create_dst_result.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/list_queue_tags.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_impl.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/queues/std/queries.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/task.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/fifo_cleanup.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_read_actor.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/audit_log.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/json_handlers_viewer.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/purge.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_settings.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/events_writer.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/schema.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/http/http.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_assign_stream_name.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/node_tracker.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/queue_attributes.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/metering.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/queue_schema.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/service.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_alter_replication.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_resolve_database_result.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/proxy_actor.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/action.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/receive_message.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_create_replication.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/services.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_creator.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/services.h_serialized.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_extract.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/common.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/counters.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_provider.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/http/types.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_create_stream_result.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/status_channel.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_alter_dst_result.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/ini_config/libini_config.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/purge_queue.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/queues/fifo/queries.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_datasource.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__toggle_config_validator.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/utils/libintegration-topic-utils.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_drop_replication.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_indexes.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/query_actor/query_actor.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_describe_replication.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_helpers.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/persqueue/topic_parser/topic_parser.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_opt_build.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/slide_limiter/usage/service.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/slide_limiter/usage/config.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_cbo.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_opt.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_resolve_secret_result.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_datasink.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/slide_limiter/service/service.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config.pb.cc |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/signals/owner.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_worker_error.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_effects.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/monitoring.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin_compact.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/queues_list_reader.cpp |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_assign_tx_id.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_filter.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/read_attributes_utils.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/executor/libclient-impl-executor.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/locks/locks.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/serialize_deserialize.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/table_creator/table_creator.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_helpers.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges_predext.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_build_stage.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_results.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/discovery_mutator/libclient-extensions-discovery_mutator.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/console_dumper.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_agg.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/rewrite_io_utils.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/set_queue_attributes.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_limit.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_exec.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_join.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/endpoints/libclient-impl-endpoints.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/thread_pool/libimpl-internal-thread_pool.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/grpc_connections/libimpl-internal-grpc_connections.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/session/libclient-impl-session.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/db_driver_state/libimpl-internal-db_driver_state.a |56.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/column_families/schema.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_helpers.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_sort.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sort.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_common.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_precompute.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/logger/libimpl-internal-logger.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_parser.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_source.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.h_serialized.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage__serialization.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/actor.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann_pg.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_wide_read.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/downtime.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/dynamic_node.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/base/msgbus.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/stats/libclient-impl-stats.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/result.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/retry/libimpl-internal-retry.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/src/kqp_runner.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/api_adapters.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/helper.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/info_collector.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_store_walle_task.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/session.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |56.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/kikimr.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_permissions.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cluster_info.cpp |56.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_process_notification.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/logger.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/schema.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp |56.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_update_downtimes.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_stage_float_up.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/node_checkers.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mirrorer/mirrorer.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_remove_task_adapter.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/erasure_checkers.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/dread_cache_service/caching_service.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_log_cleanup.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/query.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_list_tasks_adapter.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/cluster_tracker/cluster_tracker.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/src/actors.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_log_and_send.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/http.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tracing/trace.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/test_meta/libpy3tests-library-test_meta.global.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/shard_reader.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_get_log_tail.cpp |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/src/ydb_setup.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_init_scheme.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/check/libv1-lexer-check.a |56.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |56.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/objcopy_287a0728f8b1ad204ac0396eb2.o |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/local/libcomplete-analysis-local.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/libname-object-simple.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_expired_notifications.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/configurator.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/shard_writer.cpp |56.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/core/libydb-mvp-core.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/interface/libtable_data_service-local-interface.a |56.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.global.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_update_config.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/sentinel.cpp |56.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/appdata.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_api_handler.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/parser.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_task.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/reducer.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/merger.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_test_runtime.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_load_state.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/view/manager.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_check_task_adapter.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_mem_profiler.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mapper.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/cache_policy.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_tokens.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/filter.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/test_helper/controllers.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/meta_cache.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/kicli/error.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/mvp.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_store_permissions.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_reject_notification.cpp |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/schema/libname-service-schema.a |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/yql_facade_run/libessentials-tools-yql_facade_run.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/proto_helpers/libcoordinator-interface-proto_helpers.a |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_store_first_boot_timestamp.cpp |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/impl/libfmr-yt_job_service-impl.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/proto_helpers/libfmr-request_options-proto_helpers.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tx_remove_request.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/data_plane_helpers.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/interface/libfmr-job-interface.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/file/libfmr-yt_job_service-file.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/tools/ytrun/lib/libtools-ytrun-lib.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/impl/libtable_data_service-local-impl.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/walle_create_task_adapter.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/interface/libfmr-worker-interface.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/impl/libfmr-worker-impl.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__op_traits.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pq.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/client/libfmr-coordinator-client.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/impl/libfmr-gc_service-impl.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/fmr_tool_lib/libyt-fmr-fmr_tool_lib.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut_service/common/libfederated_query-ut_service-common.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.a |56.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__login_finalize.cpp |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/solomon_helpers/liblibrary-testlib-solomon_helpers.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.global.a |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.a |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/column_families/update.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/binding/libname-service-binding.a |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/datastreams_fixture/libhttp_proxy-ut-datastreams_fixture.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/check/libv1-complete-check.a |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/column/libname-service-column.a |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_repl.cpp |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/check/libv1-format-check.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/retry/py3/libpy3python-retry-py3.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/union/libname-service-union.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replproxy.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/static/libobject-simple-static.a |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/libcomplete-name-service.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |56.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |56.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/objcopy_de67ee476035f2cc7c8d34c996.o |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_message_enricher.cpp |56.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/objcopy_c96c333b4f7fc5cb2b98b27907.o |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_blobmap_ut.cpp |56.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_partlayout_ut.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/ctas/ctas |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/objcopy_3cb499a0fcc9aa014af2855233.o |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/libpy3ctas.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/workload/libpy3stress-ctas-workload.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_context.cpp |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |56.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_b306c2955ce13e6db6cae73363.o |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |56.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |56.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |56.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullrepljob.cpp |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/cluster/libname-service-cluster.a |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_init.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.grpc.pb.cc |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/util.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.h_serialized.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_info.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_info.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replmonhandler.cpp |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.pb.cc |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.grpc.pb.cc |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.global.a |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.pb.cc |56.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.grpc.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.grpc.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.grpc.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.pb.cc |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/py3/libpy3python-moto-py3.global.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.grpc.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.grpc.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.grpc.pb.cc |56.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console__load_state.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.grpc.pb.cc |56.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libydb-core-protos.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_change_path_state.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.grpc.pb.cc |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.h_serialized.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.grpc.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.pb.cc |56.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.pb.cc |56.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.grpc.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.pb.cc |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.pb.cc |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.grpc.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/client/grpc_client.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.pb.cc |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/behaviour.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.pb.cc |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/scheme_cache_lib/yql_db_scheme_resolver.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.grpc.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/deprecated/client/msgbus_client.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.pb.cc |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/attributes/ut/attributes_md5_ut.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.grpc.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.pb.cc |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replbroker.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_streaming_query.cpp |57.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.pb.cc |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/transfer/transfer |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.pb.cc |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/libpy3transfer.global.a |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.grpc.pb.cc |57.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/objcopy_b632f28ee823f938d14c0e85f9.o |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/workload/libpy3stress-transfer-workload.global.a |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/workload_mixed |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.pb.cc |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/attributes/ut/ydb-core-ymq-attributes-ut |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.grpc.pb.cc |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.grpc.pb.cc |57.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.pb.cc |57.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_c0a0299090f532c29f2ee6dc87.o |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/libpy3workload_mixed.global.a |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.h_serialized.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_resolve_resource_id_result.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.grpc.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.cc |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.pb.cc |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.grpc.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.grpc.pb.cc |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.pb.cc |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/libfmr-coordinator-interface.a |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/interface/libfmr-job_factory-interface.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/memusage_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebuf_ut.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/simple/libcore-cbo-simple.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.pb.cc |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_schemeshard_build_index_helpers.cpp |57.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/libyt-fmr-request_options.a |57.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/objcopy_4508aef343f36758ea760320db.o |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tracing/tablet_info.cpp |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_log_cache_ut.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/libpy3s3_backups.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/global/libcomplete-analysis-global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/workload/libpy3stress-s3_backups-workload.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cluster/static/libname-cluster-static.a |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |57.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/utils/libyt-fmr-utils.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libpy3utils-fetch-proto.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/lib/libpy3tests-datashard-lib.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/core/core_ydbc.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/fmr/libyt-gateway-fmr.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/database_resolver_mock.cpp |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/defaults.cpp |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/objcopy_bcf2142e31bf537964dc063d11.o |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/libpy3stress-oltp_workload-workload.global.a |57.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/url_matcher_ut.cpp |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rotate_cdc_stream.cpp |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |57.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_check.cpp |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_crypto_ut.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/cdc/cdc |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/tasks_list.cpp |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_afb48e06933bdee6c5245db82e.o |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |57.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |57.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/libpy3cdc.global.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/workload/libpy3stress-cdc-workload.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/objcopy_7d7339f4588397fc771e31030c.o |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_shred_manager.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/tx_init_schema.cpp |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_1c931ae3496b883d009f58ef8e.o |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_5233daad98e22a16d60b4b7b86.o |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_38c6001204b7ada03b8b3e421d.o |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/workload/libpy3stress-testshard_workload-workload.global.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_4488a798bff4dfb5b10f31aa13.o |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_8dbc00b2eb10d4ff1ed6e47c3a.o |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_bc86b379db0e87d023f98a592e.o |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_pdisk_config.cpp |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/core/core_ydb.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/backup/backup_stress |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/libpy3backup_stress.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/workload/libpy3stress-backup-workload.global.a |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/objcopy_4f24d349611ecfcf19e3001adb.o |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/get_value.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_iter_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |57.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/common/ut_helpers/dq_fake_ca.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/meta/meta_versions.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/libpy3pile_promotion_workload.global.a |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/objcopy_e99c9b04005e36c324dfb9fd3b.o |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/workload/libpy3scheme_board-pile_promotion-workload.global.a |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_failure.cpp |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/actualization.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_b9aaa278b10ed44e5645b3ef2f.o |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_816e2dba53f55d924139cdb3c5.o |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_e4166f3d104a6751b45e7e712f.o |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/type/libpy3oltp_workload-workload-type.global.a |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sysview.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/ut_utils.cpp |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/abstract.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/select.cpp |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/yql_testlib/yql_testlib.cpp |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_10b0cfa01297f7d7392eb4d9e4.o |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/bulk_upsert.cpp |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/stress/libpy3tests-library-stress.global.a |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_4ffdb694eb351ca96de1930bf2.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_3310cbcd39c3373557308c8e76.o |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info.pb.cc |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_e91d43b449a687b2b36f1f5526.o |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_2f0e0ac8198858b9ec9901778e.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_83efacabe56767ae4f106a6d27.o |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/meta/meta.cpp |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_f738234258cd034cd5383f92ad.o |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/interface/libcoordinator-yt_coordinator_service-interface.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/file/libcoordinator-yt_coordinator_service-file.a |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/objcopy_cf5836766ac30ca7ea957ce368.o |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.global.a |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/objcopy_899316667b8914fe8ec3af85d9.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/objcopy_daba02a22b66dd174e40603586.o |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/impl/libcoordinator-yt_coordinator_service-impl.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/metadata/impl/libfile-metadata-impl.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/upload/interface/libfile-upload-interface.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/secret_masker/dummy/liblib-secret_masker-dummy.a |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/interface/libfmr-table_data_service-interface.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/file/libtable_data_service-discovery-file.a |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/datastreams_fixture/datastreams_fixture.cpp |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/ydb-tests-sql |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/response_tasks.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/provider/yql_s3_listing_strategy_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/utils.cpp |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/executor.cpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/compaction.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/config.cpp |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.grpc.pb.cc |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/control_plane_storage_counters.cpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/request_validators.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/probes.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/util.cpp |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/objcopy_dcbdf62672440a626e79a64e14.o |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_771af70678639da3d434ea1416.o |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_2983697b65fe3fbc437e0acf1f.o |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/variator.cpp |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_aac602e1ee9075b2856f1975ca.o |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/node_broker/node_broker |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |57.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.pb.cc |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/objcopy_2a9fba044b5f98d2ff5f5c7f44.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/libpy3node_broker.global.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ut_helpers/test_table.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/workload/libpy3stress-node_broker-workload.global.a |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_base_init.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_shared.cpp |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_953328e5c3275a286b65dc3b1d.o |57.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_d0e1cde98d2ab34e72d18aae9c.o |57.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |57.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_d2d4e3343da9b011ee6a983244.o |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/regex/libv1-lexer-regex.a |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/combinatory/execute.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/04e5923f93dc6c03020c3f8d33_raw.auxcpp |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/bac1e141ce77ba9e4c154c061f_raw.auxcpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/09a86fdc074f6a74dbba3a1b6e_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/b617e7e0eedbafd573bb3e9378_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/28772d2d2f5fde0c47bbc1de81_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/ut_service/common/helpers.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/b04a8a4d880a6ab0d69f34e52c_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/2930ccca3fa511623aa0aab8b7_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/270c4cd48c2ce9ffde15347522_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/0f2203cca2515318778dba67ae_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/088cc01aea14911dadcfebb95f_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/0cb6148a9c8d23440fde108932_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/57a59c4f29be80bc3757f0df62_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/4f7a77b7c07bd4c3b8c227a3dc_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/85072bb936b0763f4b03040c4c_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/32d9902665afca9adad9d776d1_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/acd2063e47ff389d5e0998ace2_raw.auxcpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/35de4b0c5be07377d8d8f200fc_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/4d9337d0b5cdda129c615b9e11_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/372aaebe3111734f92cbd37587_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/375cc4d1d0d92664640057207f_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/7feea27966efd470217fa2b2d1_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/591fa537d34dc5e1559e2742ca_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/6a96b1975d3608d98f10dd6dca_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/52c88553bb1a046e514747d3a7_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/3058c699ce7778757ab83e8afa_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/662a23ac7b446c346c395d8a31_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/test_server.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/766923bd339f6159a29120618a_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/a8c0f40ab3816060c313900901_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/7f3256b83e7a93b527e0c65dde_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/8a767031478fdfcd8bd7f2032a_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/a5bfdf1dd024a54c5cafb2fe5e_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/96e42ebc6885f7ea88797bd6b5_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/8a74541690c10210582bbca6b9_raw.auxcpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/8750dadd4a699d4221d697bf03_raw.auxcpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/8da89ce514e40d6c15eea9462b_raw.auxcpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/9b21eec3caa6e63f2ba0789476_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/a5711c2d47c8a47e6e5840f40e_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/a4882a1596508571e159cddf6a_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/a11f017358d423e4ddbe87133e_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/fb094c33644271733cf9dfed37_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/3cc9cf139ba2036ae9abb9c09e_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/91b14d2095f07b781a0dfcd0e9_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fa57d989d640af1c4f644d61f0_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/c5a1d987730a9719a91d55e40a_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ce697fc3b324cb6152c4d7223d_raw.auxcpp |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.grpc.pb.cc |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/d52903a0693870f83d0bbe0ab8_raw.auxcpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/d42f916259e88c99d94b15ec0e_raw.auxcpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/e6ce42a762195cf7e946ca411e_raw.auxcpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/92868a760cbaac2b810517d2d5_raw.auxcpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/e72d7c890a4e6d107b4b82a390_raw.auxcpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/d2dac6d3870f7e57f4cc1314ed_raw.auxcpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/libpy3olap_workload.global.a |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/objcopy_9de271b22d7bcc64ef77cc3cde.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/libpy3stress-olap_workload-workload.global.a |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_342e8590e41686b18307d054a9.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/type/libpy3olap_workload-workload-type.global.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/io_formats/arrow/scheme/csv_arrow_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/generated/runtime_feature_flags_ut.cpp |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_b5b36403e069f48d06f8367722.o |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/objcopy_c693478edc1220e7a9143567d1.o |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libpy3core-protos-nbs.global.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/c110657bb6bb660f1355e297dd_raw.auxcpp |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_242486256e1af973cd1d5376d1.o |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_5d73baff4bb68923ddbe5f4fcd.o |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/e9ba3ee2f0ee1966e63998b143_raw.auxcpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_2efdf95387a81f55cf9c81071a.o |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/c5aa441d48d4124514e9d62ba2_raw.auxcpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/29dbda6d5a6888d44695a7480c_raw.auxcpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/yql/libcomplete-analysis-yql.a |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/libpy3oltp_workload.global.a |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/olap_workload |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |57.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/workload/libpy3stress-mixedpy-workload.global.a |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/client/libpy3tools-solomon_emulator-client.global.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/audit/audit_ut.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk/unittest |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/proto_helpers/libtable_data_service-client-proto_helpers.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/impl/libfmr-job_factory-impl.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/process/libyt-fmr-process.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/impl/libtable_data_service-client-impl.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_launcher/libyt-fmr-job_launcher.a |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/interface/libfmr-yt_job_service-interface.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/interface/libtable_data_service-discovery-interface.a |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/microseconds_sliding_window_ut.cpp |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_block_ut.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/metadata/interface/libfile-metadata-interface.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/file/upload/impl/libfile-upload-impl.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/interface/libfmr-gc_service-interface.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/proto/libyt-fmr-proto.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/common/liblibrary-testlib-common.a |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/src/common.cpp |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/impl/libfmr-job-impl.a |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/mlp/ut/common/libmlp-ut-common.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |57.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/lib/libpy3tests-sql-lib.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_04f56802b68450abc8421282d0.o |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_6403bfa5c5e35b29a21c73fb0e.o |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.0%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/objcopy_e64be2702e6aadcfe4f62214e0.o |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/common/common.cpp |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-sqs/liblibs-aws-sdk-cpp-aws-cpp-sdk-sqs.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/src/actors.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/src/fq_runner.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/public/types_ut.cpp |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_data_source_ut.cpp |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/iceberg_ddl_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/common/autoscaling_ut_common.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/object_storage_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_source_builder_ut.cpp |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |57.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/lib/objectwithstate.cpp |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/context.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/extension.cpp |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_settings.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page_yandex.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/openid_connect.cpp |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_49e50404ca7d8e93240b3617b2.o |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create_yandex.cpp |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create_nebius.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create_handler.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/extension_manager.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_impersonate_start_page_nebius.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/extension_final.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_impersonate_stop_page_nebius.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_client.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/cracked_page.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.pb.cc |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/extension_whoami.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_session_create.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page_nebius.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_cleanup_page.cpp |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page_handler.cpp |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_4e3ea6c3c5a0438f05942dbc81.o |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_28c396580e7e319c4a82e15fc9.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_75e82e9b2ff2024ae902b7d5e4.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut_helpers.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.pb.cc |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_protected_page.cpp |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/src/fq_setup.cpp |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_7c0098f27edc25092453a8033c.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_b9fcf9641e3e569e88014f85ff.o |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_7f02665786b7523f76c02ad1dd.o |58.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ftxui/libcontrib-libs-ftxui.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ut_helpers/test_topic.cpp |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |58.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_7479409fb33baf855b74c3e835.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_acf74a4313fbcafa6df239e3ec.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_df4191b43fee1a7d77acb3207f.o |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/partcheck/main.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_outofspace_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebufstream_ut.cpp |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebufresize_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_lsnmngr_ut.cpp |58.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_syncneighbors_ut.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/persqueue/topic_parser/ut/topic_names_converter_ut.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/query_executor.cpp |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/system/libsystem_allocator.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/libcomplete-name-object.a |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |58.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.a |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/http_client.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/connector_client_mock.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/writer.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/testlib/pq_helpers/mock_pq_gateway.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/tools/dqrun/lib/dqrun_lib.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/typed_local.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_5923b362516b6632b9769a5db2.o |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_6b37760fb6a28054d0feafd61d.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_278b1a63a14648a80c4b930adb.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_4826ee2207124da1bc398e3bd8.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_5b5c3367c789898aa5a6cae866.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_ping.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_8e57113197bb359e3999b04aab.o |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/log_backend/json_envelope_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_lease.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_9be8b6745d0fa150928bab4206.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/objcopy_c9ab749ab3188a8582c5cefa5e.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_e68ca1a2fa9943132c020ae028.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8e19d47784789c55156c57f816.o |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_quotas.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_pdiskfit/lib/basic_test.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/objcopy_e2cd022168ff179d1441f5d3df.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/objcopy_0ade7a5662c6292edc3a8de02f.o |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/core/libv1-complete-core.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_connections.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/common/ut/ydb-core-persqueue-common-ut |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_gate.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/auth_config_validator_ut/auth_config_validator_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_folder.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_1815f02732d96389c328f04d90.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_f0d8fb718a757998dc9403df32.o |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_df04396057094f2483296b9dbe.o |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/workload/libpy3show_create-table-workload.global.a |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_87b299e07b15c86f4f50f458ef.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_f3c323ef80ada193284f036d44.o |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_state_name_to_int.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/nodes_health_check.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/labeled_counters_merger.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_af18efc2f04dd1af5ca802c329.o |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/metering/time_grid_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/metering/stream_ru_calculator_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/basic_example/gtest |58.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/io_formats/arrow/scheme/ut/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_impl.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/common/pq_ut_common.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_8e0f2cf91b35e6051ad82535a4.o |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/node_broker/tests/py3test |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_e55498abceca534315a6428452.o |58.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_e508a8abac843a0a0f92fc62eb.o |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/workload/libpy3stress-viewer-workload.global.a |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/aggregation.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_bindings.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_ymq_v1.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_object_storage_v1.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_replication_v1.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_compute_database.cpp |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_watch.cpp |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_6508d12aaafde6f0a60fe8fff3.o |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_cd9abca883cad9b25e20bf2f08.o |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_bd84885c5c24478d181ba9e493.o |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_result_write.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/validators.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/ptr_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/batched_vec_ut.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/bufferwithgaps_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/rate_limiter_resources.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.0%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/plugins/cpp_styleguide/cpp_styleguide |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |58.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_tx_info.h_serialized.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/helpers/local.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_replication.pb.{h, cc} |58.0%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/protoc |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/table/show_create_table |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_get.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/pgproxy/pg_proxy_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/libpy3show_create_table.global.a |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/objcopy_970514ee5aa7605a49b54b8feb.o |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/init/init_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/tools/protobuf_plugin/config_proto_plugin |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/objcopy_33061e6ec0580baa7f93f1ce23.o |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/libpy3workload_topic_kafka.global.a |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_storage/in_memory_control_plane_storage.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/objcopy_c96ef635306ccee8a5cf6359f1.o |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/ut/utils_ut.cpp |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/service/etcd_grpc.cpp |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ut_helpers/mock_service.cpp |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_service.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/config/ut/unittest |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_logstore.pb.{h, cc} |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.{pb.h ... grpc.pb.h} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |58.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_export_v1.{pb.h ... grpc.pb.h} |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |58.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/splitter/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_dbstat.cpp |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info.{pb.h ... grpc.pb.h} |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |58.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libpy3contrib-libs-googleapis-common-protos.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_load.cpp |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_many.cpp |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/dataset.cpp |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_defrag.cpp |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.grpc.pb.cc |58.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/log_backend/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/federated_query/ut/unittest |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_outofspace.cpp |58.6%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/ut/common/common.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/helpers_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/common/util_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_localrecovery.cpp |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.{pb.h ... grpc.pb.h} |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_synclog.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/objcopy_c55121179eeb3b5753498290c4.o |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.global.a |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/actor/yc_search_ut/unittest |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_brokendevice.cpp |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/backup/unittest |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |58.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/vdisk_mock.cpp |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/runlib/application.cpp |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/ut/test_connection_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_bad_blobid.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/failing_mtpq.cpp |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_volatile/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_huge.cpp |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_volatile/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/protos/out/out.cpp |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/external_sources/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/counters.cpp |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_monitoring_v1.{pb.h ... grpc.pb.h} |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/oidc_proxy/mvp.cpp |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_repl.cpp |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/test_env.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/helpers.cpp |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/provider/ut/pushdown/pushdown_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/runlib/kikimr_setup.cpp |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.{pb.h ... grpc.pb.h} |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/compute/common/ut/unittest |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_1555e67a3dd43a3e7f09bf8eee.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_3db6af291678d4ac330517956a.o |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_488333b1ebd4c1d6d8ec5bcb8f.o |58.7%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/workload/tsserver |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_operation_v1.{pb.h ... grpc.pb.h} |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_scheme_v1.{pb.h ... grpc.pb.h} |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_inference/libydb-library-arrow_inference.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_simplebs.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_monitoring_proxy.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_chunk_tracker.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_view_v1.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_logstore_v1.{pb.h ... grpc.pb.h} |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/fq_v1.{pb.h ... grpc.pb.h} |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/annotations.{pb.h ... grpc.pb.h} |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytz/py3/libpy3python-pytz-py3.global.a |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/api/annotations.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_faketablet.cpp |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.global.a |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompiler/rescompiler |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |58.7%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompressor/rescompressor |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/helpers.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/vector/liblibrary-workload-vector.a |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_76cd981cf66123b7633d25b898.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/common/row_dispatcher_settings.cpp |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_60a4829fdc305e3a74a7ddcb41.o |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_22b5b8dd6ea05f4194f60e6181.o |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sqs_workload/sqs_json/libsqs_json.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/shred_helpers.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sqs_workload/libsqs_workload.a |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/runlib/utils.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/blob/type_codecs_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/blob/blob_ut.cpp |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_helpers/export_reboots_common.cpp |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcc/liblibrary-workload-tpcc.a |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/metadata/ut/functions_metadata_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/ut_common/ut_common.cpp |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |58.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk2/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_settings.cpp |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk2/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk2/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk2/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk2/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk2/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk2/unittest |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_zip.cpp |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk2/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk2/unittest |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk2/unittest |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_blockbs.cpp |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/metering/ut/unittest |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/common/row_dispatcher_settings.h_serialized.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |58.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/providers/s3/actors/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/workload/libpy3stress-topic_kafka-workload.global.a |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/fetcher/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_compaction/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/yq_internal.pb.{h, cc} |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_followers/unittest |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_export/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/quota_internal.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chopper.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_top_sort.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chain_map.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/providers/s3/common/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |58.8%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |58.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/external_sources/object_storage/inference/ut/gtest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |58.8%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.pb.cc |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.{pb.h ... grpc.pb.h} |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |58.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/libpy3workload_testshard.global.a |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/objcopy_06a563d04a0e897145ef1061d2.o |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_sausage_meta.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_scan_iface.h_serialized.cpp |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/fq_private_v1.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_table.cpp |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_part_group_iter_create.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_sausagecache.cpp |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |58.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/viewer.pb.{h, cc} |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_column_filter.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_reader.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_hash.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_arrow.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/blob/ut/ydb-core-persqueue-pqtablet-blob-ut |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_program_step.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/libcomplete-name-cache.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/local/libname-cache-local.a |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/storage_pools.cpp |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/cached/libobject-simple-cached.a |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers_ut.cpp |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/constructor.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/fixtures/libpy3tests-library-fixtures.global.a |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/olap/objcopy_994f7d36d0c12371f6d6ec62e4.o |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/impatient/libname-service-impatient.a |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/config/grpc_service.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/libpy3tests-library-compatibility.global.a |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/olap/objcopy_8dd70891bfbac8135389af5f53.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/olap/objcopy_c7c0405528f55543f02099b70d.o |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.a |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libpy3client-yc_public-common.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libpy3client-yc_public-iam.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/common/libpy3tests-olap-common.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.global.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_36807918bd7a86c1ea37310c9c.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_656baae3c1e24959f5bcc457d7.o |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/helpers/libpy3olap-scenario-helpers.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/lib/libpy3tests-olap-lib.global.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_0ab925f82bbba07bf3b749dc3c.o |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/709f125727d9ea4165df516509_raw.auxcpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_5992d4831c5055a481712a2a80.o |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/olap/ydb-tests-compatibility-olap |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/object.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.global.a |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.a |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |58.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_1ab2a5a6dd84a6c9ff5d5c50b0.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_3bdea7737a87c43bfaa0aaf4c3.o |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_6887bde1dc99f5c5c2f0922842.o |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |59.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |59.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |59.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_auth.pb.{h, cc} |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |59.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |59.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |59.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |59.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |59.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.{pb.h ... grpc.pb.h} |59.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |59.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |59.0%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |59.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |59.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/access/access.{pb.h ... grpc.pb.h} |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.global.a |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |59.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/long_tx_service/public/ut/unittest |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/executer_actor/ut/unittest |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/raw_socket/ut/unittest |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ticket_parser.cpp |58.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/actors/scheme.cpp |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/iam_token_service_subject.{pb.h ... grpc.pb.h} |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_locks/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_locks/unittest |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |59.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_pg/flat_database_pg_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/backup.{pb.h ... grpc.pb.h} |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_locks/unittest |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_9a3dabea847c21e0b4fa4cda26.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_b34c6a8a5501db208eebc5d8e4.o |59.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/aba998449c2518e3272d8e87fb_raw.auxcpp |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_e32003454342267c2263935765.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_cca8dcd66462c9ca3c57fcb78e.o |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2492aafb6862566a2398c9f27e.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_e66920085df69f6f7e41547063.o |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_3df021aac8504049c53286aea0.o |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/workload/libpy3stress-simple_queue-workload.global.a |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_422ca1effff14e5a08952658d0.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_5f161468ff5322b803d4d0dc79.o |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_8ac5034640eee44b1cd5fa5253.o |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_testshard/unittest |59.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/transfer/ut/column_table/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/add_column/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |58.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/config/validation/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/transfer/ut/functional/unittest |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/manager.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/tools/decrypt/main.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/cursor.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/objcopy_f4efacd00293c5fe09c3f84a62.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/objcopy_988cc467d4da79de606ebf50ee.o |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/workload/libpy3stress-reconfig_state_storage_workload-workload.global.a |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/objcopy_1f78e7638ae0f2e308bd7331f9.o |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/registration.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |59.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/describer/ut/unittest |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/quoter/ut/unittest |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/federated_query/ut_service/fast/unittest |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.pb.cc |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/row_dispatcher/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |59.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.{pb.h ... grpc.pb.h} |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/discovery_actor.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/manager.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/validators/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/validators/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/validators/ut/unittest |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.global.a |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/validators/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/validators/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/validators/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/validators/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.global.a |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/validators/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/validators/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/validators/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/delete/objcopy_609c2613d8f9c513602350c6a8.o |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/delete/objcopy_ffc5f76f7501b8251738448541.o |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_181bdcd1743e9a1a78fafe4b60.o |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.global.a |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_49e9948af399bc60603a7d2db5.o |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/4399546af28cb40e5d74ea4a4b_raw.auxcpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_49bad8251d240ad7c49d384b91.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/delete/objcopy_e6184a39b8332c221c5cda3c2f.o |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/error.pb.{h, cc} |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |59.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |59.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_scan/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydbd/export/libapps-ydbd-export.a |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |59.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/ut/ut_slicer.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_scan/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/auth_actors.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_scan/unittest |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |59.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/mock/yql_mock.cpp |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |59.4%| [UN] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |59.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.grpc.pb.cc |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.grpc.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.grpc.pb.cc |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.global.a |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/common/py3test |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/fetcher.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.pb.cc |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/control_plane_proxy.cpp |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.pb.cc |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.pb.cc |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/protos/libmvp-core-protos.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/security/simple/libmvp-security-simple.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/meta_cache_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.grpc.pb.cc |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/aggregator_impl.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/access/libclient-yc_private-access.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.grpc.pb.cc |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/quota/libclient-yc_private-quota.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_group_path_struct.cpp |59.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.grpc.pb.cc |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/reservoir_sampling/libreservoir_sampling_udf.so |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/codecs_ut.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/libyc_private-ydb-v1.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgimpl_ut.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmem_ut.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgwriter_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/main.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/freetype/libcontrib-libs-freetype.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lcms2/libcontrib-libs-lcms2.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/common/config.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libpng/libcontrib-libs-libpng.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/libetcd-grpc.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libwebp/sharpyuv/liblibs-libwebp-sharpyuv.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/quota/account_read_quoter.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/tools/decrypt/decrypt |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openjpeg/libcontrib-libs-openjpeg.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/memory_controller/memory_controller.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libwebp/libcontrib-libs-libwebp.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cppy/libpy3contrib-python-cppy.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libjpeg-turbo/libcontrib-libs-libjpeg-turbo.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/contourpy/libpy3contrib-python-contourpy.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pillow/py3/libpy3python-Pillow-py3.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/qhull/libcontrib-libs-qhull.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libtiff/libcontrib-libs-libtiff.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cycler/py3/libpy3python-cycler-py3.global.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_id_dict_ut.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pillow/py3/libpy3python-Pillow-py3.a |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.pb.cc |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/fonttools/libpy3contrib-python-fonttools.global.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.grpc.pb.cc |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/contourpy/libpy3contrib-python-contourpy.a |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.{pb.h ... grpc.pb.h} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/edaf602b2011baa1519a223d63_raw.auxcpp |59.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/gateway/ut/gtest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_schemeshard_stats.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_1007df29dec27b0b7a1587d49f.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_b91160bcee04ad1f57e80af064.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_1326afc143d720f2af434cd836.o |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut/ydb-core-kqp-federated_query-ut |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.grpc.pb.cc |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_sqs_topic_v1.{pb.h ... grpc.pb.h} |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher_service.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib/py3/extern/agg24-svn/libpy3-extern-agg24-svn.a |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_1339ee5ef04af3a5a49d43a6c9.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_fcc835b175560db56b04f51f44.o |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kiwisolver/py3/libpy3python-kiwisolver-py3.a |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_fdd48fc620c42f480ae38b77f5.o |59.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_dae5a42f53b4f98bf1b9fd8118.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_b08299d456f3448b368e814cb8.o |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_9f43001a877b9e371fe700c81d.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_25d3afea4b7778a202a80125cb.o |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kiwisolver/py3/libpy3python-kiwisolver-py3.global.a |59.5%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib/py3/extern/ttconv/libpy3py3-extern-ttconv.a |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_7a185a4b35de7733fde931d298.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_6b8c453743f8fd2c5380af70c6.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_0291c815ab38527766323f743e.o |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/olefile/py3/libpy3python-olefile-py3.global.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/38dcacd12926621ca72e30ce1b_raw.auxcpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_ae5b9f6e7a00f305f01a3dde87.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_e774a92982177091add3614f41.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_1ba56637fdb9b3e1bc9b45aa93.o |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.pb.cc |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/events.pb.{h, cc} |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib/py3/libpy3python-matplotlib-py3.a |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/http_req.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_2b682e146a665bfa19210b0fd9.o |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/example/ydb-tests-example |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib/py3/libpy3python-matplotlib-py3.global.a |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/column_table/ydb-core-transfer-ut-column_table |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/functional/ydb-core-transfer-ut-functional |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |59.5%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/ut_utils/ut_utils.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest-timeout/py3/libpy3python-pytest-timeout-py3.global.a |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyparsing/py3/libpy3python-pyparsing-py3.global.a |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_c623700776b43ee95ec93c56f9.o |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/query_history_ut.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/actors/ut/yql_arrow_push_down_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_e0aef87c4bf15cfdc957f4bdd1.o |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_12d01741952bd4afa836364d84.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_15e284a8ecb30c90903e842e70.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_cee1e02beaf827051149b5ca30.o |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.grpc.pb.cc |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/error.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/plan2svg/py3test |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_test_runtime.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_tokens.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/objcopy_5accfe00d45fb7ebcc30e116b2.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/objcopy_b783a1a2aacb855daa1e55fad6.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/objcopy_93665db601a12d4842de4565e2.o |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/double_indexed_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/table_bindings_from_bindings.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/view/grpc_service.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/ydbd/main.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_0664e2ab2eb37ae9f02538e483.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_4b767dce2ddf7a5424aef828d6.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_19422d2b60428207055b4ed843.o |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_bd8a6d25e26a719f80141d0711.o |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/etcd_proxy/proxy.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/proxy_private.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/service/counters.cpp |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/13360e4ecdf34efe6c3a817a44_raw.auxcpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_cf3971576aced18377e99f5367.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_7c81cbfa6b5ce112674cb0a849.o |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_e2acb41e7099c0db4fe54a1587.o |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_08f7acdb6eb761b28bf6990862.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_c7c229be41e9b028572ad1aab3.o |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yaml_config/ut_transform/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/workload_kv |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_5294a064c14cf5a49516321590.o |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/libpy3workload_kv.global.a |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/objcopy_691607a9cbabb8d8c0161d1a6d.o |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.grpc.pb.cc |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.pb.cc |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/flat_table_part.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/decoder.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_disk_quotas/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.grpc.pb.cc |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut_pg/unittest |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut_pg/unittest |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut_pg/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_object_storage.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/kafka/tests/py3test |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut_pg/unittest |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut_pg/unittest |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut_pg/unittest |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut_pg/unittest |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/lib/libpy3functional-tpc-lib.global.a |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_7e6470c67310c26b57384706e4.o |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut_pg/unittest |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_e5d897582dc0fbda7c578cb53f.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_11e4572b38d275456acaf6e9ab.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_824785b12dcc08862746468e4b.o |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/lib/libpy3olap-load-lib.global.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.pb.cc |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_5333c1912ecbac0f64ff97551f.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_48884f6b745ced4d3e78997cb1.o |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut_pg/unittest |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_aebf7c73fcaf6a54715cc177c8.o |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.pb.cc |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/base/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/counters.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.{pb.h ... grpc.pb.h} |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut_pg/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/bridge_get.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_e31620202d3ba8df14ff2a18e1.o |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_f8b2cbafb1fed0e25bf9683c2d.o |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_388aef0b6ac03d4f661ae7a30e.o |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/events.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/ids.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.pb.cc |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_44c54191096d916f4750b2c8ff.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_f5bffdc9c705ff664d677c40c7.o |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_b452f781b2aebe749e4c7d53ab.o |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_c50fed028c36a9f2013227a33d.o |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_65ac58c27d43a55d0ea4eda626.o |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/objcopy_b81ec5f0c21c081e83c17a0eef.o |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.grpc.pb.cc |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/actors_factory.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.grpc.pb.cc |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.{pb.h ... grpc.pb.h} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_simple_db_flat.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_filter_kmeans.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/actor.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/parsing.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut_large/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/probes.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/ydb-tests-compatibility |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_helpers.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext_dict.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/s3_bench/main.cpp |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_transfer/unittest |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/plan2svg/ydb-tests-functional-kqp-plan2svg |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_transfer/unittest |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/kv.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/synclog/ut/unittest |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/synclog/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/synclog/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/synclog/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/synclog/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/synclog/ut/unittest |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/session_service.{pb.h ... grpc.pb.h} |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/synclog/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/test_shard/grpc_service.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/synclog/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/synclog/ut/unittest |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/objcopy_48e09f84949dd34b82c51f21a3.o |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/s3_bench/ydb_s3_bench |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/objcopy_3b212908932716bae8a8e38b2c.o |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut/inside_ydb_ut.cpp |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/objcopy_ce63bab0f89a8715a42271a26a.o |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/core/mvp_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/synclog/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/streaming/tests/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.pb.cc |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/task_command_executor.pb.{h, cc} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/actors/run_actor.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/api.{pb.h ... grpc.pb.h} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/rpc.{pb.h ... grpc.pb.h} |59.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/direct_read_actor.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/workload/libpy3stress-kv-workload.global.a |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/phantom_blobs.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/describer/describer_ut.cpp |59.5%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/partition_writer.cpp |59.5%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/init.h_serialized.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/pending_fetcher.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/serializer/abstract.cpp |59.5%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |59.5%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/error.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/scheme_ranges_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/scheme_types_proto_ut.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/scheme_borders_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/scheme_tablecell_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/bridge/ydb-tests-functional-bridge |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/local_cluster/local_cluster |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/local_cluster/libpy3local_cluster.global.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/dc048c91e67372877fc6ad2dfc_raw.auxcpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/bridge/objcopy_d80f811b3fe32bcd2128d6ab6f.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/bridge/objcopy_4b2ec656f7e85bc05586d7e6fc.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/bridge/objcopy_c0b503af0486d120ebabb4c64b.o |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5db899a01c2ec6f53648af6840.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_bfa810e70cd1de18c5d4a18a62.o |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/read_session_actor.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_00c87b13e2f685811a9825079d.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_c02c3d9f840d02af9fad858a55.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5865a174a6c25ca1a2d6386702.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/local_cluster/objcopy_e83326c043824a10ed26240d03.o |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/ut/ydb-core-scheme-ut |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_2a98e2f0e66f286cb125620511.o |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_94f66830f5c535f3f015b42e43.o |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_8cba80b2275265b72407436cdf.o |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/c4711c742b4f72331dccea9c2a_raw.auxcpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_3382de65b417782bf648c475b1.o |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/protos/mvp.pb.{h, cc} |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/accessor/secret_id.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/events.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.grpc.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_disk_quotas.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_secrets_ut.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/92868a760cbaac2b810517d2d5_raw.auxcpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_user_management_ut.cpp |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minstep/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minstep/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.h_serialized.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |59.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/c110657bb6bb660f1355e297dd_raw.auxcpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/events.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs__intpy3___pb2.py.p5ju.yapyc3 |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |59.5%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache__intpy3___pb2.py.p5ju.yapyc3 |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |59.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py{ ... i} |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs__intpy3___pb2.py{ ... i} |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage__intpy3___pb2.py.p5ju.yapyc3 |59.5%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc__intpy3___pb2.py.p5ju.yapyc3 |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage__intpy3___pb2.py{ ... i} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache__intpy3___pb2.py{ ... i} |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy__intpy3___pb2.py.p5ju.yapyc3 |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/health_check/health_check_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/task_ping.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/hive_ut.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_sequence/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |59.5%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy__intpy3___pb2.py{ ... i} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health__intpy3___pb2.py{ ... i} |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.{pb.h ... grpc.pb.h} |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/http_api/py3test |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/probes.cpp |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/plugins/grpc_python/grpc_python |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_runtime.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_db.cpp |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/py3cc/py3cc |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_impl.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_config_get.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_dummy.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_balance_actor_sql.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/cluster_balancing.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/py3cc/slow/py3cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_alter_configs_actor.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_describe.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_add.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_delete.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_configs_actor.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_create_topics_actor.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_48a08121f0a68da2f2666b0341.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_791e2f78c18891d943ecce5e41.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_7211c23d9494c46f0f60063e9e.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/secondary_index/objcopy_6b62c1db41e3ebd0278a84dced.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/secondary_index/objcopy_716263ce181e67161f84180281.o |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_acquire.cpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/secondary_index/objcopy_b83d9052e0bc89877bbe223294.o |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup_collection/unittest |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_produce_actor.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/knn/kqp_knn_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_utils.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_create.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup_collection/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_batch_operations.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_lwtrace_probes.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/query_actor/query_actor_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |59.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/datastreams.pb.{h, cc} |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.{pb.h ... grpc.pb.h} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_proxy.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/rate_accounting.cpp |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_config_v1.{pb.h ... grpc.pb.h} |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/write_session_actor.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_fetch_actor.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_init_producer_id_actor.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_list_offsets_actor.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_metadata_actor.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/ut/ut_helper.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_config_set.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_output_channel_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_arrow_helpers_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/modification.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_actor.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/statistics_internal/libstatistics_internal_udf.global.a |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_addmember.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_append.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_aggrcount.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_init.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/config/validation/auth_config_validator_ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_count.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_factory.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/field_transformation.pb.{h, cc} |59.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/c5aa441d48d4124514e9d62ba2_raw.auxcpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_container.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/secret.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_some.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_coalesce.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_sum.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_func.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_exists.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_getelem.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_minmax.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/request/common.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_actor.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata__intpy3___pb2.py{ ... i} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation__intpy3___pb2.py{ ... i} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_if.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation__intpy3___pb2.py.p5ju.yapyc3 |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.5%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py.p5ju.yapyc3 |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_map_join.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log__intpy3___pb2_grpc.py.p5ju.yapyc3 |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/events.pb.{h, cc} |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/records.pb.{h, cc} |59.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/base/ut/gtest |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_check_args.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain1_map.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_self_check.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_coalesce.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_top.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_collect.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_blocks.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_event_impl.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense1.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_contains.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dictitems.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/monitoring/grpc_service.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mod.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_discard.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dynamic_variant.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_timeouts.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_2f7ac0f750374152d13c6bfbcf.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_854d6cc7a0cc5cdd793cfc1e6d.o |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_a926d3332cb769ac3e6c9e6e37.o |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ensure.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_expand_map.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.grpc.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_extend.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_filter.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.pb.cc |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/topic.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flow.cpp |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_frombytes.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromyson.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/database/database.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_guess.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/grpc_request_context_wrapper.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/pgproxy.pb.{h, cc} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hopping.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_invoke.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ifpresent.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.pb.cc |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterable.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterator.cpp |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_length.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join_dict.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.grpc.pb.cc |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.pb.cc |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_state.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_listfromrange.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs |59.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_tx.cpp |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lookup.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mapnext.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_logical.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_measure_arg.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_helpers.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |59.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map_join.cpp |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/libpy3tstool.global.a |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/objcopy_6077c98b9810fee0e2250a36a4.o |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multihopping.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_next_value.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backtrace.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mutdict.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_nop.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot_isolation.cpp |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_now.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_pickle.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_queue.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_random.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_removemember.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_keyvalue.pb.{h, cc} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_range.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reverse.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/schema.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.{pb.h ... grpc.pb.h} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_seq.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_info.pb.cc |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_round.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_skip.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_size.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_source.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_to_list.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_scalar_apply.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/tstool/tstool |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_take.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_time_order_recover.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.grpc.pb.cc |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_timezone.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.pb.cc |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_toindexdict.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |59.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_udf.cpp |59.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/providers/generic/actors/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_varitem.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_way.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.grpc.pb.cc |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_while.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/clusters_from_connections.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/allocation.h_serialized.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_todict.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.pb.cc |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sequenceshard.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_part_loader.h_serialized.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/dq/runtime/dq_channel_service_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.pb.cc |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.pb.cc |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_map.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain_map.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_fwd_misc.cpp |59.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |59.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/persqueue/topic_parser/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/shutdown/state.cpp |59.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/persqueue/topic_parser/ut/unittest |59.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/persqueue/topic_parser/ut/unittest |59.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/persqueue/topic_parser/ut/unittest |59.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/persqueue/topic_parser/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |59.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/persqueue/topic_parser/ut/unittest |59.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/persqueue/topic_parser/ut/unittest |59.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/persqueue/topic_parser/ut/unittest |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |59.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/persqueue/topic_parser/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |59.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/persqueue/topic_parser/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/table_creator/table_creator_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/request/request_actor_cb.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/config/bsconfig_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/grpc_service.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/public_http/http_req.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.global.a |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.a |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/services_initializer.cpp |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_44fac4fe441507735704a000ad.o |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_calls.cpp |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_994fcbd53c4e2174c302bdb5ab.o |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_7c328c2741f9dd7697a2e0e8b1.o |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/service/actor.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/secrets/942364ad5a5e5e098d1e17591f_raw.auxcpp |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/secrets/objcopy_768030afc62b16afdaf2ee3f97.o |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/flavours/libpy3tests-library-flavours.global.a |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/secrets/objcopy_3c254544f1506576cfb57420e0.o |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/secrets/objcopy_950c001e67cca8190a7777a25e.o |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_delete.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_4b2e093abff756c97b675c0a31.o |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_89b3e69f7cdba68b4eefcae48c.o |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/objcopy_a6e393b6d53f4c73feac80b55c.o |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/secrets/ydb-tests-functional-secrets |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/behaviour.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.grpc.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/validation.{pb.h ... grpc.pb.h} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/common.{pb.h ... grpc.pb.h} |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/result_set_meta.pb.{h, cc} |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |59.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |59.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.grpc.pb.cc |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/object.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_comp_create.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |59.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_auth/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_auth/unittest |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_auth/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_auth/unittest |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/tablet_flat_executed.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/private_grpc.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/aggregator.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_auth/unittest |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_auth/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_auth/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_auth/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_auth/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_session_destroy.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/debug_info.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/resource.{pb.h ... grpc.pb.h} |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_auth/unittest |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/grpc_service.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/postgresql/py3test |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_changer.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_common.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/mixedpy/tests/py3test |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils.cpp |59.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/dq_io.pb.{h, cc} |59.7%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/runtime_feature_flags.h |59.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/metadata/ut/unittest |59.7%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/ctas_workload |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/objcopy_3fdb568d483b57acc8e627f8c2.o |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/libpy3tests-tools-ydb_serializable.global.a |59.5%| RESOURCE $(sbr:4966407557) |59.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.grpc.pb.cc |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/persqueue.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |59.7%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/control_board_proto.h |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.pb.cc |59.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/fs_backup_validation_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |59.7%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut_util/unittest |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [SB] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service_ut.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |59.8%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.pb.cc |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.pb.cc |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.grpc.pb.cc |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon_alloc/memory_info.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |59.9%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/nemesis |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/protos/task.pb.{h, cc} |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/view/unittest |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/data.pb.{h, cc} |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.8%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/transfer_workload |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/durationpy/libpy3contrib-python-durationpy.global.a |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/blobs.pb.{h, cc} |59.9%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/statistics_workload |59.9%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/simple_queue |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/events.pb.{h, cc} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/blob_range.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_scripting_v1.{pb.h ... grpc.pb.h} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/data.pb.{h, cc} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_prefixed_vector_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/sessions.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/initiator.pb.{h, cc} |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/events.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/links.pb.{h, cc} |59.9%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/ydb_cli |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_selfheal/unittest |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/portion_info.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_auth_v1.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.grpc.pb.cc |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/task.pb.{h, cc} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/replication/unittest |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |59.9%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.{pb.h ... grpc.pb.h} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_sequenceshard.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/group_members.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_writer_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_rs/unittest |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/ut_helpers.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/slot_indexes_pool.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.grpc.pb.cc |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_backup.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/quoter_service.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_fulltext_ut.cpp |59.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/users.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/snap_vec_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_changer_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_reader_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_stream_indexes_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_vector_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/autopartitioning_manager.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_bio_actor.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.grpc.pb.cc |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/sessions/sessions.cpp |59.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/permissions.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon_alloc/monitor.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/common/tracing_support.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.pb.cc |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_config.pb.{h, cc} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/openid_connect.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/driver/nemesis |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/objcopy_81ae81681ce2388a653cfa5ba3.o |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/groups.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/libpy3nemesis.global.a |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__migrate_state.cpp |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/dynamic_nameserver_mon.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/key.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/heartbeat.cpp |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/metrics_queue.pb.{h, cc} |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.{pb.h ... grpc.pb.h} |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/microseconds_sliding_window.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/dynamic_nameserver.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_tx_env.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |60.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/long_tx_service/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/local.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__extend_lease.cpp |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__init_scheme.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize_dump.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__register_node.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_node_enumeration.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_app.cpp |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__update_epoch.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__update_config.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_17cef60c2dd0eb7ea46181ba87.o |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__load_state.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/metadata_initializers.cpp |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_065e9244d685c2b8f0ab66e414.o |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_461999da7ba13deab5689c18ec.o |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/transaction.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__assign_free_slots.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/throttler.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_background_compaction/unittest |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/lease_holder.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.{pb.h ... grpc.pb.h} |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/transfer/tests/py3test |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/request_discriminator.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/libpy3local_ydb.global.a |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__init_scheme.cpp |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/objcopy_8d2ea3c78a255bb4c87c2fc54a.o |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_config.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/sampling_throttling_control.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/sampling_throttling_control_internals.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.grpc.pb.cc |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/consumer_offset_tracker.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.{pb.h ... grpc.pb.h} |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controller.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_pool_status.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/configured_tablet_bootstrapper.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_charge_create.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/cms/cms_ut.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/local_ydb/local_ydb |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.{pb.h ... grpc.pb.h} |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_04ccb9b757b207bc74705e9bb1.o |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_b9596990f3fd41de0fa350fc68.o |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_d54fb5ab35d376fe3311e9feea.o |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_read.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compactification.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/graph_description.pb.{h, cc} |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/mdb_endpoint_generator_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/secret_behaviour.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |60.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/generated/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/ydb.h_serialized.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_blob_encoder.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/message_id_deduplicator.cpp |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/events/events.cpp |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/common/event_helpers.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_mlp.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/service_account.{pb.h ... grpc.pb.h} |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/common/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/common/actor.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/common/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/common/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__delete_node.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/common/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__update_tablets_object.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon_alloc/tcmalloc.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/common/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/common/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/common/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/common/ut/unittest |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/operation.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/tenant_user_account.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/user_account.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/metadata.{pb.h ... grpc.pb.h} |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/profile_service.{pb.h ... grpc.pb.h} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/common/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/common/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/sourceid.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.pb.cc |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_dump.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_executer_ut.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/oltp_workload/tests/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/user_info.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/fetcher.cpp |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_node_location.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |60.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.grpc.pb.cc |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_backup.pb.{h, cc} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/index.pb.{h, cc} |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_helpers.cpp |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_store_hotdog.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/shared_cache_s3fifo.h_serialized.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_table_btree_index_histogram.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/local_table_client.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.pb.cc |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__check_slot_status.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libutils-fetch-proto.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/generated/codegen/main.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/bootstrapper.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_delayedresp_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/initializer.cpp |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/subscriber.cpp |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_transaction_actor.cpp |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/filesystem/librestricted-boost-filesystem.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/utf8_range/librestricted-google-utf8_range.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/libcontrib-restricted-abseil-cpp-tstring.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/libcontrib-restricted-abseil-cpp.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/ownerinfo.cpp |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libcore-protos-nbs.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jinja2cpp/libcontrib-libs-jinja2cpp.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/snapshot.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_semaphore_timeout.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/legacy/rpc_legacy.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/build/cow/on/libbuild-cow-on.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.grpc.pb.cc |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_broker.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_snapshot.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.grpc.pb.cc |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.grpc.pb.cc |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.grpc.pb.cc |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.pb.cc |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/helpers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_maintenance.pb.{h, cc} |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.{pb.h ... grpc.pb.h} |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/table/behaviour.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/counters/counters.cpp |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/snapshot.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_replication/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_replication/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/auth_factory.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/txn_actor_response_builder.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7eade8c49389813f8c36b72b5b.o |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7bfd03a31f5e230607792f10cc.o |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_1a1e300767b552f4c13c3295d0.o |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_4f92526e13553482736b942b2c.o |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_3209cda00462f2963f3cbbc912.o |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/libpy3ydb_configure.global.a |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/objcopy_940b9a794cb8fbc6ebdf926276.o |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_list_renderer.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/kqp_common.h_serialized.cpp |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/cfg/bin/ydb_configure |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_keys/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/helpers/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_test_shard_v1.{pb.h ... grpc.pb.h} |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/fetch_database.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |60.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/pqtablet/partition/ut/gtest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_sourcemanager.cpp |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/kqp_compile_actor.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/shard_impl.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_startup.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/initializer.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/abstract.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_rebuildhistory.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/kqp_compile_service.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/ydb/ydb.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/show_create/show_create.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_init_schema.cpp |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_store_metrics.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/counters.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/operation_helpers.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_monitoring.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/ut/kqp_compile_fallback_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |60.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console.pb.cc |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.{pb.h ... grpc.pb.h} |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |60.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_boot_misc.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.pb.cc |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.grpc.pb.cc |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/rpc/status.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/dq_io_state.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_bridge_v1.{pb.h ... grpc.pb.h} |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/streaming_queries/streaming_queries.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/table_record.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_messages.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/abstract.cpp |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_8120ef49e7e653ed0601604313.o |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d3af02c7d57ea2cbbe5d381baa.o |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_f93c60b04a0499f2ec6880591a.o |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_backup.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/tablet_flat_executor.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.grpc.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/health.pb.cc |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.pb.cc |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.grpc.pb.cc |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.grpc.pb.cc |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.grpc.pb.cc |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.grpc.pb.cc |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.pb.cc |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/persqueue.pb.{h, cc} |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/restore_controller.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/backpressure.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/group_size_in_units.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.grpc.pb.cc |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/tx_event.pb.{h, cc} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_column_stats/unittest |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/initializer.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_clickhouse_internal.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/checkpointing/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/self_heal.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/large/objcopy_04f2935f3ada8eb9d01ebaba6b.o |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/large/objcopy_28f172e1aa977d907bdfa0a81b.o |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/large/objcopy_6af7a7ce8a1ee5e67d75a2978a.o |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/bsc_cache.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/serializer/utils.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.grpc.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_describe.cpp |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_d3749b17b0bc2057695c3e736a.o |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_ee633aebbed65e39e09fbf4271.o |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_6bdc69403f0fa7c03edbf19c28.o |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/example/py3test |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/vector_index/large/ydb-tests-datashard-vector_index-large |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/cancellation.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.grpc.pb.cc |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_sessions_describe.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/viewer/tests/py3test |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.pb.cc |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_db_mon.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.grpc.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/processor/schema.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.grpc.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/load_test.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.pb.cc |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.grpc.pb.cc |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.grpc.pb.cc |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/dq_task_params.pb.{h, cc} |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.pb.cc |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/testlib/actors/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/testlib/actors/ut/unittest |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/rate_limiter.cpp |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/corrupted_reads.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/testlib/actors/ut/unittest |60.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut/unittest |60.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut/unittest |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/access_behaviour.cpp |60.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut/unittest |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/testlib/actors/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/testlib/actors/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/testlib/actors/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/testlib/actors/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_scheme.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/check_integrity.cpp |60.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut/unittest |60.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/testlib/actors/ut/unittest |60.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/testlib/actors/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |60.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/top_ut.cpp |60.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/testlib/actors/ut/unittest |60.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut/unittest |60.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/scheme/ut/unittest |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/bin/objcopy_9509442a50bd9d1393fa0d54e4.o |60.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/providers/s3/provider/ut/unittest |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |60.3%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/generated/dispatch_op.h |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/alter.cpp |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/object.cpp |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/read_init_auth_actor.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/kafka.h_serialized.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/partition_writer_cache_actor.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_list_objects_in_s3_export.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_group/main.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_system_view.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_query.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collect_operation.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_bridge.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_locks/locks/list.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_stored_state_data.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/grpc_service.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_locks/locks/snapshot.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/datastreams/datastreams_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/partition_stats/top_partitions.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_consumer_protocol.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer__mlp_balancing.cpp |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/shutdown/controller.cpp |60.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/libpy3yaml-config-protos.global.a |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/keyvalue/grpc_service_v2.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_top_partitions.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/table_settings.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/bridge/grpc_service.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/partition_stats/partition_stats.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health.grpc.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/service.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/retry_options.pb.{h, cc} |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.grpc.pb.cc |60.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.grpc.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/behaviour.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/ydb_value_operator.cpp |60.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/http_request.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/ut_pg/scheme_tablecell_pg_ut.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_import.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/readproxy/readproxy.cpp |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_cms.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_fetch_actor.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_init_schema.cpp |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_b8aa61f402be805d2e3e9e75a2.o |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_c65a9d5efe13dc05c1466090ba.o |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/db_counters.cpp |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_d23500649301df2a8de48ba70d.o |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_2aa1916d45dca98014edb3d732.o |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_1574e8a5a6c530c7bfd6378c4d.o |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_504b845d57f1a23561e970de61.o |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.grpc.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.grpc.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pdiskfit.grpc.pb.cc |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/time_cast/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.grpc.pb.cc |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.grpc.pb.cc |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_table_impl.{pb.h ... grpc.pb.h} |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.grpc.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.grpc.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board_mon.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/cms/grpc_service.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/backup/grpc_service.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.pb.cc |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/version/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/streaming/objcopy_e6cee7a0d001013375fe4474ef.o |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/streaming/objcopy_7658f3232c7b17d24a0a184809.o |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.grpc.pb.cc |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/streaming/objcopy_15b0d0854ad50b151b07652021.o |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.grpc.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.grpc.pb.cc |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.pb.cc |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.{pb.h ... grpc.pb.h} |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/actor.cpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/processor_impl.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_state.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.pb.cc |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/abstract/request_features.cpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/select_builder.cpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.pb.cc |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema.cpp |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read_actor.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/sdk_session.cpp |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/events.pb.{h, cc} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_view.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/schema.cpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/mon_proto.pb.{h, cc} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_test_shard.cpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/fetch_config.pb.{h, cc} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/streaming/ydb-tests-compatibility-streaming |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/exceptions_mapping.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.grpc.pb.cc |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/cloud.{pb.h ... grpc.pb.h} |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_session_detach.cpp |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/operation.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/resource_preset_service.{pb.h ... grpc.pb.h} |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/solomon/py3test |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/quota/quota.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.{pb.h ... grpc.pb.h} |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/blockstore.{pb.h ... grpc.pb.h} |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/sdk_table_client.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_state_collect.cpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_delete.cpp |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |60.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/activation.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_index_record.cpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_user_request_context.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/fmr.pb.{h, cc} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp |60.5%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/fb094c33644271733cf9dfed37_raw.auxcpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_storage_request.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_row_builder.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.6%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags__intpy3___pb2.py{ ... i} |60.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.pb.cc |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |60.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.pb.cc |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/receive_message.cpp |60.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.grpc.pb.cc |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/pq_database.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/storage_type_service.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ydb_local_connection.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_publisher_service_actor.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/show_create/view/tests/py3test |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/yandex_passport_cookie.{pb.h ... grpc.pb.h} |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.grpc.pb.cc |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/action_type.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/token_service.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/iam_token_service.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_transactional_producers_initializers.cpp |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/metrics_actor.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.grpc.pb.cc |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_external_data_source.cpp |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/pg_tables/pg_tables.cpp |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/table_exists.cpp |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/counters/kqp_counters.cpp |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/analyze_actor.cpp |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_consumer_members_metadata_initializers.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/oauth_request.{pb.h ... grpc.pb.h} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/api/http.{pb.h ... grpc.pb.h} |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/common/timeout.cpp |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_response.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/encryption/py3test |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/optimization.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/public/tools/lib/cmds/ut/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/initializer.cpp |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/scan.cpp |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.{pb.h ... grpc.pb.h} |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read.cpp |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_config.cpp |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/query_stats/query_stats.cpp |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_fq.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.pb.cc |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.pb.cc |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/action.{pb.h ... grpc.pb.h} |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/general_cache/usage/events.cpp |60.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.grpc.pb.cc |60.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.grpc.pb.cc |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |60.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.pb.cc |60.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.pb.cc |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/service_impl.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/fetcher.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_consumer_groups_metadata_initializers.cpp |60.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/query_stats/query_metrics.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |60.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |60.6%| [CC] {tool} $(B)/library/cpp/build_info/build_info.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [CC] {tool} $(S)/library/cpp/svnversion/svn_interface.c |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |60.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_delivery_problem.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/ydb/query_actor.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_navigate.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.grpc.pb.cc |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.grpc.pb.cc |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.grpc.pb.cc |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.grpc.pb.cc |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/fetch_config.pb.{h, cc} |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.grpc.pb.cc |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.pb.cc |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.pb.cc |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.pb.cc |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.grpc.pb.cc |60.7%| [BI] {tool} $(B)/library/cpp/build_info/buildinfo_data.h |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.grpc.pb.cc |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.grpc.pb.cc |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |60.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.pb.cc |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |60.6%| [CC] {tool} $(S)/library/cpp/build_info/build_info_static.cpp |60.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.grpc.pb.cc |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.{pb.h ... grpc.pb.h} |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.pb.cc |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor_recovery.h_serialized.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_bridge_common.pb.{h, cc} |60.7%| COMPACTING CACHE 992.0KiB |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache.cpp |60.6%| [CC] {tool} $(S)/library/cpp/svnversion/svnversion.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/distributed_commit_helper.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_offsets_actor.cpp |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/login_page.cpp |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [CC] {tool} $(B)/library/cpp/build_info/sandbox.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/health/health.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_testshard.grpc.pb.cc |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_handshake_actor.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/nodes_manager.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_range_ops/unittest |60.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |60.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata.grpc.pb.cc |60.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_comp_gen.h_serialized.cpp |60.8%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_events.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.grpc.pb.cc |60.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_borrowlogic.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.pb.cc |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_backup.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.grpc.pb.cc |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_counters.cpp |60.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |60.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.pb.cc |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_comp_gen.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/contexts.cpp |60.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/probes.cpp |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sequenceshard/public/ut/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.grpc.pb.cc |60.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ymq.pb.{h, cc} |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.grpc.pb.cc |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/general_cache/usage/abstract.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.pb.cc |60.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_fetcher_actor.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/quota/read_quoter.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/fetching_steps.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/queries.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/read_metadata.cpp |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/bool_test_enums.h_serialized.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/checker.cpp |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/quota/write_quoter.cpp |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage_2_ring_groups.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/metadata.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/behaviour.cpp |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/source.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/behaviour.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/ut/ut_manager.cpp |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_b8d63b589074145793d63c27a3.o |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_f928a40774b17a9d6cd7cabd2c.o |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_e7477203b27fa0321cf18fd7ee.o |60.8%| [CC] {tool} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |60.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/849c58233edc33539cbeb93a31_raw.auxcpp |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_bf578b7161cc94bf18488d04ca.o |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/objcopy_4f055c289b3de8f2a1e827ae5c.o |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_exec_commit_mgr.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/public_http/http_service.cpp |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_2194854d9f8cbb3e0ba798b861.o |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_703c8e1d9a9a2b271b8b995a29.o |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_52e86d5ee8fadefdbb415ca379.o |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_912038ceef7de48e0e15c25307.o |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/validation/validators_ut.cpp |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_bac05c8b5a79735451f58d9322.o |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/owners.cpp |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/auth/groups.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer_app.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/test/testhull_index.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.8%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/e6ce42a762195cf7e946ca411e_raw.auxcpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/write_id.cpp |60.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/test_connection/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |60.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/token.{pb.h ... grpc.pb.h} |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_protobuf.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/private/aggregated_counters.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_recovery.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/show_create/create_view_formatter.cpp |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |60.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/config/validation/column_shard_config_validator_ut/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/labeled_db_counters.cpp |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic.cpp |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_tracing_signals.cpp |60.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |60.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config__intpy3___pb2.py{ ... i} |60.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap__intpy3___pb2.py{ ... i} |60.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap__intpy3___pb2.py.p5ju.yapyc3 |60.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper__intpy3___pb2.py.p5ju.yapyc3 |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_bootlogic.cpp |60.9%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |60.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper__intpy3___pb2.py{ ... i} |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ut_consistent_copy_tables.cpp |60.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap__intpy3___pb2_grpc.py.p5ju.yapyc3 |60.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config__intpy3___pb2.py.p5ju.yapyc3 |60.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/dq_effects.pb.{h, cc} |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |60.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ydb_sdk_connection.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache.cpp |60.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/service.pb.{h, cc} |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/compaction_ut.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/control_plane_storage_requester_actor.cpp |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/bool_ut.cpp |60.9%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py{ ... i} |60.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |60.9%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/topic_workload |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/restarts/py3test |60.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |60.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |60.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor.pb.{h, cc} |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_index_iter_create.cpp |60.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/show_create/formatters_common.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |60.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |60.8%| [CC] {tool} $(B)/ydb/core/protos/feature_flags.pb.cc |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_charge_range.cpp |60.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |60.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_tablet_v1.{pb.h ... grpc.pb.h} |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/server_restart/gtest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_rate_limiter.pb.{h, cc} |60.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |60.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |60.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |60.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |60.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |61.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |61.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scripting.pb.{h, cc} |61.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |61.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.{pb.h ... grpc.pb.h} |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |61.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/pq_async_io/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |61.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/graph/shard/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_ut_configs.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ymq/utils.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/memory_controller/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/control_plane_proxy/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ydb_checkpoint_storage.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/rate_limiter/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |62.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |62.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/common/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |62.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_message.pb.{h, cc} |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/user_account.{pb.h ... grpc.pb.h} |62.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.{pb.h ... grpc.pb.h} |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/kqp_query_session/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_stats/unittest |62.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_vacuum_logic.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/config/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/huge/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/service_account.{pb.h ... grpc.pb.h} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/huge/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/huge/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/huge/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/huge/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/huge/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/huge/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |62.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/huge/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/mediator/ut/unittest |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |62.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/tests/sql/solomon/pytest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_pipe_client.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.pb.cc |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |62.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/resource_broker.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/huge/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/olap_workload/tests/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/huge/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_parser/enum_parser |62.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/backpressure/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_reset.cpp |62.4%| PREPARE $(FLAKE8_PY2-2255386470) - 6.56 MB |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/partition_chooser_impl.cpp |62.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_counters_aggregator.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/backpressure/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/backpressure/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/backpressure/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/backpressure/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/backpressure/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/backpressure/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/backpressure/ut/unittest |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_init/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/backpressure/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/audit_log.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/sampler_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/jaeger_tracing/throttler_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/backpressure/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/constructor.cpp |62.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/simple_reader_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_findlatest.cpp |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |63.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_boot_lease.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/quota_tracker.cpp |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/s3_backups/tests/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |63.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_8fca143a218b930f297b779e3a.o |63.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_9314464e3560b2511ac931acd9.o |63.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/objcopy_8db6616d40f8020d0632222fe3.o |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_cd57da3671b96739ee73293fb1.o |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_e8c94c485e81b4b2899f52f594.o |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_3bb523a1011c0a7019f2684a90.o |63.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/statuses.cpp |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/workload_topic |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |63.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_types.cpp |63.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |63.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.grpc.pb.cc |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/quota_service.{pb.h ... grpc.pb.h} |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |63.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/libpy3workload_topic.global.a |63.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/objcopy_e9d954952def1b899e1fb63731.o |63.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/scheme_describe.cpp |63.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/value/ut/gtest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/generated/codegen/main.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/auth_ut.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_spacetracker_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/bin/main.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |64.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |64.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |64.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |64.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |64.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |64.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} |64.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |64.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |64.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |64.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |64.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |64.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |64.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |64.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/add_index.cpp |64.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |64.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |64.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |64.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |64.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |64.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |64.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |64.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |64.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/manager.cpp |64.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |64.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/value/ut/ydb-public-lib-value-ut |64.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |64.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |64.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |64.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |64.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |64.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/value/ut/value_ut.cpp |64.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/arrow/libsrc-client-arrow.a |64.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/registry/libcpp-dwarf_backtrace-registry.global.a |64.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |64.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |64.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |64.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |64.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |64.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_c98e5b95c64b8486a12f10d408.o |64.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_b06d27009e49b9ba3df883a226.o |64.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |64.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/knn/objcopy_6cfe787a44e849877f4b79a557.o |64.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_0035b673555f394234ae284e25.o |64.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_45b6981aed17dda33d43217f52.o |64.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_9818d2b70aad7db98a0f9c044c.o |64.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |64.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |64.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/knn/objcopy_5f1f131b295e1fe534a12648c6.o |64.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_11720a94357f4e62a79e34a420.o |64.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/knn/ydb-tests-datashard-knn |64.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |64.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |64.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |64.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_scale_manager_graph_cmp_ut.cpp |64.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_0446f521b26a2e8128f94ac50f.o |64.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_367e2bc5d83faa0907a06d2976.o |64.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_49a1ca9559288648fba9cf7b65.o |64.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |64.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |64.9%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |64.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |64.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |65.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |64.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/knn/objcopy_e73aed5f96567ff87b3489e87b.o |64.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl_qs.cpp |65.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |65.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |65.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |65.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_927a1f7611cf94fb1cd21ef8cf.o |65.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |65.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/util_pool_ut.cpp |65.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_96b8686cd075e874d95d4aa5c5.o |65.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/util_string_ut.cpp |65.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_6a5c78aa9f679a0920be5264fe.o |65.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_b031a661ba244dffa03ab0c7ec.o |65.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_d0255dda539959b69d421868a2.o |65.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/5a2f230528097042fdaf726fed_raw.auxcpp |65.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/comp_nodes/ut/unittest |65.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |65.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/pq_read/test/py3test |65.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |65.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |65.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |65.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |65.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |65.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |65.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |65.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |65.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |65.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |65.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |65.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |65.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |65.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |65.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |65.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |65.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |65.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |65.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |65.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.grpc.pb.cc |65.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/simple_queue/tests/py3test |65.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/limiter/grouped_memory/ut/unittest |65.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_clusters_updater_actor.cpp |65.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |65.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/workload/libpy3stress-topic-workload.global.a |65.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |65.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |65.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl.cpp |65.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/libpy3ydb-dstool.global.a |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/objcopy_fca89909cedb628068681e1038.o |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |65.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/lib/libpy3dstool_lib.global.a |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.grpc.pb.cc |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/yt/kqp_yt_import/py3test |65.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_discovery_v1.{pb.h ... grpc.pb.h} |65.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lazy_list.cpp |65.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.{pb.h ... grpc.pb.h} |65.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.pb.cc |65.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.{pb.h ... grpc.pb.h} |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |65.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |65.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |65.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/db_metadata_cache.pb.cc |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |65.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/nodes_health_check.cpp |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |65.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage_ut.cpp |65.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |65.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |65.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.pb.cc |65.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |65.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |65.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_login_ut.cpp |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |65.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |65.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |65.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |65.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_bcbbd2d8f2367d5f3ed5199234.o |65.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_f05ead59375a9db120b95dd730.o |65.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_1aeeb50f676472f975830c135d.o |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tools/join_perf/libkqp-tools-join_perf.a |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tools/combiner_perf/libkqp-tools-combiner_perf.a |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/mvp/meta/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |65.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |65.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/ydb_schema_query_actor.h_serialized.cpp |65.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |65.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.grpc.pb.cc |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |65.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_import_ut.cpp |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/tablet/ut/unittest |65.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/dstool/ydb-dstool |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/result_writer.cpp |65.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_2900a0c4957bb4f1bc1729508c.o |65.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_auth_processor.cpp |65.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_c386e2211742a44d16094d73d0.o |65.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_dac3ec236f3cba753ea226bb96.o |65.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |65.9%| [PR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/include/llvm/IR/Attributes.inc{, .d} |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |65.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_table_ut.cpp |65.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |65.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |65.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/ydb_result_set_old.pb.cc |65.9%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config.{pb.h ... grpc.pb.h} |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |65.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_query_ut.cpp |65.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_ut.cpp |65.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/sink.pb.{h, cc} |65.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/actors/codecs.cpp |65.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/cloud_user.{pb.h ... grpc.pb.h} |65.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_counters_ut.cpp |65.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/claims.{pb.h ... grpc.pb.h} |65.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/auth.{pb.h ... grpc.pb.h} |65.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |65.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |65.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |65.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |65.9%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |65.9%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |65.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |66.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |66.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/auth/gtest |66.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |66.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |66.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |66.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |66.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |66.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |66.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |66.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |66.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |66.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |66.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/leader_election.cpp |66.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.{pb.h ... grpc.pb.h} |66.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |66.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |66.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |66.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |66.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |66.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/kqp_indexes/unittest |66.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hasitems.cpp |66.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |66.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |66.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |66.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |66.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |66.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.pb.cc |66.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |66.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |66.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |66.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ticket_parser_ut.cpp |66.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |66.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_ut.cpp |66.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_init_schema.cpp |66.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/json/json_ut.cpp |66.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |66.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_hash_join_ut.cpp |66.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |66.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |66.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_hash_combine_ut.cpp |66.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/udf_resolver.pb.{h, cc} |66.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/task_get.cpp |66.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_script_executions.cpp |66.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/ut/utils/libcomp_nodes-ut-utils.a |67.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |67.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/surg/main.cpp |67.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |67.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |67.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |67.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |67.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |67.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |67.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |67.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |67.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |67.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |67.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/streaming/streaming |67.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |67.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |67.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |67.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |67.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |67.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/libpy3streaming.global.a |67.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/objcopy_1ac33161075322a9a0efb211c2.o |67.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |67.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |67.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/workload/libpy3stress-streaming-workload.global.a |67.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_keyvalue_v1.{pb.h ... grpc.pb.h} |67.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old__intpy3___pb2.py.p5ju.yapyc3 |67.3%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/29dbda6d5a6888d44695a7480c_raw.auxcpp |67.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_apply.cpp |67.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover_ut.cpp |67.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_locks_helper.cpp |67.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/topic_kafka/tests/py3test |67.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest |67.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |67.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_just.cpp |67.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/source.pb.{h, cc} |67.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |67.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |67.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/minikql_compile/ut/unittest |67.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_bridge.pb.{h, cc} |67.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config__intpy3___pb2.py.p5ju.yapyc3 |67.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction__intpy3___pb2_grpc.py.p5ju.yapyc3 |67.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |67.7%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config__intpy3___pb2_grpc.py.p5ju.yapyc3 |67.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |67.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_6d8369510b03c08a300f2e2657.o |67.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_7d0deb4120fbddf720c11b5358.o |67.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_e1e64d508ce59834ec0a40f731.o |67.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/5c5fdf614c3039a8dba94a4f38_raw.auxcpp |67.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_1161ad4136b0254d3201b549ad.o |67.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_c77713875cf17988efd8fc0fb3.o |67.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_0ec6204c03bcc69b725a944498.o |67.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_bac9f9173c971a3536b196d071.o |67.8%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config__intpy3___pb2.py.p5ju.yapyc3 |67.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics__intpy3___pb2.py.p5ju.yapyc3 |67.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |67.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |67.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |67.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |68.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_debug.cpp |68.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |68.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction__intpy3___pb2.py{ ... i} |68.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |68.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chopper.cpp |68.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/fetcher.cpp |68.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/workload/libpy3stress-kafka-workload.global.a |68.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/grpc_pq_write.cpp |68.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_combine.cpp |68.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |68.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense.cpp |68.0%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |68.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_div.cpp |68.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |68.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |68.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_enumerate.cpp |68.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut_pg/unittest |68.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut_pg/unittest |68.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut_pg/unittest |68.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut_pg/unittest |68.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |68.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut_pg/unittest |68.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut_pg/unittest |68.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut_pg/unittest |68.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut_pg/unittest |68.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut_pg/unittest |68.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |68.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |68.1%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |68.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut_pg/unittest |68.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_metrics_actor.cpp |68.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_7f9e816a97aaeee837ac316091.o |68.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.pb.cc |68.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a38b1580810a6e4b419da99dcf.o |68.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_9ea5b1fb7a4f8e1b0b8d7cf345.o |68.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a5874452d3dbd6f6e49cd08be6.o |68.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/schema_actors.cpp |68.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |68.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_b9fd5c62781ec3b78d111a0ba7.o |68.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |68.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold.cpp |68.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold1.cpp |68.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.grpc.pb.cc |68.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_debug_v1.{pb.h ... grpc.pb.h} |68.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_if.cpp |68.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |68.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/sqs_topic_proxy.cpp |68.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_metadata__intpy3___pb2.py.p5ju.yapyc3 |68.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} |68.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_decimal.cpp |68.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |68.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/events.cpp |68.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |68.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |68.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |68.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |68.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |68.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |68.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_auth_actor.cpp |68.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |68.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |68.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |68.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_base.cpp |68.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_14c03c6aecffbe39cb01ddf2ed.o |68.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/bin/main.cpp |68.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_40779f0570229cef213050a4fa.o |68.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |68.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |68.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |68.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_d52256d4fa9895f38df6030445.o |68.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |68.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |68.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/proxy/proxy.cpp |68.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |68.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |68.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |68.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |68.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |68.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |68.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/control.cpp |68.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/events.pb.{h, cc} |68.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |68.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_simple.cpp |68.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_rows_formatter.cpp |68.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_long_tx_service.cpp |68.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |68.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_rh_hash.cpp |68.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |68.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_narrow_map.cpp |68.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/auth/ydb-public-sdk-cpp-tests-integration-auth |68.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reduce.cpp |68.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |68.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |68.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map.cpp |68.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/4342cd9f302f261f8b1a8137d8_raw.auxcpp |68.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/cancel_tx_ut.cpp |68.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |68.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/objcopy_599af7074d669a6697054e1001.o |68.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |68.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/objcopy_388676493f4fc142dc0926df96.o |68.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/objcopy_bb95af667e01d0dbfb707dfb90.o |68.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/read_info_actor.cpp |68.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |68.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/proto/ymq.{pb.h ... grpc.pb.h} |68.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_sort.cpp |69.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/objcopy_7406de026bf25e30e96a88517d.o |69.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_weakmember.cpp |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |69.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/security/ydb-tests-functional-security |69.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.pb.cc |69.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/attributes/ut/unittest |69.1%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/mypy-protobuf/bin/protoc-gen-mypy/protoc-gen-mypy |69.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/object_storage_listing_ut.cpp |69.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_safe_circular_buffer.cpp |69.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_null.cpp |69.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_heap.cpp |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.grpc.pb.cc |69.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |69.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |69.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |69.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_proxy.grpc.pb.cc |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/update_offsets_in_transaction_actor.cpp |69.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.grpc.pb.cc |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/replication/grpc_service.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.pb.cc |69.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.grpc.pb.cc |69.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/tool |69.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |69.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_filter.cpp |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/flat_ut.cpp |69.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/scheduler/libproviders-dq-scheduler.a |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/common/service.cpp |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_actor.cpp |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |69.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/global_worker_manager/libproviders-dq-global_worker_manager.a |69.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |69.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/metrics/libproviders-dq-metrics.a |69.0%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/feature_flags__intpy3___pb2.py{ ... i} |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_partitioned_executer.cpp |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_groups_actor.cpp |69.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |69.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/raw_socket/ut/buffered_writer_ut.cpp |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/object.cpp |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_253d734e8c901d319d84fcc6e9.o |69.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_303f7409bfab4277e367bbd11a.o |69.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_363b5875cc5c5e5745458b16b8.o |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |69.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_e2a089b95d9316f6e26025d3e3.o |69.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |69.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/manager.cpp |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/send_message.cpp |69.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |69.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |69.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/yt/libdq-actors-yt.a |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/shutdown/events.cpp |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/kesus/grpc_service.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_seat.cpp |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |69.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_registry.cpp |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/locks_ut.cpp |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |69.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_list_groups_actor.cpp |69.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_double_indexed/unittest |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/maintenance/grpc_service.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_log_codec.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/appendable_compressed_file.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |69.1%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py{ ... i} |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/run/run_ut.cpp |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |69.1%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py{ ... i} |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/common.cpp |69.1%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py.p5ju.yapyc3 |69.1%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |69.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |69.1%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2_grpc.py.p5ju.yapyc3 |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp.cpp |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/restore.cpp |69.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/checker_secret.cpp |69.0%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/c5aa441d48d4124514e9d62ba2_raw.auxcpp |69.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.{pb.h ... grpc.pb.h} |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/tablet/ydb_tablet.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |69.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |69.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |69.1%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/e6ce42a762195cf7e946ca411e_raw.auxcpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |69.1%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/node_broker_workload |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |69.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/access_service.{pb.h ... grpc.pb.h} |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |69.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/connector.pb.{h, cc} |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |69.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker.cpp |69.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |69.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.grpc.pb.cc |69.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |69.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |69.1%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/yql_pq_expr_nodes.{gen.h ... defs.inl.h} |69.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/tsserver/main.cpp |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/pdisks.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |69.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |69.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/tsserver/tsserver |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |68.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |68.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/bin/mvp_meta |68.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |68.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/kqprun/tests/py3test |68.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |68.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |68.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |68.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |68.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |68.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |69.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |69.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.grpc.pb.cc |69.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tostring.cpp |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |69.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/audit_denylist.cpp |69.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |69.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/mvp/core/ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |69.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/mon_ut.cpp |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.0%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/repl/ut/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/repl/ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/repl/ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/repl/ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/repl/ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/repl/ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/repl/ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/repl/ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/repl/ut/unittest |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_e7dfca3c87220ea0ed36a65f9f.o |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_643fa2679e88d9b2d33558b050.o |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_22edc4e58ff43cb5e83c9bbe2c.o |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_external_table.cpp |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/repl/ut/unittest |69.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |68.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/daf02fd86bb7e2296f1437ae1f_raw.auxcpp |68.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |68.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |68.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |68.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |68.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |68.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |68.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} |69.0%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/yql_s3_expr_nodes.{gen.h ... defs.inl.h} |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/dynamic_config/grpc_service.cpp |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_3d6916930a438b51675ef6dda7.o |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_64cecb639c5f85fbf868097a08.o |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_93dc3386250916dfae1ecb9b13.o |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |69.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |69.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |69.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/c5aa441d48d4124514e9d62ba2_raw.auxcpp |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |69.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/0dcc46b1d394aa60fd3d37d468_raw.auxcpp |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_e89cf02a9ed3d3ce4d135f1b6a.o |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_8ebbbeea46de68e6f72977a547.o |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_dc090340e6aea1b64bea92b3f8.o |69.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/e6ce42a762195cf7e946ca411e_raw.auxcpp |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_0c11f1377612879bf0a028062e.o |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_ecce6cdc5283ee9e09809febc9.o |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |69.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_9001a43ebb2f39da4516c33deb.o |69.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/objcopy_b500d4c016dd56011b777a1283.o |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |69.1%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/cfg |69.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |69.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |69.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/sensitive.{pb.h ... grpc.pb.h} |69.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen |69.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/options.{pb.h ... grpc.pb.h} |69.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} |69.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |69.1%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/expr_nodes/dqs_expr_nodes.{gen.h ... defs.inl.h} |69.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.{pb.h ... grpc.pb.h} |69.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hullop/ut/unittest |69.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.grpc.pb.cc |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hullop/ut/unittest |69.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hullop/ut/unittest |69.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hullop/ut/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hullop/ut/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hullop/ut/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |69.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |69.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |69.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |69.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |69.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |69.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |69.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |69.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |69.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |69.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hullop/ut/unittest |69.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hullop/ut/unittest |69.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hullop/ut/unittest |69.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |69.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |69.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |69.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |69.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hullop/ut/unittest |69.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |69.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |69.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming |69.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |69.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |69.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |69.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |69.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |69.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |69.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |69.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |69.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/service/manager.cpp |69.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |69.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} |69.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |69.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |69.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.pb.cc |69.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |69.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |69.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/event.pb.{h, cc} |69.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |69.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |69.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |69.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |69.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |69.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |69.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |69.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |69.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.{pb.h ... grpc.pb.h} |69.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |69.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |69.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |69.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |69.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/resource.{pb.h ... grpc.pb.h} |69.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |69.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |69.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |69.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.{pb.h ... grpc.pb.h} |69.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.grpc.pb.cc |69.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |69.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/formats/arrow/ut/unittest |69.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |69.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |69.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |69.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |69.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_large.cpp |69.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest |69.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_counters.cpp |69.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |69.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/percentiles.cpp |69.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_common.cpp |69.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |69.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |69.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |69.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |69.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write_actor.cpp |69.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |69.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |69.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/audit/audit.cpp |69.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/memory_stats_ut.cpp |69.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/localdb_ut.cpp |69.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/statestorage_guardian_impl_ut.cpp |69.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |69.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/fulltext_ut.cpp |69.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_790c6ea4aad5e761d21421b25d.o |69.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_4e45fac9e6e2cbc502659b10eb.o |69.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_c19b3eb5266bf8e49b2b628bc2.o |69.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |69.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |69.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage_grouptype_ut.cpp |69.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/kmeans_ut.cpp |69.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_auth.cpp |69.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/table_index_ut.cpp |69.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/partition_scale_manager.cpp |70.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/path_ut.cpp |70.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |70.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |70.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/logoblob_ut.cpp |70.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_kqp.cpp |70.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |70.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_common.cpp |70.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |70.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |70.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |70.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |70.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/delete_message.cpp |70.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |70.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |70.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |70.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |70.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_8685c3ae88e5169a5acffc7bc4.o |70.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/ut_labeled.cpp |70.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_d191482d8b66f1c03ea8df56d3.o |70.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_ff581f3cff717ab223922f0cd8.o |70.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |70.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |70.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/ut_service/slow/kqp_federated_query_actors_ut.cpp |70.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |70.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/libpy3tools-ydb_serializable-replay.global.a |70.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/config.pb.{h, cc} |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |70.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/objcopy_efd352795aee39d7ac6e163a2d.o |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export_fs.cpp |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/control/lib/generated/codegen/main.cpp |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut/ydb-core-base-ut |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |70.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o |70.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |70.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_withcontext.cpp |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/logs/log.cpp |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_tests/py3test |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay_yt/main.cpp |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |70.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.{pb.h ... grpc.pb.h} |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/node_tablet_monitor.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |70.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_31d605682329607481eb568ed0.o |70.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_245adf3e28f56e6467e034d9f2.o |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/offload_actor.cpp |70.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_7648c2519d02b8456f762efc4b.o |70.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/statestorage_ut.cpp |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_monitoring.cpp |70.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_589d529f9477963cf67237781c.o |70.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_82d6d29ac7be3798b7e748facc.o |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/grpc_pq_read.cpp |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |70.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_b1ab101896e634020e0c6ffeaf.o |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |70.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app_sendreadset.cpp |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/common/common_app.cpp |70.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/mock/libcommon-http_gateway-mock.a |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_release.cpp |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |70.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/storage_meta.pb.{h, cc} |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sequence/unittest |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sequence/unittest |70.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |70.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_f022524c80d73c5ff5d59211dd.o |70.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_6dc907e08e2d6bd66a6c998081.o |70.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_4d329553dae503930cad7a3eec.o |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/node_whiteboard.cpp |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/high_load/unittest |70.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controlling_service_base.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__update_slot_status.cpp |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serializable/py3test |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/probes.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |70.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |70.3%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/yql_pg_expr_nodes.{gen.h ... defs.inl.h} |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_client_cache.cpp |70.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |70.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |70.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |70.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/util.cpp |70.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |70.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |70.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |70.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |70.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |70.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |70.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/serializer/stream.cpp |70.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.pb.cc |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |70.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |70.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |70.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore_fs.cpp |70.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |70.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |70.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/testshard_workload/tests/py3test |70.3%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/client/libyt-yt-client.a |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/graph/ut/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |70.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.{pb.h ... grpc.pb.h} |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/service_node/main.cpp |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |70.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_row_eggs.h_serialized.cpp |70.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/ut/message_id_deduplicator_ut.cpp |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |70.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/sqs_topic/receipt.cpp |70.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |70.5%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.pb.{h, cc} |70.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |70.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |70.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/shared_cache_events.h_serialized.cpp |70.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/local_discovery/grpc_service.cpp |70.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |70.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.{pb.h ... grpc.pb.h} |70.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |70.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |70.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |70.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |70.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |70.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |70.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |70.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |70.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |70.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |70.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |70.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/ut_helpers.cpp |70.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |70.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |70.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_committed.cpp |70.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |70.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} |70.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/initializer.cpp |70.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |70.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cluster_info_ut.cpp |70.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut_common.cpp |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |70.7%| [AR] {tool} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |70.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |70.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |70.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_maintenance_v1.{pb.h ... grpc.pb.h} |70.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |70.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/downtime_ut.cpp |70.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_tenants_ut.cpp |70.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/cdc/tests/py3test |70.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |70.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_0359848ae21601186c5b0d9873.o |70.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_277b7e8f79021687bec95be8db.o |70.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_afdf6d60c4f76ae91a235d460b.o |70.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_observer.cpp |70.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.{pb.h ... grpc.pb.h} |70.7%| [AR] {tool} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |70.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tools/combiner_perf/bin/main.cpp |70.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/backup/tests/py3test |70.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |70.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |70.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |70.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_misc.cpp |70.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |70.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |70.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_records.cpp |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |70.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut.cpp |70.8%| [PR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/yql_yt_expr_nodes.{gen.h ... defs.inl.h} |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |70.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.{pb.h ... grpc.pb.h} |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |70.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |70.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.grpc.pb.cc |70.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.pb.cc |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/kv/tests/py3test |70.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_backup_v1.{pb.h ... grpc.pb.h} |70.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kafka.grpc.pb.cc |70.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |70.9%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/type/dayofweek.{pb.h ... grpc.pb.h} |70.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |71.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/cancelation/cancelation.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |71.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |71.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/service/ext_counters.cpp |71.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |71.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/objcopy_caf222d14387d4810b5cb3e853.o |71.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/config_ut.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_pool.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_get_metrics.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.pb.cc |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/events/internal.cpp |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_writelog.cpp |71.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/http/ut/unittest |71.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.grpc.pb.cc |71.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.pb.cc |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/common/metadata_ut.cpp |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/common/encryption_ut.cpp |71.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tools/join_perf/bin/main.cpp |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.pb.cc |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.grpc.pb.cc |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.pb.cc |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__graceful_shutdown.cpp |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.grpc.pb.cc |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.pb.cc |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.pb.cc |71.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |71.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.grpc.pb.cc |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut_sequence/unittest |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |71.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/snapshot.cpp |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |71.0%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp/unittest |71.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp/unittest |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_snapshot_readonly.cpp |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/commit_offset_actor.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_init.cpp |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |71.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/service_node/service_node |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/ut/consumer_offset_tracker_ut.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |71.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.grpc.pb.cc |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |71.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/common/ut/unittest |71.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_refresh.cpp |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/vslots.cpp |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.{pb.h ... grpc.pb.h} |71.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.grpc.pb.cc |71.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_stat.cpp |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/object.cpp |71.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |71.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |71.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.pb.cc |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.pb.cc |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/kafka_streams_test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/load_test/ut_ycsb.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/discovery/grpc_service.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |71.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_27c0687ceeb7ce4ff5e4cea90a.o |71.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_d68e1e5b762e412afe6a534487.o |71.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/objcopy_e0d6c3883613601e455029f31f.o |71.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_7eab954373d77ffb1fab95ca0d.o |71.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/libpy3kafka_streams_test.global.a |71.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |71.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.pb.cc |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.pb.cc |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.pb.cc |71.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.grpc.pb.cc |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.pb.cc |71.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.grpc.pb.cc |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/resource_pools/resource_pools.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/board_subscriber_ut.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |71.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/tpch/objcopy_7c9715e23edebba4ffb82d27d5.o |71.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/tpch/objcopy_385ba1144ebdaae0c967a41e83.o |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql.cpp |71.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/tpch/objcopy_84b6c628d15e9e575bec5be5c5.o |71.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.cc |71.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.grpc.pb.cc |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |71.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.grpc.pb.cc |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/persqueue.{pb.h ... grpc.pb.h} |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/partition_actor.cpp |71.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.grpc.pb.cc |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/show_create/table/tests/py3test |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_session_attach.cpp |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/preparation_controller.cpp |71.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |71.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |71.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_collect.cpp |71.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |71.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/backends.cpp |71.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_d709b1895f91108d9f51b703ea.o |71.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_ec9bc627b6d56d1a941c2b7e4f.o |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_large/ut_btree_index_large.cpp |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_reader/fetching_executor.cpp |71.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_dfbd751fc64901b06ded4354c8.o |71.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |71.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |71.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_replication.cpp |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/processor.cpp |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/lib/auth/auth_helpers.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_interval_summary.cpp |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/storage_type.{pb.h ... grpc.pb.h} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |71.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/table/show_create_table |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |71.1%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/table/show_create_table |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/export/session/task.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/mirror_describer.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |71.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.grpc.pb.cc |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/resource_preset.{pb.h ... grpc.pb.h} |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_login.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/behaviour.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/auth/grpc_service.cpp |71.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/local_session.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_update.cpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_comp.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.{pb.h ... grpc.pb.h} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config__intpy3___pb2.py{ ... i} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/token_exchange_service.{pb.h ... grpc.pb.h} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.pb.cc |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.{pb.h ... grpc.pb.h} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.pb.cc |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op__intpy3___pb2.py{ ... i} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_metrics.cpp |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |71.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/annotations.{pb.h ... grpc.pb.h} |71.2%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/feature_flags__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/ut/ydb-core-persqueue-pqtablet-partition-ut |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/federated_queries/objcopy_600a00846e81e804b6821a1e7d.o |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |71.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/federated_queries/objcopy_cdb71a03369b35054573fbb9d8.o |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/federated_queries/objcopy_b44faf459bbd6fc49621c663b8.o |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |71.2%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/bac1e141ce77ba9e4c154c061f_raw.auxcpp |71.2%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/3058c699ce7778757ab83e8afa_raw.auxcpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/coordinator/ut/unittest |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config.pb.{h, cc} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_intermediate.cpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_slice.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_import.pb.{h, cc} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/init/init.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.grpc.pb.cc |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.{pb.h ... grpc.pb.h} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.grpc.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_utils.cpp |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health__intpy3___pb2.py.p5ju.yapyc3 |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon__intpy3___pb2.py.p5ju.yapyc3 |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.pb.cc |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units__intpy3___pb2.py.p5ju.yapyc3 |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/backup.pb.cc |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.2%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console__intpy3___pb2.py{ ... i} |71.2%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.h_serialized.cpp |71.2%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |71.2%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py{ ... i} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units__intpy3___pb2.py{ ... i} |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/federated_queries/ydb-tests-compatibility-federated_queries |71.2%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base__intpy3___pb2.py{ ... i} |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/profiler.pb.cc |71.2%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py.p5ju.yapyc3 |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_health__intpy3___pb2.py{ ... i} |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_kesus.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.pb.cc |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon__intpy3___pb2.py{ ... i} |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_reassign/unittest |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine__intpy3___pb2.py{ ... i} |71.2%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console__intpy3___pb2.py.p5ju.yapyc3 |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/query_interval.cpp |71.2%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_outset.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_cms.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_statistics_aggregator.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.grpc.pb.cc |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.grpc.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_mem_warm.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_dbase_scheme.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.grpc.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/config.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_exec_commit.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot.grpc.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.grpc.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/compile_cache/compile_cache.cpp |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/composite.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/export/session/control.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_connection.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |70.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |70.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.grpc.pb.cc |70.4%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/c5a1d987730a9719a91d55e40a_raw.auxcpp |70.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |70.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_auth_utils.cpp |70.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |70.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.pb.cc |70.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing.cpp |70.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |70.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |70.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |70.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.5%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console__intpy3___pb2.py{ ... i} |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.6%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py{ ... i} |70.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |70.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console__intpy3___pb2_grpc.py.p5ju.yapyc3 |70.7%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console__intpy3___pb2.py.p5ju.yapyc3 |70.7%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/msgbus__intpy3___pb2_grpc.py.p5ju.yapyc3 |70.7%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py.p5ju.yapyc3 |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |70.7%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/bac1e141ce77ba9e4c154c061f_raw.auxcpp |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic/workload_topic |70.7%| [LD] {RESULT} $(B)/ydb/tests/stress/topic/workload_topic |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/common/ss_dialog.cpp |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_partition_stats/ut_top_cpu_usage.cpp |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/topic_workload |70.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/objcopy_da2669c2228a88c83cd32d45da.o |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/objcopy_ce0222bab1634be9f9a52f715d.o |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/objcopy_ec94bbf9004678001f4c8195e3.o |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/slow/txusage_slow_ut.cpp |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |70.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |70.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/tablet/tx_session_timeout.cpp |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullstorageratio_ut.cpp |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_helper.cpp |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_03f75cad4510fd9d018635026c.o |70.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_64bde13108f9284b2e9f0bbb7a.o |70.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_52d3e6a0651990fc997ab40ba2.o |70.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_ce073e3cc612363936bdd04210.o |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_c43ce24509a50b033fa4050a33.o |70.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_dc1e8788b8287c02880cfe2814.o |70.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_6cfba3dbee97ec121b2f346459.o |70.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/audit_logins.cpp |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/c664ef6ca80e747b410e1da324_raw.auxcpp |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/http_service.cpp |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/pq_rl_helpers.cpp |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/bac1e141ce77ba9e4c154c061f_raw.auxcpp |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |70.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_response_tablet_distribution.cpp |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |70.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/utils.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/kqp_common.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/quoter/kesus_quoter_proxy.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker_ut.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/tablets/tablets.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_datashard_scan_response.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_init_schema.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_service.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/show_create/create_table_formatter.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/writer.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_0c451aebc6dafbdf0d9da2ab02.o |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_blob_ut.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_ut_pool.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/pqtablet/blob/ut/unittest |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_mon.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_tablet.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/protos/out/out_cms.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_persqueue_v1.{pb.h ... grpc.pb.h} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/fq_private.pb.{h, cc} |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_responsiveness_pinger.cpp |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_table.cpp |71.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |71.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |71.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_coordination_v1.{pb.h ... grpc.pb.h} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/coordinator.cpp |71.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqrb/partition_scale_manager_graph_cmp.cpp |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_query_v1.{pb.h ... grpc.pb.h} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [PD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/yaml-config-protos.{self.protodesc, protosrc} |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/access.{pb.h ... grpc.pb.h} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/blockstore.{pb.h ... grpc.pb.h} |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/access_service.{pb.h ... grpc.pb.h} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |71.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/mlp/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |71.1%| [PD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config.proto.{desc, 236947a227eabf309dc2ce63434b3df8.rawproto} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |71.1%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/d42f916259e88c99d94b15ec0e_raw.auxcpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [PD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config.proto.{desc, 236947a227eabf309dc2ce63434b3df8.rawproto} |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/ctas/tests/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/graph.pb.{h, cc} |71.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.grpc.pb.cc |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_ut_local.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |71.1%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/a8c0f40ab3816060c313900901_raw.auxcpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.pb.cc |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/reference.{pb.h ... grpc.pb.h} |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |71.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_page_label.cpp |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/service_account_service.{pb.h ... grpc.pb.h} |71.1%| [PD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/config.proto.{desc, 236947a227eabf309dc2ce63434b3df8.rawproto} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |71.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sequenceshard/ut/unittest |71.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |71.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |71.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_replication.{pb.h ... grpc.pb.h} |71.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction__intpy3___pb2.py.p5ju.yapyc3 |71.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/operation_id.pb.{h, cc} |71.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |71.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/backup/backup_stress |71.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms__intpy3___pb2.py{ ... i} |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [PD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/yaml-config-protos.{self.protodesc, protosrc} |71.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |71.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |71.1%| [LD] {RESULT} $(B)/ydb/tests/stress/backup/backup_stress |71.1%| [PD] {RESULT} $(B)/ydb/library/yaml_config/protos/yaml-config-protos.{self.protodesc, protosrc} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags__intpy3___pb2.py.p5ju.yapyc3 |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2.py.p5ju.yapyc3 |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/feature_flags__intpy3___pb2.py.p5ju.yapyc3 |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/objcopy_589315062f5401a368910248f0.o |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/objcopy_c114cbf6b820d92320c1e2c912.o |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/objcopy_61613f0bd98876f149d8574891.o |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_actor.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose__intpy3___pb2.py{ ... i} |71.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |71.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/test_shard.pb.cc |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/query/ut/unittest |71.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/query/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/query/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/query/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/query/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/query/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/query/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/query/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/query/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/query/ut/unittest |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_column_build_reboots/ut_column_build_reboots.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/gc.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.grpc.pb.cc |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tx_quoter_resource_update.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_keyvalue.{pb.h ... grpc.pb.h} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/events.pb.{h, cc} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.{pb.h ... grpc.pb.h} |71.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/yaml_config/ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/helpers/ut/ydb-core-kqp-compile_service-helpers-ut |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_finish_trasersal.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/incompatibility_rules_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/common.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_logstore.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_sys.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_reader.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/login_shared_func.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/actors/analyze_actor.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_schedule_traversal.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |71.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/service/service.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/partition_key_range/partition_key_range_sequence_ut.cpp |71.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_1de592266ca9bc1d10b20d8e9a.o |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_c4b248e24a6215fa53b9e5552d.o |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/ut/ydb-core-config-ut |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_e25036fa51e72ace049084c308.o |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |71.2%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/e9ba3ee2f0ee1966e63998b143_raw.auxcpp |71.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/utils/libcore-config-utils.a |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_coordinator.grpc.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/cdc/cdc |71.2%| [LD] {RESULT} $(B)/ydb/tests/stress/cdc/cdc |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_logical.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/schema.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/transfer/ut/row_table/unittest |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/large/ydb-tests-olap-large |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree.cpp |71.2%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/92868a760cbaac2b810517d2d5_raw.auxcpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/large/objcopy_65ca1c9a2830c14c52b6f356c1.o |71.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/transfer/transfer |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/large/objcopy_632c8f121033cba272f8b156b6.o |71.2%| [LD] {RESULT} $(B)/ydb/tests/stress/transfer/transfer |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/large/objcopy_a3b27567b6c6c0a98826db6a0d.o |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |71.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config__intpy3___pb2.py{ ... i} |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/ingress/blobstorage_ingress_ut.cpp |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config__intpy3___pb2.py.p5ju.yapyc3 |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config__intpy3___pb2.py.p5ju.yapyc3 |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/ingress/blobstorage_ingress_matrix_ut.cpp |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.grpc.pb.cc |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config__intpy3___pb2.py{ ... i} |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/allocation.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |71.2%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/transfer_workload |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.grpc.pb.cc |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_keyvalue_v2.{pb.h ... grpc.pb.h} |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/partition_key_range/ut/ydb-core-persqueue-public-partition_key_range-ut |71.2%| [PK] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |71.2%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py.p5ju.yapyc3 |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/serializer/parsing.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |71.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/partition_key_range/ut/unittest |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/yql_mount.pb.{h, cc} |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.pb.cc |71.2%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py.p5ju.yapyc3 |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_element.cpp |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_exists.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |71.2%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/e9ba3ee2f0ee1966e63998b143_raw.auxcpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics__intpy3___pb2.py{ ... i} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_import.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_group.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_compress.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/process.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |71.2%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.{gen.h ... defs.inl.h} |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/partition_scale_request.cpp |71.2%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/3058c699ce7778757ab83e8afa_raw.auxcpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromstring.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_view.pb.{h, cc} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/helpers/ut/kqp_compile_cache_helpers_ut.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/rate_limiter_resources.cpp |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.{pb.h ... grpc.pb.h} |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/data.pb.{h, cc} |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multimap.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/maintenance.grpc.pb.cc |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |71.2%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/topic_kafka_workload |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/google/type/timeofday.{pb.h ... grpc.pb.h} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_linear.cpp |71.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/92868a760cbaac2b810517d2d5_raw.auxcpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.pb.cc |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/device_perf_test.{pb.h ... grpc.pb.h} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |71.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |71.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |71.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |71.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |71.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |71.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |71.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |71.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |71.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_balancer_actor.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/rbo/objcopy_8a4839ce59e28719331c9dfe7e.o |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_3ea8aa67e7c24c4f0e3b0406b9.o |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_445797246443360525d31550d1.o |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_51b071d7746089933668451b33.o |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor.cpp |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.grpc.pb.cc |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |71.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/e9ba3ee2f0ee1966e63998b143_raw.auxcpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/inotify.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.pb.cc |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_data.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/query_actor/ut/unittest |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.{pb.h ... grpc.pb.h} |71.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/3058c699ce7778757ab83e8afa_raw.auxcpp |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/folder_service.{pb.h ... grpc.pb.h} |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/transitional/folder_service.{pb.h ... grpc.pb.h} |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/deleting.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |71.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/tests/library/ut/py3test |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/dq_solomon_shard.pb.{h, cc} |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/fmr.pb.{h, cc} |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/fetcher.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/keyvalue/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/config/ut/main.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |71.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/rate_limiter/grpc_service.cpp |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |71.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/manager.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group__intpy3___pb2.py{ ... i} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings__intpy3___pb2.py{ ... i} |71.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op__intpy3___pb2.py.p5ju.yapyc3 |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/build_index/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.pb.cc |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/console_service.{pb.h ... grpc.pb.h} |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/database.{pb.h ... grpc.pb.h} |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.grpc.pb.cc |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme__intpy3___pb2.py{ ... i} |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/access_service.{pb.h ... grpc.pb.h} |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/groupinfo/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/groupinfo/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/groupinfo/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/groupinfo/ut/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/groupinfo/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/groupinfo/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/groupinfo/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/groupinfo/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/groupinfo/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/initialization.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kqp_helper.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/groupinfo/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_database.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/minikql_engine.pb.cc |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/url_matcher.cpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_status_proxy.{pb.h ... grpc.pb.h} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_table.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_api_versions_actor.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.pb.cc |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split_antlr4/SQLv1Antlr4Parser.pb.{code0.cc ... main.h} |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_part_loader.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compile_service/kqp_compile_computation_pattern_service.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/viewer/json/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/fq.pb.{h, cc} |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/kqp_query_svc/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |71.2%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/d52903a0693870f83d0bbe0ab8_raw.auxcpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/dqs.pb.{h, cc} |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_178e64ce5db822fc6aa8b3e608.o |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/keyvalue/grpc_service_v1.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_f9b0feecd0e36f08cbf5c53562.o |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_0a1f127d9343562caddfbacf79.o |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_b866963286293af0b6f2139fed.o |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config__intpy3___pb2.py.p5ju.yapyc3 |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |71.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/ctas/ctas |71.3%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info.{pb.h ... grpc.pb.h} |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk__intpy3___pb2.py{ ... i} |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/aae788a890ddcb1702c659c8aa_raw.auxcpp |71.3%| [LD] {RESULT} $(B)/ydb/tests/stress/ctas/ctas |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_86ad37399122e504f3e6d8378d.o |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_5a4a401f33f46c70417a65f584.o |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_e317764e105a7e9e48b67a7b7e.o |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_951c70889c9404d1662da27090.o |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |71.3%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/ctas_workload |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_ut.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing_app.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_debug.pb.{h, cc} |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_condense.cpp |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/counters_shard.pb.{h, cc} |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |71.3%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/olap_workload |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_test_shard.pb.{h, cc} |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.grpc.pb.cc |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/incrhuge/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/incrhuge/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/incrhuge/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/wardens/py3test |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/incrhuge/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/incrhuge/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_mediator.{pb.h ... grpc.pb.h} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/incrhuge/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/incrhuge/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/incrhuge/ut/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel_unstable/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/incrhuge/ut/unittest |71.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py{, i} |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |71.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config.pb.{h, cc} |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/incrhuge/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |71.5%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/expr_nodes/dq_expr_nodes.{gen.h ... defs.inl.h} |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |71.5%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/kafka_workload |71.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/mvp/oidc_proxy/ut/unittest |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |71.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.{pb.h ... grpc.pb.h} |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/metering_sink.cpp |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_helpers.cpp |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/list_topics/ut/unittest |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_cluster_state.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_change_backend.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_type_operation.pb.cc |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/shard_iterator.cpp |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/generic_manager.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |71.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/row_table/ydb-core-transfer-ut-row_table |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/lib/actors/pq_schema_actor.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_ping.cpp |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |71.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |71.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.grpc.pb.cc |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_yql_ut.cpp |71.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/accurate_accumulate/liblibrary-cpp-accurate_accumulate.a |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/jaeger_tracing/sampling_throttling_configurator.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_combine.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_pg_ut.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon_alloc/profiler.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |71.5%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py{, i} |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app.cpp |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_request.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/sourceid_info.h_serialized.cpp |71.3%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/config.pb.{h, cc} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc.pb.cc |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info.pb.cc |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_transaction_actor_sql.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_init.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/graph_params.pb.{h, cc} |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.pb.cc |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/unittests.pb.{h, cc} |71.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_yql.h_serialized.cpp |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_part_overlay.cpp |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |71.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_0aefef587c181350d3a25f70e0.o |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_965640ca94893d27c182c611e2.o |71.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_c068ee86eb127df13256bfbe45.o |71.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/linear_regression/liblibrary-cpp-linear_regression.a |71.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/writer/source_id_encoding.cpp |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compaction.cpp |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/trie.cpp |71.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |71.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.grpc.pb.cc |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_ack_timeout.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme__intpy3___pb2.py.p5ju.yapyc3 |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor_compaction_logic.h_serialized.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_tx_allocator.{pb.h ... grpc.pb.h} |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |71.5%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/protoc/plugins/grpc_cpp/grpc_cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/workload_mixed |71.4%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/workload_mixed |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.pb.cc |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_server.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/checker_access.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/accessor_init.cpp |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |71.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_blob_depot.grpc.pb.cc |71.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.pb.cc |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/storage/storage_stats.cpp |71.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.pb.cc |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |71.5%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2.py{ ... i} |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_tablet.pb.{h, cc} |71.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/config/init/ut/unittest |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/manager/alter_impl.cpp |71.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_txloglogic.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.{pb.h ... grpc.pb.h} |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon/mon.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_topic_v1.{pb.h ... grpc.pb.h} |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.h_serialized.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mon_alloc/stats.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/topic/tests/py3test |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/formats/arrow/serializer/native.cpp |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/cloud_service.{pb.h ... grpc.pb.h} |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/shared_sausagecache.cpp |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/access.cpp |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_collector.cpp |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |71.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_compaction_logic.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |71.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.grpc.pb.cc |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_backup.cpp |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/service/ut/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/service/ut/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/service/ut/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/service/ut/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/service/ut/unittest |71.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/benchmark/libcpp-testing-benchmark.a |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/service/ut/unittest |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_actor.cpp |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/service/ut/unittest |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/service/ut/unittest |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/kafka_transactions_coordinator.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/service/ut/unittest |71.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/benchmark/main/libtesting-benchmark-main.global.a |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/service/ut/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/backup/s3_path_style/unittest |71.5%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/service/sysview_service.cpp |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |71.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/user_account_service.{pb.h ... grpc.pb.h} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |71.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_dynamic_config_v1.{pb.h ... grpc.pb.h} |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__load_state.cpp |71.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |71.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |71.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/issue_id.pb.{h, cc} |71.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/config.cpp |71.0%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |71.1%| [PB] {tool} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |71.1%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/cluster_state_info.{pb.h ... grpc.pb.h} |71.2%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/move_pdisk.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_factory.cpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config__intpy3___pb2.py{ ... i} |71.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |71.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |71.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |71.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |71.2%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2.py{ ... i} |71.1%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |71.1%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |71.2%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |71.1%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_distributed_config.{pb.h ... grpc.pb.h} |71.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |71.1%| [PB] {tool} $(B)/ydb/core/protos/cluster_state_info.{pb.h ... grpc.pb.h} |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |71.2%| [PB] {tool} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |71.2%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.2%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2.py.p5ju.yapyc3 |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/trie.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_benchmark/main.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |71.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/filesystem/librestricted-boost-filesystem.a |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_helpers.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/perf/colons.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/base/generated/codegen/main.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |71.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/perf/main.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/appendable_compressed_file.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/inotify.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut_common.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize_dump.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/tx_aggregate_data.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controlling_service_base.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controller.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_helpers.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_log_codec.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adaptive_hedging_manager.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |71.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jinja2cpp/libcontrib-libs-jinja2cpp.a |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backtrace.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group__intpy3___pb2.py.p5ju.yapyc3 |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_sequenceshard.grpc.pb.cc |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/database_basic_sausage_metainfo.grpc.pb.cc |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/common/events.cpp |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_stat_table_btree_index.cpp |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_callable.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.grpc.pb.cc |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old__intpy3___pb2.py{ ... i} |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |71.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config__intpy3___pb2.py{ ... i} |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_html.cpp |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_accessor/cache_policy/policy.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.pb.cc |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |71.3%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py.siec.yapyc3 |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/oom/py3test |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_algo_ut.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |71.4%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py.siec.yapyc3 |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/ingress/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/ingress/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/ingress/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/ingress/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/ingress/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |71.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/grpc__intpy3___pb2.py{ ... i} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/ingress/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/ingress/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/ingress/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_configure.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/ingress/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.grpc.pb.cc |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/ingress/ut/unittest |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/logger_config.pb.{h, cc} |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ru_calculator/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.pb.cc |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/session.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_export.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_load_blob_queue.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/cluster_state_info.grpc.pb.cc |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/cache/pq_l2_cache.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_aggr_stat_response.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/behaviour.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/public/mlp/mlp_writer.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.grpc.pb.cc |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |71.4%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config__intpy3___pb2.py{ ... i} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__cut_tablet_history.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |71.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/kqp_expr_nodes.{gen.h ... defs.inl.h} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |71.4%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |71.4%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/grpc__intpy3___pb2.py{ ... i} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |71.4%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config__intpy3___pb2.py.p5ju.yapyc3 |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_helpers.cpp |71.4%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/grpc__intpy3___pb2.py.p5ju.yapyc3 |71.4%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/grpc__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mul.cpp |71.4%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/29dbda6d5a6888d44695a7480c_raw.auxcpp |71.4%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/c110657bb6bb660f1355e297dd_raw.auxcpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |71.4%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/oltp_workload |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/request/config.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/cms_ut_common.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/database_service.{pb.h ... grpc.pb.h} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_rate_limiter_v1.{pb.h ... grpc.pb.h} |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export__intpy3___pb2.py.p5ju.yapyc3 |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/cms/sentinel_ut.cpp |71.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/health_check/health_check.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_counters.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/private/labeled_db_counters.cpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color__intpy3___pb2.py{ ... i} |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_dummy.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/blobsan/blobsan |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color__intpy3___pb2.py.p5ju.yapyc3 |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/cursor.pb.{h, cc} |71.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_59eb97971e5f83d3296e6c33b5.o |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_60e08504076128d310212c6460.o |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_5acd2383ed2cd599cfd64f7c8a.o |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |71.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/workload/libpy3show_create-view-workload.global.a |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_95b3eecc97c453f0c55c456659.o |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_3ddbad334a37a829b3772ddb05.o |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_4352b8b3e3cf61532c865b371b.o |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/d78d0f74a3f72be1016c0cf8cf_raw.auxcpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqrb/read_balancer__metrics.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |71.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/workload/libpy3stress-statistics_workload-workload.global.a |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/objcopy_b4ebb94deb4cea673457b77fcc.o |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/libpy3statistics_workload.global.a |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_scripting.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_interval_metrics.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/c110657bb6bb660f1355e297dd_raw.auxcpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/meta.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_errors/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_errors/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_errors/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/29dbda6d5a6888d44695a7480c_raw.auxcpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/actors/helpers.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/fetcher.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.pb.cc |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.pb.cc |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ydb_state_storage.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/yql_generic_expr_nodes.{gen.h ... defs.inl.h} |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |71.3%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py{ ... i} |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_sysview_processor.{pb.h ... grpc.pb.h} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_mirror3of4/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/usage/service.cpp |71.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_page_iface.h_serialized.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_prepend.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/credentials.pb.{h, cc} |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/general_cache/usage/config.cpp |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_aggregate.cpp |71.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_operation.cpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/mon.{pb.h ... grpc.pb.h} |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_compute_events.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/run_ydb.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/ydb-dump.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/supported_codecs_fixture.cpp |71.3%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/supported_codecs.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/workload-transfer-topic-to-table.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/result_set_format/objcopy_8a4d9ee45734faa2e2e415766d.o |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/result_set_format/objcopy_3aec050ca8485886fd4b8eafb6.o |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/result_set_format/objcopy_8b4bbe6a7e2b5ae1e2417f51a2.o |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/ut/workload-topic.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sequenceproxy/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |71.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/defrag/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/query_compiler.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.grpc.pb.cc |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.4%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py{ ... i} |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/db_counters.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/query_proccessor.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |71.3%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/acd2063e47ff389d5e0998ace2_raw.auxcpp |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings__intpy3___pb2.py.p5ju.yapyc3 |71.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/public_http/ut/unittest |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/manager/modification_controller.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.grpc.pb.cc |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/service.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/crossref.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py.p5ju.yapyc3 |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join_imp.cpp |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |71.3%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py.p5ju.yapyc3 |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/medium/objcopy_1583476a2a074be936cf5a393e.o |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/medium/objcopy_71b7c7df3e7853e6e7cd11e484.o |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/medium/objcopy_cc203073bb2a03b31e52a78f24.o |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base__intpy3___pb2.py.p5ju.yapyc3 |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/counters/proxy_counters.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_cms_v1.{pb.h ... grpc.pb.h} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant__intpy3___pb2.py.p5ju.yapyc3 |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters__intpy3___pb2.py{ ... i} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adaptive_hedging_manager.cpp |71.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/pgproxy/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/tests/kikimr_tpch/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp.cpp |71.4%| [EN] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/stock.h_serialized.{cpp, h} |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/resource_broker_ut.cpp |71.4%| [EN] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/kv.h_serialized.{cpp, h} |71.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config__intpy3___pb2.py{ ... i} |71.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |71.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.pb.cc |71.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |71.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export__intpy3___pb2.py{ ... i} |71.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_table_v1.{pb.h ... grpc.pb.h} |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/iam_token.{pb.h ... grpc.pb.h} |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tracing_signals.{pb.h ... grpc.pb.h} |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/query_cache/py3test |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/actors/actors.h_serialized.cpp |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/comp_defrag.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/backpressure/ut_client/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_commit_actor.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/result_set_format/ydb-tests-compatibility-result_set_format |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_state_ut.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/query_replay.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/processor/tx_init.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_a457e57e9ccca716aa1224bf90.o |71.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_a14abb13ecebd457a15fc48470.o |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/limiter/grouped_memory/service/manager.cpp |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_f152d89e868e3e70c582478d88.o |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/probes.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_skiptake.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/change_exchange.pb.cc |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/providers/solomon/actors/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.grpc.pb.cc |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/folder.{pb.h ... grpc.pb.h} |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.grpc.pb.cc |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrapper.grpc.pb.cc |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/blobsan/main.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |71.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_6e0da74b1512d0ffe19c5dc500.o |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_8491a772a9425d10f304e6f0e9.o |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_fd8d9957a06c9923c501e36fd9.o |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_clickhouse_internal_v1.{pb.h ... grpc.pb.h} |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/jaeger_tracing/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/jaeger_tracing/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_visitall.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config__intpy3___pb2.py{, i} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/jaeger_tracing/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/jaeger_tracing/ut/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/pq_impl.cpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/jaeger_tracing/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/jaeger_tracing/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/jaeger_tracing/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/jaeger_tracing/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/jaeger_tracing/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |71.4%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console_config__intpy3___pb2.py{ ... i} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot_isolation/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/jaeger_tracing/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/query_replay/main.cpp |71.3%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/fb094c33644271733cf9dfed37_raw.auxcpp |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms__intpy3___pb2.py.p5ju.yapyc3 |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/import.{pb.h ... grpc.pb.h} |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.pb.h_serialized.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_switch.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.grpc.pb.cc |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_proxy.cpp |71.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |71.4%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/a8c0f40ab3816060c313900901_raw.auxcpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |71.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/feature_flags.pb.cc |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.{pb.h ... grpc.pb.h} |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/sql/py3test |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/snapshot.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/blobstorage_config__intpy3___pb2.py.siec.yapyc3 |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.pb.cc |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |71.3%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/util/ut/unittest |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/util/ut/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/simple_queue |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/util/ut/unittest |71.4%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/util/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/util/ut/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/util/ut/unittest |71.4%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_cdc_stream/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/util/ut/unittest |71.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/libpy3simple_queue.global.a |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/objcopy_6c8bedcdc8efb835a928b278ce.o |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/util/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_info.grpc.pb.cc |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_cdc_stream/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/util/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/task_result_write.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/retry_config.pb.{h, cc} |71.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_node_broker.pb.cc |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_datastreams_v1.{pb.h ... grpc.pb.h} |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tooptional.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_linux.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |71.4%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/s3_backups_workload |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/util/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/manager.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |71.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |71.4%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/c5a1d987730a9719a91d55e40a_raw.auxcpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/labels_maintainer.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/column_statistic_eval.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.pb.cc |71.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/generated/codegen/main.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_import_v1.{pb.h ... grpc.pb.h} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/backup_service.{pb.h ... grpc.pb.h} |71.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/unit/client/oauth2_token_exchange/helpers/libclient-oauth2_token_exchange-helpers.a |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/view/show_create_view |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |71.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/ydb-public-sdk-cpp-tests-integration-sessions |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources__intpy3___pb2.py.p5ju.yapyc3 |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |71.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/libpy3show_create_view.global.a |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/objcopy_9ccdc4f01b578a43bc35d4d519.o |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |71.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_unwrap.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/finalize_script_service/kqp_check_script_lease_actor.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_create_partitions_actor.cpp |71.4%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.{pb.h ... grpc.pb.h} |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ymq/ymq_proxy.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flatmap.cpp |71.4%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/d52903a0693870f83d0bbe0ab8_raw.auxcpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ymq/grpc_service.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_load.grpc.pb.cc |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_replicate.cpp |71.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/fb094c33644271733cf9dfed37_raw.auxcpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.pb.cc |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/actors/persqueue_utils.cpp |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_dbase_apply.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |71.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/a8c0f40ab3816060c313900901_raw.auxcpp |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |71.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/tests/postgres_integrations/library/ut/py3test |71.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_bs_controller.grpc.pb.cc |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/row_dispatcher/topic_session.cpp |71.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/sqs_topic/utils.cpp |71.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_info.{pb.h ... grpc.pb.h} |71.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |71.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_resolve.cpp |71.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/tenant_slot_broker__alter_tenant.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |71.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/c5a1d987730a9719a91d55e40a_raw.auxcpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rowlocks.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_configure.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/security/certificate_check/cert_check.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/fq/ydb_over_fq.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_manager.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/tx_analyze_deadline.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |71.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/d52903a0693870f83d0bbe0ab8_raw.auxcpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/executor.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/service.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_find_split_key.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_tiered_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/abstract/common.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |71.4%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/keyvalue/keyvalue.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/normalizer/schema_version/version.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/service/add_data.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/s3_backups/objcopy_70be8d5dc43dbc1df67ecd59c9.o |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/s3_backups/objcopy_abdf75b6b1f064446bfb0de382.o |71.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/compatibility/s3_backups/objcopy_71f52eacd4ede06f6cee6faac3.o |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |71.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/resource_pool_classifiers/resource_pool_classifiers.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_265d7fd505d52534f38ea6fb7f.o |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_a52eb3c900a84eaad86a211549.o |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_40226ff8497733c6e798ee3940.o |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/compatibility/s3_backups/ydb-tests-compatibility-s3_backups |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/request/request_actor.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |71.4%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_12c0c2de60710b6e317b585312.o |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_698c9f8753a1b6a5d23e436b7f.o |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_2cc418e8604751e5b8f9029a81.o |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_b2bcc4e0bf3c59d73d91f4f423.o |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_f458fc1e40a33beb49b77512d1.o |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_a0543c2dc30365e9b2ad3d0ca6.o |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_df0cb3f315162a3110ee243ecd.o |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_e0331f455507fe5ac3b71d0537.o |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup_collection/853739aab8b8326c327792b514_raw.auxcpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ydb/ydb_export.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup_collection/objcopy_4a2af4000c37712b7785122f9f.o |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup_collection/objcopy_0033302419990e7c37da7fa23e.o |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup_collection/objcopy_ab13df51a06a892447a4ac624e.o |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ydb-tests-olap |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/discovery/unittest |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel__intpy3___pb2.py{ ... i} |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/sys_view/nodes/nodes.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources__intpy3___pb2.py{ ... i} |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.grpc.pb.cc |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/kafka_messages_int.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_metrics.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/node_broker__update_config_subscription.cpp |71.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_group/unittest |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_state.cpp |71.4%| [PB] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/backup_collection/ydb-tests-functional-backup_collection |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/metadata/manager.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_worker.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant__intpy3___pb2.py{ ... i} |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/mind/hive/tx__delete_tablet.cpp |71.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/partition_write.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/group.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tobytes.cpp |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_1dba5118ef0a485f3bf803be50.o |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_yql.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_5954401d16336469a08f24b688.o |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_6e536fb2c379a4ebe79c499de8.o |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_28372cda055f052750260342ed.o |71.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_list.cpp |71.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_2d296dfaf373f7f15e6312517a.o |71.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |71.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |71.4%| [PY] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/protos/console_config__intpy3___pb2.py.p5ju.yapyc3 |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |71.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |71.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |71.5%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |71.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/viewer/tests/py3test |71.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |71.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |71.6%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/actors/actors.h_serialized.cpp |71.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/feature_flags.grpc.pb.cc |71.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/bac1e141ce77ba9e4c154c061f_raw.auxcpp |71.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/c5aa441d48d4124514e9d62ba2_raw.auxcpp |71.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/92868a760cbaac2b810517d2d5_raw.auxcpp |71.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/e9ba3ee2f0ee1966e63998b143_raw.auxcpp |71.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/e6ce42a762195cf7e946ca411e_raw.auxcpp |71.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/c5a1d987730a9719a91d55e40a_raw.auxcpp |71.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/d52903a0693870f83d0bbe0ab8_raw.auxcpp |71.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |71.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/transfer/transfer |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/cdc/cdc |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/backup/backup_stress |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/table/show_create_table |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic/workload_topic |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/workload_mixed |71.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/3058c699ce7778757ab83e8afa_raw.auxcpp |71.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/fb094c33644271733cf9dfed37_raw.auxcpp |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/ctas/ctas |71.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/a8c0f40ab3816060c313900901_raw.auxcpp |71.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/c110657bb6bb660f1355e297dd_raw.auxcpp |71.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/29dbda6d5a6888d44695a7480c_raw.auxcpp |71.8%| [PD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config.proto.{desc, 236947a227eabf309dc2ce63434b3df8.rawproto} |71.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |71.8%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/c110657bb6bb660f1355e297dd_raw.auxcpp |71.8%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/c5a1d987730a9719a91d55e40a_raw.auxcpp |71.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console__intpy3___pb2.py{ ... i} |71.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config__intpy3___pb2.py.p5ju.yapyc3 |71.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.8%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/fb094c33644271733cf9dfed37_raw.auxcpp |71.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc__intpy3___pb2.py{ ... i} |71.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2.py.p5ju.yapyc3 |71.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/feature_flags.pb.cc |71.8%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/e6ce42a762195cf7e946ca411e_raw.auxcpp |71.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py{ ... i} |71.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/generated/codegen/main.cpp |71.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info__intpy3___pb2.py{ ... i} |71.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config__intpy3___pb2.py{ ... i} |71.8%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/transfer_workload |71.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |71.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |71.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc__intpy3___pb2.py.p5ju.yapyc3 |71.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py.p5ju.yapyc3 |71.8%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/a8c0f40ab3816060c313900901_raw.auxcpp |71.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_drop.cpp |71.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info.{pb.h ... grpc.pb.h} |71.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |71.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |71.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |71.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare.cpp |71.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare_scheme.cpp |71.8%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/ctas_workload |71.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_query.cpp |71.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/main.cpp |71.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/commands.cpp |71.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |71.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.9%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console__intpy3___pb2.py.p5ju.yapyc3 |71.9%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.8%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config__intpy3___pb2.py.p5ju.yapyc3 |71.9%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/92868a760cbaac2b810517d2d5_raw.auxcpp |71.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |71.9%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/e9ba3ee2f0ee1966e63998b143_raw.auxcpp |71.9%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/c5aa441d48d4124514e9d62ba2_raw.auxcpp |71.9%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/feature_flags__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |71.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |71.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |71.9%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/feature_flags__intpy3___pb2.py.p5ju.yapyc3 |71.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |71.9%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2_grpc.py.p5ju.yapyc3 |71.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config__intpy3___pb2.py{ ... i} |71.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |71.9%| [PB] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config.pb.{h, cc} |72.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config__intpy3___pb2.py{ ... i} |72.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py{ ... i} |72.0%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/3058c699ce7778757ab83e8afa_raw.auxcpp |72.0%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2_grpc.py.p5ju.yapyc3 |72.0%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/29dbda6d5a6888d44695a7480c_raw.auxcpp |72.0%| [CC] {tool} $(B)/ydb/core/protos/grpc.pb.cc |72.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py{, i} |72.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/feature_flags__intpy3___pb2.py{ ... i} |72.0%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py.p5ju.yapyc3 |72.0%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal__intpy3___pb2.py{ ... i} |72.0%| [PY] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config__intpy3___pb2.py.siec.yapyc3 |72.1%| [CC] {tool} $(B)/ydb/core/protos/console.grpc.pb.cc |72.5%| [CC] {tool} $(B)/ydb/core/protos/console.pb.cc |72.5%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.{pb.h ... grpc.pb.h} |72.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |72.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/iam_token_service.{pb.h ... grpc.pb.h} |72.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |72.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.pb.cc |72.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.grpc.pb.cc |72.9%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus__intpy3___pb2.py.p5ju.yapyc3 |72.9%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/topic_workload |72.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.pb.cc |72.9%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console__intpy3___pb2_grpc.py.p5ju.yapyc3 |72.9%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/bac1e141ce77ba9e4c154c061f_raw.auxcpp |72.9%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/d52903a0693870f83d0bbe0ab8_raw.auxcpp |72.9%| [PD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/yaml-config-protos.{self.protodesc, protosrc} |72.9%| [PR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/4306a854d105ac9e8a68bf91ca_raw.auxcpp |73.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/apps/ydb/ut/parse_command_line.cpp |73.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |73.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |73.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |73.4%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.{pb.h ... grpc.pb.h} |73.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |73.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |73.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |73.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/http_proxy/ut/http_ut.cpp |73.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |73.7%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/view/show_create_view |73.7%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/view/show_create_view |73.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |73.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |73.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/view/show_create_view |73.8%| [CC] {tool} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |73.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |73.8%| [CC] {tool} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |73.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/viewer |73.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |74.0%| [CC] {tool} $(B)/ydb/core/protos/cluster_state_info.grpc.pb.cc |74.0%| [CC] {tool} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |74.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |74.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info.grpc.pb.cc |74.1%| [CC] {tool} $(B)/ydb/core/protos/grpc.grpc.pb.cc |74.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |74.1%| [CC] {tool} $(B)/ydb/core/protos/console_config.grpc.pb.cc |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/streaming/streaming |74.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.grpc.pb.cc |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/streaming/streaming |74.1%| [LD] {RESULT} $(B)/ydb/tests/stress/streaming/streaming |74.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.grpc.pb.cc |74.1%| [CC] {tool} $(B)/ydb/core/protos/cluster_state_info.pb.cc |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |74.1%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |74.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |74.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info.pb.cc |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/node_broker/node_broker |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |74.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/node_broker/node_broker |74.3%| [LD] {RESULT} $(B)/ydb/tests/stress/node_broker/node_broker |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kafka/kafka_streams_test |74.4%| [LD] {RESULT} $(B)/ydb/tests/stress/kafka/kafka_streams_test |74.4%| [CC] {tool} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/kafka_streams_test |74.5%| [LD] {RESULT} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |74.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |74.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |74.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |74.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |74.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet/tablet_resolver.cpp |74.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |74.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |74.7%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |74.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |74.7%| [CC] {tool} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/workload_kv |74.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |74.7%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/workload_kv |74.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_08a4b5d38a76e21591db0c3424.o |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/workload_kv |74.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_f4b44a5d280d0f27f5ffd278e8.o |74.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_e2637cea0f2e4db109b364a246.o |74.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |74.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |75.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/audit/audit_config/audit_config.cpp |75.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |75.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |75.1%| [AR] {RESULT} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |75.1%| [CC] {tool} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |75.1%| [CC] {tool} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |75.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/ds_table/accessor_subscribe.cpp |75.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/grpc_pq_schema.cpp |75.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |75.1%| [CC] {tool} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |75.1%| [AR] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |75.1%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |75.1%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |75.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/tpch/tpch |75.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/public-sdk-cpp-tests-integration-sessions_pool |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ydb |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tools/sql2yql/sql2yql |75.1%| [CC] {tool} $(S)/ydb/core/control/lib/generated/codegen/main.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/audit/audit_config/audit_config.cpp |75.1%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |75.1%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |75.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |75.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |75.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |75.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |75.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/log_backend/log_backend_build.cpp |75.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |75.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kafka_proxy/actors/kafka_find_coordinator_actor.cpp |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |75.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/log_backend/log_backend_build.cpp |75.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/lib/generated/codegen/main.cpp |75.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |75.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |75.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |75.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |75.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |75.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |75.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/topic_data_ut.cpp |75.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/libpy3yaml-config-protos.global.a |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/bits_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ulid_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ui64id_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/circular_queue_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/wildcard_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/token_bucket_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hyperlog_counter_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/stlog_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/simple_cache_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/queue_oneone_inplace_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/queue_inplace_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/interval_set_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/page_map_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/lz4_data_generator_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/operation_queue_priority_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/operation_queue_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/viewer/viewer_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/log_priority_mute_checker_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/lf_stack_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_stack_ut.cpp |75.8%| [CC] {tool} $(B)/ydb/core/protos/console_config.pb.cc |75.8%| [AR] {RESULT} $(B)/ydb/library/yaml_config/protos/libpy3yaml-config-protos.global.a |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_heap_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_fixed_hash_set_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cache_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fragmented_buffer_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/concurrent_rw_hash_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_cow_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/event_priority_queue_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/address_classifier_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fast_lookup_unique_list_ut.cpp |75.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hazard_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fast_tls_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |75.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/libpy3yaml-config-protos.global.a |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |75.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.pb.cc |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/ut/ydb-core-util-ut |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/events.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_connection_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/audit/audit_log_impl.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |75.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/manager.cpp |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/simple_queue/simple_queue |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/events.cpp |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/simple_queue |75.9%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/simple_queue |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/manager.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/certificate_check/cert_check.cpp |75.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/audit/libydb-core-audit.a |75.9%| [AR] {RESULT} $(B)/ydb/core/audit/libydb-core-audit.a |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |75.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/streaming_service.{pb.h ... grpc.pb.h} |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |75.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/other/mon_vdisk_stream.cpp |75.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_benches_ut.cpp |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/audit/audit_log_impl.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |75.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |75.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |75.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_generator.cpp |75.9%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/core/libyt-yt-core.a |75.9%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |75.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_check.cpp |75.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |75.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/federated_query/ut_service/fast/kqp_federated_query_actors_ut.cpp |76.0%| [CC] {tool} $(B)/ydb/core/protos/config.grpc.pb.cc |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |76.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |76.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.grpc.pb.cc |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/other/mon_vdisk_stream.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/ut/graph_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |75.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_ut.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/join/kqp_complex_join_query_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |75.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_ut.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |75.9%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/yt/yt/core/libyt-yt-core.a |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |75.9%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |75.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_fulltext_index_build_reboots.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |75.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_defrag.cpp |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_defrag.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/rm_service/kqp_resource_estimation.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/request/config.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_resource_estimation.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/request/config.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/discovery_actor.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_cost_tracker.cpp |76.0%| [CC] {tool} $(B)/ydb/core/protos/msgbus.pb.cc |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/discovery_actor.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_cost_tracker.cpp |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.pb.cc |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_settings.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_settings.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/general_cache/usage/config.cpp |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/usage/common.cpp |76.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_federation_discovery_v1.{pb.h ... grpc.pb.h} |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/usage/config.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_alloc.cpp |76.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/usage/common.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_alloc.cpp |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |76.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |76.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |76.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config.pb.cc |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_internal_interface.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_internal_interface.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_scan.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_scan.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_read_log.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery_read_log.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/config.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/config.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/general_cache/usage/service.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/usage/service.cpp |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/usage/config.cpp |76.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/usage/config.cpp |76.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |76.0%| [AR] {RESULT} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |76.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |76.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_linux.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/cluster_state_info.pb.cc |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_linux.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info.pb.cc |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/common/row_dispatcher_settings.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_write.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/common/row_dispatcher_settings.cpp |76.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/common/liblibs-row_dispatcher-common.a |76.1%| [AR] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/common/liblibs-row_dispatcher-common.a |76.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/common/liblibs-row_dispatcher-common.a |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_writelog.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_read.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_writelog.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/usage/service.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_read.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/usage/service.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/jaeger_tracing/sampling_throttling_configurator.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/jaeger_tracing/sampling_throttling_configurator.cpp |76.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |76.1%| [AR] {RESULT} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |76.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/worker.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/worker.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_writer.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/certificate_check/cert_auth_utils.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor/usage/events.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_writer.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/usage/events.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_exec_commit_mgr.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_exec_commit_mgr.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_auth_utils.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/slide_limiter/usage/config.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/slide_limiter/usage/config.cpp |76.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |76.1%| [AR] {RESULT} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |76.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/tablet_killer.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/tablet_killer.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/limiter/grouped_memory/service/manager.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/manager.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/cluster_state_info.grpc.pb.cc |76.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/cluster_state_info.grpc.pb.cc |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/category.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/category.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/general_cache/service/manager.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/service/manager.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/formats/arrow/serializer/native.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/usage/events.cpp |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/general_cache/service/service.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/usage/events.cpp |76.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |76.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/common/service.cpp |76.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/service/service.cpp |76.1%| [AR] {RESULT} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/common/service.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/serializer/native.cpp |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |76.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |76.2%| [AR] {RESULT} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/persqueue_utils.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/persqueue_utils.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/node_checkers.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_backup.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/node_checkers.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_backup.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/serverless_proxy_config.grpc.pb.cc |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/queue_attributes.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/serialize_deserialize.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/queue_attributes.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_recovery.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/serialize_deserialize.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_delete.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/audit_logins.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_worker.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/slide_limiter/service/service.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_findlatest.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/audit_logins.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_delete.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_worker.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_findlatest.cpp |76.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |76.2%| [AR] {RESULT} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/slide_limiter/service/service.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/events_writer.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/events_writer.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/cancelation/cancelation.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/cancelation/cancelation.cpp |76.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |76.2%| [AR] {RESULT} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |76.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/http_service.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/http_service.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_blockbs.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_log.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_log.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_common.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/incrhuge_keeper_common.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/slide_limiter/usage/service.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_planner_strategy.cpp |76.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/slide_limiter/usage/service.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor/service/service.cpp |76.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/config.pb.cc |76.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |76.2%| [AR] {RESULT} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_sectorrestorator.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/service/service.cpp |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_sectorrestorator.cpp |76.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |76.3%| [AR] {RESULT} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_requestimpl.cpp |76.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |76.3%| [AR] {RESULT} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_requestimpl.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/protos/config.pb.cc |76.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |76.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |76.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |76.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.grpc.pb.cc |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/pq_rl_helpers.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/pq_rl_helpers.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/limiter/grouped_memory/usage/config.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/config.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_compute_events.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_timeouts.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_events.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_timeouts.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/login_shared_func.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/login_shared_func.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/yaml_config_helpers.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_rebuildhistory.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_helpers.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_rebuildhistory.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/deprecated/yaml_config_parser.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/deprecated/yaml_config_parser.cpp |76.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |76.3%| [AR] {RESULT} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |76.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor/usage/abstract.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/usage/abstract.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/ydb/ydb.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/dlq_helpers.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_delete.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/dlq_helpers.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/ydb/ydb.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_delete.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_txloglogic.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_txloglogic.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/pdisk_log.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/public_http/http_req.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/pdisk_log.cpp |76.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/public_http/http_req.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/auto_config_initializer.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer.cpp |76.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_api_versions_actor.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_api_versions_actor.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/config_helpers.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/config_helpers.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/wilson_tracing_control.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/bridge.cpp |76.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |76.4%| [AR] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/bridge.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/wilson_tracing_control.cpp |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/limiter/grouped_memory/usage/service.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage_event_filter.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/limiter/grouped_memory/service/actor.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/usage/service.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_event_filter.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/audit_dml_operations.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.pb.cc |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/yql_single_query.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor/usage/service.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/yql_single_query.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/usage/service.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor/usage/config.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/service/actor.cpp |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor/usage/config.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/mock/dsproxy_mock.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/scope.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/yaml_config_parser.cpp |76.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/scope.cpp |76.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |76.4%| [AR] {RESULT} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |76.4%| [AR] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_parser.cpp |76.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/mock/dsproxy_mock.cpp |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |76.4%| [AR] {RESULT} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/formats/arrow/serializer/abstract.cpp |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/serializer/abstract.cpp |76.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |76.4%| [AR] {RESULT} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_find_coordinator_actor.cpp |76.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |76.4%| [AR] {RESULT} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |76.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_find_coordinator_actor.cpp |76.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |76.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_entryserialize.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_entryserialize.cpp |76.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |76.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_distributed_config.grpc.pb.cc |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/version/version.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/version/version.cpp |76.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/version/libversion.a |76.5%| [AR] {RESULT} $(B)/ydb/core/driver_lib/version/libversion.a |76.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_cluster_discovery/counters.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/counters.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/shutdown/controller.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_calls.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_calls.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/shutdown/controller.cpp |76.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |76.5%| [AR] {RESULT} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |76.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/txn_actor_response_builder.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/txn_actor_response_builder.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/base/blobstorage_events.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/queues/fifo/queries.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/audit/heartbeat_actor/heartbeat_actor.cpp |76.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |76.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |76.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |76.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |76.5%| [AR] {RESULT} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |76.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |76.5%| [AR] {RESULT} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/base/blobstorage_events.cpp |76.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/audit/heartbeat_actor/heartbeat_actor.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/queues/fifo/queries.cpp |76.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/workers_pool.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/workers_pool.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/audit_log.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/audit_log.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_whoami.cpp |76.5%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/version/version_definition.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_internal.pb.cc |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/version/version_definition.cpp |76.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/apps/version/libversion_definition.a |76.5%| [AR] {RESULT} $(B)/ydb/apps/version/libversion_definition.a |76.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/apps/version/libversion_definition.a |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/formats/arrow/arrow_helpers.cpp |76.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/arrow_helpers.cpp |76.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |76.5%| [AR] {RESULT} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |76.5%| [CC] {tool} $(B)/ydb/core/protos/config.pb.cc |76.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |76.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_event_impl.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.pb.cc |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_event_impl.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage_monitoring.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_monitoring.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/process.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/process.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/opaque_path_description.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/opaque_path_description.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/formats/arrow/program/execution.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/program/execution.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/validation/auth_config_validator.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/auth_config_validator.cpp |76.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |76.6%| [AR] {RESULT} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |76.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |76.6%| [AR] {tool} $(B)/ydb/core/protos/libydb-core-protos.a |76.6%| [AR] {RESULT} $(B)/ydb/core/protos/libydb-core-protos.a |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/general_cache/service/counters.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/general_cache/service/counters.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/events.cpp |76.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |76.6%| [AR] {RESULT} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |76.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |76.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/events.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/validation/auth_config_validator_ut/auth_config_validator_ut.cpp |76.6%| [LD] {tool} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |76.6%| [LD] {RESULT} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/queues/std/queries.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/auth_config_validator_ut/auth_config_validator_ut.cpp |76.6%| [LD] {tool} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |76.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/core_validators.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config.grpc.pb.cc |76.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |76.6%| [AR] {RESULT} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/core_validators.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.grpc.pb.cc |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/validator.cpp |76.6%| [LD] {tool} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |76.6%| [LD] {RESULT} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |76.6%| [LD] {tool} $(B)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/bridge/syncer/syncer.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator.cpp |76.6%| [LD] {RESULT} $(B)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen |76.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/priorities/usage/config.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/usage/config.cpp |76.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |76.6%| [AR] {RESULT} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/pdisk_write.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/queues/std/queries.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugedefs.cpp |76.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/pdisk_write.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_event_filter.cpp |76.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugedefs.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_event_filter.cpp |76.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_scan_common.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_common.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/service/service.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/priorities/usage/service.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/usage/service.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/board_replica.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/service/service.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/priorities/service/manager.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/service/manager.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_costmodel.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_replica.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/bridge/syncer/syncer.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_costmodel.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/validation/column_shard_config_validator.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/registry.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/column_shard_config_validator.cpp |76.7%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/lib/generated/control_board_proto.h |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/registry.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/audit_helpers/audit_helper.cpp |76.7%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/control_board_proto.h |76.7%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/generated/dispatch_op.h |76.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |76.7%| [AR] {RESULT} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |76.7%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/generated/dispatch_op.h |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/kqp.cpp |76.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/init/dummy.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage_proxy.cpp |76.7%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |76.7%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/dummy.cpp |76.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_proxy.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/kqp.cpp |76.7%| [PB] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/helpers.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/helpers.cpp |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/audit_helpers/audit_helper.cpp |76.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap.cpp |76.7%| [AR] {RESULT} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |76.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |76.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/generated/codegen/codegen |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap.cpp |76.8%| [PB] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |76.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/priorities/service/service.cpp |76.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |76.8%| [AR] {RESULT} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/priorities/service/service.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_proxy.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_proxy.cpp |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |76.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen |76.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |76.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |76.8%| [AR] {RESULT} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/dataset.cpp |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/dataset.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/nodes_manager.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/nodes_manager.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kafka_metrics.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/formats/arrow/ut/ut_slicer.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_scan_events.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/ut/ut_slicer.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_metrics.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_events.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events.cpp |76.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |76.8%| [AR] {RESULT} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_pdiskfit/lib/basic_test.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/lib/basic_test.cpp |76.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |76.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/library/compatibility/configs/dump/dumper/main.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/validation/validators.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/validators.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |76.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/validation/libcore-config-validation.a |76.8%| [AR] {RESULT} $(B)/ydb/core/config/validation/libcore-config-validation.a |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/ut/main.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |76.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/ut/main.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_metrics_actor.cpp |76.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_metrics_actor.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |76.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/replica.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/replica.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/grpc.pb.cc |76.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.pb.cc |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compile_service/helpers/ut/kqp_compile_cache_helpers_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/helpers/ut/kqp_compile_cache_helpers_ut.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon_alloc/stats.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon_alloc/stats.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon/audit/audit_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/audit/audit_ut.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ut_helpers/mock_service.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ut_helpers/mock_service.cpp |76.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |76.9%| [AR] {RESULT} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |76.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon/ut_utils/ut_utils.cpp |76.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |76.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/ut_utils/ut_utils.cpp |76.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |76.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |76.9%| [AR] {RESULT} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |76.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/fqrun/src/common.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/src/common.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/validation/validators_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/validation/validators_ut.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_settings.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_settings.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/cms/node_checkers.h_serialized.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/write.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/write.cpp |76.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |76.9%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console.pb.cc |76.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.pb.cc |76.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console.grpc.pb.cc |77.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console.grpc.pb.cc |77.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/grpc.grpc.pb.cc |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/limiter/grouped_memory/ut/ut_manager.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/grpc.grpc.pb.cc |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_index_record.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/ut/ut_manager.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console_config.grpc.pb.cc |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_index_record.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |77.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.grpc.pb.cc |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.grpc.pb.cc |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_ut.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_utils.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_utils.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/etcd_proxy/proxy.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/proxy.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/error.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/error.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/fqrun/src/actors.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/src/actors.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/logs/log.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/logs/log.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/column.cpp |77.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |77.0%| [AR] {RESULT} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/statuses.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/column.cpp |77.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/statuses.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/meta.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/meta.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/merged_column.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/merged_column.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/logic.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request.cpp |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request.cpp |77.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |77.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/logic.cpp |77.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/merger.cpp |77.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |77.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/merger.cpp |77.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |77.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |77.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/client/grpc_client.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/client/grpc_client.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/secure_protobuf_printer.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/secure_protobuf_printer.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/snapshot_scheme.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/base/msgbus.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_export.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/snapshot_scheme.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/meta.cpp |77.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/base/libpublic-lib-base.a |77.1%| [AR] {RESULT} $(B)/ydb/public/lib/base/libpublic-lib-base.a |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/base/msgbus.cpp |77.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_export.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/meta.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/client/msgbus_client.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/client/msgbus_client.cpp |77.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |77.1%| [AR] {RESULT} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |77.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/fqrun/src/fq_runner.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/src/fq_runner.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__update_config_subscription.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__update_config_subscription.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_operation.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/ydb/ut/parse_command_line.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/context.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/context.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_operation.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/ydb/ut/parse_command_line.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/shared_sausagecache.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/shared_sausagecache.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/action.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/action.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__graceful_shutdown.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__graceful_shutdown.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_object_storage.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_object_storage.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/http/types.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console_config.pb.cc |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_scripting.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/http/types.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/protos/out/out.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_scripting.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config.pb.cc |77.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/out/libcore-protos-out.a |77.1%| [AR] {RESULT} $(B)/ydb/core/protos/out/libcore-protos-out.a |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_diff.cpp |77.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/protos/out/out.cpp |77.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_diff.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/error/error.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/monitoring/grpc_service.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/view/grpc_service.cpp |77.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/test_shard/grpc_service.cpp |77.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/error/libcore-ymq-error.a |77.1%| [AR] {RESULT} $(B)/ydb/core/ymq/error/libcore-ymq-error.a |77.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |77.2%| [AR] {RESULT} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |77.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/error/libcore-ymq-error.a |77.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/view/libydb-services-view.a |77.2%| [AR] {RESULT} $(B)/ydb/services/view/libydb-services-view.a |77.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/test_shard/libydb-services-test_shard.a |77.2%| [AR] {RESULT} $(B)/ydb/services/test_shard/libydb-services-test_shard.a |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/error/error.cpp |77.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/view/libydb-services-view.a |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |77.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/test_shard/libydb-services-test_shard.a |77.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/view/grpc_service.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/monitoring/grpc_service.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/test_shard/grpc_service.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/collection.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/collection.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/filtered_scheme.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/predicate/range.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/filtered_scheme.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__update_config.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/predicate/range.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__update_config.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/partition_writer_cache_actor.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/abstract_scheme.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/partition_writer_cache_actor.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/abstract_scheme.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/portion_index.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/portion_index.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__init_scheme.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__init_scheme.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/query.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__load_state.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/query.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__load_state.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__update_epoch.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__update_epoch.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/error.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/error.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__migrate_state.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__migrate_state.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/console_dumper.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/console_dumper.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/column_features.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/msgbus.pb.cc |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/column_features.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/request.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/request.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/msgbus.pb.cc |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/error.cpp |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/logic.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/error.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/logic.cpp |77.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |77.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |77.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |77.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/counters.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/control/lib/immediate_control_board_impl.cpp |77.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/counters.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/lib/immediate_control_board_impl.cpp |77.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |77.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/filtered_scheme.cpp |77.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |77.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/lib/libcore-control-lib.a |77.3%| [AR] {RESULT} $(B)/ydb/core/control/lib/libcore-control-lib.a |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/filtered_scheme.cpp |77.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/http/xml.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/http/xml.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/constructor.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/constructor.cpp |77.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |77.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |77.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/configurator.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/configurator.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/partition_writer.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/partition_writer.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/objects_cache.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/objects_cache.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/tiering/tier_info.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/tiering/tier_info.cpp |77.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |77.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |77.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/config/grpc_service.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/config/grpc_service.cpp |77.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/config/libydb-services-config.a |77.3%| [AR] {RESULT} $(B)/ydb/services/config/libydb-services-config.a |77.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/config/libydb-services-config.a |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/schema.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/schema.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/kikimr.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/kikimr.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon_alloc/profiler.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon_alloc/profiler.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/result.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/result.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/schema.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/schema.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/grpc_helper.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_helper.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/deprecated/kicli/dynamic_node.cpp |77.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/deprecated/kicli/dynamic_node.cpp |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp |77.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |77.3%| [AR] {RESULT} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |77.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/snapshot_scheme.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/snapshot_scheme.cpp |77.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/common.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/common.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/remove.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/remove.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/config.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/control.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/config.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/control.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/action.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/action.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/mock/yql_mock.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/base_utils/format_info.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/mock/yql_mock.cpp |77.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/base_utils/format_info.cpp |77.4%| [AR] {RESULT} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |77.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_produce_actor.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_produce_actor.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_config_base/config_base.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_config_base/config_base.cpp |77.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |77.4%| [AR] {RESULT} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |77.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/base_utils/node_by_host.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/base_utils/node_by_host.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/config.pb.cc |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/portions.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/portions.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/data_events/write_data.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/config.pb.cc |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/write_data.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/splitter.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/splitter.cpp |77.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/libydb-core-protos.a |77.4%| [AR] {RESULT} $(B)/ydb/core/protos/libydb-core-protos.a |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/config_parser.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmd_config.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/base_utils/format_util.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/config_parser.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmd_config.cpp |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/base_utils/format_util.cpp |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/value/ut/ydb-public-lib-value-ut |77.4%| [LD] {RESULT} $(B)/ydb/public/lib/value/ut/ydb-public-lib-value-ut |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/keyvalue_write.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon_alloc/tcmalloc.cpp |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/attributes/ut/ydb-core-ymq-attributes-ut |77.4%| [LD] {RESULT} $(B)/ydb/core/ymq/attributes/ut/ydb-core-ymq-attributes-ut |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/keyvalue_write.cpp |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/tsserver/tsserver |77.4%| [LD] {RESULT} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |77.4%| [LD] {RESULT} $(B)/ydb/tools/tsserver/tsserver |77.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon_alloc/tcmalloc.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/auth/grpc_service.cpp |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/scheme/ut/ydb-core-scheme-ut |77.5%| [LD] {RESULT} $(B)/ydb/core/scheme/ut/ydb-core-scheme-ut |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers.cpp |77.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.cpp |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/partition_key_range/ut/ydb-core-persqueue-public-partition_key_range-ut |77.5%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/partition_key_range/ut/ydb-core-persqueue-public-partition_key_range-ut |77.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/auth/libydb-services-auth.a |77.5%| [AR] {RESULT} $(B)/ydb/services/auth/libydb-services-auth.a |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |77.5%| [LD] {RESULT} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers.cpp |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compile_service/helpers/ut/ydb-core-kqp-compile_service-helpers-ut |77.5%| [LD] {RESULT} $(B)/ydb/core/kqp/compile_service/helpers/ut/ydb-core-kqp-compile_service-helpers-ut |77.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |77.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |77.5%| [AR] {RESULT} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/ut/ydb-core-config-ut |77.5%| [LD] {RESULT} $(B)/ydb/core/config/ut/ydb-core-config-ut |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |77.5%| [LD] {RESULT} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/monitoring.cpp |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |77.5%| [LD] {RESULT} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |77.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/monitoring.cpp |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |77.5%| [LD] {RESULT} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/auth/grpc_service.cpp |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |77.5%| [LD] {RESULT} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_base/cli_kicli.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/log_backend/log_backend.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_kicli.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/constructor.cpp |77.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |77.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |77.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |77.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |77.5%| [AR] {RESULT} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/log_backend/log_backend.cpp |77.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/logic.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/constructor.cpp |77.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |77.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |77.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/run_query.cpp |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |77.5%| [LD] {RESULT} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/run_query.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/logic.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/resolver.cpp |77.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/resolver.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/cleanup_queue_data.cpp |77.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/defs.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/defs.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cleanup_queue_data.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_completion_impl.cpp |77.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_completion_impl.cpp |77.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/public/lib/value/ut/ydb-public-lib-value-ut |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/column_fetching/manager.cpp |77.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/partition_key_range/ut/ydb-core-persqueue-public-partition_key_range-ut |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/column_fetching/manager.cpp |77.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/attributes/ut/ydb-core-ymq-attributes-ut |77.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/scheme/ut/ydb-core-scheme-ut |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_import.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_discovery.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_import.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_root.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_discovery.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_root.cpp |77.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/codegen/ydb-core-base-generated-codegen |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/splitter/blob_info.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/blob_info.cpp |77.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/tsserver/tsserver |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/local/storage.cpp |77.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |77.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |77.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp |77.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/splitter/column_info.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/local/storage.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/column_info.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_info.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_base/cli_cmds_db.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_info.cpp |77.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |77.6%| [AR] {RESULT} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/replication/grpc_service.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/discovery/grpc_service.cpp |77.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |77.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/replication/libydb-services-replication.a |77.6%| [AR] {RESULT} $(B)/ydb/services/replication/libydb-services-replication.a |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/predicate/filter.cpp |77.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/discovery/libydb-services-discovery.a |77.6%| [AR] {RESULT} $(B)/ydb/services/discovery/libydb-services-discovery.a |77.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/predicate/filter.cpp |77.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/grpc_service.cpp |77.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/abstract.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/abstract.cpp |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/grpc_service.cpp |77.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/ut/ydb-core-config-ut |77.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/replication/grpc_service.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/discovery/grpc_service.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_meta.cpp |77.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_syslogreader.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_reader/fetching_executor.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_syslogreader.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/fetching_executor.cpp |77.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/library/compatibility/configs/dump/dumper/ydb-config-meta-dumper |77.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compile_service/helpers/ut/ydb-core-kqp-compile_service-helpers-ut |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_logreader.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_logreader.cpp |77.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_cursor.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_cursor.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/index_chunk.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/index_chunk.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/common/context.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/common/context.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/rate_limiter/grpc_service.cpp |77.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |77.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |77.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |77.7%| [AR] {RESULT} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |77.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/rate_limiter/grpc_service.cpp |77.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_debug.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/backup/grpc_service.cpp |77.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/backup/libydb-services-backup.a |77.7%| [AR] {RESULT} $(B)/ydb/services/backup/libydb-services-backup.a |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_debug.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/writer/write_controller.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/empty.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/backup/grpc_service.cpp |77.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/write_controller.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/locks/read_start.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/empty.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/read_start.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/local_discovery/grpc_service.cpp |77.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |77.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp |77.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |77.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |77.7%| [AR] {RESULT} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/local_discovery/grpc_service.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp |77.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/manager.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/manager.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/writer/blob_constructor.cpp |77.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/blob_constructor.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/splitter/batch_slice.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/configs_config.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/batch_slice.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_config.cpp |77.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/keyvalue/grpc_service_v2.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/batch_builder/merger.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/batch_builder/merger.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/keyvalue/grpc_service_v2.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_metadata.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_metadata.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/transparent.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/transparent.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/iterator.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/iterator.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/cms/grpc_service.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/cms/grpc_service.cpp |77.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/libydb-services-cms.a |77.8%| [AR] {RESULT} $(B)/ydb/services/cms/libydb-services-cms.a |77.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_logstore.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/abstract.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/empty.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_logstore.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/abstract.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/empty.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/snapshot.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/actors/wait_events.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/snapshot.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/wait_events.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/constructor.cpp |77.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |77.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/constructor.cpp |77.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/builder.cpp |77.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |77.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/builder.cpp |77.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/transparent.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/transparent.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/snapshot.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/common/context.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/snapshot.cpp |77.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |77.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/common/context.cpp |77.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/maintenance/grpc_service.cpp |77.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |77.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |77.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/maintenance/grpc_service.cpp |77.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |77.8%| [AR] {RESULT} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_log.cpp |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_portion_chunk.cpp |77.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |77.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/bridge/grpc_service.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/plain/column_portion_chunk.cpp |77.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_log.cpp |77.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/bridge/libydb-services-bridge.a |77.9%| [AR] {RESULT} $(B)/ydb/services/bridge/libydb-services-bridge.a |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/bridge/grpc_service.cpp |77.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/bridge/libydb-services-bridge.a |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/logic.cpp |77.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |77.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/predicate/container.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/keyvalue/grpc_service_v1.cpp |77.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |77.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |77.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/tablet/ydb_tablet.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/header.cpp |77.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/logic.cpp |77.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/tablet/libydb-services-tablet.a |77.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |77.9%| [AR] {RESULT} $(B)/ydb/services/tablet/libydb-services-tablet.a |77.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/predicate/container.cpp |77.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |77.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |77.9%| [AR] {RESULT} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/header.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/tablet/ydb_tablet.cpp |77.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/actors/block_events.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/keyvalue/grpc_service_v1.cpp |77.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/block_events.cpp |77.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |77.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |77.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/meta.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/meta.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/abstract.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/remap.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/validator_nameservice.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/remap.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_nameservice.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/abstract.cpp |77.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |77.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |77.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |77.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |77.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |77.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/util.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_query.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/util.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_query.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/http.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/http.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |77.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |77.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/modifications_validator.cpp |77.9%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/actors/actors.h_serialized.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/modifications_validator.cpp |78.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kafka_proxy/actors/actors.h_serialized.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/init/init_noop.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/init_noop.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |78.0%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/node_broker_workload |78.0%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/node_broker_workload |78.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |78.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |78.0%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/simple_queue |78.0%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/simple_queue |78.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |78.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |78.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |78.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/meta/meta_versions.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/meta/meta_versions.cpp |78.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |78.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/dynamic_config/grpc_service.cpp |78.0%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/workload/tsserver |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/dynamic_config/grpc_service.cpp |78.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |78.0%| [AR] {RESULT} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |78.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/configs_dispatcher_proxy.cpp |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_proxy.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/etcd_proxy/service/etcd_gate.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/validators/validator_bootstrap.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/util/config_index.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/jaeger_tracing_configurator.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/core/core_ydbc.cpp |78.0%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/topic_kafka_workload |78.0%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/topic_kafka_workload |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_gate.cpp |78.0%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/generated/runtime_feature_flags.h |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/validators/validator_bootstrap.cpp |78.0%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/kafka_workload |78.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/util/libcms-console-util.a |78.0%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/kafka_workload |78.0%| [AR] {RESULT} $(B)/ydb/core/cms/console/util/libcms-console-util.a |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/core/core_ydbc.cpp |78.0%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/core/core_ydb.cpp |78.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |78.0%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_bc86b379db0e87d023f98a592e.o |78.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/util/config_index.cpp |78.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |78.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/workload/libpy3stress-testshard_workload-workload.global.a |78.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |78.0%| [AR] {RESULT} $(B)/ydb/tests/stress/testshard_workload/workload/libpy3stress-testshard_workload-workload.global.a |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_scheme.cpp |78.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |78.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/init/init.h_serialized.cpp |78.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |78.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |78.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/init.h_serialized.cpp |78.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/core/core_ydb.cpp |78.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/etcd_proxy/service/etcd_watch.cpp |78.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/core/libydb-mvp-core.a |78.1%| [AR] {RESULT} $(B)/ydb/mvp/core/libydb-mvp-core.a |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_watch.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/log_settings_configurator.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_scheme.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/log_settings_configurator.cpp |78.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/mvp/core/libydb-mvp-core.a |78.1%| [PY] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/objcopy_bc86b379db0e87d023f98a592e.o |78.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |78.1%| [BN] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/tsserver |78.1%| [AR] {RESULT} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |78.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_configuration_info_collector.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configuration_info_collector.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__replace_config_subscriptions.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__replace_config_subscriptions.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |78.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/libpy3stress-testshard_workload-workload.global.a |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |78.1%| [LD] {RESULT} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |78.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/testlib/pq_helpers/mock_pq_gateway.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/testlib/pq_helpers/mock_pq_gateway.cpp |78.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |78.1%| [AR] {RESULT} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |78.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/fq/ydb_over_fq.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/fq/private_grpc.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/ydb_over_fq.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/private_grpc.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/etcd_proxy/service/etcd_lease.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_lease.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/oidc_proxy/mvp.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/oidc_proxy/mvp.cpp |78.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |78.1%| [AR] {RESULT} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |78.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__get_yaml_config.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__get_yaml_config.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__log_cleanup.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__log_cleanup.cpp |78.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |78.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_state.h_serialized.cpp |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |78.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |78.1%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tests/tpch/tpch |78.1%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/tpch/tpch |78.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__drop_yaml_config.cpp |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tests/tpch/tpch |78.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__drop_yaml_config.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__get_log_tail.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__add_config_subscription.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__get_log_tail.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__add_config_subscription.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__remove_config_subscriptions.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_config_subscriptions.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__remove_config_subscription.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_config_subscription.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__update_last_provided_config.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_last_provided_config.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__get_yaml_metadata.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__get_yaml_metadata.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/actors/actors.h_serialized.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kafka_proxy/actors/actors.h_serialized.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/metering.h_serialized.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__configure.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__configure.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/fq/grpc_service.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/grpc_service.cpp |78.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/libydb-services-fq.a |78.2%| [AR] {RESULT} $(B)/ydb/services/fq/libydb-services-fq.a |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_chunk_tracker.cpp |78.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_chunk_tracker.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/meta/meta.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_pdisk_config.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/meta/meta.cpp |78.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_pdisk_config.cpp |78.2%| [AR] {RESULT} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |78.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/tests/kikimr_tpch/kqp_tpch_ut.cpp |78.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/etcd_proxy/service/etcd_grpc.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_grpc.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/events/delete_blobs.cpp |78.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/events/delete_blobs.cpp |78.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |78.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |78.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/query_data/kqp_predictor.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_data/kqp_predictor.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/src/common.h_serialized.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |78.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |78.3%| [LD] {RESULT} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |78.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compute_actor/kqp_compute_state.h_serialized.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/address_classification/net_classifier.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_failure.cpp |78.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/meta/bin/mvp_meta |78.3%| [LD] {RESULT} $(B)/ydb/mvp/meta/bin/mvp_meta |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |78.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |78.3%| [AR] {RESULT} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_failure.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/address_classification/net_classifier.cpp |78.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/meta/bin/mvp_meta |78.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_reader/contexts.h_serialized.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/portion_info.h_serialized.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/columns_set.h_serialized.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |78.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/init/init_ut.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/init_ut.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/auth_factory.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/auth_factory.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_range.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_range.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/counters/counters.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/counters/counters.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/shard_impl.cpp |78.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |78.3%| [AR] {RESULT} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/shard_impl.cpp |78.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_startup.cpp |78.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_startup.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/write_data.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/write_data.cpp |78.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_list_renderer.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_list_renderer.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/initializer.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/initializer.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_common/rpc_common_kqp_session.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_readbulksst.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_readbulksst.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_stathuge.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_stathuge.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/config/init/init.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/config/init/init.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/tx_semaphore_timeout.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_semaphore_timeout.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/tablet/gc_counters.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/tablet/gc_counters.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_kqp_tx.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_statdb.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_public.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_statdb.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_public.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/scan.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/scan.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_fetch_script_results.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_snapshot.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_snapshot.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_barrier.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_barrier.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/snapshot.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/snapshot.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_huge.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_huge.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_readactor.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_readactor.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storage.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storage.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_prune_columns_stage.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_prune_columns_stage.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_script.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/config_fit_pdisks.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config_fit_pdisks.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/bootstrapper.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/bootstrapper.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/shred.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/shred.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_actor.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_actor.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_sourcemanager.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/common/actor.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_sourcemanager.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/common/actor.cpp |78.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |78.4%| [AR] {RESULT} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |78.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/table/behaviour.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__check_slot_status.cpp |78.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/snapshot.cpp |78.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__check_slot_status.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/snapshot.cpp |78.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |78.5%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/table/behaviour.cpp |78.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_queue.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_extr.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_queue.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_extr.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/initializer.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__update_node_location.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/initializer.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_node_location.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_readbatch.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_readbatch.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/fetcher.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/fetcher.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_process.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_process.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_pdisk.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_pdisk.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/legacy/rpc_legacy.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/legacy/rpc_legacy.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/subscriber.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/subscriber.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/tiering.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/tiering.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_indexes.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_indexes.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/ownerinfo.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/ownerinfo.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_returning.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_returning.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/query_stattablet.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_stattablet.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_sub_columns_portions.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_sub_columns_portions.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/blob_recovery_request.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_defs.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_defs.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/user_info.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/user_info.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/leaked_blobs.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/leaked_blobs.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/backup.cpp |78.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/backup.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/lease_holder.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/lease_holder.cpp |78.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/special_cleaner.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/special_cleaner.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_mon.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_mon.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_unreadable.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_unreadable.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_tablets_object.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablets_object.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_transaction_actor.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_transaction_actor.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/sourceid.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/sourceid.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_sst.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor_sst.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/common/event_helpers.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/common/event_helpers.cpp |78.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |78.6%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor2.cpp |78.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor2.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__delete_node.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__delete_node.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/events/events.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/events/events.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_node_enumeration.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_node_enumeration.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__update_pool_status.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_pool_status.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/secret_behaviour.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/secret_behaviour.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v2_chunks.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v2_chunks.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/scrub_actor.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__update_config.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_config.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_reader/actor.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/actor.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/query/assimilation.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__locks.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__locks.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/query/assimilation.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/message_id_deduplicator.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/message_id_deduplicator.cpp |78.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |78.6%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_logreplay.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_mlp.cpp |78.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_logreplay.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compile_service/kqp_compile_actor.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_mlp.cpp |78.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/kqp_compile_actor.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__init_scheme.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__init_scheme.cpp |78.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__assign_free_slots.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__assign_free_slots.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_blob_encoder.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_blob_encoder.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/consumer_offset_tracker.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/consumer_offset_tracker.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_table_resolver.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/writer/metadata_initializers.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/metadata_initializers.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/service_actor.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/service_actor.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__register_node.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__register_node.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compactification.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compactification.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/configured_tablet_bootstrapper.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker__extend_lease.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/configured_tablet_bootstrapper.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker__extend_lease.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_compactfreshappendix.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_compactfreshappendix.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/subscriber.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/subscriber.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon_alloc/monitor.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon_alloc/monitor.cpp |78.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |78.7%| [AR] {RESULT} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |78.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/request/request_actor.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/request/request_actor.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/metadata/metadata_actor.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/metadata/metadata_actor.cpp |78.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |78.7%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |78.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v1_chunks.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_v1_chunks.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_reader.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_reader.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_reader/events.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/events.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_copy_tables.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/dynamic_nameserver_mon.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/dynamic_nameserver_mon.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_read.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_read.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_rollback_transaction.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_scheme_executer.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/transaction.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/transaction.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/background_controller.cpp |78.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/background_controller.cpp |78.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idxsnap.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idxsnap.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/grpc_pq_schema.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/grpc_pq_schema.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_bio_actor.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_bio_actor.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/dynamic_nameserver.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/dynamic_nameserver.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_committer.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_committer.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/service/service.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/service.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/abstract.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/abstract.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/utils.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_constant_folding_stage.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/local.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/utils.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_constant_folding_stage.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/local_rate_limiter.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/local.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllog.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllog.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/scrub/restore_corrupted_blob_actor.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/load_test.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/load_test.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/scrub/restore_corrupted_blob_actor.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/service/add_data.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/add_data.cpp |78.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |78.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |78.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/explain_data_query.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hulldb_bulksstmngr.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hulldb_bulksstmngr.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_configure.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_configure.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/labels_maintainer.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/labels_maintainer.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/sys_view.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/sys_view.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_query_stats.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/broken_blobs.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/broken_blobs.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/cache.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_context.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/cache.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_context.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |78.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_endpoint_publish_actor.cpp |78.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/storage/groups.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/groups.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_fq_internal.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_reader/read_coordinator.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/read_coordinator.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hull.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hull.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_public.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/quoter_service.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/localrecovery/localrecovery_public.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/manager.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/manager.cpp |78.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |78.9%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |78.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |78.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_execute_yql_script.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/stat_processor.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/stat_processor.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/nodes/nodes.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/nodes/nodes.cpp |78.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |78.9%| [AR] {RESULT} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/tiling.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/tiling.cpp |78.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |78.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstslice.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstslice.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_response.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_response.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__process_boot_queue.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__process_boot_queue.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_statestorage_config_generator.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_statestorage_config_generator.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/auth/permissions.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/permissions.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/tables/normalizer.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/tables/normalizer.cpp |78.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |78.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |78.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/balance/utils.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/utils.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/blobstorage_hullcompdelete.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/populator.cpp |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table.cpp |78.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |78.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |78.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/populator.cpp |78.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/sessions/sessions.cpp |79.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |78.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |79.0%| [AR] {RESULT} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/blobstorage_hullcompdelete.cpp |79.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/sessions/sessions.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_selfheal.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_selfheal.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_aggregate.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_aggregate.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/auth/users.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/users.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy.cpp |79.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |79.0%| [AR] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/group_layout_checker.cpp |79.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_layout_checker.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_interval_metrics.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_interval_metrics.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_load_blob_queue.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_load_blob_queue.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_proxy.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/storage_proxy.cpp |79.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |79.0%| [AR] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ymq/ymq_proxy.cpp |79.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ymq/ymq_proxy.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/fetcher.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/fetcher.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/batch_builder/restore.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/batch_builder/restore.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/recovery/hulldb_recovery.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/recovery/hulldb_recovery.cpp |79.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |79.0%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |79.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_explain_yql_script.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/tablet_flat_executed.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/tablet_flat_executed.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/deprecated/persqueue_v0/persqueue.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/service.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/persqueue.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/service.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blob_cache.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blob_cache.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/auth/group_members.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/batch_builder/builder.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/group_members.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/batch_builder/builder.cpp |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllogcutternotify.cpp |79.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hulllogcutternotify.cpp |79.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |79.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |79.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |79.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_scatter_gather.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_scatter_gather.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_chain.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_chain.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__overload.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__overload.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/datastreams/grpc_service.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/grpc_service.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_configure.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_configure.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/granule/clean_granule.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/granule/clean_granule.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/aggregator.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/aggregator.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_aggregate_data.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_aggregate_data.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_commit_actor.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_commit_actor.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/migrate.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/migrate.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idx.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_idx.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/load_everything.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/load_everything.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/behaviour.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/behaviour.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/granule/normalizer.cpp |79.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |79.1%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/object.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_actualization.cpp |79.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/object.cpp |79.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |79.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_actualization.cpp |79.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/granule/normalizer.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_compaction_logic.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_compaction_logic.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_changer.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_changer.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__cut_tablet_history.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__cut_tablet_history.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |79.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |79.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |79.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_writer.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_aggr_stat_response.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_writer.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_aggr_stat_response.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/behaviour.cpp |79.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/behaviour.cpp |79.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |79.1%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |79.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_export.cpp |79.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_dummy.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_export.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_dummy.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_info.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_info.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/service/sysview_service.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/service/sysview_service.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/request/request_actor_cb.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/request/request_actor_cb.cpp |79.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |79.2%| [AR] {RESULT} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |79.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ymq/grpc_service.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ymq/grpc_service.cpp |79.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/ymq/libydb-services-ymq.a |79.2%| [AR] {RESULT} $(B)/ydb/services/ymq/libydb-services-ymq.a |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_collector.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_discovery.cpp |79.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_cluster_discovery/grpc_service.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/grpc_service.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |79.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |79.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |79.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__load_state.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__load_state.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/read_balancer__metrics.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/cache/pq_l2_cache.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer__metrics.cpp |79.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |79.2%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/cache/pq_l2_cache.cpp |79.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/access.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/access.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_backup.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/alter_impl.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/alter_impl.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_backup.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_maintenance.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_quantum.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_quantum.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/monitoring.cpp |79.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/monitoring.cpp |79.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |79.2%| [AR] {RESULT} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |79.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/metering_sink.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/metering_sink.cpp |79.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_monitoring.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon/mon.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/mon.cpp |79.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/libydb-core-mon.a |79.3%| [AR] {RESULT} $(B)/ydb/core/mon/libydb-core-mon.a |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/checker_access.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/checker_access.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/grouper.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/grouper.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kafka_transactions_coordinator.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_transactions_coordinator.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/create_session.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_part_loader.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_part_loader.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_impl.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_databases_cache.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_databases_cache.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstvec.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sstvec.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_ack_timeout.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_ack_timeout.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/writer/source_id_encoding.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/source_id_encoding.cpp |79.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |79.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_change_backend.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_change_backend.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_log.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_log.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_recoverylogwriter.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_recoverylogwriter.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_actor.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_actor.cpp |79.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |79.3%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/generic_manager.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/generic_manager.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/export/session/session.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/session.cpp |79.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |79.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |79.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/services_initializer.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/services_initializer.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_request.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_request.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_empty.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_essence.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_empty.cpp |79.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_essence.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/health_check/health_check.cpp |79.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/storage/storage_stats.cpp |79.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/libydb-core-health_check.a |79.4%| [AR] {RESULT} $(B)/ydb/core/health_check/libydb-core-health_check.a |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/health_check/health_check.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/storage_stats.cpp |79.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_public.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_public.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/abstract/initialization.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/initialization.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_delete_index.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_selector.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_delete_index.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/base_table_writer.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_selector.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/base_table_writer.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_ping.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_ping.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compaction.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_compaction.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_tablet_metrics.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_init.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablet_metrics.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_init.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_cache.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_cache.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/lib/actors/pq_schema_actor.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/lib/actors/pq_schema_actor.cpp |79.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |79.4%| [AR] {RESULT} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_index_columns.cpp |79.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_index_columns.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/pq_impl.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/pq_impl.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_datasnap.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_datasnap.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/self_heal.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/self_heal.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors.cpp |79.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |79.4%| [AR] {RESULT} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |79.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisrunner.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisrunner.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard_schema.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_schema.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisproxy.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisproxy.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugerecovery.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugerecovery.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ycsb/common.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/common.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/metadata/fetcher.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/fetcher.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_generate.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_generate.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/update_last_seen_ready.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kqp_helper.cpp |79.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/update_last_seen_ready.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kqp_helper.cpp |79.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/update_group_latencies.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/update_group_latencies.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/service.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/service.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_defs.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_defs.cpp |79.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |79.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/tx_self_check.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_self_check.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/local_pgwire/pgwire_kqp_proxy.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/pgwire_kqp_proxy.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/manager.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/manager.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_partition_helper.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_rewriter.cpp |79.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |79.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_rewriter.cpp |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/hulldb_bulksst_add.cpp |79.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |79.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/hulldb_bulksst_add.cpp |79.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |79.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_dblogcutter.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_dblogcutter.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_graph.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/database/database.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/database/database.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/copy_blob_ids_to_v2.cpp |79.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |79.5%| [AR] {RESULT} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/copy_blob_ids_to_v2.cpp |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/modification.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/modification.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_helpers.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_helpers.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service.cpp |79.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |79.5%| [AR] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |79.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/secret.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_v0_meta.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/secret.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks.cpp |79.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/chunks_v0_meta.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/service/deleting.cpp |79.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_compute_state.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/deleting.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_state.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/tx_init.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_init.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing_app.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing_app.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_tx.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_tx.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/tx_config_set.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_config_set.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/group_metrics_exchange.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_metrics_exchange.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/optimizer.cpp |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |79.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compile_service/kqp_compile_computation_pattern_service.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/kqp_compile_computation_pattern_service.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/commit_config.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/commit_config.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/partition_scale_request.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/partition_scale_request.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_unused_tables_template.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_unused_tables_template.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_version_info.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_ttl_preset_setting_version_info.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/rate_accounting.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/rate_accounting.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_literal_executer.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_responsiveness_pinger.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_responsiveness_pinger.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_events.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_events.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_scan_query.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hulldefs.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_actor.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_tablet_groups.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablet_groups.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/local_proxy/local_partition_actor.cpp |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |79.6%| [AR] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/normalizer.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/normalizer.cpp |79.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |79.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |79.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment.cpp |79.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_cluster_state.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_cluster_state.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/common.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/common.cpp |79.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_sys.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_rows.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_sys.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/one_layer.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/one_layer.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_balancer_actor.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_balancer_actor.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/balance/sender.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/sender.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_actor.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_execute_scheme_query.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_actor.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_finish_trasersal.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_finish_trasersal.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank.cpp |79.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |79.7%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_actor.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_actor.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_reader.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_reader.cpp |79.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/mlp/libpersqueue-public-mlp.a |79.7%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/mlp/libpersqueue-public-mlp.a |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/mlp/libpersqueue-public-mlp.a |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_osiris.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_osiris.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/vdisk_write.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/vdisk_write.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_storage_config.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_storage_config.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_state_storage.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_state_storage.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/utils.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/utils.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/actors/analyze_actor.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/actors/analyze_actor.cpp |79.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |79.7%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/constructor.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/constructor.cpp |79.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |79.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_utils.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_utils.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/worker.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/worker.cpp |79.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |79.7%| [AR] {RESULT} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_init_schema.cpp |79.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_init_schema.cpp |79.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_list_offsets_actor.cpp |79.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_list_offsets_actor.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/kesus_quoter_proxy.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/kesus_quoter_proxy.cpp |79.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/libydb-core-quoter.a |79.8%| [AR] {RESULT} $(B)/ydb/core/quoter/libydb-core-quoter.a |79.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/tx_session_timeout.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tx_session_timeout.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_compute_actor_factory.cpp |79.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |79.8%| [AR] {RESULT} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/bsc.cpp |79.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/bsc.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_deprecated_snapshot.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_deprecated_snapshot.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_pure_compute_actor.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/abstract/kqp_common.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/kqp_common.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/writer/writer.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/writer.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_appearance_snapshot.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/restore_appearance_snapshot.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_metadata_actor.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_metadata_actor.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_intermediate.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_intermediate.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/show_create/create_table_formatter.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/show_create/create_table_formatter.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_datashard_scan_response.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_datashard_scan_response.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/read_balancer.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__start_tablet.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__start_tablet.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |79.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |79.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |79.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_node_registration.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/constructor.cpp |79.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |79.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/constructor.cpp |79.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_response_tablet_distribution.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_response_tablet_distribution.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_service.cpp |79.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullactor.cpp |79.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/finalize_script_service/kqp_finalize_script_service.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullactor.cpp |79.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |79.9%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/propose_group_key.cpp |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/propose_group_key.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/lib/auth/auth_helpers.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/lib/auth/auth_helpers.cpp |79.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |79.9%| [AR] {RESULT} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_configs_actor.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_configs_actor.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/data.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/chunks/data.cpp |79.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |79.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_proxy.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_read_session_proxy.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_init_producer_id_actor.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_init_producer_id_actor.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_create_topics_actor.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_create_topics_actor.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_scheme_base.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/events.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/config_fit_groups.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/events.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config_fit_groups.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/abstract/abstract.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_bridge.cpp |79.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/abstract/abstract.cpp |79.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_bridge.cpp |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_fetch_actor.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_offset_fetch_actor.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_alter_configs_actor.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_alter_configs_actor.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_tools.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_tools.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/constructor.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/constructor.cpp |79.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |79.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |79.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_interval_summary.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_interval_summary.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_collect.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_collect.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/processor.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/processor.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/storage_stats_calculator.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_status.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_status.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/storage_stats_calculator.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_tablet_status.cpp |79.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_tablet_status.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_vdisk_guids.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/blobstorage_vdisk_guids.cpp |79.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/constructor.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/topic.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/constructor.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/topic.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/balance/deleter.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/deleter.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/metadata/behaviour.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/behaviour.cpp |80.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |80.0%| [AR] {RESULT} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/write_session_actor.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/write_session_actor.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_remove_directory.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_osiris.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_osiris.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_rename_tables.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_binding.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_binding.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_path.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_login.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_login.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_balance_actor_sql.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_balance_actor_sql.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_data_executer.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/compile_cache/compile_cache.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/compile_cache/compile_cache.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |80.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |80.0%| [AR] {RESULT} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_explain_data_query.cpp |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_replication.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_replication.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/accessor/secret_id.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/accessor/secret_id.cpp |80.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |80.0%| [AR] {RESULT} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/service/ext_counters.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/init/init.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/service/ext_counters.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/init/init.cpp |80.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |80.0%| [AR] {RESULT} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |80.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |80.0%| [AR] {RESULT} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |80.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/mirror_describer.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/object.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/mirror_describer.cpp |80.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/object.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_prepare_data_query.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_vdisk.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_copy_table.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_vdisk.cpp |80.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/optimizer.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisfinder.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubisfinder.cpp |80.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |80.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/optimizer.cpp |80.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |80.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data.cpp |80.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |80.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_stat_aggr.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_stat_aggr.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/balance/handoff_map.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/handoff_map.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/constructor.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/portions/constructor.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_get_metrics.cpp |80.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_get_metrics.cpp |80.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kafka_connection.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_connection.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/metadata/snapshot.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/snapshot.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree.cpp |80.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |80.1%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/accessor_refresh.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_refresh.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/resource_pools/resource_pools.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__release_tablets.cpp |80.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/resource_pools/resource_pools.cpp |80.1%| [AR] {RESULT} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__release_tablets.cpp |80.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/storage/vslots.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_pool.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/vslots.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_pool.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_common.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_common.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pipe.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pipe.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/meta.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/events/internal.cpp |80.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_cache.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/events/internal.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_cache.cpp |80.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/meta.cpp |80.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |80.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/initializer.cpp |80.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/initializer.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/constructor.cpp |80.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |80.2%| [AR] {RESULT} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |80.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/max/constructor.cpp |80.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |80.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |80.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/zero_level.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/zero_level.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_async.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_async.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_scrub.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_scrub.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/writer/compacted_blob_constructor.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/compacted_blob_constructor.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_dynamic_config.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/cmds_box.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_box.cpp |80.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |80.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |80.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/commit_offset_actor.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/commit_offset_actor.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/partition_actor.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/partition_actor.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_init.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/cmds_storage_pool.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_init.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_quorum.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_storage_pool.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_quorum.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/node_whiteboard.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/node_whiteboard.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/cmds_host_config.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_host_config.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_proxy.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_table.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_proxy.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_table.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/constructor.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/constructor.cpp |80.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |80.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |80.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__update_slot_status.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_connectivity.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__update_slot_status.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_connectivity.cpp |80.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/balance/balancing_actor.cpp |80.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/balance/balancing_actor.cpp |80.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |80.2%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/one_layer.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/one_layer.cpp |80.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |80.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_fsm.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_fsm.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/rm_service/kqp_resource_info_exchanger.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_resource_info_exchanger.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/constructor.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/config.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/constructor.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/offload_actor.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/offload_actor.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/zero_level.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/zero_level.cpp |80.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |80.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_persistent_storage.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_persistent_storage.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/granule/granule.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/granule.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portions_index.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/portions_index.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_transport.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/meta.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon/audit/audit.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |80.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |80.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |80.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |80.3%| [AR] {RESULT} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/meta.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_static_group.cpp |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |80.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |80.3%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_invoke_static_group.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/audit/audit.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/actors/query_utils.cpp |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_validate.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/common/config.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_validate.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/common/config.cpp |80.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |80.3%| [AR] {RESULT} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |80.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/actor.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/actor.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/direct_read_actor.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/direct_read_actor.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group_resolver.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group_resolver.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_bridge.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_group.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_bridge.cpp |80.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__restart_tablet.cpp |80.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__restart_tablet.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_mon.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_mon.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/constructor.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/constructor.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_mongroups.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_plan_conversion_utils.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_mongroups.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_plan_conversion_utils.cpp |80.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |80.4%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tablet_move_info.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tablet_move_info.cpp |80.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multiget.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multiget.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pdisk.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_pdisk.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/versioned_index.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/versioned_index.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |80.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |80.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |80.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/metadata/blobstorage_pdisk_metadata.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/metadata/blobstorage_pdisk_metadata.cpp |80.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/metadata/libblobstorage-pdisk-metadata.a |80.4%| [AR] {RESULT} $(B)/ydb/core/blobstorage/pdisk/metadata/libblobstorage-pdisk-metadata.a |80.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/metadata/libblobstorage-pdisk-metadata.a |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/init_scheme.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/init_scheme.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/granule/stages.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/stages.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_impl.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_impl.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request_reporting.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request_reporting.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_execute_query.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/read_session_actor.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/read_session_actor.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_dynamic.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_dynamic.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_driveestimator.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_driveestimator.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_schemeshard_stats.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_schemeshard_stats.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/schema.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/schema.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/const.cpp |80.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |80.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_indexrestoreget.cpp |80.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/const.cpp |80.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_indexrestoreget.cpp |80.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |80.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |80.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/index_info.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/index_info.cpp |80.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/memory_controller/memory_controller.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/memory_controller/memory_controller.cpp |80.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |80.5%| [AR] {RESULT} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |80.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/auth_actors.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/auth_actors.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/actors/run_actor.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/actors/run_actor.cpp |80.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |80.5%| [AR] {RESULT} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |80.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/quota/account_read_quoter.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/account_read_quoter.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_console.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_response.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_console.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_response.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/registration.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_resource.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/registration.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_resource.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/node_warden_mon.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/node_warden_mon.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/delete_message.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/delete_message.cpp |80.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |80.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |80.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/counters.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multicollect.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/counters.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_multicollect.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/get_group.cpp |80.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |80.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/get_group.cpp |80.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_encrypt.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_encrypt.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/optimizer.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/optimizer.cpp |80.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |80.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |80.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemonactor.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemonactor.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/aggregator_impl.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_actor.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/aggregator_impl.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/object.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/object.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_http.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl_http.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/metadata/object.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/object.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/manager.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/manager.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/constructor.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/constructor.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_reader/fetcher.cpp |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/fetcher.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/bridge.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/bridge.cpp |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/ticket_parser.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ticket_parser.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/persqueue.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/granule/storage.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/drop_donor.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/granule/storage.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/drop_donor.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_mon.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_mon.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/describe_table.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_monitoring_proxy.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_monitoring_proxy.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/manager.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/manager.cpp |80.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |80.6%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/kesus/grpc_service.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/kesus/grpc_service.cpp |80.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/kesus/libydb-services-kesus.a |80.6%| [AR] {RESULT} $(B)/ydb/services/kesus/libydb-services-kesus.a |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_simple.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_worker_common.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_range.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_simple.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_worker_common.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_range.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_session_actor.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/update_offsets_in_transaction_actor.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/update_offsets_in_transaction_actor.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/actors/scheme.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/actors/scheme.cpp |80.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3dc.cpp |80.6%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3dc.cpp |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/counters.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/counters.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/storage/storage_pools.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/storage_pools.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_version.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/schema_version.cpp |80.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |80.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/group_mapper.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/index/index.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/index/index.cpp |80.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |80.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |80.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put_impl.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |80.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put_impl.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_distributed_erase.cpp |80.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/cursor.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/cursor.cpp |80.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |80.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |80.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_base.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_snapshot_base.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_block.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_block.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_stat.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_stat.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_check_integrity_get.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_check_integrity_get.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/sqs_topic_proxy.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/sqs_topic_proxy.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_common_upload.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/constructor.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/constructor.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_monactor.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_monactor.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_strategy_base.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_request.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_strategy_base.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/cmds_drive_status.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_drive_status.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__unlock_tablet.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__unlock_tablet.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/scheme.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_transformer.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/scheme.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_transformer.cpp |80.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |80.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_cdc_stream_common.cpp |80.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_cdc_stream_common.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/bridge/proxy/bridge_proxy.cpp |80.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |80.7%| [AR] {RESULT} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/bridge/proxy/bridge_proxy.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__sync_tablets.cpp |80.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__sync_tablets.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_put.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_helpers/export_reboots_common.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/export_reboots_common.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/group_sessions.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/group_sessions.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/rm_service/kqp_snapshot_manager.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_snapshot_manager.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/cluster_balancing.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cluster_balancing.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__mon_reset_schema_version.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/fetcher.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/fetcher.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_status.cpp |80.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_status.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/grpc_pq_write.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/grpc_pq_write.cpp |80.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_impl.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_impl.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_helpers/shred_helpers.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/shred_helpers.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_effects.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_effects.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/abstract_scheme.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/abstract_scheme.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/schema_actors.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/schema_actors.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/source.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/source.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/metadata.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/metadata.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_patch.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_patch.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_assimilate.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_assimilate.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_mon.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_mon.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_helpers/failing_mtpq.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/failing_mtpq.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__set_down.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__set_down.cpp |80.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |80.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |80.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemon.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_nodemon.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__generate_data_ut.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_scan_executer.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__generate_data_ut.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_partitioned_executer.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_partitioned_executer.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/disk_metrics.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/disk_metrics.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_vacuum_logic.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_vacuum_logic.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/internal/rate_limiter_resources.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/rate_limiter_resources.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_blackboard.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_blackboard.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/preset_schemas.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_reset.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/validators.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/versions/preset_schemas.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_reset.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/validators.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_proxy/control_plane_proxy.cpp |80.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |80.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_result_write.cpp |80.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |80.8%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_pipe_client.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_result_write.cpp |80.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_pipe_client.cpp |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/control_plane_proxy.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/resource_broker.cpp |80.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |80.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/resource_broker.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_boot_lease.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_boot_lease.cpp |80.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_tasks_validate.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_get.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_get.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/meta.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/meta.cpp |80.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |80.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |80.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/constructor.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/constructor.cpp |80.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |80.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |80.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/internal/nodes_health_check.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/nodes_health_check.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_cancel_operation.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/constructor.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/constructor.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_compute_database.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_compute_database.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_folder.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_folder.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__seize_tablets_reply.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__seize_tablets_reply.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_collect.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_collect.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/interval.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/interval.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/scheme_describe.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/scheme_describe.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_bindings.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_bindings.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_state.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_state.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_quotas.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_quotas.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp_compute.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/in_memory_control_plane_storage.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/in_memory_control_plane_storage.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__tablet_owners_reply.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__tablet_owners_reply.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_block.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_block.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_connections.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_connections.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/update_seen_operational.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/update_seen_operational.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/result.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/result.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetched_data.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetched_data.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_ping.cpp |80.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/task_ping.cpp |80.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |80.9%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |80.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3of4.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover_m3of4.cpp |80.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_dep_tracker.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp |81.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |81.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__vacuum.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__vacuum.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sending.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_impl.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_get_impl.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_compute_statistics.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_compute_statistics.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_helpers/test_env.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/test_env.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__stop_tablet.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__stop_tablet.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/schema.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/schema.cpp |81.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |81.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/cmds_bridge.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/cmds_bridge.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_kqp.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/dsproxy_discover.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/writer/partition_chooser_impl.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/writer/partition_chooser_impl.cpp |81.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |81.0%| [AR] {RESULT} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_helpers/helpers.cpp |81.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |81.0%| [AR] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_helpers/helpers.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/common/pq_ut_common.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/completed_operations_unit.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/common/pq_ut_common.cpp |81.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |81.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/actor/actor.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/actor/actor.cpp |81.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |81.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__read_columns.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_change_receiving.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp |81.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp |81.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |81.0%| [AR] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |81.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/constructor.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/constructor.cpp |81.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/iterator.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/iterator.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_distributed_erase_tx_unit.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__process_pending_operations.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__process_pending_operations.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__switch_drain.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__switch_drain.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/config_cmd.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/config_cmd.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_vector_actor.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_vector_actor.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_active_transaction.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__store_table_path.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__write.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__write.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/select_groups.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/select_groups.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__schema_changed.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/reshuffle_kmeans.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/reshuffle_kmeans.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/constructor.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/constructor.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/http_req.cpp |81.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |81.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |81.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |81.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |81.1%| [AR] {RESULT} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/http_req.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__stats.cpp |81.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_bootlogic.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_bootlogic.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_upload.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/source.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/metadata.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__kill_node.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/source.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__kill_node.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/metadata.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/schema.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_incr_restore.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/schema.cpp |81.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |81.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |81.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__release_tablets_reply.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__release_tablets_reply.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/metadata.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/metadata.cpp |81.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a |81.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a |81.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_query_state.cpp |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_query_state.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_transaction.cpp |81.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_direct_erase.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/source.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/source.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/behaviour.cpp |81.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |81.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool/behaviour.cpp |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |81.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |81.2%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__conditional_erase_rows.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/source.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/source.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/source.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/source.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replbroker.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replbroker.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_cdc_stream_unit.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_tx.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_download_txs.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_domain.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_domain.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_change_sender_activation.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__store_scan_state.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_commit_writes_tx_unit.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/login_page.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/login_page.cpp |81.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/libydb-core-security.a |81.2%| [AR] {RESULT} $(B)/ydb/core/security/libydb-core-security.a |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/security/libydb-core-security.a |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/source.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/source.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/quota/write_quoter.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/write_quoter.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_handshake_actor.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_handshake_actor.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__readset.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/scheme_cache_lib/yql_db_scheme_resolver.cpp |81.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |81.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |81.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |81.2%| [AR] {RESULT} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/scheme_cache_lib/yql_db_scheme_resolver.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |81.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/limit.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/limit.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/keep_alive.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |81.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_loans.cpp |81.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/quota/read_quoter.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/read_quoter.cpp |81.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/quota/libpersqueue-pqtablet-quota.a |81.3%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/quota/libpersqueue-pqtablet-quota.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/quota/libpersqueue-pqtablet-quota.a |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__s3_upload_txs.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_reader/fetching_steps.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/fetching_steps.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/tasks_list.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/tasks_list.cpp |81.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |81.3%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/ydb/query_actor.cpp |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/ydb/query_actor.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__monitoring.cpp |81.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |81.3%| [AR] {RESULT} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__status.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/common/autoscaling_ut_common.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__status.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/common/autoscaling_ut_common.cpp |81.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |81.3%| [AR] {RESULT} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__object_storage_listing.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/scanner.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/scanner.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__disconnect_node.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_resend_rs.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__disconnect_node.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/meta.cpp |81.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |81.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/common/timeout.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/common/timeout.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/meta.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_navigate.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_navigate.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__compact_borrowed.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector_cdc_stream.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetched_data.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__load_everything.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetched_data.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__load_everything.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/query_data/kqp_prepared_query.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_data/kqp_prepared_query.cpp |81.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |81.3%| [AR] {RESULT} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__migrate_schemeshard.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/ut_common.cpp |81.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/ut_common.cpp |81.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |81.3%| [AR] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |81.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/fetcher.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/fetcher.cpp |81.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/context.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/context.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/execute_data_query.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/plain_read_data.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/plain_read_data.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cancel_tx_proposal.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/metadata.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/metadata.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_create_coordination_node.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/source.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/constructor.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/source.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replmonhandler.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/constructor.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replmonhandler.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/abstract.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/abstract.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_alter_coordination_node.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__cleanup_uncommitted.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_uncommitted.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__read_iterator.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_scheme_tx_unit.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/distributed_commit_helper.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/distributed_commit_helper.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/local_pgwire/local_pgwire_connection.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/local_pgwire_connection.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__plan_step.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/read_metadata.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/read_metadata.cpp |81.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |81.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/counters/kqp_counters.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/counters/kqp_counters.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/initializer.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/query_stats/query_metrics.cpp |81.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |81.4%| [AR] {RESULT} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/initializer.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/query_stats/query_metrics.cpp |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__op_rows.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_in_rs.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/table_exists.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/table_exists.cpp |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/metadata.cpp |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/metadata.cpp |81.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/service/service_impl.cpp |81.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |81.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |81.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/service_impl.cpp |81.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |81.4%| [AR] {RESULT} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |81.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/analyze_actor.cpp |81.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/analyze_actor.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_data_tx_unit.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_state_collect.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_state_collect.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_external_data_source.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_external_data_source.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/constructors.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/constructors.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/schema.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/schema.cpp |81.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |81.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |81.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_get_operation.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_drop_table.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replproxy.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replproxy.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__request_tablet_seq.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__request_tablet_seq.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/complete_data_tx_unit.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullrepljob.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullrepljob.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__engine_host.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/optimization.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/optimization.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_view.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_view.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_message_enricher.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_message_enricher.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_dc_followers.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_dc_followers.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/query_stats/query_stats.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_kh_describe.cpp |81.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |81.5%| [AR] {RESULT} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |81.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/column_families/update.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/column_families/update.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__seize_tablets.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__seize_tablets.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/db_counters.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/db_counters.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_reader/actor.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/actor.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_repl.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_repl.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/service/activation.cpp |81.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |81.5%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__request_tablet_owners.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/activation.cpp |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__request_tablet_owners.cpp |81.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_read_unit.cpp |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |81.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_write_unit.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_volatile_snapshot_unit.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_import.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_import.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__propose_tx_base.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/private_events.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/private_events.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_kqp_base.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kafka_consumer_members_metadata_initializers.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/behaviour.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/behaviour.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_consumer_members_metadata_initializers.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_rate_limiter_api.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector_base.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/service.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/service.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/schema.cpp |81.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/libydb-services-metadata.a |81.6%| [AR] {RESULT} $(B)/ydb/services/metadata/libydb-services-metadata.a |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/schema.cpp |81.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |81.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/aggr.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/aggr.cpp |81.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |81.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |81.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/check_snapshot_tx_unit.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_begin_transaction.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/actors/cpu_load_actors.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/actors/cpu_load_actors.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_table.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__progress_tx.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/not_sorted.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/not_sorted.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_modify_permissions.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__compaction.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__get_state_tx.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__cleanup_borrowed.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/context.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/context.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_exchange_split.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__init.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__init.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__snapshot_txs.cpp |81.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetching.cpp |81.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/fetching.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__column_stats.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_stream_execute_yql_script.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/object.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/object.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__update_pile.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__update_pile.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_locks/locks/list.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/list.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/partition_stats/top_partitions.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/partition_stats/top_partitions.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pq.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/read_balancer__mlp_balancing.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_table_options.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer__mlp_balancing.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pq.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/read_init_auth_actor.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/read_init_auth_actor.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/walle_check_task_adapter.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/walle_create_task_adapter.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_check_task_adapter.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_create_task_adapter.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_log_store.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_locks_db.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/metadata.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/metadata.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/kmeans_helper.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/kmeans_helper.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/iterator.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rename_unused_stage.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/iterator.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rename_unused_stage.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__register_node.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__register_node.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_remove_request.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_store_first_boot_timestamp.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_request.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_store_first_boot_timestamp.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/console_interaction.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/console_interaction.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard__kqp_scan.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/register_node.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/register_node.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_reject_notification.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_reject_notification.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__lock_tablet.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__lock_tablet.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_store_permissions.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_store_permissions.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_async_index.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_remove_task.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_task.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetching.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetching.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__resume_tablet.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__resume_tablet.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/walle_api_handler.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_api_handler.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |81.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |81.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_cdc_stream.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/conflicts_cache.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_data_tx_out_rs_unit.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_defaults.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_defaults.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_and_wait_dependencies_unit.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/make_scan_snapshot_unit.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/plain_read_data.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/plain_read_data.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/alter_cdc_stream_unit.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/sub_columns_fetching.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender_table_base.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/sub_columns_fetching.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/complete_write_unit.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_load_state.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_update_config.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_load_state.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_update_config.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/merge.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/merge.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/backup_unit.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/backup_unit.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_sender.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_sender.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/constructors.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/constructors.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/sentinel.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/sentinel.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_collector.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_peer_stats_calculator.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_peer_stats_calculator.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/abstract.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/abstract.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_heartbeat.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive_statics.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_statics.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_store_metrics.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_store_metrics.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_init_scheme.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_init_scheme.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_remove_expired_notifications.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_expired_notifications.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_init_schema.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_init_schema.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_get_log_tail.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_get_log_tail.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/walle_list_tasks_adapter.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_list_tasks_adapter.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/cluster_tracker/cluster_tracker.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/cluster_tracker/cluster_tracker.cpp |81.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |81.8%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/run_script_actor/kqp_run_script_actor.cpp |81.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/run_script_actor/kqp_run_script_actor.cpp |81.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |81.8%| [AR] {RESULT} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |81.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/local_kmeans.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/local_kmeans.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/walle_remove_task_adapter.cpp |81.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/erasure_checkers.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/walle_remove_task_adapter.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/erasure_checkers.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_log_and_send.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_log_and_send.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/block_fail_point_unit.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/block_fail_point_unit.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/behaviour_registrator_actor.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_log_cleanup.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_log_cleanup.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/monitoring.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/monitoring.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_pipeline.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessor_callback.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessor_callback.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_update_downtimes.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_update_downtimes.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/session.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/session.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/fulltext.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/unique_index.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/constructor.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/fulltext.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/unique_index.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/constructor.cpp |81.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |81.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |81.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/cdc_stream_scan.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__configure_scale_recommender.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__configure_scale_recommender.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_process_notification.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_process_notification.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/fulltext_dict.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/fulltext_dict.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/sample_k.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/sample_k.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/logger.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/logger.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/dread_cache_service/caching_service.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/dread_cache_service/caching_service.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cluster_info.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cluster_info.cpp |81.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |81.9%| [AR] {RESULT} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |81.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_overload.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_check_integrity.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_check_integrity.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/executor.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/executor.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_remove_permissions.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_remove_permissions.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_write_out_rs_unit.cpp |81.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mirrorer/mirrorer.cpp |81.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mirrorer/mirrorer.cpp |81.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |82.0%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |82.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tx_store_walle_task.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tx_store_walle_task.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/test_helper/shard_writer.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/shard_writer.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/test_helper/shard_reader.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/shard_reader.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_incremental_restore_src_unit.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/http.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/http.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/merge.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/info_collector.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/info_collector.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/alter_table_unit.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/actor.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/actor.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_distributed_erase_tx_out_rs_unit.cpp |82.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |82.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |82.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/filter_kmeans.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/filter_kmeans.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/s3_write.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3_write.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/read_metadata.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/read_metadata.cpp |82.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |82.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |82.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/s3.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/full_scan_sorted.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/full_scan_sorted.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/node_info.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_kqp_data_tx_out_rs_unit.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/node_info.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/downtime.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/downtime.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_table_unit.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/api_adapters.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/api_adapters.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/s3_delete.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/column_families/schema.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3_delete.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/test_helper/helper.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/column_families/schema.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/helper.cpp |82.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/libydb-core-cms.a |82.0%| [AR] {RESULT} $(B)/ydb/core/cms/libydb-core-cms.a |82.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |82.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |82.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |82.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/test_helper/controllers.cpp |82.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/controllers.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/events.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/events.cpp |82.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_status.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_status.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compile_service/kqp_compile_service.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/kqp_compile_service.cpp |82.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |82.1%| [AR] {RESULT} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |82.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/context.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/context.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_discover.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_discover.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yaml_config/yaml_config.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/src/actors.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yaml_config/yaml_config.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/src/actors.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/node_report.cpp |82.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |82.1%| [AR] {RESULT} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetching.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/node_report.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/fetching.cpp |82.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_outreadset.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__reassign_groups.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__reassign_groups.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/proxy.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/proxy.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/garbage_collection.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/garbage_collection.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_common.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_common.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_actor.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage__serialization.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage__serialization.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/resolved_value.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/resolved_value.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/op_init_schema.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_init_schema.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_uncertain.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/signals/owner.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_uncertain.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/signals/owner.cpp |82.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/signals/libydb-library-signals.a |82.1%| [AR] {RESULT} $(B)/ydb/library/signals/libydb-library-signals.a |82.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/op_apply_config.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_apply_config.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/change_collector_async_index.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/op_load.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_load.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__response_tablet_seq.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__response_tablet_seq.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_get.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_get.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/constructors.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_resolver.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructors.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_resolver.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/mon_main.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/mon_main.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_collect_garbage.cpp |82.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_collect_garbage.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_metrics.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_metrics.cpp |82.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/limit_sorted.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/limit_sorted.cpp |82.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |82.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |82.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/move_index_unit.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/persqueue/topic_parser/topic_parser.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/persqueue/topic_parser/topic_parser.cpp |82.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |82.2%| [AR] {RESULT} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |82.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/comm.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/comm.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/set_queue_attributes.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/set_queue_attributes.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__reassign_groups_on_decommit.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__reassign_groups_on_decommit.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__init_scheme.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__init_scheme.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/ds_table/accessor_subscribe.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_gc.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/ds_table/accessor_subscribe.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_gc.cpp |82.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |82.2%| [AR] {RESULT} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |82.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_mon.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/src/ydb_setup.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_mon.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/src/ydb_setup.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/queues_list_reader.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/queues_list_reader.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/default_fetching.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/default_fetching.cpp |82.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |82.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |82.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_patch.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_patch.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ycsb/kqp_select.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/kqp_select.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script_cursor.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script_cursor.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/s3_scan.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/scanner.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/scanner.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3_scan.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/abstract/common.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/common.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/table_creator/table_creator.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/base/counters.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/base/counters.cpp |82.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/blocks.cpp |82.2%| [AR] {RESULT} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/blocks.cpp |82.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/table_creator/table_creator.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |82.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |82.2%| [AR] {RESULT} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_create_table.cpp |82.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/manager.cpp |82.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/manager.cpp |82.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |82.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |82.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/constructor.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/constructor.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/query.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/query.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_and_wait_in_rs_unit.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/query_actor/query_actor.cpp |82.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_trash.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessors_ordering.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/context.cpp |82.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |82.3%| [AR] {RESULT} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/accessors_ordering.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_trash.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__delete_tablet.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/context.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/query_actor/query_actor.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__delete_tablet.cpp |82.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/src/kqp_runner.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/src/kqp_runner.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/status.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/status.cpp |82.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |82.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |82.3%| [AR] {RESULT} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |82.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |82.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |82.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_keep_alive.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/metrics.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/metrics.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetched_data.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetched_data.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker__alter_tenant.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker__alter_tenant.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/op_commit_blob_seq.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/op_commit_blob_seq.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finalize_plan_tx_unit.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/topic_session.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/topic_session.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/iterator.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/iterator.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_analyze_deadline.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_deadline.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/metadata/manager.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/manager.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_actor.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_drop_coordination_node.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/service/executor.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/executor.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_resolve.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_resolve.cpp |82.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/test_helper/columnshard_ut_common.cpp |82.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |82.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |82.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/s3.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/s3.cpp |82.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/proxy_actor.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/proxy_actor.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/schema_version/version.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/schema_version/version.cpp |82.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |82.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |82.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/group_metrics_exchange.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/group_metrics_exchange.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/load_based_timeout.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/load_based_timeout.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_write.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_write.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_create_partitions_actor.cpp |82.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |82.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_create_partitions_actor.cpp |82.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/purge_queue.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/purge_queue.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/snapshot.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/snapshot.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_range.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_range.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/resource_pool_classifiers/resource_pool_classifiers.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/resource_pool_classifiers/resource_pool_classifiers.cpp |82.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |82.4%| [AR] {RESULT} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |82.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/metering.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/metering.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_service.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_service.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/node_tracker.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/node_tracker.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/request_controller_info.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/request_controller_info.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/storage_group_info.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_group_info.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/finalize_script_service/kqp_check_script_lease_actor.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/finalize_script_service/kqp_check_script_lease_actor.cpp |82.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |82.4%| [AR] {RESULT} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |82.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_statistics.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_statistics.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/constructor.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/constructor.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/http/http.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/http/http.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_manager.cpp |82.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |82.4%| [AR] {RESULT} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_manager.cpp |82.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/queue.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_put.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_put.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_read_actor.cpp |82.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_read_actor.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/read.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/read_with_blobs.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/read.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/read_with_blobs.cpp |82.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/purge.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/purge.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/event.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/event.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/monitoring.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/receive_message.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/monitoring.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_context.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/receive_message.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_context.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_init.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_init.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/blob_mapping_cache.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/follower_edge.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/blob_mapping_cache.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/follower_edge.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/fifo_cleanup.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/fifo_cleanup.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_write_actor.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_write_actor.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/retention.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/retention.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/meta.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/meta.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_load.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_load.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/schema.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/schema.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/user_settings_reader.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/user_settings_reader.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetch_steps.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/fetch_steps.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_get_block.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_get_block.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/meta.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/meta.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__adopt_tablet.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common/result.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__adopt_tablet.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/result.cpp |82.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |82.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |82.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__block_storage_result.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__block_storage_result.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/list_queue_tags.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_queue_tags.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/queue_schema.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/queue_schema.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/garbage.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/garbage.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_convert_to_physical.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_convert_to_physical.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/index_events_processor.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/index_events_processor.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/service.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/service.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_rd_read_actor.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions.cpp |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/pq/async_io/dq_pq_rd_read_actor.cpp |82.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |82.5%| [AR] {RESULT} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |82.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions.cpp |82.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/coro_tx.cpp |82.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common/conveyor_task.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/coro_tx.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/conveyor_task.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/auth_multi_factory.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/auth_multi_factory.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/common/script.cpp |82.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |82.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |82.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_tx_details_unit.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/cfg/cfg.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cfg/cfg.cpp |82.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |82.6%| [AR] {RESULT} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |82.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_distributed_erase_tx_unit.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/send_message.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/send_message.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_worker_actor.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/tx_proxy_status.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/tx_proxy_status.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/list_users.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_users.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive_impl.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_impl.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/cache_policy/policy.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/cache_policy/policy.cpp |82.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/queue_leader.cpp |82.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |82.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/queue_leader.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/change_visibility.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tablet_info.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/change_visibility.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tablet_info.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_resolve.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_resolve.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/executor.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/executor.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/given_id_range.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/given_id_range.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/written.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/written.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common/description.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/description.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator/txallocator_impl.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_impl.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |82.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |82.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |82.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/get_queue_attributes.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/get_queue_attributes.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/tag_queue.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/tag_queue.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/modify_permissions.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/modify_permissions.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/board_publish.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_publish.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.cpp |82.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/write_with_blobs.cpp |82.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/list_permissions.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/write_with_blobs.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_permissions.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/garbage_collector.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/garbage_collector.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/list_queues.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_queues.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/delete_message.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/delete_message.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/create_user.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/create_user.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/table_profiles.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_profiles.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/table_settings.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_settings.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_stats.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_write_details_unit.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/delete_user.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/delete_user.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/incr_restore_scan.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/s3_upload.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/s3_upload.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/auth_factory.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/auth_factory.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/storage_pool_info.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_pool_info.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/accessor_init.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/accessor_init.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/load_in_rs_unit.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/board_lookup.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/load_in_rs_unit.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__delete_tablet_result.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_lookup.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__delete_tablet_result.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_upload_rows.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage_replica.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_replica.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/get_queue_url.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/get_queue_url.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/data_decommit.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/data_decommit.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert_index.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert_index.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/snapshotreq.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/snapshotreq.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_pdisk.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_pdisk.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/unisched.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/unisched.cpp |82.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |82.7%| [AR] {RESULT} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |82.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/assimilator.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/assimilator.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/proxy_service.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/proxy_service.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/untag_queue.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/untag_queue.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/create_queue.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/create_queue.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_portion.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_portion.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/commitreq.cpp |82.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/commitreq.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/column_record.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/column_record.cpp |82.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/import_s3.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/import_s3.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/delete_queue.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/delete_queue.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_actor.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_actor.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/agent.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/export_scan.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/agent.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/export_scan.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/common/comparable.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common/comparable.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/util/memory_tracker.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/memory_tracker.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sharding/hash_slider.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/hash_slider.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/key_validator.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/key_validator.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_scheme.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_scheme.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/actor.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/actor.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_table.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_table.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/session_actor/kqp_temp_tables_manager.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/session_actor/kqp_temp_tables_manager.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_browse.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_browse.cpp |82.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |82.8%| [AR] {RESULT} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_wb_req.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/describe.cpp |82.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/describe.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_wb_req.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/rpc_long_tx.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/rpc_long_tx.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive_domains.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/column_families.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/kqp_workload_service.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_domains.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/column_families.cpp |82.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |82.8%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/kqp_workload_service.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/datareq.cpp |82.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/datareq.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_query.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_query.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/storage_block.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/storage_block.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sharding/random.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/random.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tiering/tier/object.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/tier/object.cpp |82.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |82.8%| [AR] {RESULT} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |82.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/viewer.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/blob_depot.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/blob_depot.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/resolvereq.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/resolvereq.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/table_description.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_description.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/upload_rows_common_impl.cpp |82.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/upload_rows_common_impl.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_subdomain_path_id.cpp |82.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/blocks.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_trans_queue.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/blocks.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_vdisk.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_vdisk.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator_client/actor_client.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/portion_info.cpp |82.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |82.9%| [AR] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/portion_info.cpp |82.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive_log.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_log.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/viewer_topic_data.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer_topic_data.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/fill.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/fill.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/channel_kind.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/channel_kind.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/count_queues.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/count_queues.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/list_dead_letter_source_queues.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/initiate_build_index_unit.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/list_dead_letter_source_queues.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator/txallocator.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/follower_tablet_info.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/follower_tablet_info.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_bs.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_bs.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/merge_subset.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/merge_subset.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/boot_queue.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/boot_queue.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_pq.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_pq.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/db_wrapper.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/db_wrapper.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tiering/manager.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/manager.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_scan_tx_unit.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/util/failure_injection.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/failure_injection.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_schedule_traversal.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_schedule_traversal.cpp |82.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/util/libydb-core-util.a |82.9%| [AR] {RESULT} $(B)/ydb/core/util/libydb-core-util.a |82.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/util/libydb-core-util.a |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_pipe_req.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_pipe_req.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_operation.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_operation.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_commit_writes_tx_unit.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_accessor.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/constructor_accessor.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_split_src.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_kqp_data_tx_unit.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/viewer_request.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer_request.cpp |82.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/loading/stages.cpp |82.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/loading/stages.cpp |82.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |83.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |83.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/schemereq.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator/txallocator__scheme.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator__scheme.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_worker.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/table/table.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/table/table.cpp |83.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |83.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |83.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/upload_columns.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/upload_columns.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finish_propose_unit.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/storage_balancer.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_and_send_write_out_rs_unit.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_balancer.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tiering/fetcher.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_cdc_stream_unit.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/fetcher.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/service_initializer.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/service_initializer.cpp |83.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |83.0%| [AR] {RESULT} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |83.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_vector_index.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_vector_index.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_impl.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_impl.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/options/schema.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/options/schema.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_validate_config.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_validate_config.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/modification/events/change_owning.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/events/change_owning.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/time_cast/time_cast.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/time_cast/time_cast.cpp |83.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |83.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |83.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |83.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |83.0%| [AR] {RESULT} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |83.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/appdata.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/appdata.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_store.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_store.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_volatile_snapshot_unit.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/common/ss_dialog.cpp |83.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/common/ss_dialog.cpp |83.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |83.0%| [AR] {RESULT} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |83.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent/request.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent/request.cpp |83.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sharding/hash_modulo.cpp |83.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |83.1%| [AR] {RESULT} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/hash_modulo.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy.cpp |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/ttl/validator.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/ttl/validator.cpp |83.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |83.1%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finalize_build_index_unit.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/finish_propose_write_unit.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator/txallocator__reserve.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/read_table_impl.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator__reserve.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/external_sources/object_storage.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/read_table_impl.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/object_storage.cpp |83.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |83.1%| [AR] {RESULT} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_actor.cpp |83.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |83.1%| [AR] {RESULT} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_actor.cpp |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |83.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |83.1%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/export_common.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/export_common.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/export_s3_uploader.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/topic_description.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/topic_description.cpp |83.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |83.1%| [AR] {RESULT} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/object.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execution_unit.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/object.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execution_unit.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/upload_rows.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/upload_rows.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/domain_info.cpp |83.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |83.1%| [AR] {RESULT} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/domain_info.cpp |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_data_tx_unit.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/compacted.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/compacted.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_cache_append.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_cache_append.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/engine/minikql/flat_local_tx_factory.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/engine/minikql/flat_local_tx_factory.cpp |83.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |83.1%| [AR] {RESULT} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |83.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/schema/schema.cpp |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/schema/schema.cpp |83.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/store/store.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |83.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |83.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_kh_snapshots.cpp |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/store/store.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_get_shard_locations.cpp |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sharding/sharding.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/sharding.cpp |83.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/portions/data_accessor.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sharding/hash_intervals.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/portions/data_accessor.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/run.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/hash_intervals.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/run.cpp |83.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/common/common.cpp |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/common/common.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/diagnostics/scan_diagnostics_actor.cpp |83.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/diagnostics/scan_diagnostics_actor.cpp |83.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |83.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/diagnostics/libtx-columnshard-diagnostics.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/diagnostics/libtx-columnshard-diagnostics.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/diagnostics/libtx-columnshard-diagnostics.a |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/tablets/tablets.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_commit_writes_tx_unit.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |83.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |83.2%| [AR] {RESULT} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/read_table_scan.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/tablets/tablets.cpp |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/column_engine_logs.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/column_engine_logs.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/main.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/main.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/create_store.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/options/update.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/options/update.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/create_store.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_split_dst.cpp |83.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |83.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |83.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |83.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_table.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |83.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_user_db.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_server.cpp |83.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/wait_for_stream_clearance_unit.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/kikimr_services_initializers.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/kikimr_services_initializers.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_uploads.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update_index.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_update_index.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_disk.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_disk.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_fakeinitshard.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_fakeinitshard.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/statestorage_guardian.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/statestorage_guardian.cpp |83.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/libydb-core-base.a |83.3%| [AR] {RESULT} $(B)/ydb/core/base/libydb-core-base.a |83.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/base/libydb-core-base.a |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/factories.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/factories.cpp |83.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/librun.a |83.3%| [AR] {RESULT} $(B)/ydb/core/driver_lib/run/librun.a |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_schema_snapshots.cpp |83.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/run/librun.a |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_store.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter_store.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/schema/update.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_and_send_out_rs_unit.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/schema/update.cpp |83.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |83.3%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |83.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer__balancing.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/json_handlers_viewer.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_cluster_discovery.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_cluster_discovery.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/json_handlers_viewer.cpp |83.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/libydb-core-viewer.a |83.3%| [AR] {RESULT} $(B)/ydb/core/viewer/libydb-core-viewer.a |83.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/execute_write_unit.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_write_operation.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tenant.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tenant.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp |83.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |83.3%| [AR] {RESULT} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_config.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_operator.cpp |83.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_operator.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/metadata_accessor.cpp |83.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/metadata_accessor.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_snapshots.cpp |83.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/drain.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/drain.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_genconfig.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_genconfig.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/upload_stats.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/upload_stats.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_index_notice_unit.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/wait_for_plan_unit.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_initroot.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_scheme_initroot.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_console.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_console.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/direct_tx_unit.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_write_unit.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_cms.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_table.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_cms.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/drop_table.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/update.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/update.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |83.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |83.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_apply.cpp |83.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_stress.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_persqueue_stress.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_debug.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_debug.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/common/columnshard.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/common/columnshard.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tablet.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_admin.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_tablet.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_admin.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/collector.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/abstract/collector.cpp |83.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |83.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |83.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/create_table.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/create_table.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_actorsystem_perftest.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_actorsystem_perftest.cpp |83.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |83.4%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |83.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/rotate_cdc_stream_unit.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/rotate_cdc_stream_unit.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/backup/impl/local_partition_reader.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/backup/impl/local_partition_reader.cpp |83.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |83.4%| [AR] {RESULT} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |83.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/make_snapshot_unit.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/volatile_tx.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_query_compiler.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__configure_subdomain.cpp |83.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__configure_subdomain.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/object.cpp |83.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/update.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/object.cpp |83.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |83.5%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/update.cpp |83.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |83.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |83.5%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |83.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_full_text_source.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_full_text_source.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/move_portions.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/memory_state_migration.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/move_portions.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/memory_state_migration.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_cleanup_unit.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/update.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/update.cpp |83.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |83.5%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |83.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/cli_utils/cli_cmds_node.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/node_tablet_monitor.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/node_tablet_monitor.cpp |83.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |83.5%| [AR] {RESULT} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/converter.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/converter.cpp |83.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/control/immediate_control_board_actor.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/update.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/update.cpp |83.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/libydb-core-control.a |83.5%| [AR] {RESULT} $(B)/ydb/core/control/libydb-core-control.a |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/immediate_control_board_actor.cpp |83.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/control/libydb-core-control.a |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/common/kqp_ut_common.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/common/kqp_ut_common.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/column_engine.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/object.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/column_engine.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/object.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/discovery/discovery.cpp |83.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |83.5%| [AR] {RESULT} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/discovery/discovery.cpp |83.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/discovery/libydb-core-discovery.a |83.5%| [AR] {RESULT} $(B)/ydb/core/discovery/libydb-core-discovery.a |83.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |83.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/remove_portions.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/remove_portions.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_persistent_snapshot_unit.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/update.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/update.cpp |83.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |83.5%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |83.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/update.cpp |83.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/update.cpp |83.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/context.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/context.cpp |83.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |83.6%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |83.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/object.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/common/object.cpp |83.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |83.6%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |83.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/virtual_group.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/virtual_group.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/update.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/update.cpp |83.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |83.6%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |83.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/restore_unit.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_slot_broker.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/restore_unit.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_slot_broker.cpp |83.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/libydb-core-mind.a |83.6%| [AR] {RESULT} $(B)/ydb/core/mind/libydb-core-mind.a |83.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_locks/manager/manager.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_locks/manager/manager.cpp |83.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |83.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |83.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/drop_table_unit.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_distributed_erase_tx_unit.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app_sendreadset.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/pq_impl_app_sendreadset.cpp |83.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |83.6%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |83.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_uniq_helper.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_uniq_helper.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ycsb/test_load_actor.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_s3_downloads.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/test_load_actor.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/recompute_kmeans.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/recompute_kmeans.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/update.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/restore.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/restore.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/update.cpp |83.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |83.6%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |83.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_data_tx_unit.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/common.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/common.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/prefix_kmeans.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/prefix_kmeans.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/partition_monitoring.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/partition_monitoring.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/grpc_pq_read.cpp |83.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |83.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |83.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |83.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/grpc_pq_read.cpp |83.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |83.6%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |83.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |83.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |83.7%| [AR] {RESULT} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |83.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_resolve_node.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/column_fetching/cache_policy.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_resolve_node.cpp |83.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/column_fetching/cache_policy.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/actors/scheme_actors.cpp |83.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |83.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |83.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/actors/scheme_actors.cpp |83.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |83.7%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |83.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/volatile_tx_mon.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/receive_snapshot_unit.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/checker_secret.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/checker_secret.cpp |83.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |83.7%| [AR] {RESULT} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |83.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_external_table.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_external_table.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/merger.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction/merger.cpp |83.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |83.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |83.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/protect_scheme_echoes_unit.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/move_table_unit.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/layout/layout.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/layout/layout.cpp |83.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |83.7%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |83.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/storage/pdisks.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/storage/pdisks.cpp |83.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |83.7%| [AR] {RESULT} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |83.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ycsb/test_load_read_iterator.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/test_load_read_iterator.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/remove_locks.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/remove_locks.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/operation.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/operation.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/connect_socket_protocol.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/connect_socket_protocol.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/leader_tablet_info.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/leader_tablet_info.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/query_compiler/kqp_mkql_compiler.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_mkql_compiler.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/testing.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_commit_transaction.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/partition_scale_manager.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/testing.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/partition_scale_manager.cpp |83.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/proxy/proxy.cpp |83.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/proxy/proxy.cpp |83.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |83.7%| [AR] {RESULT} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |83.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/manager.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/local_db/manager.cpp |83.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |83.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |83.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_scheme_initroot.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_scheme_initroot.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/manager.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/manager.cpp |83.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |83.8%| [AR] {RESULT} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |83.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write_actor.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_write_actor.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/create_persistent_snapshot_unit.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__read_step_subscriptions.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/remove_lock_change_records.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__read_step_subscriptions.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/memory.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/memory.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_write_tx_in_rs_unit.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/read_op_unit.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/abstract/fetcher.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/abstract/fetcher.cpp |83.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |83.8%| [AR] {RESULT} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |83.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/indexes/update.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/indexes/update.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_snapshot_tx_unit.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_kqp_data_tx_in_rs_unit.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/send_message.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/send_message.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_rules.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_rules.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_auth_actor.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/agent.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_sasl_auth_actor.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/agent.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/indexes/schema.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/indexes/schema.cpp |83.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |83.8%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |83.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/status.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/status.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/data_events/shards_splitter.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_scheme_tx_in_rs_unit.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/shards_splitter.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_list_groups_actor.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_list_groups_actor.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/transfer.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/transfer.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_scheme_request.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_scheme_request.cpp |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/manager.cpp |83.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/manager.cpp |83.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |83.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |83.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |83.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/actors/read_info_actor.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/actors/read_info_actor.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |83.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |83.9%| [AR] {RESULT} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp |83.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx.cpp |83.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/libydb-core-tx.a |83.9%| [AR] {RESULT} $(B)/ydb/core/tx/libydb-core-tx.a |83.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__last_step_subscriptions.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__last_step_subscriptions.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_drain_node.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_drain_node.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/plan_queue_unit.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_groups_actor.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_describe_groups_actor.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/store_scheme_tx_unit.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/changes.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/changes.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_http_server.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_http_server.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/space_monitor.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/space_monitor.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/query_compiler/kqp_olap_compiler.cpp |83.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |83.9%| [AR] {RESULT} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ycsb/kqp_upsert.cpp |83.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/kqp_upsert.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_persqueue.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_persqueue.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/tx__create_tablet.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/tx__create_tablet.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_data_tx_in_rs_unit.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_fill_node.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_fill_node.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/secondary_index.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/secondary_index.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_clusters_updater_actor.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_clusters_updater_actor.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_tablet_counters.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_tablet_counters.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/transfer.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/transfer.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_pq_read_session_info.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_read_session_info.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_types.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_types.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |83.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |83.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |83.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_tx_request.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_tx_request.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_hive_create_tablet.cpp |83.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_hive_create_tablet.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/columns/schema.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/columns/schema.cpp |83.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_index.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_compactionstate.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_upsert_index.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_compactionstate.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/remove_schema_snapshots.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/read_table_scan_unit.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_repl_offsets_client.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_node_registration.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__plan_step.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_node_registration.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__plan_step.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_blobstorage_config.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_blobstorage_config.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/minikql_compile/mkql_compile_service.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/minikql_compile/mkql_compile_service.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogreader.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogreader.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_scheme_tx_out_rs_unit.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/service/add_index.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/service/add_index.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/manager/manager.cpp |84.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |84.0%| [AR] {RESULT} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/manager/manager.cpp |84.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |84.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |84.0%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |84.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/manager.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/manager.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_pq_metacache.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metacache.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/util/actorsys_test/testactorsys.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/actorsys_test/testactorsys.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_monactors.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_monactors.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfull.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/prepare_distributed_erase_tx_in_rs_unit.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfull.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_console.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_console.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_proxy.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_proxy.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_counters_aggregator.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/read_data_protocol.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/read_data_protocol.cpp |84.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/libydb-core-tablet.a |84.0%| [AR] {RESULT} $(B)/ydb/core/tablet/libydb-core-tablet.a |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_builder.cpp |84.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_builder.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgreader.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgreader.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/local_pgwire/local_pgwire.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/local_pgwire.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |84.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |84.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |84.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_tablet_state.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_tablet_state.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_test_shard_request.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/grpc_proxy_status.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/grpc_proxy_status.cpp |84.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_firstrun.cpp |84.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_test_shard_request.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_firstrun.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/ut_helpers.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/ut_helpers.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/row_dispatcher.cpp |84.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |84.1%| [AR] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_recovery.cpp |84.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_recovery.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__schema.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__schema.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_actor.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_pipe.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_compute_actor.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_pipe.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata_proxy.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata_proxy.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_ic_debug.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_ic_debug.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_fulltext_index.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_fulltext_index.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_syncloghttp.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_actor.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_syncloghttp.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_actor.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/data_events/shard_writer.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/shard_writer.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeletonfront.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_cms.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_cms.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeletonfront.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/pdisk_read.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/pdisk_read.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxyobtain.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxyobtain.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__mediators_confirmations.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__mediators_confirmations.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/send_data_protocol.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/send_data_protocol.cpp |84.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |84.1%| [AR] {RESULT} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_committer.cpp |84.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_committer.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/debug_log.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/debug_log.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_loggedrec.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_loggedrec.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/olap/columns/update.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/olap/columns/update.cpp |84.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |84.1%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |84.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_state.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_state.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_state.cpp |84.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/manager.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_state.cpp |84.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_accessor/in_mem/manager.cpp |84.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |84.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |84.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__monitoring.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__monitoring.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__init.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__init.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/control.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/events/control.cpp |84.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |84.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |84.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/grpc_server.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/grpc_server.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmultiput_actor.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmultiput_actor.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_mon_dbmainpage.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__configure.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_mon_dbmainpage.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__configure.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator_impl.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__stop_guard.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__stop_guard.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/scrub.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_impl.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/scrub.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_type_ann.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_type_ann.cpp |84.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |84.2%| [AR] {RESULT} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql.cpp |84.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |84.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/libcore-client-server.a |84.2%| [AR] {RESULT} $(B)/ydb/core/client/server/libcore-client-server.a |84.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |84.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |84.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |84.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_write_table.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/control.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_read_iterator_common.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/events/control.cpp |84.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |84.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |84.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfullhandler.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_syncfullhandler.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_dp_cost_based.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo_dp_cost_based.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/constructor.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/constructor.cpp |84.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a |84.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a |84.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_scheduler.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_scheduler.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__restore_params.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__restore_params.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__restore_transaction.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__restore_transaction.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/behaviour.cpp |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/external_data_source/behaviour.cpp |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__acquire_read_step.cpp |84.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |84.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__acquire_read_step.cpp |84.2%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |84.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_bsc.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/lib/node_warden_mock_bsc.cpp |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |84.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/health/health.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/health/health.cpp |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |84.3%| [AR] {RESULT} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/public_http/http_service.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/public_http/http_service.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata.cpp |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/public_http/libydb-core-public_http.a |84.3%| [AR] {RESULT} $(B)/ydb/core/public_http/libydb-core-public_http.a |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_recoverlostdata.cpp |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/controller/controller.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/controller/controller.cpp |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |84.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxywrite.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_proxywrite.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__schema_upgrade.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__schema_upgrade.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_propagator.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/guid_propagator.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogrecovery.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogrecovery.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator_state.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_state.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_analyze.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/counters_manager.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_sharing/manager/shared_blobs.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/counters_manager.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/manager/shared_blobs.cpp |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |84.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_task.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/syncer_job_task.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/auth/groups.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/blob_manager_db.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/groups.cpp |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |84.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/blob_manager_db.cpp |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |84.3%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/datastreams/put_records_actor.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/put_records_actor.cpp |84.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_committer.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_committer.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert.cpp |84.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/effects/kqp_opt_phy_insert.cpp |84.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |84.3%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/checker.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/checker.cpp |84.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |84.4%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/auth/owners.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/auth/owners.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |84.4%| [AR] {RESULT} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_reader/contexts.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_reader/contexts.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |84.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/rm_service/kqp_rm_service.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_service.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |84.4%| [AR] {RESULT} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_reader/task.cpp |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_tracker.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_tracker.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_reader/task.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqrb/read_balancer_app.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqrb/read_balancer_app.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |84.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/mediator_queue.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/mediator_queue.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/util/actorsys_test/single_thread_ic_mock.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/util/actorsys_test/single_thread_ic_mock.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |84.4%| [AR] {RESULT} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/construction/context.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_delivery_problem.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/data_events/columnshard_splitter.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_delivery_problem.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |84.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/actualization/construction/context.cpp |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_overload_handler.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_coordination_node.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_overload_handler.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/data_events/columnshard_splitter.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |84.4%| [AR] {RESULT} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_offsets_actor.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_topic_offsets_actor.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/balancer.cpp |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/balancer.cpp |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |84.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_sequencer_actor.cpp |84.4%| [AR] {RESULT} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |84.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |84.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |84.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |84.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_response.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/tx_analyze_shard_response.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |84.5%| [AR] {RESULT} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_alter_table.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kafka_transactional_producers_initializers.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_transactional_producers_initializers.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_config.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_config.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator__check.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_storage_request.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_request.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator__check.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |84.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |84.5%| [AR] {RESULT} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/locks/dependencies.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks/dependencies.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |84.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmovedpatch_actor.cpp |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vmovedpatch_actor.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_block_and_get.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_block_and_get.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/local_pgwire/local_pgwire_auth_actor.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/scan.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/local_pgwire/local_pgwire_auth_actor.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |84.5%| [AR] {RESULT} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/scan.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |84.5%| [AR] {RESULT} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_shred.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_shred.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/kafka_consumer_groups_metadata_initializers.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/kafka_consumer_groups_metadata_initializers.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_forget_operation.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_db.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_db.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_session_info.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_session_info.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |84.5%| [AR] {RESULT} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_init_schema.cpp |84.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_init_schema.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_list_operations.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/queries.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |84.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/streaming_query/queries.cpp |84.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_make_directory.cpp |84.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |84.5%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_get_scale_recommendation.cpp |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/sqs_topic/receive_message.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/sqs_topic/receive_message.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/processor_impl.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/processor_impl.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/sqs_topic/libydb-services-sqs_topic.a |84.6%| [AR] {RESULT} $(B)/ydb/services/sqs_topic/libydb-services-sqs_topic.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/pg_tables/pg_tables.cpp |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/sqs_topic/libydb-services-sqs_topic.a |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_object_storage.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |84.6%| [AR] {RESULT} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/pg_tables/pg_tables.cpp |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/restore.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/restore.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/layout_helpers.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/rbo/kqp_rbo.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/layout_helpers.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/table_settings.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/table_settings.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |84.6%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/resource_subscriber/events.cpp |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/events.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |84.6%| [AR] {RESULT} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_state.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/readproxy/readproxy.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_state.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |84.6%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_import_data.cpp |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/readproxy/readproxy.cpp |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |84.6%| [AR] {RESULT} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/tier/adapter.cpp |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/adapter.cpp |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |84.6%| [AR] {RESULT} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |84.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |84.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |84.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/processor/tx_top_partitions.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/processor/tx_top_partitions.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compute_actor/kqp_scan_fetcher_actor.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_service.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/overload_manager/overload_manager_service.cpp |84.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compute_actor/kqp_scan_fetcher_actor.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_read_columns.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_state.cpp |84.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_cms.cpp |84.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |84.7%| [AR] {RESULT} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_state.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_cms.cpp |84.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |84.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |84.7%| [AR] {RESULT} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/query/rpc_attach_session.cpp |84.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/blobstorage_skeleton.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc_actor.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc_actor.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/datastreams/datastreams_proxy.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/datastreams_proxy.cpp |84.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |84.7%| [AR] {RESULT} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |84.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/actors/kafka_fetch_actor.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/actors/kafka_fetch_actor.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog_private_events.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclog_private_events.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_test_shard.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_test_shard.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |84.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |84.7%| [AR] {RESULT} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |84.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |84.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |84.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |84.7%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |84.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |84.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |84.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_planner.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |84.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |84.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |84.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_snapshot.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_tasks_runner.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_storage_snapshot.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/blobs_manager.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/blobs_manager.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_fq.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_fq.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/resource_subscriber/actor.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/actor.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/txusage_fixture.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ycsb/bulk_mkql_upsert.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/txusage_fixture.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ycsb/bulk_mkql_upsert.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/tablet/ext_tx_base.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/tablet/ext_tx_base.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_thresholds.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/phantom_flag_thresholds.cpp |84.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/libvdisk-synclog-phantom_flag_storage.a |84.7%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/libvdisk-synclog-phantom_flag_storage.a |84.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/libvdisk-synclog-phantom_flag_storage.a |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/indexation.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/indexation.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/overload_manager/overload_subscribers.cpp |84.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/overload_manager/overload_subscribers.cpp |84.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |84.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_describe_system_view.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_describe_system_view.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/tablet/write_queue.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/tablet/write_queue.cpp |84.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/counters/engine_logs.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |84.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/alter.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/alter.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/counters/engine_logs.cpp |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |84.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_list_objects_in_s3_export.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_list_objects_in_s3_export.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_bridge.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_bridge.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_keyvalue.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |84.8%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_db_mon.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/access_behaviour.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_db_mon.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/access_behaviour.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |84.8%| [AR] {RESULT} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/portion.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/portion.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/tablet_flat_executor.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/tablet_flat_executor.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/partition_stats/partition_stats.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/tables/table_queries.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |84.8%| [AR] {RESULT} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/tables/table_queries.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |84.8%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |84.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/resolve_local_db_table.cpp |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_output_stream.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/resource_subscriber/task.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/resource_subscriber/task.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/splitter/chunks.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/data_locks/locks/snapshot.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |84.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_locks/locks/snapshot.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/splitter/chunks.cpp |84.8%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/init/init.h_serialized.cpp |84.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/init.h_serialized.cpp |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |84.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read_actor.cpp |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |84.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |84.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/init/libcore-config-init.a |84.8%| [AR] {RESULT} $(B)/ydb/core/config/init/libcore-config-init.a |84.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |84.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_boot_misc.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_boot_misc.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/deprecated/persqueue_v0/grpc_pq_read_actor.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/group_write.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |84.9%| [AR] {RESULT} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/group_write.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_inserted_portions.cpp |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/portion/clean_inserted_portions.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_load_rows.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/manager.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/manager.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/metadata/initializer.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/metadata/initializer.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |84.9%| [AR] {RESULT} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/tx_monitoring.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/tx_monitoring.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/libydb-core-load_test.a |84.9%| [AR] {RESULT} $(B)/ydb/core/load_test/libydb-core-load_test.a |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |84.9%| [AR] {RESULT} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |84.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |84.9%| [AR] {RESULT} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storages_manager.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storages_manager.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |84.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |84.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/topic_sdk_test_setup.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/operation_helpers.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/topic_sdk_test_setup.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/operation_helpers.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |84.9%| [AR] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |84.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/write_actor.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/write_actor.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/manager/abstract.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/manager/abstract.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard_private_events.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_private_events.cpp |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/normalizer/tablet/broken_txs.cpp |84.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/normalizer/tablet/broken_txs.cpp |84.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |84.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_clickhouse_internal.cpp |84.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_clickhouse_internal.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_buffer_lookup_actor.cpp |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/libydb-services-ydb.a |85.0%| [AR] {RESULT} $(B)/ydb/services/ydb/libydb-services-ydb.a |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_buffer_lookup_actor.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/backup/iscan/iscan.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/backup/iscan/iscan.cpp |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |85.0%| [AR] {RESULT} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/backup/iscan/libcolumnshard-backup-iscan.a |85.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/backup/iscan/libcolumnshard-backup-iscan.a |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/backup/iscan/libcolumnshard-backup-iscan.a |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy_simple.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/column_table.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/column_table.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_continuous_backup_cleaner.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_continuous_backup_cleaner.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/locks/locks.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/locks/locks.cpp |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |85.0%| [AR] {RESULT} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/ydb_over_fq/list_directory.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import_helpers.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_helpers.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/add_column.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/add_column.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__update_subdomain_key.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_subdomain_key.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/show_create/show_create.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/show_create/show_create.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |85.0%| [AR] {RESULT} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard_subdomain_path_id.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_subdomain_path_id.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_worker_error.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_worker_error.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/events.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/events.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/write.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/write.cpp |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |85.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_handshake.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/streaming_queries/streaming_queries.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_handshake.cpp |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/streaming_queries/libcore-sys_view-streaming_queries.a |85.0%| [AR] {RESULT} $(B)/ydb/core/sys_view/streaming_queries/libcore-sys_view-streaming_queries.a |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/streaming_queries/streaming_queries.cpp |85.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/streaming_queries/libcore-sys_view-streaming_queries.a |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |85.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_cache/scheme_cache.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |85.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_cache/scheme_cache.cpp |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |85.0%| [AR] {RESULT} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |85.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |85.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |85.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |85.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__load_state.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__load_state.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/operations/slice_builder/pack_builder.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/slice_builder/pack_builder.cpp |85.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |85.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |85.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/fake_coordinator.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/fake_coordinator.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin_compact.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin_compact.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_drop_replication.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_drop_replication.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_request_proxy.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_describe_replication.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_describe_replication.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_create_stream_result.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_create_stream_result.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/rpc_execute_data_query.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |85.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |85.1%| [AR] {RESULT} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__toggle_config_validator.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__toggle_config_validator.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_configs_provider.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_hash_func_propagate_transformer.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configs_provider.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_hash_func_propagate_transformer.cpp |85.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_cbo.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_cbo.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_configs_manager.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/load_actor_write.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_write.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configs_manager.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_opt.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_opt.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/load_actor_delete.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_delete.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/program/resolver.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/program/resolver.cpp |85.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/program/libcore-tx-program.a |85.1%| [AR] {RESULT} $(B)/ydb/core/tx/program/libcore-tx-program.a |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |85.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_assign_stream_name.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_assign_stream_name.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |85.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_alter_replication.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_alter_replication.cpp |85.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/basics/runtime.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/runtime.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_helpers.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_helpers.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/long_tx_service/commit_impl.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/commit_impl.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_create_dst_result.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_create_dst_result.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/dst_creator.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_runner.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_runner.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_discovery_targets_result.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_discovery_targets_result.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/stream_creator.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_extract.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_extract.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/view/behaviour.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/view/behaviour.cpp |85.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |85.2%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |85.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_provider.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_datasource.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_datasource.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/load_actor_mon.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_mon.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/load_actor_state.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_state.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/tablet_queue.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/tablet_queue.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/purecalc.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/purecalc.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/basics/appdata.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/appdata.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/dst_alterer.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_alterer.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/feature_flags_configurator.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/secret_resolver.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/secret_resolver.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/actors/test_runtime.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/test_runtime.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/node_service/kqp_node_service.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/node_service/kqp_node_service.cpp |85.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |85.2%| [AR] {RESULT} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |85.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator__schema.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__schema.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_indexes.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_indexes.cpp |85.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/manager.cpp |85.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/manager.cpp |85.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |85.2%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |85.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_opt_build.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_opt_build.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/load_actor_read_validate.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_read_validate.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_host.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_host.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__set_config.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__set_config.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/backup/import/session.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/backup/import/session.cpp |85.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.global.a |85.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.global.a |85.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.global.a |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator__init.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__init.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/long_tx_service/long_tx_service_impl.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_impl.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__cleanup_subscriptions.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__cleanup_subscriptions.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator__schema_upgrade.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__schema_upgrade.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/kqp_metadata_loader.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/kqp_metadata_loader.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/immediate_controls_configurator.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/long_tx_service/acquire_snapshot_impl.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/long_tx_service/acquire_snapshot_impl.cpp |85.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |85.3%| [AR] {RESULT} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |85.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__update_tenant_state.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_tenant_state.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_sink_precompute.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_sink_precompute.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_index_utils.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_index_utils.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/config/bsconfig_ut.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__remove_tenant.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_tenant.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/execute_queue.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/execute_queue.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_write_constraint.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_write_constraint.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_faketablet.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_faketablet.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/ydbd/main.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/ydbd/main.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__alter_tenant.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_simplebs.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__alter_tenant.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_simplebs.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/prepare.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__create_tenant.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__create_tenant.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_gateway_proxy.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_gateway_proxy.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_bad_blobid.cpp |85.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_bad_blobid.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_huge.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_huge.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers_ut.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_helpers_ut.cpp |85.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_tenants_manager.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_tenants_manager.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_brokendevice.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_brokendevice.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/query_actor/query_actor_ut.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_localrecovery.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_repl.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_localrecovery.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_repl.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/helpers.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/helpers.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_drop_stream_result.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_drop_stream_result.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__revert_pool_state.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__revert_pool_state.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_outofspace.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_outofspace.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/configs_dispatcher.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_dispatcher.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/vdisk_mock.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/vdisk_mock.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_defrag.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_defrag.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_load.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_load.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_many.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_many.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_synclog.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_synclog.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_dbstat.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_dbstat.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/lib/test_gc.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |85.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |85.4%| [AR] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |85.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/describer/describer_ut.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/describer/describer_ut.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/generated/runtime_feature_flags_ut.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/generated/runtime_feature_flags_ut.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/kqp_gateway.cpp |85.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/kqp_gateway.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |85.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__root_shred_manager.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__root_shred_manager.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_sharding.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_sharding.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator__configure.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator__configure.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_disk_quotas.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_disk_quotas.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot_isolation.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot_isolation.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/runlib/utils.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/runlib/utils.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_filter_kmeans.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_filter_kmeans.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_index.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_index.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/tx_init_scheme.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/tx_init_scheme.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/test_client.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/test_client.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/abstract.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/abstract.cpp |85.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |85.5%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |85.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/runlib/application.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/runlib/application.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/test_shard_context.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/test_shard_context.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/apps/etcd_proxy/service/etcd_impl.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/apps/etcd_proxy/service/etcd_impl.cpp |85.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |85.5%| [AR] {RESULT} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |85.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tracing/tablet_info.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tracing/tablet_info.cpp |85.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tracing/libydb-core-tracing.a |85.5%| [AR] {RESULT} $(B)/ydb/core/tracing/libydb-core-tracing.a |85.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/pq_async_io/ut_helpers.cpp |85.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/pq_async_io/ut_helpers.cpp |85.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |85.5%| [AR] {RESULT} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |85.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |85.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/kqprun/runlib/kikimr_setup.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/kqprun/runlib/kikimr_setup.cpp |85.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |85.6%| [AR] {RESULT} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |85.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_opt.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_opt.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/common/ut_helpers/dq_fake_ca.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/common/ut_helpers/dq_fake_ca.cpp |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |85.6%| [AR] {RESULT} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |85.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__get.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__get.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/helpers/local.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/local.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/kqp_ic_gateway.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/kqp_ic_gateway.cpp |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |85.6%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |85.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__forget.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__forget.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/ut/common/common.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/ut/common/common.cpp |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/mlp/ut/common/libmlp-ut-common.a |85.6%| [AR] {RESULT} $(B)/ydb/core/persqueue/public/mlp/ut/common/libmlp-ut-common.a |85.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/public/mlp/ut/common/libmlp-ut-common.a |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/helpers/writer.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/writer.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sysview.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sysview.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/helpers/query_executor.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/query_executor.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/helpers/typed_local.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/typed_local.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/common/common.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/common/common.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/test_server.cpp |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |85.6%| [AR] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/test_server.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/federated_query/ut_service/common/helpers.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/connector_client_mock.cpp |85.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/ut_service/common/libfederated_query-ut_service-common.a |85.6%| [AR] {RESULT} $(B)/ydb/core/kqp/federated_query/ut_service/common/libfederated_query-ut_service-common.a |85.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |85.6%| [AR] {RESULT} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |85.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/federated_query/ut_service/common/libfederated_query-ut_service-common.a |85.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/ut_service/common/helpers.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/connector_client_mock.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/execute.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/execute.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |85.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |85.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/bulk_upsert.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/bulk_upsert.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_check.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_check.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/fqrun/src/fq_setup.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/src/fq_setup.cpp |85.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |85.7%| [AR] {RESULT} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |85.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/helpers/aggregation.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/helpers/aggregation.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |85.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |85.7%| [AR] {RESULT} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/compaction.cpp |85.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/variator.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/compaction.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/variator.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/abstract.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/abstract.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/backup/async_jobs/import_downloader.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/backup/async_jobs/import_downloader.cpp |85.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/backup/async_jobs/libcolumnshard-backup-async_jobs.a |85.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/backup/async_jobs/libcolumnshard-backup-async_jobs.a |85.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/backup/async_jobs/libcolumnshard-backup-async_jobs.a |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_shred_manager.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/ut/datastreams_fixture/datastreams_fixture.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_shred_manager.cpp |85.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/datastreams_fixture/libhttp_proxy-ut-datastreams_fixture.a |85.7%| [AR] {RESULT} $(B)/ydb/core/http_proxy/ut/datastreams_fixture/libhttp_proxy-ut-datastreams_fixture.a |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/datastreams_fixture/datastreams_fixture.cpp |85.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/datastreams_fixture/libhttp_proxy-ut-datastreams_fixture.a |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_finalize.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_finalize.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/actualization.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/actualization.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_column.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/alter_column.cpp |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/ut_utils.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/ut_utils.cpp |85.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |85.7%| [AR] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |85.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/behaviour.cpp |85.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/behaviour.cpp |85.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |85.7%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |85.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |85.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp |85.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |85.8%| [AR] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |85.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_streaming_query.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_streaming_query.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/executor.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/executor.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_incremental_restore_finalize.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_incremental_restore_finalize.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/row_table.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/row_table.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/bs/remove.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/remove.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_init.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_init.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/local_rpc/helper.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/local_rpc/helper.cpp |85.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |85.8%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |85.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/utils/scheme_helpers.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/utils/scheme_helpers.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_change_path_state.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_change_path_state.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |85.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |85.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__forget.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__forget.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__login_finalize.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login_finalize.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/tablet_helpers.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tablet_helpers.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/test_tablet.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/test_tablet.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/purecalc_output.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/purecalc_output.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_sysviews_update.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_sysviews_update.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__op_traits.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__op_traits.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/view/manager.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/view/manager.cpp |85.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |85.9%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |85.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/basics/helpers.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/helpers.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |85.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |85.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_stage_float_up.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_stage_float_up.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_alter_dst_result.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_alter_dst_result.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann_pg.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/purecalc_input.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/scheme.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann_pg.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/purecalc_input.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/scheme.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_assign_tx_id.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_assign_tx_id.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_wide_read.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_resolve_secret_result.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole_wide_read.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_resolve_secret_result.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_create_replication.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_create_replication.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_resolve_database_result.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_resolve_database_result.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sort.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sort.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_precompute.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_precompute.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_sqlin.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_results.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_results.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_helpers.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_source.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_helpers.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_sort.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_source.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_sort.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/state_server_interface.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/state_server_interface.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_drop_dst_result.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_drop_dst_result.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |86.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/dst_remover.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_remover.cpp |86.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/read_attributes_utils.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_heartbeat.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_heartbeat.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/logging.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/peephole/kqp_opt_peephole.cpp |86.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |86.1%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/logging.cpp |86.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/rewrite_io_utils.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_limit.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/rewrite_io_utils.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_limit.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__remove_tenant_done.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_tenant_done.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/utils/metadata_helpers.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/utils/metadata_helpers.cpp |86.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |86.1%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |86.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_effects.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_effects.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_agg.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_agg.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__remove_tenant_failed.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_tenant_failed.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/config_helpers.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/config_helpers.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/controller.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/controller.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/basics/services.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/tx_initialize.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/tx_initialize.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/basics/services.cpp |86.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |86.1%| [AR] {RESULT} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |86.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_init_schema.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_init_schema.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/net_classifier_updater.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/net_classifier_updater.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_configs_subscriber.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_configs_subscriber.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/backup/import/import_actor.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/backup/import/import_actor.cpp |86.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.a |86.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.a |86.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.a |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_filter.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_olap_filter.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_join.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_join.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_type_ann.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_type_ann.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/tx_resolve_resource_id_result.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/tx_resolve_resource_id_result.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/tx_load_everything.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_index.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/upsert_index.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/tx_load_everything.cpp |86.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |86.1%| [AR] {RESULT} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |86.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/transfer/transfer_writer.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/transfer/transfer_writer.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_statement_rewrite.cpp |86.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/transfer/libydb-core-transfer.a |86.1%| [AR] {RESULT} $(B)/ydb/core/transfer/libydb-core-transfer.a |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_statement_rewrite.cpp |86.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/transfer/libydb-core-transfer.a |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_type_ann.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |86.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__update_tenant_pool_config.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_tenant_pool_config.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/tools/dqrun/lib/dqrun_lib.cpp |86.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/tools/dqrun/lib/dqrun_lib.cpp |86.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |86.2%| [AR] {RESULT} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |86.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__update_confirmed_subdomain.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_confirmed_subdomain.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_transform.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_transform.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |86.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |86.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |86.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_helpers.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_helpers.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/logger.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/logger.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/configs_cache.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator_impl.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_cache.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator_impl.cpp |86.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |86.2%| [AR] {RESULT} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |86.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__remove_computational_units.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__remove_computational_units.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_exec.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_exec.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext_dict.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext_dict.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rotate_cdc_stream.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rotate_cdc_stream.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/yql_testlib/yql_testlib.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/yql_testlib/yql_testlib.cpp |86.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |86.2%| [AR] {RESULT} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |86.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__replace_yaml_config.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_datasink.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__replace_yaml_config.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_datasink.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |86.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |86.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__init_scheme.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__init_scheme.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc_actor.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc_actor.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console__update_pool_state.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console__update_pool_state.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/combinatory/select.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/combinatory/select.cpp |86.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |86.3%| [AR] {RESULT} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |86.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |86.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/libcore-cms-console.a |86.3%| [AR] {RESULT} $(B)/ydb/core/cms/console/libcore-cms-console.a |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |86.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_lock.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_lock.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__shred_manager.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__shred_manager.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_index_build_info.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_index_build_info.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/tablet_flat_dummy.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tablet_flat_dummy.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__get.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__get.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/common_helper.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/common_helper.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/node_service/kqp_node_state.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/node_service/kqp_node_state.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |86.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |86.3%| [AR] {RESULT} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |86.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_explain_prepared.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_explain_prepared.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/test_shard_mon.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/test_shard_mon.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_column.cpp |86.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__list.cpp |86.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/gateway/behaviour/tablestore/operations/drop_column.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__list.cpp |86.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |86.3%| [AR] {RESULT} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |86.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/test_tablet/load_actor_impl.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/test_tablet/load_actor_impl.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/tx_helpers.cpp |86.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |86.4%| [AR] {RESULT} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tx_helpers.cpp |86.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges_predext.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/logical/kqp_opt_log_ranges_predext.cpp |86.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |86.4%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |86.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/cs_helper.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/cs_helper.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp |86.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |86.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |86.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_incremental_restore_scan.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_incremental_restore_scan.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_build_stage.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/physical/kqp_opt_phy_build_stage.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |86.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |86.4%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |86.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |86.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__progress.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__progress.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |86.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/tenant_runtime.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__list.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/tenant_runtime.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__list.cpp |86.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/testlib/libydb-core-testlib.a |86.5%| [AR] {RESULT} $(B)/ydb/core/testlib/libydb-core-testlib.a |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |86.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/host/kqp_translate.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/host/kqp_translate.cpp |86.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |86.5%| [AR] {RESULT} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |86.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_fulltext_index_build_reboots.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_fulltext_index_build_reboots.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |86.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/base/board_subscriber_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |86.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scheme/kqp_user_management_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_user_management_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/ut/consumer_offset_tracker_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/ut/consumer_offset_tracker_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_ut_common.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/ut_helpers.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_column_build_reboots/ut_column_build_reboots.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build_reboots/ut_column_build_reboots.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cluster_info_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_partition_stats/ut_top_cpu_usage.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_partition_stats/ut_top_cpu_usage.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |86.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |86.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ut_ycsb.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_complex_join_query_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_complex_join_query_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_tenants_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_block_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_block_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export_fs.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export_fs.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/test/testhull_index.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/test/testhull_index.cpp |86.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |86.7%| [AR] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |86.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |86.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |86.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/executer_actor/kqp_executer_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_counters_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_counters_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl_qs.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl_qs.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ut_consistent_copy_tables.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ut_consistent_copy_tables.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/object_storage_listing_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/backpressure.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/backpressure.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |86.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |86.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_ut_local.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/cluster_balancing.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/cluster_balancing.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/sentinel_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/topic_data_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/health_check/health_check_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/driver_lib/run/run_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/driver_lib/run/run_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/viewer/viewer_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |86.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |86.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay_yt/main.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/ut_helpers.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scheme/kqp_secrets_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_secrets_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/blobsan/main.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/blobsan/main.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_yql_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_large.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_yql_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/simple_reader_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/simple_reader_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_common.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay/main.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/main.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_fulltext_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_fulltext_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |87.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |87.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay/query_proccessor.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay/query_replay.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/indexes/kqp_stream_indexes_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_stream_indexes_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/group_size_in_units.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_size_in_units.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_ut_configs.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/quoter_service_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay/query_compiler.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |87.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |87.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/table_creator/table_creator_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/datastreams/datastreams_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage_2_ring_groups.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage_2_ring_groups.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/yql/dq/runtime/dq_channel_service_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/yql/dq/runtime/dq_channel_service_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/ut/graph_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |87.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |87.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_query_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/flat_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/flat_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_ut_common.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |87.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |87.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/compaction_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/compaction_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/phantom_blobs.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/phantom_blobs.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |87.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |87.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/bridge_get.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/bridge_get.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_auth.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_auth.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore_fs.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore_fs.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/service/kqp_snapshot_readonly.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_snapshot_readonly.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_changer_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_changer_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/slow/txusage_slow_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/slow/txusage_slow_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_prefixed_vector_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_prefixed_vector_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_login_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |87.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |87.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/tablet_state_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/tablet_state_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/self_heal.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/self_heal.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/corrupted_reads.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/corrupted_reads.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/cms_ut_common.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/mvp/core/mvp_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/mvp/core/mvp_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/knn/kqp_knn_ut.cpp |87.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/knn/kqp_knn_ut.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |87.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet/resource_broker_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/locks_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/locks_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/cms/cms_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/cms/cms_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/move_pdisk.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/move_pdisk.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/federated_query/ut_service/slow/kqp_federated_query_actors_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_kqp.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/ut_service/slow/kqp_federated_query_actors_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/backup_ut/fs_backup_validation_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/fs_backup_validation_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |87.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |87.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/cancel_tx_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/check_integrity.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/check_integrity.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/comp_defrag.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/comp_defrag.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/downtime_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/hive_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/compile_service/ut/kqp_compile_fallback_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/compile_service/ut/kqp_compile_fallback_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_connection_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_connection_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_pg_ut.cpp |87.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_pg_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |87.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/tenant_ut_pool.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_labeled.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mon/mon_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mon/mon_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_writer_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_writer_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_vector_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_vector_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/federated_query/ut_service/fast/kqp_federated_query_actors_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/federated_query/ut_service/fast/kqp_federated_query_actors_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_group/main.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_group/main.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |87.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |87.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/cancellation.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/cancellation.cpp |88.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |88.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |88.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |88.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |88.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |88.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |88.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |88.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |88.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |88.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |88.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |88.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |88.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |88.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |88.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |88.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |88.0%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |88.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |88.0%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/public/mlp/mlp_reader_ut.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_reader_ut.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |88.1%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/ydb_cli |88.1%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/ydb_cli |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.h_serialized.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.h_serialized.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/bsc_cache.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/bsc_cache.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_counters.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |88.1%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |88.1%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/security/ticket_parser_ut.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/bool_ut.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/bool_ut.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |88.1%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/generated/libcore-base-generated.a |88.1%| [AR] {RESULT} $(B)/ydb/core/base/generated/libcore-base-generated.a |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp |88.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |88.1%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/d42f916259e88c99d94b15ec0e_raw.auxcpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |88.1%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/d42f916259e88c99d94b15ec0e_raw.auxcpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |88.1%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/console_config__intpy3___pb2.py.p5ju.yapyc3 |88.1%| [PY] {BAZEL_UPLOAD} $(B)/ydb/core/protos/console_config__intpy3___pb2.py.p5ju.yapyc3 |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |88.1%| [LD] {RESULT} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_import_ut.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |88.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |88.1%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/d42f916259e88c99d94b15ec0e_raw.auxcpp |88.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/d42f916259e88c99d94b15ec0e_raw.auxcpp |88.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |88.1%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |88.2%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |88.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |88.2%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |88.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |88.2%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |88.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |88.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |88.2%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |88.2%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |88.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |88.2%| [PR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/acd2063e47ff389d5e0998ace2_raw.auxcpp |88.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |88.2%| [PR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/acd2063e47ff389d5e0998ace2_raw.auxcpp |88.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |88.2%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |88.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |88.2%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |88.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/acd2063e47ff389d5e0998ace2_raw.auxcpp |88.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/protos/acd2063e47ff389d5e0998ace2_raw.auxcpp |88.2%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |88.2%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |88.2%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |88.2%| [AR] {RESULT} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |88.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |88.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/fifo_cleanup.h_serialized.cpp |88.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |88.2%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |88.2%| [EN] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |88.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |88.2%| [EN] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |88.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |88.2%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |88.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |88.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |88.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |88.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |88.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |88.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp >> Scheme::NotEmptyCell [GOOD] >> Scheme::NullCell [GOOD] |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/s3_backups/s3_backups |88.2%| [LD] {RESULT} $(B)/ydb/tests/stress/s3_backups/s3_backups |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/s3_backups/s3_backups |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |88.2%| [LD] {RESULT} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |88.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |88.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |88.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |88.2%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |88.2%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |88.2%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests >> TypesProto::DecimalNoTypeInfo [GOOD] >> TypesProto::Decimal35 [GOOD] |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |88.2%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/s3_backups_workload |88.2%| [LD] {RESULT} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |88.2%| [LD] {RESULT} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |88.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::NullCell [GOOD] |88.2%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/s3_backups_workload |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |88.2%| [LD] {RESULT} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |88.2%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut/unittest |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |88.2%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/olap_workload |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/olap_workload |88.2%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/olap_workload >> Scheme::NonEmptyOwnedCellVec [GOOD] >> Scheme::EmptyOwnedCellVec [GOOD] |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |88.2%| [LD] {RESULT} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |88.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> TypesProto::Decimal35 [GOOD] |88.2%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut/unittest |88.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::EmptyOwnedCellVec [GOOD] |88.2%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut/unittest |88.2%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/olap_workload |88.2%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/olap_workload |88.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |88.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |88.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |88.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_ut.cpp |88.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_ut.cpp |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.2%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut_pg/unittest |88.3%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpStringCells >> PgTest::DumpStringCells [GOOD] |88.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |88.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/queue_schema.h_serialized.cpp |88.2%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |88.2%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |88.3%| [LD] {RESULT} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |88.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp >> PgTest::DumpIntCells |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpStringCells [GOOD] |88.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |88.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |88.3%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpIntCells [GOOD] |88.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |88.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |88.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/volatile_tx.h_serialized.cpp |88.3%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut_pg/unittest |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |88.3%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut_pg/unittest |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |88.3%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common >> ThrottlerControlTests::Overflow_1 [GOOD] |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |88.3%| [LD] {RESULT} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |88.3%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest >> PgTest::DumpIntCells [GOOD] |88.3%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut_pg/unittest |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |88.3%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |88.3%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |88.3%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |88.3%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut_pg/unittest |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |88.3%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut_pg/unittest |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |88.3%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |88.3%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |88.3%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_1 [GOOD] >> ThrottlerControlTests::LongIdle [GOOD] |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/jaeger_tracing/ut/unittest |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |88.3%| [LD] {RESULT} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |88.3%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut_pg/unittest >> SamplingControlTests::EdgeCaseLower [GOOD] >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step >> ThrottlerControlTests::Simple [GOOD] >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step |88.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut_pg/unittest |88.3%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut_pg/unittest >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::LongIdle [GOOD] |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/jaeger_tracing/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseLower [GOOD] |88.3%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/jaeger_tracing/ut/unittest |88.3%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard.h_serialized.cpp >> SamplingControlTests::Simple [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Simple [GOOD] |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/jaeger_tracing/ut/unittest |88.3%| [TA] $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} >> ThrottlerControlTests::Overflow_2 [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/jaeger_tracing/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::Simple [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_2 [GOOD] >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/jaeger_tracing/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/jaeger_tracing/ut/unittest |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests >> SamplingControlTests::EdgeCaseUpper [GOOD] |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |88.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |88.4%| [TA] {RESULT} $(B)/ydb/core/scheme/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [LD] {RESULT} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |88.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/execution_unit.h_serialized.cpp |88.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |88.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/source.h_serialized.cpp |88.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/ut_common/ut_common.cpp |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] |88.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/jaeger_tracing/ut/unittest |88.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/datashard_active_transaction.h_serialized.cpp |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/ut_common/ut_common.cpp |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseUpper [GOOD] |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/jaeger_tracing/ut/unittest |88.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |88.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_provider.h_serialized.cpp |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/mind/node_broker_ut.cpp |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |88.4%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |88.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.h_serialized.cpp |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |88.4%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.h_serialized.cpp |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TA] $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest >> SchemeBorders::Partial [GOOD] >> SchemeRanges::CmpBorders [GOOD] |88.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TA] {RESULT} $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [LD] {RESULT} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |88.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> SchemeRanges::CmpBorders [GOOD] |88.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut/unittest |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |88.4%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/control_board_proto.cpp |88.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |88.4%| [AR] {RESULT} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |88.4%| [LD] {RESULT} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |88.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |88.4%| [LD] {RESULT} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |88.4%| [AR] {RESULT} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |88.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |88.4%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |88.4%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/example/ydb-tests-example |88.4%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |88.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |88.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |88.5%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest >> KqpCompileServiceHelpers::CheckInvalidator [GOOD] |88.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp >> Scheme::CompareWithNullSemantics [GOOD] >> Scheme::EmptyCell [GOOD] >> TypesProto::Decimal22 [GOOD] >> SchemeRanges::RangesBorders [GOOD] >> Scheme::TSerializedCellMatrix [GOOD] >> Scheme::CellVecTryParse [GOOD] >> Scheme::CompareOrder [GOOD] >> Scheme::CompareUuidCells [GOOD] >> Scheme::OwnedCellVecFromSerialized [GOOD] |88.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |88.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> KqpCompileServiceHelpers::CheckInvalidator [GOOD] Test command err: empty ResourceManager { MkqlHeavyProgramMemoryLimit: 31457280 } empty ResourceManager { MkqlHeavyProgramMemoryLimit: 31457281 } modified: MkqlHeavyProgramMemoryLimit: 31457281 -> 31457280 ResourceManager { MkqlHeavyProgramMemoryLimit: 31457281 } EnableKqpScanQuerySourceRead: true ResourceManager { MkqlHeavyProgramMemoryLimit: 31457281 } EnableKqpScanQuerySourceRead: true EnableKqpScanQueryStreamIdxLookupJoin: true ResourceManager { MkqlHeavyProgramMemoryLimit: 31457281 } EnableKqpScanQuerySourceRead: true EnableKqpScanQueryStreamIdxLookupJoin: true EnableKqpScanQuerySourceRead: true EnableKqpScanQueryStreamIdxLookupJoin: true ResourceManager { MkqlHeavyProgramMemoryLimit: 31457281 } EnableKqpScanQuerySourceRead: true EnableKqpScanQueryStreamIdxLookupJoin: true ResourceManager { MkqlHeavyProgramMemoryLimit: 31457281 } EnableKqpScanQuerySourceRead: true EnableKqpScanQueryStreamIdxLookupJoin: true |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |88.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::OwnedCellVecFromSerialized [GOOD] |88.5%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/schema.h_serialized.cpp |88.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::EmptyCell [GOOD] |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> SchemeRanges::RangesBorders [GOOD] >> SchemeBorders::Full [GOOD] >> Scheme::YqlTypesMustBeDefined [GOOD] |88.5%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut/unittest |88.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::CompareUuidCells [GOOD] |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut/unittest |88.5%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut/unittest >> KqpCompileServiceHelpers::OnlyRmAndTopLevelOptionsAreSupportedToInvalidate |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |88.5%| [AR] {RESULT} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a >> KqpCompileServiceHelpers::OnlyRmAndTopLevelOptionsAreSupportedToInvalidate [GOOD] |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/common/ut/ydb-core-persqueue-common-ut |88.5%| [LD] {RESULT} $(B)/ydb/core/persqueue/common/ut/ydb-core-persqueue-common-ut |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::YqlTypesMustBeDefined [GOOD] |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |88.5%| [LD] {RESULT} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest >> KqpCompileServiceHelpers::OnlyRmAndTopLevelOptionsAreSupportedToInvalidate [GOOD] |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |88.5%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |88.5%| [LD] {RESULT} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |88.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a >> RuntimeFeatureFlags::UpdatingRuntimeFlags [GOOD] >> RuntimeFeatureFlags::ConversionToProto [GOOD] >> RuntimeFeatureFlags::ConversionFromProto [GOOD] >> RuntimeFeatureFlags::DefaultValues [GOOD] >> TMicrosecondsSlidingWindow::Basic [GOOD] >> TValue::IsNull [GOOD] >> TValue::FieldsCount [GOOD] |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |88.6%| [LD] {RESULT} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/base/generated/ut/unittest >> RuntimeFeatureFlags::DefaultValues [GOOD] >> ConfigProto::ForbidNewRequired |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/helpers/ut/unittest |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/common/ut/unittest >> TMicrosecondsSlidingWindow::Basic [GOOD] >> AttributesMD5Test::AmazonSampleWithString [GOOD] >> AttributesMD5Test::AmazonSampleWithBinary [GOOD] |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/base/generated/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/helpers/ut/unittest |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/persqueue/common/ut/unittest >> LongTxServicePublicTypes::LongTxId [GOOD] >> LongTxServicePublicTypes::Snapshot [GOOD] >> ConfigProto::ForbidNewRequired [GOOD] >> LongTxServicePublicTypes::SnapshotMaxTxId [GOOD] |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/public/lib/value/ut/gtest >> TValue::FieldsCount [GOOD] >> LongTxServicePublicTypes::SnapshotReadOnly [GOOD] >> Json::BasicRendering [GOOD] >> JsonEnvelopeTest::Simple [GOOD] >> JsonEnvelopeTest::NoReplace [GOOD] >> JsonEnvelopeTest::ArrayItem [GOOD] >> JsonEnvelopeTest::Escape [GOOD] >> JsonEnvelopeTest::BinaryData [GOOD] |88.6%| [TS] {RESULT} ydb/core/base/generated/ut/unittest |88.6%| [TS] {BAZEL_UPLOAD} ydb/public/lib/value/ut/gtest |88.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |88.6%| [TS] {RESULT} ydb/core/persqueue/common/ut/unittest >> Scheme::TSerializedCellVec [GOOD] >> Scheme::UnsafeAppend [GOOD] |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/attributes/ut/unittest >> AttributesMD5Test::AmazonSampleWithBinary [GOOD] |88.6%| [TS] {RESULT} ydb/public/lib/value/ut/gtest |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/ymq/attributes/ut/unittest |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/public/ut/unittest >> LongTxServicePublicTypes::SnapshotReadOnly [GOOD] |88.6%| [TS] {RESULT} ydb/core/ymq/attributes/ut/unittest |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/ut/unittest >> ConfigProto::ForbidNewRequired [GOOD] |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/log_backend/ut/unittest >> JsonEnvelopeTest::BinaryData [GOOD] |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/long_tx_service/public/ut/unittest |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |88.6%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/viewer/json/ut/unittest >> Json::BasicRendering [GOOD] >> TPartitionKeyRangeSequenceTest::ValidSequence [GOOD] >> TPartitionKeyRangeSequenceTest::ValidSinglePartition [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidOverlap [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidOverlapLong [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidContains [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidGap [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidOrder [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidFullCoverHi [GOOD] >> TPartitionKeyRangeSequenceTest::InvalidFullCoverLo [GOOD] >> TPartitionKeyRangeSequenceTest::EmptyInput [GOOD] >> TPartitionKeyRangeSequenceTest::ValidFivePartitions [GOOD] |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/config/ut/unittest |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/log_backend/ut/unittest >> OldFormat::SameVersion [GOOD] >> OldFormat::DefaultRules [GOOD] >> OldFormat::PrevYear [GOOD] >> OldFormat::Trunk [GOOD] >> OldFormat::UnexpectedTrunk [GOOD] >> OldFormat::TooOld [GOOD] >> OldFormat::OldNbs [GOOD] >> VersionParser::Basic [GOOD] >> YdbVersion::DefaultSameVersion [GOOD] >> YdbVersion::DefaultPrevMajor [GOOD] >> YdbVersion::DefaultNextMajor [GOOD] >> YdbVersion::DefaultHotfix [GOOD] >> YdbVersion::DefaultCompatible [GOOD] >> YdbVersion::DefaultNextYear [GOOD] >> YdbVersion::DefaultPrevYear [GOOD] >> YdbVersion::DefaultNewMajor [GOOD] >> YdbVersion::DefaultOldMajor [GOOD] >> YdbVersion::DefaultDifferentBuild [GOOD] >> YdbVersion::DefaultDifferentBuildIncompatible [GOOD] >> YdbVersion::DefaulPatchTag [GOOD] >> YdbVersion::LimitOld [GOOD] |88.6%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/operation.h_serialized.cpp |88.6%| [TS] {RESULT} ydb/core/config/ut/unittest |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/viewer/json/ut/unittest |88.6%| [TS] {RESULT} ydb/core/tx/long_tx_service/public/ut/unittest |88.6%| [LD] {RESULT} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut ------- [TS] {asan, default-linux-x86_64, release} ydb/core/scheme/ut/unittest >> Scheme::UnsafeAppend [GOOD] Test command err: Serialize: 0.001489s Cells constructor: 0.003943s Parse: 0.000196s Copy: 0.000254s Move: 0.000134s >> YdbVersion::LimitNew [GOOD] >> YdbVersion::CurrentCanLoadFrom [GOOD] >> YdbVersion::CurrentCanLoadFromAllOlder [GOOD] >> YdbVersion::CurrentCanLoadFromIncompatible [GOOD] >> YdbVersion::CurrentStoresReadableBy [GOOD] >> YdbVersion::StoredReadableBy [GOOD] >> YdbVersion::StoredReadableByIncompatible [GOOD] >> YdbVersion::StoredWithRules [GOOD] >> YdbVersion::StoredWithRulesIncompatible [GOOD] >> TFunctionsMetadataTest::Serialization >> YdbVersion::OldNbsStored [GOOD] >> YdbVersion::OldNbsIncompatibleStored [GOOD] >> YdbVersion::NewNbsCurrent [GOOD] >> YdbVersion::NewNbsIncompatibleCurrent [GOOD] >> YdbVersion::OneAcceptedVersion [GOOD] >> YdbVersion::ForbiddenMinor [GOOD] >> YdbVersion::DefaultRulesWithExtraForbidden [GOOD] >> YdbVersion::ExtraAndForbidden [GOOD] >> YdbVersion::SomeRulesAndOtherForbidden [GOOD] >> YdbVersion::Component [GOOD] >> YdbVersion::OtherComponent [GOOD] >> YdbVersion::YDBAndNbs [GOOD] |88.6%| [TS] {RESULT} ydb/core/viewer/json/ut/unittest |88.6%| [TS] {RESULT} ydb/core/log_backend/ut/unittest |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/scheme/ut/unittest >> YdbVersion::DifferentYdbVersionsWithNBSRules [GOOD] >> YdbVersion::WithPatchAndWithoutPatch [GOOD] >> YdbVersion::AcceptSpecificHotfixWithoutPatch [GOOD] >> YdbVersion::TrunkYDBAndNbs [GOOD] >> TFunctionsMetadataTest::Serialization [GOOD] >> YdbVersion::TrunkAndStable [GOOD] >> YdbVersion::CompatibleWithSelf [GOOD] >> YdbVersion::PrintCurrentVersionProto [GOOD] |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/partition_key_range/ut/unittest >> TPartitionKeyRangeSequenceTest::ValidFivePartitions [GOOD] |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/persqueue/public/partition_key_range/ut/unittest >> AuthConfigValidation::AcceptValidPasswordComplexity [GOOD] >> AuthConfigValidation::CannotAcceptInvalidPasswordComplexity [GOOD] >> AuthConfigValidation::AcceptValidAccountLockoutConfig [GOOD] >> AuthConfigValidation::CannotAcceptInvalidAccountLockoutConfig [GOOD] |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut |88.6%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp >> DoubleIndexedTests::TestUpsertBySingleKey [GOOD] >> DoubleIndexedTests::TestUpsertByBothKeys [GOOD] >> DoubleIndexedTests::TestMerge [GOOD] >> DoubleIndexedTests::TestFind [GOOD] >> DoubleIndexedTests::TestErase [GOOD] |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/client/metadata/ut/unittest >> TFunctionsMetadataTest::Serialization [GOOD] |88.6%| [TS] {RESULT} ydb/core/persqueue/public/partition_key_range/ut/unittest |88.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |88.6%| [LD] {RESULT} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/driver_lib/version/ut/unittest >> YdbVersion::PrintCurrentVersionProto [GOOD] Test command err: Application: "ydb" |88.6%| [TS] {RESULT} ydb/core/client/metadata/ut/unittest |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/client/metadata/ut/unittest |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/driver_lib/version/ut/unittest |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |88.6%| [LD] {RESULT} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/validation/auth_config_validator_ut/unittest >> AuthConfigValidation::CannotAcceptInvalidAccountLockoutConfig [GOOD] |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |88.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_double_indexed/unittest >> DoubleIndexedTests::TestErase [GOOD] |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/config/validation/auth_config_validator_ut/unittest |88.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_double_indexed/unittest |88.6%| [TS] {RESULT} ydb/core/config/validation/auth_config_validator_ut/unittest |88.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |88.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |88.6%| [TS] {RESULT} ydb/core/tx/scheme_board/ut_double_indexed/unittest >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> FormatCSV::Instants [GOOD] >> FormatCSV::EmptyData [GOOD] >> FormatCSV::Common |88.6%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |88.6%| [TA] $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> FormatCSV::Common [GOOD] >> FormatCSV::Strings [GOOD] >> FormatCSV::Nulls [GOOD] >> EncryptedFileSerializerTest::SerializeWholeFileAtATime [GOOD] >> EncryptedFileSerializerTest::WrongParametersForSerializer [GOOD] >> EncryptedFileSerializerTest::WrongParametersForDeserializer [GOOD] >> EncryptedFileSerializerTest::SplitOnBlocks |88.6%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |88.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> EncryptedFileSerializerTest::SplitOnBlocks [GOOD] >> EncryptedFileSerializerTest::EmptyFile [GOOD] >> EncryptedFileSerializerTest::ReadPartial [GOOD] >> EncryptedFileSerializerTest::DeleteLastByte [GOOD] >> EncryptedFileSerializerTest::AddByte [GOOD] >> EncryptedFileSerializerTest::RemoveLastBlock [GOOD] >> EncryptedFileSerializerTest::ChangeAnyByte |88.7%| [TM] {RESULT} ydb/core/driver_lib/version/ut/unittest |88.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut >> EncryptedFileSerializerTest::ChangeAnyByte [GOOD] >> EncryptedFileSerializerTest::BigHeaderSize [GOOD] >> EncryptedFileSerializerTest::BigBlockSize [GOOD] >> EncryptedFileSerializerTest::RestoreFromState [GOOD] >> EncryptedFileSerializerTest::IVSerialization [GOOD] >> PathsNormalizationTest::NormalizeItemPath [GOOD] >> PathsNormalizationTest::NormalizeItemPrefix [GOOD] >> PathsNormalizationTest::NormalizeExportPrefix [GOOD] |88.7%| [TA] {RESULT} $(B)/ydb/core/scheme/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/io_formats/arrow/scheme/ut/unittest >> FormatCSV::Nulls [GOOD] Test command err: 12000000 Cannot read CSV: no columns specified Cannot read CSV: Invalid: Empty CSV file d'Artagnan '"' Jeanne d'Arc "'" 'd'Artagnan' ''"'' 'Jeanne d'Arc' '"'"' d'Artagnan '"' Jeanne d'Arc "'" src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: \N,"","" \N,"\N","\N" \N,\N,\N parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,\N,\N ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: NULL,"","" NULL,"NULL","NULL" NULL,NULL,NULL parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,NULL,NULL ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ |88.7%| [TS] {BAZEL_UPLOAD} ydb/core/io_formats/arrow/scheme/ut/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/public/tools/lib/cmds/ut/py3test >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] |88.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp |88.7%| [TS] {BAZEL_UPLOAD} ydb/public/tools/lib/cmds/ut/py3test |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/backup/common/ut/unittest >> PathsNormalizationTest::NormalizeExportPrefix [GOOD] |88.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/events.h_serialized.cpp >> ExternalDataSourceTest::ValidateName [GOOD] >> ExternalDataSourceTest::ValidatePack [GOOD] >> ExternalDataSourceTest::ValidateAuth [GOOD] >> ExternalDataSourceTest::ValidateParameters [GOOD] >> ExternalDataSourceTest::ValidateHasExternalTable [GOOD] >> ExternalDataSourceTest::ValidateProperties [GOOD] >> ExternalDataSourceTest::ValidateLocation [GOOD] >> ExternalSourceBuilderTest::ValidateName [GOOD] >> ExternalSourceBuilderTest::ValidateAuthWithoutCondition [GOOD] >> ExternalSourceBuilderTest::ValidateAuthWithCondition [GOOD] >> ExternalSourceBuilderTest::ValidateUnsupportedField [GOOD] >> ExternalSourceBuilderTest::ValidateNonRequiredField [GOOD] >> ExternalSourceBuilderTest::ValidateRequiredField [GOOD] >> ExternalSourceBuilderTest::ValidateNonRequiredFieldValues [GOOD] >> ExternalSourceBuilderTest::ValidateRequiredFieldValues [GOOD] >> ExternalSourceBuilderTest::ValidateRequiredFieldOnCondition [GOOD] >> IcebergDdlTest::HiveCatalogWithS3Test [GOOD] >> IcebergDdlTest::HadoopCatalogWithS3Test [GOOD] >> ObjectStorageTest::SuccessValidation [GOOD] >> ObjectStorageTest::FailedCreate [GOOD] >> ObjectStorageTest::FailedValidation [GOOD] >> ObjectStorageTest::FailedJsonListValidation [GOOD] >> ObjectStorageTest::FailedOptionalTypeValidation [GOOD] >> ObjectStorageTest::WildcardsValidation [GOOD] >> ObjectStorageTest::FailedPartitionedByValidation [GOOD] >> GroupStress::Test [GOOD] |88.7%| [TS] {BAZEL_UPLOAD} ydb/core/backup/common/ut/unittest |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/external_sources/ut/unittest >> ObjectStorageTest::FailedPartitionedByValidation [GOOD] |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_group/unittest >> GroupStress::Test [GOOD] >> Backpressure::MonteCarlo >> TCollectingS3ListingStrategyTests::IfNoIssuesOccursShouldReturnCollectedPaths [GOOD] >> TCollectingS3ListingStrategyTests::IfThereAreMoreRecordsThanSpecifiedByLimitShouldReturnError [GOOD] >> TCollectingS3ListingStrategyTests::IfAnyIterationReturnIssueThanWholeStrategyShouldReturnIt [GOOD] >> TCollectingS3ListingStrategyTests::IfExceptionIsReturnedFromIteratorThanItShouldCovertItToIssue [GOOD] |88.7%| [TS] {BAZEL_UPLOAD} ydb/core/external_sources/ut/unittest |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_group/unittest |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |88.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |88.7%| [TS] {RESULT} ydb/core/io_formats/arrow/scheme/ut/unittest |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |88.7%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/py3test |88.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |88.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |88.7%| [LD] {RESULT} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |88.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |88.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/s3/provider/ut/unittest >> TCollectingS3ListingStrategyTests::IfExceptionIsReturnedFromIteratorThanItShouldCovertItToIssue [GOOD] |88.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |88.7%| [TM] {RESULT} ydb/core/blobstorage/ut_group/unittest |88.7%| [TS] {BAZEL_UPLOAD} ydb/library/yql/providers/s3/provider/ut/unittest |88.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |88.7%| [TS] {RESULT} ydb/library/yql/providers/s3/provider/ut/unittest |88.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |88.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/abstract.h_serialized.cpp |88.7%| [TS] {RESULT} ydb/core/backup/common/ut/unittest |88.7%| [TS] {RESULT} ydb/core/external_sources/ut/unittest |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/ut/ydb-core-persqueue-pqtablet-partition-ut |88.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/ut/ydb-core-persqueue-pqtablet-partition-ut |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/pqtablet/partition/ut/ydb-core-persqueue-pqtablet-partition-ut |88.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |88.7%| [TA] $(B)/ydb/core/kqp/compile_service/helpers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.7%| [LD] {RESULT} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |88.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |88.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |88.7%| [TA] {RESULT} $(B)/ydb/core/kqp/compile_service/helpers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |88.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/common/ut/ydb-core-persqueue-common-ut |88.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compile_service/helpers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Mirror3of4::ReplicationSmall |88.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |88.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/manager/abstract.h_serialized.cpp |88.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |88.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp >> TImportantConsumerOffsetTrackerTest::EmptyConsumersList [GOOD] >> TImportantConsumerOffsetTrackerTest::Unbound [GOOD] >> TImportantConsumerOffsetTrackerTest::Expired [GOOD] >> TImportantConsumerOffsetTrackerTest::UnboundAndExpired [GOOD] >> TImportantConsumerOffsetTrackerTest::ReadUnboundAndExpired [GOOD] >> TImportantConsumerOffsetTrackerTest::ReadUnboundAndWaiting300 [GOOD] >> TImportantConsumerOffsetTrackerTest::ReadUnboundAndWaiting400 [GOOD] >> TImportantConsumerOffsetTrackerTest::ReadUnboundAndWaiting500 [GOOD] >> TImportantConsumerOffsetTrackerTest::Waiting400And500 [GOOD] >> TImportantConsumerOffsetTrackerTest::Waiting400AndRead500 [GOOD] >> TImportantConsumerOffsetTrackerTest::ExactMatchSingleConsumerMaxRetention [GOOD] >> TImportantConsumerOffsetTrackerTest::ExactMatchSingleConsumerFiniteRetention [GOOD] >> TImportantConsumerOffsetTrackerTest::ExactMatchMultipleConsumers [GOOD] >> TImportantConsumerOffsetTrackerTest::MultipleExactMatches [GOOD] >> TImportantConsumerOffsetTrackerTest::UnboundKeepNextBlob [GOOD] >> TDeduplicatorTest::AddMessage [GOOD] >> TDeduplicatorTest::AddTwoMessages [GOOD] >> TDeduplicatorTest::AddDeduplicatedMessages [GOOD] >> TDeduplicatorTest::AddTwoMessages_DifferentTime_OneBucket [GOOD] >> TDeduplicatorTest::AddManyMessages_SameTime_DifferentBucket [GOOD] |88.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp |88.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/description.h_serialized.cpp ------- [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/pqtablet/partition/ut/gtest >> TDeduplicatorTest::AddManyMessages_SameTime_DifferentBucket [GOOD] Test command err: Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000002 >> ArrowInferenceTest::csv_simple |88.7%| [TS] {BAZEL_UPLOAD} ydb/core/persqueue/pqtablet/partition/ut/gtest |88.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a >> ArrowInferenceTest::csv_simple [GOOD] >> ArrowInferenceTest::tsv_simple [GOOD] >> ArrowInferenceTest::tsv_empty [GOOD] >> ArrowInferenceTest::broken_json [GOOD] >> ArrowInferenceTest::empty_json_each_row [GOOD] >> ArrowInferenceTest::empty_json_list [GOOD] >> ArrowInferenceTest::broken_json_list [GOOD] |88.7%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |88.7%| [TS] {RESULT} ydb/core/persqueue/pqtablet/partition/ut/gtest |88.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a ------- [TS] {asan, default-linux-x86_64, release} ydb/core/external_sources/object_storage/inference/ut/gtest >> ArrowInferenceTest::broken_json_list [GOOD] Test command err: {
: Error: couldn't open csv/tsv file, check format and compression parameters: empty file, code: 1001 } {
: Error: couldn't open json file, check format and compression parameters: empty file, code: 1001 } {
: Error: couldn't open json file, check format and compression parameters: empty file, code: 1001 } 2025-12-23T18:23:06.522775Z 1 00h00m00.000000s :OBJECT_STORAGE_INFERENCINATOR DEBUG: TArrowInferencinator: [1:6:6]. HandleFileError: {
: Error: couldn't run arrow json chunker for /path/is/neither/real: Invalid: straddling object straddles two block boundaries (try to increase block size?), code: 1001 } {
: Error: couldn't run arrow json chunker for /path/is/neither/real: Invalid: straddling object straddles two block boundaries (try to increase block size?), code: 1001 } {
: Error: couldn't open json file, check format and compression parameters: Invalid: JSON parse error: Invalid value. in row 0, code: 1001 } |88.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |88.7%| [TS] {BAZEL_UPLOAD} ydb/core/external_sources/object_storage/inference/ut/gtest |88.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/granule.h_serialized.cpp |88.7%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |88.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |88.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |88.7%| [TS] {RESULT} ydb/core/external_sources/object_storage/inference/ut/gtest |88.7%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |88.7%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |88.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |88.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/kqp_executer.h_serialized.cpp |88.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |88.7%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/read_metadata.h_serialized.cpp |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_large/unittest |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut_large/unittest |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |88.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |88.7%| [TM] {RESULT} ydb/core/tablet_flat/ut_large/unittest |88.7%| [LD] {RESULT} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |88.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |88.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |88.7%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |88.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |88.8%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |88.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqrb/read_balancer__balancing.h_serialized.cpp |88.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |88.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/backup/common/ut/ydb-core-backup-common-ut |88.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |88.8%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |88.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut >> ConsoleDumper::Basic [GOOD] >> ConsoleDumper::CoupleMerge [GOOD] >> ConsoleDumper::CoupleOverwrite [GOOD] >> ConsoleDumper::CoupleMergeOverwriteRepeated [GOOD] >> ConsoleDumper::ReverseMerge [GOOD] >> ConsoleDumper::ReverseOverwrite [GOOD] >> ConsoleDumper::ReverseMergeOverwriteRepeated [GOOD] >> ConsoleDumper::Different [GOOD] >> ConsoleDumper::SimpleNode [GOOD] >> ConsoleDumper::JoinSimilar [GOOD] >> ConsoleDumper::DontJoinDifferent >> ConsoleDumper::DontJoinDifferent [GOOD] >> ConsoleDumper::SimpleTenant [GOOD] >> ConsoleDumper::SimpleNodeTenant [GOOD] >> ConsoleDumper::SimpleHostId [GOOD] >> ConsoleDumper::SimpleNodeId [GOOD] >> ConsoleDumper::DontJoinNodeTenant [GOOD] >> ConsoleDumper::JoinMultipleSimple [GOOD] >> ConsoleDumper::MergeNode [GOOD] >> ConsoleDumper::MergeOverwriteRepeatedNode [GOOD] >> ConsoleDumper::Ordering [GOOD] >> ConsoleDumper::IgnoreUnmanagedItems [GOOD] >> IncompatibilityRules::BasicPatternMatching [GOOD] >> IncompatibilityRules::EmptyLabelMatching [GOOD] >> IncompatibilityRules::UnsetLabelMatching [GOOD] >> IncompatibilityRules::AddAndRemoveRules [GOOD] >> IncompatibilityRules::RuleOverride [GOOD] >> IncompatibilityRules::SimpleIncompatibilityCheck [GOOD] |88.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |88.8%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |88.8%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |88.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |88.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut >> IncompatibilityRules::DisableRules [GOOD] >> IncompatibilityRules::MergeRules [GOOD] >> IncompatibilityRules::ParseEmptyOverrides [GOOD] >> IncompatibilityRules::ParseDisableRules [GOOD] >> IncompatibilityRules::ParseCustomRules [GOOD] >> IncompatibilityRules::ParseUnsetMarker [GOOD] >> IncompatibilityRules::ParseEmptyMarker [GOOD] >> IncompatibilityRules::DeterministicOrdering [GOOD] >> IncompatibilityRules::ComplexMultiPatternRule [GOOD] >> IncompatibilityRules::IntegrationWithResolveAll [GOOD] >> IncompatibilityRules::CheckLabelsMapCompatibility [GOOD] >> IncompatibilityRules::CheckLabelsMapWithUnsetMarker [GOOD] >> IncompatibilityRules::ValueInOperator [GOOD] >> IncompatibilityRules::ValueInMultipleValues [GOOD] >> IncompatibilityRules::NegatedFlag [GOOD] >> IncompatibilityRules::NegatedWithValueIn [GOOD] >> IncompatibilityRules::ComplexRuleWithNegation [GOOD] >> IncompatibilityRules::MonostateMatchesAnyValue [GOOD] >> IncompatibilityRules::RealWorldScenario_DynamicNodesValidation [GOOD] >> IncompatibilityRules::ComplexValueInNegationCombination [GOOD] >> IncompatibilityRules::EdgeCase_EmptyValueIn [GOOD] >> IncompatibilityRules::EdgeCase_MultipleUnsetEmptyInValueIn [GOOD] >> IncompatibilityRules::BuiltInRules_RequiredLabels [GOOD] >> IncompatibilityRules::BuiltInRules_StaticNodes [GOOD] >> IncompatibilityRules::BuiltInRules_DynamicNodes [GOOD] >> IncompatibilityRules::BuiltInRules_CloudNodes [GOOD] >> IncompatibilityRules::BuiltInRules_SlotNodes [GOOD] >> IncompatibilityRules::BuiltInRules_DisableSpecificRules [GOOD] >> IncompatibilityRules::BuiltInRules_DisableAll [GOOD] >> YamlConfig::CollectLabels [GOOD] >> YamlConfig::MaterializeSpecificConfig [GOOD] >> YamlConfig::MaterializeAllConfigSimple [GOOD] >> YamlConfig::MaterializeAllConfigs |88.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/column_engine_logs.h_serialized.cpp |88.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |88.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp >> YamlConfig::MaterializeAllConfigs [GOOD] >> YamlConfig::AppendVolatileConfig [GOOD] >> YamlConfig::AppendAndResolve [GOOD] >> YamlConfig::GetMetadata [GOOD] >> YamlConfig::ReplaceMetadata [GOOD] >> YamlConfigParser::Iterate [GOOD] >> YamlConfigParser::ProtoBytesFieldDoesNotDecodeBase64 [GOOD] >> YamlConfigParser::PdiskCategoryFromString [GOOD] >> YamlConfigParser::AllowDefaultHostConfigId [GOOD] >> YamlConfigParser::IncorrectHostConfigIdFails [GOOD] >> YamlConfigParser::NoMixedHostConfigIds [GOOD] >> YamlConfigProto2Yaml::StorageConfig [GOOD] >> YamlConfigResolveUnique::NotUniqueSelectors [GOOD] >> YamlConfigResolveUnique::AllTestConfigs |88.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |88.8%| [AR] {RESULT} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |88.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/http_proxy/ut/http_ut.cpp |88.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |88.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/http_proxy/ut/http_ut.cpp >> YamlConfigResolveUnique::AllTestConfigs [GOOD] |88.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |88.8%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |88.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a >> TArrowPushDown::SimplePushDown [GOOD] >> TArrowPushDown::FilterEverything [GOOD] >> TArrowPushDown::MatchSeveralRowGroups [GOOD] |88.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yaml_config/ut/unittest >> YamlConfigResolveUnique::AllTestConfigs [GOOD] Test command err: host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" "\/dev\/disk\/by-partlabel\/kikimr_nvme_02" host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" host_configs: - host_config_id: 1 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: NVME expected_slot_count: 9 - path: /dev/disk/by-partlabel/kikimr_nvme_02 type: NVME expected_slot_count: 9 - host_config_id: 2 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: SSD expected_slot_count: 9 hosts: - host: sas8-6954.search.yandex.net port: 19000 host_config_id: 1 - host: sas8-6955.search.yandex.net port: 19000 host_config_id: 2 item_config_generation: 0 |88.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/s3/actors/ut/unittest >> TArrowPushDown::MatchSeveralRowGroups [GOOD] |88.8%| [TS] {BAZEL_UPLOAD} ydb/library/yaml_config/ut/unittest |88.8%| [TS] {BAZEL_UPLOAD} ydb/library/yql/providers/s3/actors/ut/unittest |88.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |88.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |88.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |88.8%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |88.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |88.8%| [TS] {RESULT} ydb/library/yql/providers/s3/actors/ut/unittest |88.8%| [TS] {RESULT} ydb/library/yaml_config/ut/unittest |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |88.8%| [LD] {RESULT} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |88.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |88.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a >> TMemoryPoolTest::TransactionsWithAlignment [GOOD] >> TMemoryPoolTest::AllocOneByte [GOOD] >> TMemoryPoolTest::Transactions [GOOD] >> TMemoryPoolTest::LongRollback [GOOD] >> UtilString::ShrinkToFit [GOOD] >> TMemoryPoolTest::AppendString [GOOD] >> MdbEndpoingGenerator::Generic_WithTransformHost [GOOD] >> MdbEndpoingGenerator::Legacy [GOOD] >> MdbEndpoingGenerator::Generic_NoTransformHost [GOOD] |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |88.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest >> MdbEndpoingGenerator::Generic_NoTransformHost [GOOD] |88.8%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |88.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_util/unittest >> TMemoryPoolTest::AppendString [GOOD] |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |88.8%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest >> ConfigValidation::SameStaticGroup [GOOD] >> ConfigValidation::StaticGroupSizesGrow [GOOD] >> ConfigValidation::StaticGroupSizesShrink [GOOD] |88.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/source.h_serialized.cpp |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut_util/unittest |88.8%| [TS] {RESULT} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest |88.8%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut >> ConfigValidation::VDiskChanged [GOOD] >> ConfigValidation::TooManyVDiskChanged [GOOD] >> DatabaseConfigValidation::AllowedFields [GOOD] >> DatabaseConfigValidation::NotAllowedFields [GOOD] >> StateStorageConfigValidation::Empty [GOOD] >> StateStorageConfigValidation::Good [GOOD] >> StateStorageConfigValidation::NToSelect [GOOD] >> StateStorageConfigValidation::WriteOnly [GOOD] >> StateStorageConfigValidation::Disabled [GOOD] >> StateStorageConfigValidation::DisabledGood [GOOD] >> StateStorageConfigValidation::CanDisableAndChange [GOOD] >> StateStorageConfigValidation::CanChangeDisabled [GOOD] >> StateStorageConfigValidation::ChangesNotAllowed [GOOD] >> StateStorageConfigValidation::ValidateConfigSelfManagement [GOOD] >> StateStorageConfigValidation::ValidateConfigDomainEmpty [GOOD] >> StateStorageConfigValidation::ValidateConfigSSId [GOOD] >> StateStorageConfigValidation::ValidateConfigBad [GOOD] >> StateStorageConfigValidation::ValidateConfigValidatesStateStorage [GOOD] >> StateStorageConfigValidation::ValidateConfigGood [GOOD] >> StateStorageConfigValidation::ValidateConfigExplicitGood [GOOD] >> StateStorageConfigValidation::ValidateConfigExplicitBad [GOOD] |88.8%| [TM] {RESULT} ydb/core/tablet_flat/ut_util/unittest |88.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |88.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |88.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/validation/ut/unittest >> StateStorageConfigValidation::ValidateConfigExplicitBad [GOOD] |88.8%| [TS] {BAZEL_UPLOAD} ydb/core/config/validation/ut/unittest >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] |88.8%| [TS] {RESULT} ydb/core/config/validation/ut/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/public/ut/unittest |88.8%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |88.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/sequenceshard/public/ut/unittest |88.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/defrag/ut/unittest >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] |88.8%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |88.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |88.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |88.8%| [TS] {RESULT} ydb/core/tx/sequenceshard/public/ut/unittest |88.8%| [TS] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/defrag/ut/unittest |88.8%| [TS] {RESULT} ydb/core/blobstorage/vdisk/defrag/ut/unittest |88.8%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |88.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |88.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |88.8%| [LD] {RESULT} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |88.8%| [LD] {RESULT} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |88.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |88.8%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |88.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |88.9%| [LD] {RESULT} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |88.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |88.9%| [AR] {RESULT} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |88.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |88.9%| [AR] {RESULT} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a >> Mvp::TokenatorGetMetadataTokenGood [GOOD] >> Mvp::TokenatorRefreshMetadataTokenGood >> Mvp::OpenIdConnectRequestWithIamTokenYandex [GOOD] >> Mvp::OpenIdConnectRequestWithIamTokenNebius [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodYandex >> ColumnShardConfigValidation::AcceptDefaultCompression [GOOD] >> ColumnShardConfigValidation::NotAcceptDefaultCompression [GOOD] >> ColumnShardConfigValidation::CorrectPlainCompression [GOOD] >> ColumnShardConfigValidation::NotCorrectPlainCompression [GOOD] >> ColumnShardConfigValidation::CorrectLZ4Compression [GOOD] >> ColumnShardConfigValidation::NotCorrectLZ4Compression [GOOD] >> ColumnShardConfigValidation::CorrectZSTDCompression [GOOD] >> ColumnShardConfigValidation::NotCorrectZSTDCompression [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodYandex [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodNebius [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieYandex [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieNebius [GOOD] >> Mvp::OpenIdConnectProxyOnHttpsHost [GOOD] >> Mvp::OpenIdConnectFixLocationHeader >> MetaCache::BasicForwarding [GOOD] >> MetaCache::TimeoutFallback [GOOD] >> Mvp::OpenIdConnectFixLocationHeader [GOOD] >> Mvp::OpenIdConnectExchangeNebius [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckAuthorizationFail [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlow |88.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util >> Mvp::OpenIdConnectFullAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAuthorizationFail |88.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/validation/column_shard_config_validator_ut/unittest >> ColumnShardConfigValidation::NotCorrectZSTDCompression [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAuthorizationFail [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalid [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalidAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateOpenIdScopeMissed [GOOD] >> Mvp::OpenIdConnectAllowedHostsList >> Mvp::OpenIdConnectAllowedHostsList [GOOD] >> Mvp::OpenIdConnectHandleNullResponseFromProtectedResource [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateNotFoundCookie [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateGetWrongStateAndWrongCookie [GOOD] >> Mvp::OidcImpersonationStartFlow [GOOD] >> Mvp::OidcImpersonationStartNeedServiceAccountId |88.9%| [TS] {BAZEL_UPLOAD} ydb/core/config/validation/column_shard_config_validator_ut/unittest >> Mvp::OidcImpersonationStartNeedServiceAccountId [GOOD] >> Mvp::OidcImpersonationStopFlow [GOOD] >> Mvp::OidcImpersonatedAccessToProtectedResource [GOOD] >> Mvp::OidcImpersonatedAccessNotAuthorized [GOOD] >> Mvp::OpenIdConnectStreamingRequestResponseYandex [GOOD] >> Mvp::OpenIdConnectStreamingRequestResponseNebius ------- [TS] {asan, default-linux-x86_64, release} ydb/mvp/meta/ut/unittest >> MetaCache::TimeoutFallback [GOOD] Test command err: 2025-12-23T18:23:17.261276Z :HTTP INFO: http_proxy_acceptor.cpp:89: Listening on http://[::]:3054 2025-12-23T18:23:17.261911Z :HTTP INFO: http_proxy_acceptor.cpp:89: Listening on http://[::]:5788 2025-12-23T18:23:17.262571Z :HTTP DEBUG: http_proxy.cpp:22: Connection created [1:14:2061] 2025-12-23T18:23:17.262655Z :HTTP DEBUG: http_proxy_outgoing.cpp:180: resolving 127.0.0.1:3054 2025-12-23T18:23:17.262881Z :HTTP DEBUG: http_proxy_outgoing.cpp:375: (#11,127.0.0.1:3054) connecting... 2025-12-23T18:23:17.263172Z :HTTP DEBUG: http_proxy_outgoing.cpp:321: (#11,127.0.0.1:3054) outgoing connection opened 2025-12-23T18:23:17.263350Z :HTTP DEBUG: http_proxy_outgoing.cpp:323: (#11,127.0.0.1:3054) <- (GET /server) 2025-12-23T18:23:17.263761Z :HTTP DEBUG: http_proxy_incoming.cpp:97: (#12,[::ffff:127.0.0.1]:57728) incoming connection opened 2025-12-23T18:23:17.263966Z :HTTP DEBUG: http_proxy_incoming.cpp:170: (#12,[::ffff:127.0.0.1]:57728) -> (GET /server) 2025-12-23T18:23:17.264188Z :HTTP DEBUG: meta_cache.cpp:231: Updating ownership http://127.0.0.1:5788 with deadline 2025-12-23T18:24:17.264117Z 2025-12-23T18:23:17.264318Z :HTTP DEBUG: meta_cache.cpp:237: SetRefreshTime "/server" to 2025-12-23T18:24:17.264117Z (+1766514257.264117s) 2025-12-23T18:23:17.264412Z :HTTP DEBUG: meta_cache.cpp:198: IncomingForward /server to http://127.0.0.1:5788 timeout 30.000000s 2025-12-23T18:23:17.264620Z :HTTP DEBUG: http_proxy.cpp:22: Connection created [1:16:2063] 2025-12-23T18:23:17.264675Z :HTTP DEBUG: http_proxy_outgoing.cpp:180: resolving 127.0.0.1:5788 2025-12-23T18:23:17.264892Z :HTTP DEBUG: http_proxy_outgoing.cpp:375: (#13,127.0.0.1:5788) connecting... 2025-12-23T18:23:17.265096Z :HTTP DEBUG: http_proxy_outgoing.cpp:321: (#13,127.0.0.1:5788) outgoing connection opened 2025-12-23T18:23:17.265153Z :HTTP DEBUG: http_proxy_outgoing.cpp:323: (#13,127.0.0.1:5788) <- (GET /server) 2025-12-23T18:23:17.265397Z :HTTP DEBUG: http_proxy_incoming.cpp:97: (#14,[::ffff:127.0.0.1]:59492) incoming connection opened 2025-12-23T18:23:17.265519Z :HTTP DEBUG: http_proxy_incoming.cpp:170: (#14,[::ffff:127.0.0.1]:59492) -> (GET /server) 2025-12-23T18:23:17.265894Z :HTTP DEBUG: http_proxy_incoming.cpp:304: (#14,[::ffff:127.0.0.1]:59492) <- (200 Found, 6 bytes) 2025-12-23T18:23:17.266037Z :HTTP DEBUG: http_proxy_incoming.cpp:365: (#14,[::ffff:127.0.0.1]:59492) connection closed 2025-12-23T18:23:17.266451Z :HTTP DEBUG: http_proxy_outgoing.cpp:101: (#13,127.0.0.1:5788) -> (200 Found, 6 bytes) 2025-12-23T18:23:17.266567Z :HTTP DEBUG: http_proxy_outgoing.cpp:110: (#13,127.0.0.1:5788) connection closed 2025-12-23T18:23:17.266918Z :HTTP DEBUG: meta_cache.cpp:146: Cache received successfull (200) response for /server 2025-12-23T18:23:17.267188Z :HTTP DEBUG: http_proxy_incoming.cpp:304: (#12,[::ffff:127.0.0.1]:57728) <- (200 Found, 6 bytes) 2025-12-23T18:23:17.267308Z :HTTP DEBUG: http_proxy_incoming.cpp:365: (#12,[::ffff:127.0.0.1]:57728) connection closed 2025-12-23T18:23:17.267463Z :HTTP DEBUG: http_proxy.cpp:131: Connection closed [1:16:2063] 2025-12-23T18:23:17.267665Z :HTTP DEBUG: http_proxy_outgoing.cpp:101: (#11,127.0.0.1:3054) -> (200 Found, 6 bytes) 2025-12-23T18:23:17.267731Z :HTTP DEBUG: http_proxy_outgoing.cpp:110: (#11,127.0.0.1:3054) connection closed 2025-12-23T18:23:17.268087Z :HTTP DEBUG: http_proxy.cpp:131: Connection closed [1:14:2061] 2025-12-23T18:23:17.276191Z :HTTP INFO: http_proxy_acceptor.cpp:89: Listening on http://[::]:4696 2025-12-23T18:23:17.276673Z :HTTP INFO: http_proxy_acceptor.cpp:89: Listening on http://[::]:13457 2025-12-23T18:23:17.277098Z :HTTP DEBUG: http_proxy.cpp:22: Connection created [2:14:2061] 2025-12-23T18:23:17.277154Z :HTTP DEBUG: http_proxy_outgoing.cpp:180: resolving 127.0.0.1:4696 2025-12-23T18:23:17.277419Z :HTTP DEBUG: http_proxy_outgoing.cpp:375: (#11,127.0.0.1:4696) connecting... 2025-12-23T18:23:17.277623Z :HTTP DEBUG: http_proxy_outgoing.cpp:321: (#11,127.0.0.1:4696) outgoing connection opened 2025-12-23T18:23:17.277714Z :HTTP DEBUG: http_proxy_outgoing.cpp:323: (#11,127.0.0.1:4696) <- (GET /server) 2025-12-23T18:23:17.278036Z :HTTP DEBUG: http_proxy_incoming.cpp:97: (#12,[::ffff:127.0.0.1]:41742) incoming connection opened 2025-12-23T18:23:17.278196Z :HTTP DEBUG: http_proxy_incoming.cpp:170: (#12,[::ffff:127.0.0.1]:41742) -> (GET /server) 2025-12-23T18:23:17.278469Z :HTTP DEBUG: meta_cache.cpp:231: Updating ownership http://127.0.0.1:13457 with deadline 2025-12-23T18:33:17.278429Z 2025-12-23T18:23:17.278531Z :HTTP DEBUG: meta_cache.cpp:237: SetRefreshTime "/server" to 2025-12-23T18:33:17.278429Z (+1766514797.278429s) 2025-12-23T18:23:17.278598Z :HTTP DEBUG: meta_cache.cpp:198: IncomingForward /server to http://127.0.0.1:13457 timeout 30.000000s 2025-12-23T18:23:17.278838Z :HTTP DEBUG: http_proxy.cpp:22: Connection created [2:16:2063] 2025-12-23T18:23:17.278898Z :HTTP DEBUG: http_proxy_outgoing.cpp:180: resolving 127.0.0.1:13457 2025-12-23T18:23:17.279069Z :HTTP DEBUG: http_proxy_outgoing.cpp:375: (#13,127.0.0.1:13457) connecting... 2025-12-23T18:23:17.279202Z :HTTP DEBUG: http_proxy_outgoing.cpp:321: (#13,127.0.0.1:13457) outgoing connection opened 2025-12-23T18:23:17.279249Z :HTTP DEBUG: http_proxy_outgoing.cpp:323: (#13,127.0.0.1:13457) <- (GET /server) 2025-12-23T18:23:17.279450Z :HTTP DEBUG: http_proxy_incoming.cpp:97: (#14,[::ffff:127.0.0.1]:57868) incoming connection opened 2025-12-23T18:23:17.279580Z :HTTP DEBUG: http_proxy_incoming.cpp:170: (#14,[::ffff:127.0.0.1]:57868) -> (GET /server) 2025-12-23T18:23:17.290041Z :HTTP ERROR: http_proxy_outgoing.cpp:124: (#13,127.0.0.1:13457) connection closed with error: Connection timed out 2025-12-23T18:23:17.290440Z :HTTP DEBUG: http_proxy_incoming.cpp:208: (#14,[::ffff:127.0.0.1]:57868) connection closed 2025-12-23T18:23:17.290768Z :HTTP WARN: meta_cache.cpp:151: Cache received failed response with error "Connection timed out" for /server - retrying locally 2025-12-23T18:23:17.290921Z :HTTP DEBUG: http_proxy.cpp:131: Connection closed [2:16:2063] 2025-12-23T18:23:17.301516Z :HTTP DEBUG: http_proxy_incoming.cpp:304: (#12,[::ffff:127.0.0.1]:41742) <- (200 Found, 6 bytes) 2025-12-23T18:23:17.301714Z :HTTP DEBUG: http_proxy_incoming.cpp:365: (#12,[::ffff:127.0.0.1]:41742) connection closed 2025-12-23T18:23:17.302024Z :HTTP DEBUG: http_proxy_outgoing.cpp:101: (#11,127.0.0.1:4696) -> (200 Found, 6 bytes) 2025-12-23T18:23:17.302098Z :HTTP DEBUG: http_proxy_outgoing.cpp:110: (#11,127.0.0.1:4696) connection closed 2025-12-23T18:23:17.302350Z :HTTP DEBUG: http_proxy.cpp:131: Connection closed [2:14:2061] |88.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |88.9%| [TS] {BAZEL_UPLOAD} ydb/mvp/meta/ut/unittest >> Mvp::OpenIdConnectStreamingRequestResponseNebius [GOOD] >> Mvp::OidcWhoami200 [GOOD] >> Mvp::OidcWhoamiServiceAccount200 [GOOD] >> Mvp::OidcWhoamiBadIam200 [GOOD] >> Mvp::OidcWhoamiBadYdb200 >> Mvp::OidcWhoamiBadYdb200 [GOOD] >> Mvp::OidcWhoamiBadYdbServiceAccount200 [GOOD] >> Mvp::OidcWhoamiNoInfo500 [GOOD] >> Mvp::OidcWhoamiForward307 |88.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |88.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |88.9%| [TS] {RESULT} ydb/core/config/validation/column_shard_config_validator_ut/unittest |88.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |88.9%| [TS] {RESULT} ydb/mvp/meta/ut/unittest >> Mvp::OidcWhoamiForward307 [GOOD] >> Mvp::OidcYandexIgnoresWhoamiExtension [GOOD] >> Mvp::GetAddressWithoutPort [GOOD] |88.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |88.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |88.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |88.9%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/blob/ut/ydb-core-persqueue-pqtablet-blob-ut |88.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/blob/ut/ydb-core-persqueue-pqtablet-blob-ut |88.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |88.9%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |88.9%| [LD] {RESULT} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests ------- [TS] {asan, default-linux-x86_64, release} ydb/mvp/oidc_proxy/ut/unittest >> Mvp::GetAddressWithoutPort [GOOD] Test command err: 2025-12-23T18:23:17.120159Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.120504Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-12-23T18:23:17.127514Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.127682Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-12-23T18:23:17.133984Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.134339Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 204 2025-12-23T18:23:17.140715Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.141010Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 204 2025-12-23T18:23:17.162070Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.162467Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 204 2025-12-23T18:23:17.171372Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.171799Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 204 2025-12-23T18:23:17.202846Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-12-23T18:23:17.202962Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.203353Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 400 2025-12-23T18:23:17.203411Z :MVP DEBUG: oidc_protected_page.cpp:143: Try to send request to HTTPS port 2025-12-23T18:23:17.203483Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.203796Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-12-23T18:23:17.209272Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-12-23T18:23:17.209358Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.209615Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 400 2025-12-23T18:23:17.232144Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-12-23T18:23:17.232216Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.232425Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 307 2025-12-23T18:23:17.237191Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-12-23T18:23:17.237267Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.237431Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 302 2025-12-23T18:23:17.242259Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-12-23T18:23:17.242318Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.242503Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 302 2025-12-23T18:23:17.247015Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-12-23T18:23:17.247087Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.247287Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 302 2025-12-23T18:23:17.251724Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-12-23T18:23:17.251810Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.251970Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 302 2025-12-23T18:23:17.272950Z :MVP DEBUG: oidc_protected_page_nebius.cpp:24: Start OIDC process 2025-12-23T18:23:17.273423Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_79632E6F617574682E7964622D766965776572: c2Vz****aWU= (CE0CB168)) 2025-12-23T18:23:17.273463Z :MVP DEBUG: oidc_protected_page_nebius.cpp:96: Exchange session token 2025-12-23T18:23:17.273689Z :MVP DEBUG: oidc_protected_page_nebius.cpp:53: Getting access token: 200 OK 2025-12-23T18:23:17.273741Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.273880Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-12-23T18:23:17.296407Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 401 2025-12-23T18:23:17.319067Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2025-12-23T18:23:17.319681Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-12-23T18:23:17.319988Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2025-12-23T18:23:17.324285Z :MVP DEBUG: oidc_session_create_yandex.cpp:69: SessionService.Create(): OK 2025-12-23T18:23:17.328517Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-12-23T18:23:17.328571Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.328860Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-12-23T18:23:17.356239Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2025-12-23T18:23:17.357208Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-12-23T18:23:17.357755Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2025-12-23T18:23:17.363960Z :MVP DEBUG: oidc_session_create_yandex.cpp:69: SessionService.Create(): OK 2025-12-23T18:23:17.370370Z :MVP DEBUG: oidc_protected_page_yandex.cpp:25: SessionService.Check(): OK 2025-12-23T18:23:17.370483Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.370850Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-12-23T18:23:17.392084Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-12-23T18:23:17.392248Z :MVP DEBUG: oidc_session_create.cpp:51: Check state failed: Calculated digest is not equal expected digest 2025-12-23T18:23:17.410847Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-12-23T18:23:17.411016Z :MVP DEBUG: oidc_session_create.cpp:51: Check state failed: Calculated digest is not equal expected digest 2025-12-23T18:23:17.428601Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-12-23T18:23:17.429065Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2025-12-23T18:23:17.434211Z :MVP DEBUG: oidc_session_create_yandex.cpp:79: SessionService.Create(): 401 2025-12-23T18:23:17.451928Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-12-23T18:23:17.452316Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2025-12-23T18:23:17.456702Z :MVP DEBUG: oidc_session_create_yandex.cpp:79: SessionService.Create(): 400 2025-12-23T18:23:17.476431Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-12-23T18:23:17.477062Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2025-12-23T18:23:17.482315Z :MVP DEBUG: oidc_session_create_yandex.cpp:79: SessionService.Create(): 400 2025-12-23T18:23:17.502947Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-12-23T18:23:17.503398Z :MVP DEBUG: oidc_session_create.cpp:71: Incoming response from authorization server: 200 2025-12-23T18:23:17.508205Z :MVP DEBUG: oidc_session_create_yandex.cpp:79: SessionService.Create(): 412 2025-12-23T18:23:17.536678Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2025-12-23T18:23:17.542292Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2025-12-23T18:23:17.547442Z :MVP DEBUG: oidc_protected_page_yandex.cpp:33: SessionService.Check(): 400 2025-12-23T18:23:17.561656Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.561946Z :MVP DEBUG: extension.cpp:14: Can not process request to protected resource: GET /ydb.viewer.page/counters HTTP/1.1 Host: oidcproxy.net Authorization: 2025-12-23T18:23:17.573165Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-12-23T18:23:17.573300Z :MVP DEBUG: oidc_session_create.cpp:43: Restore oidc context failed: Cannot find cookie ydb_oidc_cookie 2025-12-23T18:23:17.593154Z :MVP DEBUG: oidc_session_create.cpp:21: Restore oidc session 2025-12-23T18:23:17.593351Z :MVP DEBUG: oidc_session_create.cpp:51: Check state failed: Calculated digest is not equal expected digest 2025-12-23T18:23:17.612957Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:23: Start impersonation process 2025-12-23T18:23:17.613077Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-12-23T18:23:17.613128Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:49: Request impersonated token 2025-12-23T18:23:17.613363Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:100: Incoming response from authorization server: 200 2025-12-23T18:23:17.613446Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:89: Set impersonated cookie: (__Host_impersonated_cookie_636C69656E745F6964: aW1w****bg== (B126DD61)) 2025-12-23T18:23:17.632398Z :MVP DEBUG: oidc_impersonate_start_page_nebius.cpp:23: Start impersonation process 2025-12-23T18:23:17.632489Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-12-23T18:23:17.653909Z :MVP DEBUG: oidc_cleanup_page.cpp:20: Clear cookie: (__Host_impersonated_cookie_636C69656E745F6964) 2025-12-23T18:23:17.674605Z :MVP DEBUG: oidc_protected_page_nebius.cpp:24: Start OIDC process 2025-12-23T18:23:17.674703Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-12-23T18:23:17.674772Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_impersonated_cookie_636C69656E745F6964: aW1w****ZQ== (1A20D8C0)) 2025-12-23T18:23:17.674808Z :MVP DEBUG: oidc_protected_page_nebius.cpp:107: Exchange impersonated token 2025-12-23T18:23:17.675117Z :MVP DEBUG: oidc_protected_page_nebius.cpp:53: Getting access token: 200 OK 2025-12-23T18:23:17.675202Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.675365Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-12-23T18:23:17.695763Z :MVP DEBUG: oidc_protected_page_nebius.cpp:24: Start OIDC process 2025-12-23T18:23:17.695834Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-12-23T18:23:17.695872Z :MVP DEBUG: openid_connect.cpp:260: Using cookie (__Host_impersonated_cookie_636C69656E745F6964: aW1w****ZQ== (1A20D8C0)) 2025-12-23T18:23:17.695896Z :MVP DEBUG: oidc_protected_page_nebius.cpp:107: Exchange impersonated token 2025-12-23T18:23:17.696193Z :MVP DEBUG: oidc_protected_page_nebius.cpp:53: Getting access token: 401 OK 2025-12-23T18:23:17.696220Z :MVP DEBUG: oidc_protected_page_nebius.cpp:65: Getting access token: {"error": "bad_token"} 2025-12-23T18:23:17.696249Z :MVP DEBUG: oidc_protected_page_nebius.cpp:121: Clear impersonated cookie (__Host_impersonated_cookie_636C69656E745F6964) and retry 2025-12-23T18:23:17.719487Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.719924Z :MVP DEBUG: oidc_protected_page.cpp:51: Incoming incomplete response for protected resource: 200 2025-12-23T18:23:17.720085Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 59 bytes 2025-12-23T18:23:17.720205Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 59 bytes 2025-12-23T18:23:17.720327Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 14 bytes 2025-12-23T18:23:17.720397Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 0 bytes 2025-12-23T18:23:17.738834Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.739105Z :MVP DEBUG: oidc_protected_page.cpp:51: Incoming incomplete response for protected resource: 200 2025-12-23T18:23:17.739208Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 59 bytes 2025-12-23T18:23:17.739260Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 59 bytes 2025-12-23T18:23:17.739309Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 14 bytes 2025-12-23T18:23:17.739372Z :MVP DEBUG: oidc_protected_page.cpp:71: Incoming data chunk for protected resource: 0 bytes 2025-12-23T18:23:17.758355Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.762239Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-12-23T18:23:17.765094Z :MVP DEBUG: extension_whoami.cpp:33: Whoami Extension Info: OK 2025-12-23T18:23:17.792152Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.795792Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 2025-12-23T18:23:17.797918Z :MVP DEBUG: extension_whoami.cpp:33: Whoami Extension Info: OK 2025-12-23T18:23:17.819969Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.823463Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 TProfileServiceMock Get: Invalid or missing token: Bearer bad-token 2025-12-23T18:23:17.826571Z :MVP DEBUG: extension_whoami.cpp:40: Whoami Extension Info 401: Invalid or missing token, 2025-12-23T18:23:17.847511Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.851160Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 403 2025-12-23T18:23:17.853428Z :MVP DEBUG: extension_whoami.cpp:33: Whoami Extension Info: OK 2025-12-23T18:23:17.877762Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.881477Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 403 2025-12-23T18:23:17.888217Z :MVP DEBUG: extension_whoami.cpp:33: Whoami Extension Info: OK 2025-12-23T18:23:17.908601Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.911315Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 403 TProfileServiceMock Get: Invalid or missing token: Bearer bad-token 2025-12-23T18:23:17.915421Z :MVP DEBUG: extension_whoami.cpp:40: Whoami Extension Info 401: Invalid or missing token, 2025-12-23T18:23:17.938195Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.942191Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 307 2025-12-23T18:23:17.968189Z :MVP DEBUG: oidc_protected_page.cpp:119: Forward user request bypass OIDC 2025-12-23T18:23:17.968504Z :MVP DEBUG: oidc_protected_page.cpp:38: Incoming response for protected resource: 200 |88.9%| [TS] {BAZEL_UPLOAD} ydb/mvp/oidc_proxy/ut/unittest |88.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |88.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |88.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |88.9%| [TS] {RESULT} ydb/mvp/oidc_proxy/ut/unittest |88.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut >> ClientBlobSerialization::SerializeAndDeserializeAllScenarios >> ClientBlobSerialization::EmptyPayload [GOOD] >> BlobTest::Flags_HasUncompressedSize [GOOD] >> BlobTest::Flags_HasWriteTimestamp [GOOD] >> BlobTest::Flags_HasKinesisData [GOOD] >> BlobTest::Flags_HasPartData [GOOD] >> BlobTest::Flags_HasCreateTimestamp [GOOD] |88.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |88.9%| [AR] {RESULT} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |88.9%| [LD] {RESULT} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |88.9%| [LD] {RESULT} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut >> TestS3UrlEscape::EscapeEscapedForce [GOOD] >> TestS3UrlEscape::EscapeUnescapeForceRet [GOOD] >> TestS3UrlEscape::EscapeAdditionalSymbols [GOOD] >> TestUrlBuilder::UriOnly [GOOD] >> TestUrlBuilder::Basic [GOOD] >> TestUrlBuilder::BasicWithEncoding [GOOD] >> TestUrlBuilder::BasicWithAdditionalEncoding [GOOD] |88.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |88.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |88.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |88.9%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |88.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/s3/common/ut/unittest >> TestUrlBuilder::BasicWithAdditionalEncoding [GOOD] |88.9%| [TS] {BAZEL_UPLOAD} ydb/library/yql/providers/s3/common/ut/unittest |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/ydb-tests-sql |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |88.9%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |88.9%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |88.9%| [TS] {RESULT} ydb/library/yql/providers/s3/common/ut/unittest |88.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/blob/ut/ydb-core-persqueue-pqtablet-blob-ut >> ClosedIntervalSet::Union >> TPGTest::TestLogin |88.9%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp >> TPGTest::TestLogin [GOOD] |88.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp ------- [TS] {asan, default-linux-x86_64, release} ydb/core/pgproxy/ut/unittest >> TPGTest::TestLogin [GOOD] Test command err: 2025-12-23T18:23:21.331239Z :PGWIRE INFO: sock_listener.cpp:66: Listening on [::]:22403 2025-12-23T18:23:21.332059Z :PGWIRE DEBUG: pg_connection.cpp:61: (#13,[::1]:57046) incoming connection opened 2025-12-23T18:23:21.332216Z :PGWIRE DEBUG: pg_connection.cpp:241: (#13,[::1]:57046) -> [1] 'i' "Initial" Size(15) protocol(0x00000300) user=user 2025-12-23T18:23:21.332389Z :PGWIRE DEBUG: pg_connection.cpp:241: (#13,[::1]:57046) <- [1] 'R' "Auth" Size(4) OK |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |88.9%| [TS] {RESULT} ydb/core/pgproxy/ut/unittest >> TBatchedVecTest::TestToStringInt [GOOD] >> TBatchedVecTest::TestOutputTOutputType [GOOD] >> BufferWithGaps::Basic [GOOD] >> BufferWithGaps::IsReadable [GOOD] >> PtrTest::Test1 [GOOD] |88.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |88.9%| [TS] {BAZEL_UPLOAD} ydb/core/pgproxy/ut/unittest |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |88.9%| [LD] {RESULT} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut >> ClientBlobSerialization::SerializeAndDeserializeAllScenarios [GOOD] >> TTypeCodecsTest::TestBoolCodec [GOOD] >> TTypeCodecsTest::TestFixedLenCodec [GOOD] >> TTypeCodecsTest::TestVarLenCodec [GOOD] >> TTypeCodecsTest::TestVarIntCodec [GOOD] >> TTypeCodecsTest::TestZigZagCodec [GOOD] >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] >> Mvp::TokenatorRefreshMetadataTokenGood [GOOD] |88.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/blobstorage/base/ut/gtest >> PtrTest::Test1 [GOOD] |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |88.9%| [TS] {BAZEL_UPLOAD} ydb/core/blobstorage/base/ut/gtest |88.9%| [TS] {RESULT} ydb/core/blobstorage/base/ut/gtest |88.9%| [LD] {RESULT} $(B)/ydb/core/metering/ut/ydb-core-metering-ut ------- [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/pqtablet/blob/ut/unittest >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] Test command err: payloadSize 0 totalSize 100 partsCount 100 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 100 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 10 partKeySize 256 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 100 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 255 partNo 0 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 1 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 0 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 100 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 255 partNo 1 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 0 hashSize 0 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 0 hashSize 0 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 1 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 1 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 0 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 0 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 100 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 100 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 255 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 100 hashSize 255 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 1 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 1 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 0 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 0 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 100 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 100 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 255 partNo 10 === payloadSize 10 totalSize 100 partsCount 40 partKeySize 256 hashSize 255 partNo 10 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 100 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 100 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 255 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 255 partNo 0 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 0 hashSize 0 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 1 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 1 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 0 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 0 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 100 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 100 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 255 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 100 hashSize 255 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 1 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 1 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 0 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 0 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 100 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 100 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 255 partNo 1 === payloadSize 10 totalSize 524288 partsCount 10 partKeySize 256 hashSize 255 partNo 1 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 0 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 100 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 100 hashSize 255 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 256 hashSize 1 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount 40 partKeySize 256 hashSize 0 partNo 0 === payloadSize 10 totalSize 524288 partsCount ... Size 1048576 partsCount 40 partKeySize 256 hashSize 1 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 1 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 0 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 0 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 100 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 100 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 255 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 255 partNo 1 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 0 hashSize 0 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 0 hashSize 0 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 1 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 1 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 0 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 0 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 100 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 100 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 255 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 100 hashSize 255 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 1 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 1 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 0 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 0 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 100 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 100 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 255 partNo 10 === payloadSize 524288 totalSize 1048576 partsCount 40 partKeySize 256 hashSize 255 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 0 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 0 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 0 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 0 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 100 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 10 partKeySize 256 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 0 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 0 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 1 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 0 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 100 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 255 partNo 0 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 0 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 0 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 1 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 0 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 100 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 255 partNo 1 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 0 hashSize 0 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 0 hashSize 0 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 1 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 1 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 0 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 0 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 100 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 100 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 255 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 100 hashSize 255 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 1 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 1 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 0 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 0 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 100 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 100 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 255 partNo 10 === payloadSize 524288 totalSize 10485760 partsCount 40 partKeySize 256 hashSize 255 partNo 10 === Size: 128 Create chunk: 0.000018s Read by index: 0.000010s Iterate: 0.000009s Size: 252 Create chunk: 0.000030s Read by index: 0.000010s Iterate: 0.000011s Size: 8002 Create chunk: 0.000072s Read by index: 0.000013s Iterate: 0.000055s Size: 8256 Create chunk: 0.000113s Read by index: 0.000021s Iterate: 0.000060s Size: 8532 Create chunk: 0.000065s Read by index: 0.000038s Iterate: 0.000030s Size: 7769 Create chunk: 0.000078s Read by index: 0.000030s Iterate: 0.000029s Size: 2853 Create chunk: 0.000059s Read by index: 0.000064s Iterate: 0.000029s Size: 2419 Create chunk: 0.000060s Read by index: 0.000065s Iterate: 0.000031s Size: 2929 Create chunk: 0.000050s Read by index: 0.000050s Iterate: 0.000026s Size: 2472 Create chunk: 0.000070s Read by index: 0.000081s Iterate: 0.000030s Size: 1887 Create chunk: 0.000058s Read by index: 0.000079s Iterate: 0.000030s Size: 1658 Create chunk: 0.000093s Read by index: 0.000077s Iterate: 0.000031s Size: 1889 Create chunk: 0.000051s Read by index: 0.000060s Iterate: 0.000029s Size: 1660 Create chunk: 0.000060s Read by index: 0.000058s Iterate: 0.000031s Size: 2407 Create chunk: 0.000063s Read by index: 0.000078s Iterate: 0.000052s Size: 2061 Create chunk: 0.000059s Read by index: 0.000068s Iterate: 0.000035s |88.9%| [TS] {BAZEL_UPLOAD} ydb/core/persqueue/pqtablet/blob/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/mvp/core/ut/unittest >> Mvp::TokenatorRefreshMetadataTokenGood [GOOD] Test command err: 2025-12-23T18:23:16.993967Z :MVP DEBUG: mvp_tokens.cpp:77: Refreshing token metadataTokenName 2025-12-23T18:23:16.994255Z :MVP DEBUG: mvp_tokens.cpp:217: Updating metadata token 2025-12-23T18:23:17.000971Z :MVP DEBUG: mvp_tokens.cpp:77: Refreshing token metadataTokenName 2025-12-23T18:23:17.001214Z :MVP DEBUG: mvp_tokens.cpp:217: Updating metadata token 2025-12-23T18:23:22.001510Z :MVP DEBUG: mvp_tokens.cpp:77: Refreshing token metadataTokenName 2025-12-23T18:23:22.001971Z :MVP DEBUG: mvp_tokens.cpp:217: Updating metadata token |88.9%| [TS] {BAZEL_UPLOAD} ydb/mvp/core/ut/unittest |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/ut/ydb-core-kqp-federated_query-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/ut/ydb-core-kqp-federated_query-ut |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/util/ut/ydb-core-util-ut |88.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |88.9%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/ut/ydb-core-kqp-federated_query-ut |89.0%| [LD] {RESULT} $(B)/ydb/core/util/ut/ydb-core-util-ut |89.0%| [TS] {RESULT} ydb/core/persqueue/pqtablet/blob/ut/unittest |89.0%| [AR] {RESULT} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |89.0%| [TS] {RESULT} ydb/mvp/core/ut/unittest >> TTimeGridTest::TimeGrid [GOOD] >> TStreamRequestUnitsCalculatorTest::Basic [GOOD] >> ArrowTest::BatchBuilder |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut >> Mirror3of4::ReplicationSmall [GOOD] >> Mirror3of4::ReplicationHuge >> ArrowTest::BatchBuilder [GOOD] >> ArrowTest::ArrowToYdbConverter >> ArrowTest::ArrowToYdbConverter [GOOD] >> ArrowTest::SortWithCompositeKey [GOOD] >> ArrowTest::MergingSortedInputStream [GOOD] >> ArrowTest::MergingSortedInputStreamReversed [GOOD] >> ArrowTest::MergingSortedInputStreamReplace [GOOD] >> ArrowTest::MaxVersionFilter [GOOD] >> ArrowTest::EqualKeysVersionFilter [GOOD] >> ColumnFilter::MergeFilters [GOOD] >> ColumnFilter::CombineFilters [GOOD] >> ColumnFilter::ApplyFilterToFilter [GOOD] >> ColumnFilter::FilterSlice [GOOD] >> ColumnFilter::FilterCheckSlice [GOOD] >> ColumnFilter::FilterSlice1 [GOOD] >> ColumnFilter::CutFilter1 [GOOD] >> ColumnFilter::CutFilter2 [GOOD] >> Dictionary::Simple |89.0%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |89.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |89.0%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |89.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |89.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |89.0%| [LD] {RESULT} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |89.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/metering/ut/unittest >> TStreamRequestUnitsCalculatorTest::Basic [GOOD] |89.0%| [TS] {BAZEL_UPLOAD} ydb/core/metering/ut/unittest >> TCowBTreeTest::SeekForwardPermutationsInplace [GOOD] >> TCowBTreeTest::SnapshotRollback >> TCowBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TCowBTreeTest::SnapshotCascade [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight10 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue10 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight1 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight3 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue100 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue3 [GOOD] >> TCircularQueueTest::ShouldPush [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight2 [GOOD] >> TCircularQueueTest::ShouldNextSingleItem >> TCircularQueueTest::Empty [GOOD] >> TCircularOperationQueueTest::ShouldTryToStartAnotherOneWhenStartFails [GOOD] >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenHasWaitingAndStart [GOOD] >> TCircularOperationQueueTest::ShouldTolerateInaccurateTimer [GOOD] >> TCircularOperationQueueTest::ShouldStartInflight100 [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue2 [GOOD] >> TCircularOperationQueueTest::ShouldStartEmpty [GOOD] >> TCircularQueueTest::ShouldNotPushTwice [GOOD] >> TCircularOperationQueueTest::ShouldShuffle [GOOD] >> TCircularOperationQueueTest::ShouldScheduleWakeupWhenNothingStarted [GOOD] >> TCircularOperationQueueTest::ShouldReturnExecTime [GOOD] >> TCircularOperationQueueTest::UseMinOperationRepeatDelayWhenTimeout [GOOD] >> TCircularOperationQueueTest::ShouldStartInflightEnqueue1 [GOOD] >> TBTreeTest::ClearAndReuse [GOOD] >> TBitsTest::TestNaiveClz [GOOD] >> AddressClassifierTest::TestLabeledClassifier [GOOD] >> AddressClassifierTest::TestLabeledClassifierFromNetData [GOOD] >> AddressClassifierTest::TestClassfierWithAllIpTypes [GOOD] >> AddressClassifierTest::TestAddressExtraction [GOOD] >> TBTreeTest::SeekForwardPermutationsInplace >> TBTreeTest::Basics [GOOD] >> AddressClassifierTest::TestAddressParsing [GOOD] >> TIntervalSetTest::IntervalSetTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalSetTestEmpty [GOOD] >> TIntervalSetTest::IntervalSetTestAdd >> TPriorityOperationQueueTest::ShouldUpdatePriorityReadyQueue >> THyperLogCounterTest::TestGetSet [GOOD] >> THazardTest::AutoProtectedPointers [GOOD] >> THyperLogCounterTest::TestIncrement [GOOD] >> THazardTest::CachedPointers [GOOD] >> THyperLogCounterTest::TestAddRandom >> TCircularQueueTest::ShouldNextSingleItem [GOOD] >> TCircularQueueTest::ShouldNextMulti [GOOD] >> TCircularQueueTest::ShouldNotRemoveMissing [GOOD] >> TCircularQueueTest::ShouldGetQueue [GOOD] >> TCowBTreeTest::Empty [GOOD] >> TCircularQueueTest::ShouldRemoveCurrentLast [GOOD] >> TConcurrentRWHashTest::TEraseTest [GOOD] >> TCircularQueueTest::ShouldRemove [GOOD] >> TConcurrentRWHashTest::TEmptyGetTest [GOOD] >> TConcurrentRWHashTest::TRemoveTest [GOOD] >> TConcurrentRWHashTest::TInsertIfAbsentTestFunc [GOOD] >> TConcurrentRWHashTest::TInsertIfAbsentTest [GOOD] >> TCowBTreeTest::Basics [GOOD] >> TCowBTreeTest::ClearAndReuse >> TConcurrentRWHashTest::TInsertTest [GOOD] >> TCircularQueueTest::ShouldRemoveCurrent [GOOD] >> TBTreeTest::SeekForwardPermutationsInplace [GOOD] >> TBTreeTest::SeekForwardPermutationsThreadSafe [GOOD] >> TBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> TBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TBTreeTest::RandomInsertInplace >> PushdownTest::NoFilter [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty |89.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp >> TIntervalSetTest::IntervalVecTestEmpty [GOOD] >> TIntervalSetTest::IntervalVecTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalVecTestAdd >> TCacheTest::Test2QCache [GOOD] >> TPriorityOperationQueueTest::ShouldUpdatePriorityReadyQueue [GOOD] >> TCircularOperationQueueTest::CheckRemoveWaiting [GOOD] >> TPriorityOperationQueueTest::ShouldUpdatePriorityWaitingQueue [GOOD] >> TCacheTest::TestUpdateItemSize [GOOD] >> TCircularOperationQueueTest::CheckWakeupAfterStop [GOOD] >> TCircularOperationQueueTest::CheckRemoveRunning [GOOD] >> TCacheTest::TestSizeBasedOverflowCallback [GOOD] >> TCircularOperationQueueTest::ShouldNotStartUntilStart [GOOD] >> TCircularOperationQueueTest::BasicRPSCheckWithRound [GOOD] >> TCircularOperationQueueTest::BasicRPSCheck [GOOD] >> TCircularOperationQueueTest::RemoveNonExistingWhenShuffle [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotExisting [GOOD] >> TCacheTest::EnsureNoLeakAfterQ2CacheDtor [GOOD] >> TCircularOperationQueueTest::CheckOnDoneNotExisting [GOOD] >> TCircularOperationQueueTest::CheckOnDoneInflight2 [GOOD] >> TCacheTest::EnsureNoLeakAfterUnboundedCacheOnMapDtor [GOOD] >> TCircularOperationQueueTest::RemoveExistingWhenShuffle [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted2 [GOOD] >> TCacheTest::TestUnboundedMapCache [GOOD] >> TCircularOperationQueueTest::CheckWakeupWhenRPSExhausted [GOOD] >> TCircularOperationQueueTest::CheckTimeout [GOOD] >> TCacheTest::TestLruCache [GOOD] >> TCircularOperationQueueTest::CheckOnDoneInflight1 [GOOD] >> TCircularOperationQueueTest::CheckRemoveNotRunning [GOOD] >> TCircularOperationQueueTest::CheckStartAfterStop [GOOD] >> TCircularOperationQueueTest::CheckTimeoutWhenFirstItemRemoved [GOOD] >> TPriorityOperationQueueTest::UpdateNonExistingShouldReturnFalse [GOOD] >> TPriorityQueueTest::TestOrder [GOOD] >> TQueueInplaceTests::TestSimpleInplace [GOOD] >> TQueueInplaceTests::DestroyInDestructor [GOOD] >> TQueueInplaceTests::EmplacePopDefault [GOOD] >> TQueueInplaceTests::PopTooManyTimes [GOOD] >> TQueueInplaceTests::MoveConstructor [GOOD] >> TQueueInplaceTests::MoveAssignment [GOOD] >> TSimpleCacheTest::TestSimpleCache [GOOD] >> TSimpleCacheTest::TestNotSoSimpleCache [GOOD] >> TStrongTypeTest::DefaultConstructorDeleted [GOOD] >> TStrongTypeTest::DefaultConstructorValue [GOOD] >> TTokenBucketTest::Unlimited [GOOD] >> TTokenBucketTest::Limited [GOOD] >> TTokenBucketTest::DelayCalculation [GOOD] >> TULID::ParseAndFormat [GOOD] >> TULID::HeadByteOrder [GOOD] >> TULID::TailByteOrder [GOOD] >> TULID::EveryBitOrder [GOOD] >> TULID::Generate [GOOD] >> TWildcardTest::TestWildcard [GOOD] >> TWildcardTest::TestWildcards [GOOD] >> TCowBTreeTest::ClearAndReuse [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsInplace [GOOD] >> TCowBTreeTest::SeekBackwardPermutationsThreadSafe [GOOD] >> TCowBTreeTest::RandomInsertInplace >> PushdownTest::Equal [GOOD] >> TIntervalSetTest::IntervalSetTestAdd [GOOD] >> TIntervalSetTest::IntervalSetTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalSetTestSubtract [GOOD] >> TIntervalSetTest::IntervalSetTestSubtractAgainstReference >> TIntervalSetTest::IntervalVecTestAdd [GOOD] >> TIntervalSetTest::IntervalVecTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalVecTestSubtract [GOOD] >> TIntervalSetTest::IntervalVecTestSubtractAgainstReference >> THyperLogCounterTest::TestAddRandom [GOOD] >> THyperLogCounterTest::TestAddFixed >> PushdownTest::NotEqualInt32Int64 [GOOD] >> TIntervalSetTest::IntervalSetTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestIsSubsetOfAgainstReference >> TIntervalSetTest::IntervalVecTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecUnion >> THyperLogCounterTest::TestAddFixed [GOOD] >> THyperLogCounterTest::TestHybridIncrement [GOOD] >> THyperLogCounterTest::TestHybridAdd [GOOD] >> TIntervalSetTest::IntervalMapTestEmpty [GOOD] >> TIntervalSetTest::IntervalMapTestSpecificAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAdd |89.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |89.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |89.0%| [TS] {RESULT} ydb/core/metering/ut/unittest >> PushdownTest::TrueCoalesce [GOOD] >> TIntervalSetTest::IntervalSetTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetUnion >> TestFederatedQueryHelpers::TestCheckNestingDepth [GOOD] >> TestFederatedQueryHelpers::TestTruncateIssues [GOOD] >> TestFederatedQueryHelpers::TestValidateResultSetColumns [GOOD] >> PushdownTest::CmpInt16AndInt32 [GOOD] >> TIntervalSetTest::IntervalMapTestAdd [GOOD] >> TIntervalSetTest::IntervalMapTestAddSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestSubtract [GOOD] >> TIntervalSetTest::IntervalMapTestSubtractAgainstReference |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TCircularQueueTest::ShouldGetQueue [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TCircularOperationQueueTest::CheckTimeoutWhenFirstItemRemoved [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TWildcardTest::TestWildcards [GOOD] >> PushdownTest::PartialAnd [GOOD] >> TIntervalSetTest::IntervalVecUnion [GOOD] >> TIntervalSetTest::IntervalMapTestSubtractAgainstReference [GOOD] >> TIntervalSetTest::IntervalVecUnionInplace >> TIntervalSetTest::IntervalMapTestAddAgainstReference |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/util/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/util/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/util/ut/unittest |89.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut >> PushdownTest::PartialAndOneBranchPushdownable [GOOD] >> PushdownTest::NotNull [GOOD] >> TIntervalSetTest::IntervalSetUnion [GOOD] >> TIntervalSetTest::IntervalMapTestAddAgainstReference [GOOD] >> TIntervalSetTest::IntervalSetUnionInplace >> TIntervalSetTest::IntervalMapTestIsSubsetOfAgainstReference >> PushdownTest::NotNullForDatetime [GOOD] >> TIntervalSetTest::IntervalVecUnionInplace [GOOD] >> TIntervalSetTest::IntervalMapTestIsSubsetOfAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapTestToStringAgainstReference [GOOD] >> TIntervalSetTest::IntervalMapUnion >> TIntervalSetTest::IntervalVecUnionInplaceSelf [GOOD] >> TIntrusiveFixedHashSetTest::TestEmptyFind [GOOD] >> TIntrusiveFixedHashSetTest::TestPushFindClear [GOOD] >> TIntrusiveHeapTest::TestEmpty [GOOD] >> TIntrusiveHeapTest::TestAddRemove [GOOD] >> TIntrusiveHeapTest::TestUpdateNoChange [GOOD] >> TIntrusiveHeapTest::TestUpdateIncrease [GOOD] >> TIntrusiveHeapTest::TestUpdateDecrease [GOOD] >> TIntrusiveStackTest::TestEmptyPop [GOOD] >> TIntrusiveStackTest::TestPushPop [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention |89.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/metering/ut/ydb-core-metering-ut >> PushdownTest::IsNull [GOOD] >> PushdownTest::StringFieldsNotSupported |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |89.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut >> PushdownTest::StringFieldsNotSupported [GOOD] >> PushdownTest::StringFieldsNotSupported2 |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |89.0%| [LD] {RESULT} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/federated_query/ut/unittest >> TestFederatedQueryHelpers::TestValidateResultSetColumns [GOOD] >> Init::TWithDefaultParser [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorForActorSystem >> PushdownTest::StringFieldsNotSupported2 [GOOD] >> PushdownTest::RegexpPushdown >> TIntervalSetTest::IntervalSetUnionInplace [GOOD] >> TIntervalSetTest::IntervalMapUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetUnionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecIntersection |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/federated_query/ut/unittest >> StaticNodeSelectorsInit::TestStaticNodeSelectorForActorSystem [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorWithAnotherLabel >> PushdownTest::RegexpPushdown [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorWithAnotherLabel [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorInheritance [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorByNodeId [GOOD] >> PushdownTest::DecimalPushdownPrecision10Scale0 [GOOD] >> TIntervalSetTest::IntervalVecIntersection [GOOD] >> TIntervalSetTest::IntervalSetIntersection |89.0%| [TM] {RESULT} ydb/core/kqp/federated_query/ut/unittest >> StaticNodeSelectorsInit::TestStaticNodeSelectorByNodeHost [GOOD] >> StaticNodeSelectorsInit::TestStaticNodeSelectorByNodeKind >> PushdownTest::DecimalPushdownPrecesion4Scale2 [GOOD] |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |89.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut >> StaticNodeSelectorsInit::TestStaticNodeSelectorByNodeKind [GOOD] >> XdsBootstrapConfig::CanSetHostnameAndDataCenterFromYdbNode [GOOD] >> XdsBootstrapConfig::CanSetDataCenterFromYdbNodeArgument |89.0%| [LD] {RESULT} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut >> XdsBootstrapConfig::CanSetDataCenterFromYdbNodeArgument [GOOD] >> XdsBootstrapConfig::CanCheckThatXdsBootstrapIsAbsent >> TIntervalSetTest::IntervalMapUnion [GOOD] >> TIntervalSetTest::IntervalMapUnionInplace >> XdsBootstrapConfig::CanCheckThatXdsBootstrapIsAbsent [GOOD] >> XdsBootstrapConfig::CanUseNodeIdFromYamlConfig [GOOD] >> TIntervalSetTest::IntervalSetIntersection [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplace >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] |89.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a >> TIntervalSetTest::IntervalVecIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplace |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |89.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/util/ut/ydb-core-util-ut ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest >> PushdownTest::DecimalPushdownPrecesion4Scale2 [GOOD] Test command err: Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (Bool '"true") $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (Bool '"true") $4))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (Bool '"true") $4))) '('('type)))) (return (Commit! $3 $2)) ) discovered cluster name: test_clusterDescribe table for: ``test_cluster`.`test_table``Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr to optimize: ( (let $1 (Bool '"true")) (let $2 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($5) $1))) (let $3 (DataSink '"result")) (let $4 (ResWrite! (Left! $2) $3 (Key) (FlatMap (Right! $2) (lambda '($6) (OptionalIf $1 $6))) '('('type)))) (return (Commit! $4 $3)) ) OptionalIf over Bool 'trueExpr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) FlatMap with JustExpr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (Right! $1) '('('type)))) (return (Commit! $3 $2)) ) Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (Right! $1) '('('type)))) (return (Commit! $3 $2)) ) PhysicalOptimizer-TrimReadWorldExpr: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)))) (return (Commit! $2 $1)) ) Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)))) (return (Commit! $2 $1)) ) ResPullExpr: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) Optimized expr: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) Filling source settings: cluster: test_cluster, table: test_table, endpoint: host: "host" port: 42BuildGenericDqSourceSettingsBuilt settings: ( (let $1 (DataSink '"result")) (let $2 '('"col_bool" '"col_date" '"col_datetime" '"col_decimal_precision10_scale0" '"col_decimal_precision4_scale2" '"col_double" '"col_dynumber" '"col_float" '"col_int16" '"col_int32" '"col_int64" '"col_int8" '"col_interval" '"col_json" '"col_json_document" '"col_optional_bool" '"col_optional_date" '"col_optional_datetime" '"col_optional_double" '"col_optional_dynumber" '"col_optional_float" '"col_optional_int16" '"col_optional_int32" '"col_optional_int64" '"col_optional_int8" '"col_optional_interval" '"col_optional_json" '"col_optional_json_document" '"col_optional_string" '"col_optional_timestamp" '"col_optional_tz_date" '"col_optional_tz_datetime" '"col_optional_tz_timestamp" '"col_optional_uint16" '"col_optional_uint32" '"col_optional_uint64" '"col_optional_uint8" '"col_optional_utf8" '"col_optional_uuid" '"col_optional_yson" '"col_string" '"col_timestamp" '"col_tz_date" '"col_tz_datetime" '"col_tz_timestamp" '"col_uint16" '"col_uint32" '"col_uint64" '"col_uint8" '"col_utf8" '"col_uuid" '"col_yson")) (let $3 (GenSourceSettings world '"test_cluster" '"test_table" (SecureParam '"cluster:default_test_cluster") $2 (lambda '($34) (Bool '"true")))) (let $4 (DataType 'Bool)) (let $5 (DataType 'Date)) (let $6 (DataType 'Datetime)) (let $7 (DataType 'Decimal '10 '0)) (let $8 (DataType 'Decimal '4 '2)) (let $9 (DataType 'Double)) (let $10 (DataType 'DyNumber)) (let $11 (DataType 'Float)) (let $12 (DataType 'Int16)) (let $13 (DataType 'Int32)) (let $14 (DataType 'Int64)) (let $15 (DataType 'Int8)) (let $16 (DataType 'Interval)) (let $17 (DataType 'Json)) (let $18 (DataType 'JsonDocument)) (let $19 (DataType 'String)) (let $20 (DataType 'Timestamp)) (let $21 (DataType 'TzDate)) (let $22 (DataType 'TzDatetime)) (let $23 (DataType 'TzTimestamp)) (let $24 (DataType 'Uint16)) (let $25 (DataType 'Uint32)) (let $26 (DataType 'Uint64)) (let $27 (DataType 'Uint8)) (let $28 (DataType 'Utf8)) (let $29 (DataType 'Uuid)) (let $30 (DataType 'Yson)) (let $31 (StructType '('"col_bool" $4) '('"col_date" $5) '('"col_datetime" $6) '('"col_decimal_precision10_scale0" $7) '('"col_decimal_precision4_scale2" $8) '('"col_double" $9) '('"col_dynumber" $10) '('"col_float" $11) '('"col_int16" $12) '('"col_int32" $13) '('"col_int64" $14) '('"col_int8" $15) '('"col_interval" $16) '('"col_json" $17) '('"col_json_document" $18) '('"col_optional_bool" (OptionalType $4)) '('"col_optional_date" (OptionalType $5)) '('"col_optional_datetime" (OptionalType $6)) '('"col_optional_double" (OptionalType $9)) '('"col_optional_dynumber" (OptionalType $10)) '('"col_optional_float" (OptionalType $11)) '('"col_optional_int16" (OptionalType $12)) '('"col_optional_int32" (OptionalType $13)) '('"col_optional_int64" (OptionalType $14)) '('"col_optional_int8" (OptionalType $15)) '('"col_optional_interval" (OptionalType $16)) '('"col_optional_json" (OptionalType $17)) '('"col_optional_json_document" (OptionalType $18)) '('"col_optional_string" (OptionalType $19)) '('"col_optional_timestamp" (OptionalType $20)) '('"col_optional_tz_date" (OptionalType $21)) '('"col_optional_tz_datetime" (OptionalType $22)) '('"col_optional_tz_timestamp" (OptionalType $23)) '('"col_optional_uint16" (OptionalType $24)) '('"col_optional_uint32" (OptionalType $25)) '('"col_optional_uint64" (OptionalType $26)) '('"col_optional_uint8" (OptionalType $27)) '('"col_optional_utf8" (OptionalType $28)) '('"col_optional_uuid" (OptionalType $29)) '('"col_optional_yson" (OptionalType $30)) '('"col_string" $19) '('"col_timestamp" $20) '('"col_tz_date" $21) '('"col_tz_datetime" $22) '('"col_tz_timestamp" $23) '('"col_uint16" $24) '('"col_uint32" $25) '('"col_uint64" $26) '('"col_uint8" $27) '('"col_utf8" $28) '('"col_uuid" $29) '('"col_yson" $30))) (let $32 (DqSourceWrap $3 (DataSource '"generic" '"test_cluster") $31)) (let $33 (ResPull! world $1 (Key) $32 '('('type)) '"generic")) (return (Commit! $33 $1)) ) Dq source filter settings: GenericConfiguration::AddCluster: name = test_cluster, kind = POSTGRESQL, database name = database, database id = , endpoint = { host: "host" port: 42 }, use tls = 0, protocol = NATIVE Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (== (Member $row '"col_int16") (Int16 '42)) $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (== (Member $4 '"col_int16") (Int16 '42)) $4))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (== (Member $4 '"col_int16") (Int16 '42)) $4))) '('('type)))) (return (Commit! $3 $2)) ) discovered cluster name: test_clusterDescribe table for: ``test_cluster`.`test_table``Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (== (Member $5 '"col_int16") (Int16 '42)) $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (== (Member $5 '"col_int16") (Int16 '42)) $5))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (== (Member $5 '"col_int ... ) '('"col_optional_datetime" (OptionalType $8)) '('"col_optional_double" (OptionalType $11)) '('"col_optional_dynumber" (OptionalType $12)) '('"col_optional_float" (OptionalType $13)) '('"col_optional_int16" (OptionalType $14)) '('"col_optional_int32" (OptionalType $15)) '('"col_optional_int64" (OptionalType $16)) '('"col_optional_int8" (OptionalType $17)) '('"col_optional_interval" (OptionalType $18)) '('"col_optional_json" (OptionalType $19)) '('"col_optional_json_document" (OptionalType $20)) '('"col_optional_string" (OptionalType $21)) '('"col_optional_timestamp" (OptionalType $22)) '('"col_optional_tz_date" (OptionalType $23)) '('"col_optional_tz_datetime" (OptionalType $24)) '('"col_optional_tz_timestamp" (OptionalType $25)) '('"col_optional_uint16" (OptionalType $26)) '('"col_optional_uint32" (OptionalType $27)) '('"col_optional_uint64" (OptionalType $28)) '('"col_optional_uint8" (OptionalType $29)) '('"col_optional_utf8" (OptionalType $30)) '('"col_optional_uuid" (OptionalType $31)) '('"col_optional_yson" (OptionalType $32)) '('"col_string" $21) '('"col_timestamp" $22) '('"col_tz_date" $23) '('"col_tz_datetime" $24) '('"col_tz_timestamp" $25) '('"col_uint16" $26) '('"col_uint32" $27) '('"col_uint64" $28) '('"col_uint8" $29) '('"col_utf8" $30) '('"col_uuid" $31) '('"col_yson" $32))) (let $34 (DqSourceWrap $5 (DataSource '"generic" '"test_cluster") $33)) (let $35 (ResWrite! world $1 (Key) (FlatMap $34 (lambda '($37) (OptionalIf (== (Member $37 $3) $4) $37))) '('('type)))) (return (Commit! $35 $1)) ) Dq source filter settings: filter_typed { comparison { operation: EQ left_value { column: "col_decimal_precision10_scale0" } right_value { typed_value { type { decimal_type { precision: 10 } } value { bytes_value: "\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" } } } } } GenericConfiguration::AddCluster: name = test_cluster, kind = POSTGRESQL, database name = database, database id = , endpoint = { host: "host" port: 42 }, use tls = 0, protocol = NATIVE Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (== (Member $row '"col_decimal_precision4_scale2") (Decimal '"-22.22" '"4" '"2") ) $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (block '( (let $5 (Decimal '"-22.22" '"4" '"2")) (return (OptionalIf (== (Member $4 '"col_decimal_precision4_scale2") $5) $4)) )))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (block '( (let $5 (Decimal '"-22.22" '"4" '"2")) (return (OptionalIf (== (Member $4 '"col_decimal_precision4_scale2") $5) $4)) )))) '('('type)))) (return (Commit! $3 $2)) ) discovered cluster name: test_clusterDescribe table for: ``test_cluster`.`test_table``Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (block '( (let $6 (Decimal '"-22.22" '"4" '"2")) (return (OptionalIf (== (Member $5 '"col_decimal_precision4_scale2") $6) $5)) )))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (block '( (let $6 (Decimal '"-22.22" '"4" '"2")) (return (OptionalIf (== (Member $5 '"col_decimal_precision4_scale2") $6) $5)) )))) '('('type)))) (return (Commit! $3 $2)) ) Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (block '( (let $6 (Decimal '"-22.22" '"4" '"2")) (return (OptionalIf (== (Member $5 '"col_decimal_precision4_scale2") $6) $5)) )))) '('('type)))) (return (Commit! $3 $2)) ) Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (block '( (let $6 (Decimal '"-22.22" '"4" '"2")) (return (OptionalIf (== (Member $5 '"col_decimal_precision4_scale2") $6) $5)) )))) '('('type)))) (return (Commit! $3 $2)) ) PhysicalOptimizer-TrimReadWorldPush filter lambda: ( (return (lambda '($1) (block '( (let $2 (Decimal '"-22.22" '"4" '"2")) (return (== (Member $1 '"col_decimal_precision4_scale2") $2)) )))) ) PhysicalOptimizer-PushFilterToReadTableExpr: ( (let $1 (DataSink '"result")) (let $2 '"col_decimal_precision4_scale2") (let $3 (Decimal '"-22.22" '"4" '"2")) (let $4 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($5) (== (Member $5 $2) $3)))) (lambda '($6) (OptionalIf (== (Member $6 $2) $3) $6))) '('('type)))) (return (Commit! $4 $1)) ) Expr: ( (let $1 (DataSink '"result")) (let $2 '"col_decimal_precision4_scale2") (let $3 (Decimal '"-22.22" '"4" '"2")) (let $4 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($5) (== (Member $5 $2) $3)))) (lambda '($6) (OptionalIf (== (Member $6 $2) $3) $6))) '('('type)))) (return (Commit! $4 $1)) ) Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 '"col_decimal_precision4_scale2") (let $3 (Decimal '"-22.22" '"4" '"2")) (let $4 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($5) (== (Member $5 $2) $3)))) (lambda '($6) (OptionalIf (== (Member $6 $2) $3) $6))) '('('type)))) (return (Commit! $4 $1)) ) Push filter. Lambda is already not emptyOptimized expr: ( (let $1 (DataSink '"result")) (let $2 '"col_decimal_precision4_scale2") (let $3 (Decimal '"-22.22" '"4" '"2")) (let $4 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($5) (== (Member $5 $2) $3)))) (lambda '($6) (OptionalIf (== (Member $6 $2) $3) $6))) '('('type)))) (return (Commit! $4 $1)) ) Filling source settings: cluster: test_cluster, table: test_table, endpoint: host: "host" port: 42BuildGenericDqSourceSettingsBuilt settings: ( (let $1 (DataSink '"result")) (let $2 '('"col_bool" '"col_date" '"col_datetime" '"col_decimal_precision10_scale0" '"col_decimal_precision4_scale2" '"col_double" '"col_dynumber" '"col_float" '"col_int16" '"col_int32" '"col_int64" '"col_int8" '"col_interval" '"col_json" '"col_json_document" '"col_optional_bool" '"col_optional_date" '"col_optional_datetime" '"col_optional_double" '"col_optional_dynumber" '"col_optional_float" '"col_optional_int16" '"col_optional_int32" '"col_optional_int64" '"col_optional_int8" '"col_optional_interval" '"col_optional_json" '"col_optional_json_document" '"col_optional_string" '"col_optional_timestamp" '"col_optional_tz_date" '"col_optional_tz_datetime" '"col_optional_tz_timestamp" '"col_optional_uint16" '"col_optional_uint32" '"col_optional_uint64" '"col_optional_uint8" '"col_optional_utf8" '"col_optional_uuid" '"col_optional_yson" '"col_string" '"col_timestamp" '"col_tz_date" '"col_tz_datetime" '"col_tz_timestamp" '"col_uint16" '"col_uint32" '"col_uint64" '"col_uint8" '"col_utf8" '"col_uuid" '"col_yson")) (let $3 '"col_decimal_precision4_scale2") (let $4 (Decimal '"-22.22" '"4" '"2")) (let $5 (GenSourceSettings world '"test_cluster" '"test_table" (SecureParam '"cluster:default_test_cluster") $2 (lambda '($36) (== (Member $36 $3) $4)))) (let $6 (DataType 'Bool)) (let $7 (DataType 'Date)) (let $8 (DataType 'Datetime)) (let $9 (DataType 'Decimal '10 '0)) (let $10 (DataType 'Decimal '4 '2)) (let $11 (DataType 'Double)) (let $12 (DataType 'DyNumber)) (let $13 (DataType 'Float)) (let $14 (DataType 'Int16)) (let $15 (DataType 'Int32)) (let $16 (DataType 'Int64)) (let $17 (DataType 'Int8)) (let $18 (DataType 'Interval)) (let $19 (DataType 'Json)) (let $20 (DataType 'JsonDocument)) (let $21 (DataType 'String)) (let $22 (DataType 'Timestamp)) (let $23 (DataType 'TzDate)) (let $24 (DataType 'TzDatetime)) (let $25 (DataType 'TzTimestamp)) (let $26 (DataType 'Uint16)) (let $27 (DataType 'Uint32)) (let $28 (DataType 'Uint64)) (let $29 (DataType 'Uint8)) (let $30 (DataType 'Utf8)) (let $31 (DataType 'Uuid)) (let $32 (DataType 'Yson)) (let $33 (StructType '('"col_bool" $6) '('"col_date" $7) '('"col_datetime" $8) '('"col_decimal_precision10_scale0" $9) '('"col_decimal_precision4_scale2" $10) '('"col_double" $11) '('"col_dynumber" $12) '('"col_float" $13) '('"col_int16" $14) '('"col_int32" $15) '('"col_int64" $16) '('"col_int8" $17) '('"col_interval" $18) '('"col_json" $19) '('"col_json_document" $20) '('"col_optional_bool" (OptionalType $6)) '('"col_optional_date" (OptionalType $7)) '('"col_optional_datetime" (OptionalType $8)) '('"col_optional_double" (OptionalType $11)) '('"col_optional_dynumber" (OptionalType $12)) '('"col_optional_float" (OptionalType $13)) '('"col_optional_int16" (OptionalType $14)) '('"col_optional_int32" (OptionalType $15)) '('"col_optional_int64" (OptionalType $16)) '('"col_optional_int8" (OptionalType $17)) '('"col_optional_interval" (OptionalType $18)) '('"col_optional_json" (OptionalType $19)) '('"col_optional_json_document" (OptionalType $20)) '('"col_optional_string" (OptionalType $21)) '('"col_optional_timestamp" (OptionalType $22)) '('"col_optional_tz_date" (OptionalType $23)) '('"col_optional_tz_datetime" (OptionalType $24)) '('"col_optional_tz_timestamp" (OptionalType $25)) '('"col_optional_uint16" (OptionalType $26)) '('"col_optional_uint32" (OptionalType $27)) '('"col_optional_uint64" (OptionalType $28)) '('"col_optional_uint8" (OptionalType $29)) '('"col_optional_utf8" (OptionalType $30)) '('"col_optional_uuid" (OptionalType $31)) '('"col_optional_yson" (OptionalType $32)) '('"col_string" $21) '('"col_timestamp" $22) '('"col_tz_date" $23) '('"col_tz_datetime" $24) '('"col_tz_timestamp" $25) '('"col_uint16" $26) '('"col_uint32" $27) '('"col_uint64" $28) '('"col_uint8" $29) '('"col_utf8" $30) '('"col_uuid" $31) '('"col_yson" $32))) (let $34 (DqSourceWrap $5 (DataSource '"generic" '"test_cluster") $33)) (let $35 (ResWrite! world $1 (Key) (FlatMap $34 (lambda '($37) (OptionalIf (== (Member $37 $3) $4) $37))) '('('type)))) (return (Commit! $35 $1)) ) Dq source filter settings: filter_typed { comparison { operation: EQ left_value { column: "col_decimal_precision4_scale2" } right_value { typed_value { type { decimal_type { precision: 4 scale: 2 } } value { bytes_value: "R\367\377\377\377\377\377\377\377\377\377\377\377\377\377\377" } } } } } |89.0%| [TS] {BAZEL_UPLOAD} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest |89.0%| [TS] {RESULT} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest >> ClosedIntervalSet::Union [GOOD] >> ClosedIntervalSet::Difference >> TBlobStorageIngressMatrix::VectorTestBitwiseAnd [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement1 [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitsBefore2 [GOOD] >> TBlobStorageIngress::Ingress [GOOD] >> TBlobStorageIngress::IngressCacheMirror3 [GOOD] >> TBlobStorageIngress::IngressCache4Plus2 [GOOD] >> TBlobStorageIngress::BarrierIngressQuorumBasicMirror3_4_2 [GOOD] >> TBlobStorageIngress::BarrierIngressQuorumBasic4Plus2_8_1 [GOOD] >> TBlobStorageIngress::BarrierIngressQuorumMirror3 [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator1 [GOOD] >> TIntervalSetTest::IntervalMapUnionInplace [GOOD] >> TIntervalSetTest::IntervalMapIntersection >> TBlobStorageIngressMatrix::VectorTestIterator2 [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalVecIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalVecDifference >> TBlobStorageIngressMatrix::VectorTestMinus [GOOD] >> TBlobStorageIngressMatrix::VectorTestIterator3 [GOOD] |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |89.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |89.0%| [LD] {RESULT} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |89.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/config/init/ut/unittest >> XdsBootstrapConfig::CanUseNodeIdFromYamlConfig [GOOD] >> TCowBTreeTest::SnapshotRollback [GOOD] >> TCowBTreeTest::SnapshotRollbackEarlyErase >> TBlobStorageIngressMatrix::ShiftedMainBitVec [GOOD] >> TBlobStorageIngressMatrix::VectorTest [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitsBefore1 [GOOD] >> TBlobStorageIngress::IngressPartsWeMustHaveLocally [GOOD] >> TBlobStorageIngress::IngressLocalParts [GOOD] >> TBlobStorageIngress::IngressPrintDistribution [GOOD] >> TBlobStorageIngressMatrix::MatrixTest [GOOD] >> TBlobStorageIngressMatrix::ShiftedBitVecBase [GOOD] >> TBlobStorageIngressMatrix::ShiftedHandoffBitVec [GOOD] >> TBlobStorageIngressMatrix::VectorTestEmpty [GOOD] >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement2 [GOOD] |89.0%| [TS] {BAZEL_UPLOAD} ydb/core/config/init/ut/unittest |89.0%| [TS] {RESULT} ydb/core/config/init/ut/unittest >> TBlobStorageIngress::IngressCreateFromRepl [GOOD] >> TBlobStorageIngress::IngressGetMainReplica [GOOD] >> TBlobStorageIngress::IngressHandoffPartsDelete [GOOD] >> TDqHashCombineTest::TestBlockModeNoInput |89.0%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/sys_view/ut_common.cpp |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressCache4Plus2 [GOOD] >> TIntervalSetTest::IntervalVecDifference [GOOD] >> TIntervalSetTest::IntervalSetDifference >> TCowBTreeTest::RandomInsertInplace [GOOD] >> TCowBTreeTest::RandomInsertThreadSafe >> TDqHashCombineTest::TestBlockModeNoInput [GOOD] >> TDqHashCombineTest::TestBlockModeSingleRow >> TBTreeTest::RandomInsertInplace [GOOD] >> TBTreeTest::RandomInsertThreadSafe |89.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitsBefore2 [GOOD] |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/ingress/ut/unittest |89.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut >> TDqHashCombineTest::TestBlockModeSingleRow [GOOD] >> TDqHashCombineTest::TestBlockModeMultiBlocks |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/ingress/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::BarrierIngressQuorumMirror3 [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestIterator2 [GOOD] |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TIntervalSetTest::IntervalMapIntersection [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplace |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/ingress/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressPrintDistribution [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestIterator3 [GOOD] |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/ingress/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/ingress/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngress::IngressHandoffPartsDelete [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitsBefore1 [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::ShiftedHandoffBitVec [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/ingress/ut/unittest >> TBlobStorageIngressMatrix::VectorTestBitwiseComplement2 [GOOD] >> ActionParsingTest::ToAndFromStringAreConsistent [GOOD] >> ActionParsingTest::ActionsForQueueTest [GOOD] >> ActionParsingTest::BatchActionTest [GOOD] >> ActionParsingTest::ActionsForMessageTest [GOOD] >> ActionParsingTest::FastActionsTest [GOOD] >> HttpCountersTest::CountersAggregationTest [GOOD] >> LazyCounterTest::LazyCounterTest [GOOD] >> LazyCounterTest::AggregationLazyTest [GOOD] >> LazyCounterTest::AggregationNonLazyTest [GOOD] >> LazyCounterTest::HistogramAggregationTest [GOOD] >> MessageAttributeValidationTest::MessageAttributeValidationTest [GOOD] >> MessageBodyValidationTest::MessageBodyValidationTest [GOOD] >> MeteringCountersTest::CountersAggregationTest [GOOD] >> NameValidationTest::NameValidationTest [GOOD] >> QueueAttributes::BasicStdTest [GOOD] >> QueueAttributes::BasicFifoTest [GOOD] >> QueueAttributes::BasicClampTest [GOOD] >> QueueCountersTest::InsertCountersTest [GOOD] >> QueueCountersTest::RemoveQueueCountersNonLeaderWithoutFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersLeaderWithoutFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersNonLeaderWithFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersLeaderWithFolderTest >> TIntervalSetTest::IntervalSetDifference [GOOD] >> TIntervalSetTest::IntervalVecDifferenceInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetDifferenceInplaceSelf [GOOD] >> TIntervalSetTest::IntervalSetTestIterator [GOOD] |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/ingress/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/ingress/ut/unittest |89.0%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/ingress/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/ingress/ut/unittest |89.1%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp >> QueueCountersTest::RemoveQueueCountersLeaderWithFolderTest [GOOD] >> QueueCountersTest::CountersAggregationTest [GOOD] >> QueueCountersTest::CountersAggregationCloudTest [GOOD] >> RedrivePolicy::RedrivePolicyValidationTest [GOOD] >> RedrivePolicy::RedrivePolicyToJsonTest [GOOD] >> RedrivePolicy::RedrivePolicyArnValidationTest [GOOD] >> SecureProtobufPrinterTest::MessageBody [GOOD] >> SecureProtobufPrinterTest::Tokens [GOOD] >> StringValidationTest::IsAlphaNumAndPunctuationTest [GOOD] >> UserCountersTest::DisableCountersTest [GOOD] >> UserCountersTest::RemoveUserCountersTest [GOOD] >> UserCountersTest::CountersAggregationTest [GOOD] |89.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |89.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut >> TDqHashCombineTest::TestBlockModeMultiBlocks [GOOD] >> TDqHashCombineTest::TestWideModeNoInput [GOOD] >> TDqHashCombineTest::TestWideModeSingleRow >> TDqHashCombineTest::TestWideModeSingleRow [GOOD] >> TDqHashCombineTest::TestWideModeMultiRows >> TIntervalSetTest::IntervalMapIntersectionInplace [GOOD] >> TIntervalSetTest::IntervalMapIntersectionInplaceSelf [GOOD] >> TIntervalSetTest::IntervalMapDifference |89.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/base/ut/unittest >> UserCountersTest::CountersAggregationTest [GOOD] |89.1%| [TS] {BAZEL_UPLOAD} ydb/core/ymq/base/ut/unittest |89.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |89.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TS] {RESULT} ydb/core/ymq/base/ut/unittest |89.1%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |89.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp >> integrations_test.py::test_read_jtest_results[o/OK] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed1] [GOOD] >> TCowBTreeTest::SnapshotRollbackEarlyErase [GOOD] >> TCowBTreeTest::ShouldCallDtorsInplace [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed2] [GOOD] >> TCowBTreeTest::ShouldCallDtorsThreadSafe >> integrations_test.py::test_read_jtest_results[f/error1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped2] [GOOD] >> integrations_test.py::test_read_jtest_with_one_result [GOOD] >> TCowBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TEventPriorityQueueTest::TestPriority [GOOD] >> TFastTlsTest::IterationAfterThreadDeath |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalSetTestIterator [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/util/ut/unittest |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut/ydb-core-base-ut >> TDqHashCombineTest::TestWideModeMultiRows [GOOD] >> TDqHashJoinBasicTest::TestBasicPassthrough-BlockJoin >> TFastTlsTest::IterationAfterThreadDeath [GOOD] >> TFastTlsTest::ManyThreadLocals [GOOD] >> TFastTlsTest::ManyConcurrentKeys |89.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut/ydb-core-base-ut |89.1%| [LD] {RESULT} $(B)/ydb/core/base/ut/ydb-core-base-ut >> TDqHashJoinBasicTest::TestBasicPassthrough-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestBasicPassthrough+BlockJoin >> TDqHashJoinBasicTest::TestBasicPassthrough+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestCrossPassthrough-BlockJoin >> TIntervalSetTest::IntervalMapDifference [GOOD] >> TIntervalSetTest::IntervalMapDifferenceInplaceSelf [GOOD] |89.1%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp >> TDqHashJoinBasicTest::TestCrossPassthrough-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestCrossPassthrough+BlockJoin |89.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/oltp_workload/oltp_workload >> TDqHashJoinBasicTest::TestCrossPassthrough+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestMixedKeysPassthrough-BlockJoin >> TFastTlsTest::ManyConcurrentKeys [GOOD] >> TFifoQueueTest::ShouldPushPop [GOOD] >> TFragmentedBufferTest::TestWriteRead [GOOD] >> TFragmentedBufferTest::TestOverwriteRead [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead2 [GOOD] >> TFragmentedBufferTest::TestIntersectedWriteRead3 [GOOD] >> TFragmentedBufferTest::Test3WriteRead [GOOD] >> TFragmentedBufferTest::Test5WriteRead [GOOD] >> TFragmentedBufferTest::TestIsNotMonolith [GOOD] >> TFragmentedBufferTest::TestGetMonolith [GOOD] >> TFragmentedBufferTest::TestSetMonolith [GOOD] >> TFragmentedBufferTest::TestReplaceWithSetMonolith [GOOD] >> TFragmentedBufferTest::CopyFrom [GOOD] >> TFragmentedBufferTest::ReadWriteRandom >> TLockFreeIntrusiveStackTest::ConcurrentRefCountNeverEmpty [GOOD] >> TLogPriorityMuteTests::MuteUntilTest [GOOD] >> TLogPriorityMuteTests::AtomicMuteUntilTest [GOOD] >> TLogPriorityMuteTests::UnmuteTest [GOOD] >> TLogPriorityMuteTests::AtomicUnmuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteTest [GOOD] >> TLogPriorityMuteTests::CheckPriorityWithSetMuteDurationTest [GOOD] >> TLogPriorityMuteTests::AtomicCheckPriorityWithSetMuteDurationTest [GOOD] >> TOneOneQueueTests::TestSimpleEnqueueDequeue [GOOD] >> TOneOneQueueTests::CleanInDestructor [GOOD] >> TOneOneQueueTests::DeleteInDestructor [GOOD] >> TOneOneQueueTests::ReadIterator [GOOD] >> TPageMapTest::TestResize [GOOD] >> TPageMapTest::TestRandom >> TDqHashJoinBasicTest::TestMixedKeysPassthrough-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestMixedKeysPassthrough+BlockJoin |89.1%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |89.1%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api >> TBlobStorageHullHugeChain::HeapAllocSmall [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllFromOneChunk [GOOD] >> TDqHashJoinBasicTest::TestMixedKeysPassthrough+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestEmptyFlows-BlockJoin >> TBlobStorageHullHugeChain::AllocFreeAllocTest [GOOD] >> TBlobStorageHullHugeChain::AllocFreeRestartAllocTest [GOOD] >> TBlobStorageHullHugeHeap::BorderValues [GOOD] >> TBlobStorageHullHugeHeap::LockChunks [GOOD] >> THugeHeapCtxTests::Basic [GOOD] >> TopTest::Test1 [GOOD] >> TBlobStorageHullHugeChain::HeapAllocLargeStandard [GOOD] >> TBlobStorageHullHugeChain::HeapAllocLargeNonStandard [GOOD] |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |89.1%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import ------- [TS] {asan, default-linux-x86_64, release} ydb/tests/postgres_integrations/library/ut/py3test >> integrations_test.py::test_read_jtest_with_one_result [GOOD] Test command err: /home/runner/.ya/build/build_root/8ptz/006276/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:26: PytestCollectionWarning: cannot collect test class 'TestCase' because it has a __init__ constructor (from: integrations_test.py) /home/runner/.ya/build/build_root/8ptz/006276/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:20: PytestCollectionWarning: cannot collect test class 'TestState' because it has a __init__ constructor (from: integrations_test.py) |89.1%| [TS] {BAZEL_UPLOAD} ydb/tests/postgres_integrations/library/ut/py3test >> TDqHashJoinBasicTest::TestEmptyFlows-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestEmptyFlows+BlockJoin >> TBlobStorageHullHugeHeap::AllocateAllReleaseAll [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllSerializeDeserializeReleaseAll [GOOD] >> TBlobStorageHullHugeKeeperPersState::SerializeParse [GOOD] >> TChainLayoutBuilder::TestBucketsV2 [GOOD] >> TopTest::Test2 [GOOD] >> TDqHashJoinBasicTest::TestEmptyFlows+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestEmptyLeft-BlockJoin |89.1%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/oltp_workload |89.1%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/oltp_workload |89.1%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/py3test >> TBlobStorageHullHugeHeap::RecoveryMode [GOOD] >> TBlobStorageHullHugeHeap::WriteRestore [GOOD] >> TDqHashJoinBasicTest::TestEmptyLeft-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestEmptyLeft+BlockJoin >> TChainLayoutBuilder::TestProdConf [GOOD] >> TChainLayoutBuilder::TestMilestoneId [GOOD] |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |89.1%| [LD] {RESULT} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf >> TDqHashJoinBasicTest::TestEmptyLeft+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestEmptyRight-BlockJoin |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ydb-tests-olap >> TDqHashJoinBasicTest::TestEmptyRight-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestEmptyRight+BlockJoin >> TBTreeTest::RandomInsertThreadSafe [GOOD] >> TBTreeTest::DuplicateKeysInplace |89.1%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::HeapAllocLargeNonStandard [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::AllocateAllFromOneChunk [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test1 [GOOD] >> TDqHashJoinBasicTest::TestEmptyRight+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestInnerRenamesKind-BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestInnerRenamesKind+BlockJoin |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::LockChunks [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::AllocFreeRestartAllocTest [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::AllocateAllSerializeDeserializeReleaseAll [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::WriteRestore [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TLockFreeIntrusiveStackTest::ConcurrentRefCountHeavyContention [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty >> TDqHashJoinBasicTest::TestInnerRenamesKind+BlockJoin [GOOD] >> TDqHashJoinBasicTest::TestBlockSpilling >> TLogoBlobIdHashTest::SimpleTestWithDifferentTabletId [GOOD] >> TLogoBlobIdHashTest::SimpleTestWithDifferentSteps [GOOD] >> TLogoBlobIdHashTest::SimpleTestWithDifferentChannel [GOOD] >> TLogoBlobTest::LogoBlobParse [GOOD] >> TLogoBlobTest::LogoBlobCompare >> TMemoryStatsAggregator::Aggregate_Empty [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/huge/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/huge/ut/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test2 [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TChainLayoutBuilder::TestMilestoneId [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/huge/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/huge/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/huge/ut/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TChainLayoutBuilder::TestBucketsV2 [GOOD] |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive >> TMemoryStatsAggregator::Aggregate_Single [GOOD] >> NFulltext::ValidateColumnsMatches [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_ExternalConsumption_DifferentHosts [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_DifferentHosts [GOOD] >> TMemoryStatsAggregator::Aggregate_Summarize_ExternalConsumption_OneHost [GOOD] >> TMemoryStatsAggregator::Aggregate_ExternalConsumption_CollidingHosts [GOOD] >> NFulltext::ValidateSettings [GOOD] >> NFulltext::FillSetting [GOOD] >> NFulltext::FillSettingInvalid [GOOD] >> NKMeans::ValidateSettings >> TGuardianImpl::FollowerTracker [GOOD] >> TGuardianImpl::FollowerTrackerDuplicates [GOOD] >> TLocalDbTest::BackupTaskNameChangedAtLoadTime [GOOD] >> TLogoBlobIdHashTest::SimpleTest [GOOD] >> TLogoBlobIdHashTest::SimpleTestPartIdDoesNotMatter [GOOD] >> TLogoBlobIdHashTest::SimpleTestBlobSizeDoesNotMatter [GOOD] >> TLogoBlobTest::LogoBlobCompare [GOOD] >> TLogoBlobTest::LogoBlobSort [GOOD] >> NKMeans::ValidateSettings [GOOD] >> Path::CanonizeOld [GOOD] >> Path::CanonizeFast [GOOD] |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TIntervalSetTest::IntervalMapDifferenceInplaceSelf [GOOD] |89.1%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |89.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |89.1%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/huge/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/huge/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TMemoryStatsAggregator::Aggregate_Summarize_NoExternalConsumption_OneHost [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TMemoryStatsAggregator::Compaction_Single [GOOD] >> TStateStorageConfig::SameConfigurationTest [GOOD] >> TStateStorageConfig::Tablet72075186224040026Test [GOOD] >> TStateStorageConfig::NonDuplicatedNodesTest >> TBlobStorageGroupTypeTest::TestCorrectLayout [GOOD] >> TBlobStorageGroupTypeTest::OutputInfoAboutErasureSpecies [GOOD] >> Path::Name_RussianAlphabet_SetLocale_C [GOOD] >> Path::Name_RussianAlphabet_SetLocale_C_UTF8 [GOOD] >> Path::Name_WeirdLocale_RegularName [GOOD] >> Path::Name_WeirdLocale_WeirdName [GOOD] >> NFulltext::Analyze [GOOD] >> NFulltext::AnalyzeRu [GOOD] >> NFulltext::AnalyzeInvalid [GOOD] >> NFulltext::AnalyzeFilterLength [GOOD] >> NFulltext::AnalyzeFilterLengthRu [GOOD] >> NFulltext::AnalyzeFilterNgram [GOOD] >> NFulltext::AnalyzeFilterSnowball [GOOD] >> TBTreeTest::DuplicateKeysInplace [GOOD] >> TBTreeTest::DuplicateKeysThreadSafe >> TableIndex::CompatibleSecondaryIndex [GOOD] >> TableIndex::NotCompatibleSecondaryIndex [GOOD] >> TableIndex::CompatibleVectorIndex [GOOD] >> TableIndex::NotCompatibleVectorIndex [GOOD] >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame3 >> TStateStorageConfig::TestReplicaSelection |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/util/ut/unittest >> Path::CanonizedStringIsSame1 [GOOD] >> Path::CanonizedStringIsSame2 [GOOD] >> Path::Name_EnglishAlphabet [GOOD] >> Path::Name_RussianAlphabet [GOOD] >> Path::Name_AllSymbols [GOOD] >> Path::Name_ExtraSymbols [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] >> TBTreeTest::DuplicateKeysThreadSafe [GOOD] >> TBTreeTest::ShouldCallDtorsInplace [GOOD] >> TBTreeTest::ShouldCallDtorsThreadSafe |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TLogoBlobIdHashTest::SimpleTestBlobSizeDoesNotMatter [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> Path::CanonizeFast [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut/unittest >> TBTreeTest::ShouldCallDtorsThreadSafe [GOOD] >> TBTreeTest::Concurrent |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TLogoBlobTest::LogoBlobSort [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TMemoryStatsAggregator::Aggregate_ExternalConsumption_CollidingHosts [GOOD] Test command err: AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 ExternalConsumption: 306 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 80 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 65 MemAvailable: 85 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 145 SoftLimit: 165 TargetUtilization: 185 ExternalConsumption: 194 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> Path::Name_WeirdLocale_WeirdName [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> NFulltext::AnalyzeFilterSnowball [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut/unittest |89.1%| [TA] $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut/unittest |89.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> Path::Name_ExtraSymbols [GOOD] |89.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.2%| [LD] {RESULT} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut/unittest |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut >> TCowBTreeTest::RandomInsertThreadSafe [GOOD] >> TCowBTreeTest::MultipleSnapshots |89.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut >> TBTreeTest::Concurrent [GOOD] >> TBTreeTest::IteratorDestructor [GOOD] >> TCacheTest::EnsureNoLeakAfterLruCacheDtor [GOOD] >> CompressionTest::lz4_generator_basic [GOOD] >> CompressionTest::lz4_generator_deflates [GOOD] >> FastLookupUniqueList::Stress >> TDqHashJoinBasicTest::TestBlockSpilling [GOOD] |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |89.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/driver/nemesis |89.2%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/driver/nemesis |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/driver/nemesis |89.2%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/nemesis |89.2%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/nemesis >> AuthDatabaseAdmin::PassOnOwnerMatchGroupSid [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSid [GOOD] >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSid [GOOD] >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthTokenAllowed::FailOnListAndNoToken [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndEmptyToken [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndTokenWithEmptyUserSid [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndNoToken [GOOD] >> AuthDatabaseAdmin::PassOnOwnerMatchUserSid [GOOD] >> AuthDatabaseAdmin::PassOnOwnerMatchUserSidWithGroup [GOOD] >> AuthTokenAllowed::FailOnListAndEmptyToken [GOOD] >> AuthTokenAllowed::PassOnListMatchUserSidWithGroup [GOOD] >> AuthTokenAllowed::PassOnListMatchUserSid [GOOD] |89.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthTokenAllowed::PassOnListMatchGroupSid [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSid [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndEmptyToken [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthTokenAllowed::FailOnListMatchGroupSid [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndToken [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndEmptyToken [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndNoToken [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/comp_nodes/ut/unittest >> TDqHashJoinBasicTest::TestBlockSpilling [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndInvalidTokenSerialized [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndNoToken [GOOD] |89.2%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/comp_nodes/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/dstool/ydb-dstool |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSid [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::FailOnListMatchGroupSid [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnEmptyOwnerAndNoToken [GOOD] |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/dstool/ydb-dstool |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::FailOnListAndEmptyToken [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::FailOnListAndNoToken [GOOD] >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] >> TQueryResultSizeTrackerTest::CheckAll [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_auth/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_auth/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_auth/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_auth/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne >> TDelayedResponsesTests::Test [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnListMatchGroupSid [GOOD] |89.2%| [TM] {RESULT} ydb/library/yql/dq/comp_nodes/ut/unittest |89.2%| [LD] {RESULT} $(B)/ydb/apps/dstool/ydb-dstool |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndToken [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnOwnerAndNoToken [GOOD] |89.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_auth/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnListMatchUserSid [GOOD] >> TBlobStorageHullCompactDeferredQueueTest::Basic |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndNoToken [GOOD] |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_auth/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_auth/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_auth/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TFragmentedBufferTest::ReadWriteRandom [GOOD] |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |89.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/query/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> ReadBatcher::Range |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_auth/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckAll [GOOD] |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_auth/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/query/ut/unittest >> ReadBatcher::ReadBatcher |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/query/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |89.2%| [CC] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/query/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/query/ut/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/query/ut/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/query/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TDelayedResponsesTests::Test [GOOD] |89.2%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dq/service_node/service_node |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |89.2%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/service_node/service_node |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/service_node/service_node |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hullop/ut/unittest |89.2%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hullop/ut/unittest |89.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] [GOOD] |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] |89.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hullop/ut/unittest |89.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |89.2%| [LD] {RESULT} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hullop/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame3 [GOOD] >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame4 [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TFragmentedBufferTest::ReadWriteRandom [GOOD] |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut >> TQueueBackpressureTest::CreateDelete [GOOD] >> TBlobStorageQueueTest::TMessageLost [GOOD] |89.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame4 [GOOD] >> TQueueBackpressureTest::PerfTrivial |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/util/ut/unittest |89.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut/unittest >> TQueueBackpressureTest::IncorrectMessageId [GOOD] >> FastLookupUniqueList::Stress [GOOD] >> StLog::Basic [GOOD] >> TQueueBackpressureTest::PerfInFlight |89.2%| [TA] $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::CreateDelete [GOOD] |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/knn/ydb-tests-datashard-knn |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/backpressure/ut/unittest |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/knn/ydb-tests-datashard-knn |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TBlobStorageQueueTest::TMessageLost [GOOD] |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |89.3%| [LD] {RESULT} $(B)/ydb/tests/datashard/knn/ydb-tests-datashard-knn |89.3%| [LD] {RESULT} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/backpressure/ut/unittest >> Dictionary::Simple [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoNeverEmpty [GOOD] >> Dictionary::ComparePayloadAndFull >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention |89.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/backpressure/ut/unittest |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |89.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/backpressure/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/backpressure/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/backpressure/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::IncorrectMessageId [GOOD] >> ReadBatcher::ReadBatcher [GOOD] >> TBlobStorageHullSstIt::TestSeekToFirst [GOOD] >> TBlobStorageHullSstIt::TestSeekExactAndPrev [GOOD] >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] >> TBlobStorageHullWriteSst::BlockOneSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex >> TBlobStorageHullOrderedSstsIt::TestSeekToFirst [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekToLast [GOOD] >> TBlobStorageHullSstIt::TestSeekExactAndNext [GOOD] >> TBlobStorageHullSstIt::TestSeekBefore [GOOD] >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/backpressure/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/backpressure/ut/unittest |89.3%| [TA] {RESULT} $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageHullSstIt::TestSstIndexSeekAndIterate [GOOD] >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexWithSmallWriteBlocks [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound [GOOD] >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] >> TBlobStorageHullSstIt::TestSeekToLast [GOOD] >> TBlobStorageHullSstIt::TestSstIndexSaveLoad [GOOD] |89.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> StLog::Basic [GOOD] Test command err: Producer 0 worked for 0.1129838611 seconds Producer 1 worked for 0.1671682616 seconds Consumer 0 worked for 0.4226291784 seconds Consumer 1 worked for 0.4266261249 seconds Consumer 2 worked for 0.5249284798 seconds Consumer 3 worked for 0.4568536472 seconds |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::ReadBatcher [GOOD] >> TFreshAppendixTest::IterateForwardAll [GOOD] >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] >> TBlobStorageHullFresh::SimpleBackwardEnd [GOOD] >> TBlobStorageHullFresh::SimpleBackWardMiddle2Times [GOOD] >> TQueueBackpressureTest::PerfTrivial [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/util/ut/unittest >> TFreshAppendixTest::IterateBackwardAll [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSstIndexSaveLoad [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] >> TBlobStorageHullFresh::SimpleBackWardEnd2Times [GOOD] >> TBlobStorageHullFresh::Perf >> TBlobStorageHullFresh::SimpleForward [GOOD] >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] >> TBlobStorageHullFresh::AppendixPerf >> TSTreeTest::Basic [GOOD] >> TBlobStorageHullFreshSegment::PerfAppendix |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TSVecTest::Basic [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hullop/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullFresh::SolomonStandCrash [GOOD] >> TBlobStorageHullFreshSegment::IteratorTest >> TCowBTreeTest::MultipleSnapshots [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithGc >> TFreshAppendixTest::IterateForwardIncluding [GOOD] >> TFreshAppendixTest::IterateForwardExcluding [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackWardMiddle2Times [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfTrivial [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/backpressure/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateForwardExcluding [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TSVecTest::Basic [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] >> Dictionary::ComparePayloadAndFull [GOOD] >> Hash::ScalarBinaryHash [GOOD] >> Hash::ScalarCTypeHash [GOOD] >> Hash::ScalarCompositeHash [GOOD] >> ProgramStep::Round0 [GOOD] >> ProgramStep::Round1 [GOOD] >> ProgramStep::Filter [GOOD] >> ProgramStep::Add |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/local_cluster/local_cluster >> ProgramStep::Add [GOOD] >> ProgramStep::Substract [GOOD] >> ProgramStep::Multiply [GOOD] >> ProgramStep::Divide [GOOD] >> ProgramStep::Gcd [GOOD] >> ProgramStep::Lcm [GOOD] >> ProgramStep::Mod [GOOD] >> ProgramStep::ModOrZero [GOOD] >> ProgramStep::Abs [GOOD] >> ProgramStep::Negate [GOOD] >> ProgramStep::Compares [GOOD] >> ProgramStep::Logic0 [GOOD] >> ProgramStep::Logic1 [GOOD] >> ProgramStep::StartsWith [GOOD] >> ProgramStep::EndsWith [GOOD] >> ProgramStep::MatchSubstring [GOOD] >> ProgramStep::StartsWithIgnoreCase [GOOD] >> ProgramStep::EndsWithIgnoreCase [GOOD] >> ProgramStep::MatchSubstringIgnoreCase [GOOD] >> ProgramStep::ScalarTest [GOOD] >> ProgramStep::TestValueFromNull [GOOD] >> ProgramStep::MergeFilterSimple |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/local_cluster/local_cluster |89.3%| [LD] {RESULT} $(B)/ydb/tests/tools/local_cluster/local_cluster |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |89.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/services/ydb/ydb_table_ut.cpp |89.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |89.3%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |89.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |89.3%| [LD] {RESULT} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table >> ProgramStep::MergeFilterSimple [GOOD] >> ProgramStep::Projection [GOOD] >> ProgramStep::MinMax [GOOD] >> ProgramStep::Sum [GOOD] >> ProgramStep::SumGroupBy [GOOD] >> ProgramStep::SumGroupByNotNull [GOOD] >> ProgramStep::MinMaxSomeGroupBy [GOOD] >> ProgramStep::MinMaxSomeGroupByNotNull [GOOD] >> Slicer::SplitBySizes [GOOD] >> SortableBatchPosition::FindPosition [GOOD] >> SortableBatchPosition::MergingSortedInputStreamReversedWithOneSearchPoint [GOOD] >> SortableBatchPosition::MergingSortedInputStreamReversedWithRangeSearch [GOOD] |89.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |89.3%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPageMapTest::TestRandom [GOOD] >> TPageMapTest::TestIntrusive [GOOD] >> TPageMapTest::TestSimplePointer [GOOD] >> TPageMapTest::TestSharedPointer [GOOD] >> TPageMapTest::TestSimplePointerFull |89.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TPageMapTest::TestSimplePointerFull [GOOD] >> TPriorityOperationQueueTest::ShouldStartEmpty [GOOD] >> TPriorityOperationQueueTest::ShouldNotStartUntilStart [GOOD] >> TPriorityOperationQueueTest::ShouldStartByPriority [GOOD] >> TPriorityOperationQueueTest::ShouldStartByPriorityWithRemove [GOOD] >> TPriorityOperationQueueTest::ShouldReturnExecTimeWhenUpdateRunningPriority [GOOD] >> TBlobStorageHullFresh::Perf [GOOD] |89.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] [GOOD] >> TQueueBackpressureTest::PerfInFlight [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |89.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut ------- [TS] {asan, default-linux-x86_64, release} ydb/core/formats/arrow/ut/unittest >> SortableBatchPosition::MergingSortedInputStreamReversedWithRangeSearch [GOOD] Test command err: Process: 100000d;/100000; 10000d;/10000; NO_CODEC(poolsize=1024;keylen=1) 0.2021203448 0.2210911404 NO_CODEC(poolsize=1024;keylen=10) 0.1534132783 0.2482180533 NO_CODEC(poolsize=1024;keylen=16) 0.1104676508 0.2045372848 NO_CODEC(poolsize=1024;keylen=32) 0.06592569055 0.1591802296 NO_CODEC(poolsize=1024;keylen=64) 0.03972180035 0.1324717476 NO_CODEC(poolsize=128;keylen=1) 0.2016566193 0.2164784476 NO_CODEC(poolsize=128;keylen=10) 0.07304169975 0.08752922393 NO_CODEC(poolsize=128;keylen=16) 0.05151637558 0.06514358749 NO_CODEC(poolsize=128;keylen=32) 0.02919093319 0.04189888314 NO_CODEC(poolsize=128;keylen=64) 0.01605694811 0.02821124922 NO_CODEC(poolsize=16;keylen=1) 0.2010010074 0.2099570542 NO_CODEC(poolsize=16;keylen=10) 0.0719219365 0.07635285397 NO_CODEC(poolsize=16;keylen=16) 0.05039654131 0.05396013899 NO_CODEC(poolsize=16;keylen=32) 0.02807102527 0.03070808446 NO_CODEC(poolsize=16;keylen=64) 0.01493699686 0.01701612239 NO_CODEC(poolsize=1;keylen=1) 0.2008730831 0.2086845872 NO_CODEC(poolsize=1;keylen=10) 0.07177339648 0.07487027428 NO_CODEC(poolsize=1;keylen=16) 0.0502445638 0.05244238527 NO_CODEC(poolsize=1;keylen=32) 0.02791992658 0.0291982148 NO_CODEC(poolsize=1;keylen=64) 0.01478641518 0.01551089526 NO_CODEC(poolsize=512;keylen=1) 0.2021203448 0.2210911404 NO_CODEC(poolsize=512;keylen=10) 0.1482943606 0.1971260763 NO_CODEC(poolsize=512;keylen=16) 0.1053484084 0.1534129488 NO_CODEC(poolsize=512;keylen=32) 0.0608061115 0.1080222928 NO_CODEC(poolsize=512;keylen=64) 0.03460202321 0.08129402495 NO_CODEC(poolsize=64;keylen=1) 0.2013687897 0.2136153969 NO_CODEC(poolsize=64;keylen=10) 0.07240183504 0.08114272681 NO_CODEC(poolsize=64;keylen=16) 0.05087647028 0.05875304549 NO_CODEC(poolsize=64;keylen=32) 0.02855098581 0.03550414104 NO_CODEC(poolsize=64;keylen=64) 0.01541697597 0.02181403389 lz4(poolsize=1024;keylen=1) 0.006629768257 0.05541610349 lz4(poolsize=1024;keylen=10) 0.04233951498 0.3344832994 lz4(poolsize=1024;keylen=16) 0.05657489465 0.404264214 lz4(poolsize=1024;keylen=32) 0.09037137941 0.5318074361 lz4(poolsize=1024;keylen=64) 0.01074936154 0.1063492063 lz4(poolsize=128;keylen=1) 0.003831111821 0.02881389382 lz4(poolsize=128;keylen=10) 0.00718182175 0.06087121933 lz4(poolsize=128;keylen=16) 0.008735936466 0.07523964551 lz4(poolsize=128;keylen=32) 0.01375268158 0.117441454 lz4(poolsize=128;keylen=64) 0.02262360212 0.1850289108 lz4(poolsize=16;keylen=1) 0.00273442178 0.01820340324 lz4(poolsize=16;keylen=10) 0.003078137332 0.02169239789 lz4(poolsize=16;keylen=16) 0.003266503667 0.02356577168 lz4(poolsize=16;keylen=32) 0.003742685614 0.02844311377 lz4(poolsize=16;keylen=64) 0.004937163375 0.03979647465 lz4(poolsize=1;keylen=1) 0.00251497006 0.01603325416 lz4(poolsize=1;keylen=10) 0.002531395234 0.01628089447 lz4(poolsize=1;keylen=16) 0.002515970516 0.01617933723 lz4(poolsize=1;keylen=32) 0.00251450677 0.01630226314 lz4(poolsize=1;keylen=64) 0.002511620933 0.01653353149 lz4(poolsize=512;keylen=1) 0.005362411291 0.04359726295 lz4(poolsize=512;keylen=10) 0.02347472854 0.1933066062 lz4(poolsize=512;keylen=16) 0.03056053336 0.2426853056 lz4(poolsize=512;keylen=32) 0.04856356058 0.3467897492 lz4(poolsize=512;keylen=64) 0.04102771881 0.3228658321 lz4(poolsize=64;keylen=1) 0.003312844256 0.02372010279 lz4(poolsize=64;keylen=10) 0.004839661617 0.03863241259 lz4(poolsize=64;keylen=16) 0.005715507689 0.04687204687 lz4(poolsize=64;keylen=32) 0.007821957352 0.06669044223 lz4(poolsize=64;keylen=64) 0.01258912656 0.1073551894 zstd(poolsize=1024;keylen=1) 0.007324840764 0.0754840827 zstd(poolsize=1024;keylen=10) 0.04506846012 0.3776978417 zstd(poolsize=1024;keylen=16) 0.0655640205 0.4694540288 zstd(poolsize=1024;keylen=32) 0.1110720087 0.6098141264 zstd(poolsize=1024;keylen=64) 0.1914108287 0.7447345433 zstd(poolsize=128;keylen=1) 0.003769847609 0.04002713704 zstd(poolsize=128;keylen=10) 0.007456731695 0.07809798271 zstd(poolsize=128;keylen=16) 0.0102539786 0.1029455519 zstd(poolsize=128;keylen=32) 0.01677217062 0.1578947368 zstd(poolsize=128;keylen=64) 0.03005940945 0.2517949988 zstd(poolsize=16;keylen=1) 0.002620896858 0.02794819359 zstd(poolsize=16;keylen=10) 0.002816201441 0.03048416019 zstd(poolsize=16;keylen=16) 0.003368308096 0.03570300158 zstd(poolsize=16;keylen=32) 0.004159808469 0.0434375 zstd(poolsize=16;keylen=64) 0.005779996974 0.05875115349 zstd(poolsize=1;keylen=1) 0.002461243407 0.02626193724 zstd(poolsize=1;keylen=10) 0.002154636612 0.0234375 zstd(poolsize=1;keylen=16) 0.002356872222 0.02519132653 zstd(poolsize=1;keylen=32) 0.002427911996 0.02573879886 zstd(poolsize=1;keylen=64) 0.00258021431 0.02699269609 zstd(poolsize=512;keylen=1) 0.005583027596 0.05848930481 zstd(poolsize=512;keylen=10) 0.0236929438 0.2237078941 zstd(poolsize=512;keylen=16) 0.03443366072 0.2936507937 zstd(poolsize=512;keylen=32) 0.05917328099 0.4212765957 zstd(poolsize=512;keylen=64) 0.1058929843 0.5749553837 zstd(poolsize=64;keylen=1) 0.00319560285 0.03401360544 zstd(poolsize=64;keylen=10) 0.004852093844 0.05176470588 zstd(poolsize=64;keylen=16) 0.00633344236 0.06557881773 zstd(poolsize=64;keylen=32) 0.009647738439 0.09619952494 zstd(poolsize=64;keylen=64) 0.01626771323 0.1514644351 NO_CODEC --1000 ----1 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----16 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----64 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----128 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----512 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----1024 ------1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% --10000 ---- ... 4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"1,2\",\"p\":{\"options\":[\"{3(Min):[1]}\",\"{4(Min):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[1]},\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"1,2\",\"p\":{\"options\":[\"{3(Max):[1]}\",\"{4(Max):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[1]},\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"1,2\",\"p\":{\"options\":[\"{3(Some):[1]}\",\"{4(Some):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[1]},\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=272;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=248;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=248;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:141;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"2222"},{"name":"value","value":"8888"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"2222"}],"fields":["id1: int32 not null"]},"reverse":true,"position":1,"records_count":2}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:157;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"1111"},{"name":"value","value":"8888"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":2}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:157;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"2222"},{"name":"value","value":"7777"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"2222"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:141;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"3333"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"4444"},{"name":"value","value":"8888"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"4444"}],"fields":["id1: int32 not null"]},"reverse":true,"position":3,"records_count":4}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:157;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"3333"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"3333"},{"name":"value","value":"8888"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"3333"}],"fields":["id1: int32 not null"]},"reverse":true,"position":2,"records_count":4}; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=merger.cpp:157;pos={"sorting":{"sorting_columns":[{"name":"id1","value":"3333"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":1};heap={"data":{"sorting_columns":[{"name":"id1","value":"1111"},{"name":"value","value":"7777"}],"fields":["id1: int32 not null","value: int32 not null"]},"sorting":{"sorting_columns":[{"name":"id1","value":"1111"}],"fields":["id1: int32 not null"]},"reverse":true,"position":0,"records_count":2}; |89.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::Perf [GOOD] |89.3%| [LD] {RESULT} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |89.4%| [TS] {BAZEL_UPLOAD} ydb/core/formats/arrow/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest |89.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut/unittest >> TQueueBackpressureTest::PerfInFlight [GOOD] |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |89.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/backpressure/ut/unittest |89.4%| [TS] {RESULT} ydb/core/formats/arrow/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TPriorityOperationQueueTest::ShouldReturnExecTimeWhenUpdateRunningPriority [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/util/ut/unittest |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/tools/decrypt/decrypt |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |89.4%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |89.4%| [LD] {RESULT} $(B)/ydb/core/backup/tools/decrypt/decrypt |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |89.4%| [LD] {RESULT} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf >> SysViewQueryHistory::TopDurationAdd [GOOD] >> SysViewQueryHistory::ScanQueryHistoryMerge [GOOD] >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] >> SysViewQueryHistory::AggrMerge [GOOD] >> SysViewQueryHistory::StableMerge [GOOD] >> SysViewQueryHistory::AddDedupRandom >> SysViewQueryHistory::StableMerge2 [GOOD] >> SysViewQueryHistory::ServiceQueryHistoryAdd [GOOD] >> SysViewQueryHistory::TopReadBytesAdd [GOOD] >> SysViewQueryHistory::AddDedupRandom [GOOD] >> SysViewQueryHistory::AddDedup [GOOD] >> SysViewQueryHistory::AddDedup2 [GOOD] >> TBlobStorageBarriersTreeTest::Tree [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> SysViewQueryHistory::AggrMergeDedup [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AggrMerge [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::ScanQueryHistoryMerge [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TA] $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::StableMerge [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::TopDurationAdd [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::StableMerge2 [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::ServiceQueryHistoryAdd [GOOD] >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AggrMergeDedup [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AddDedup2 [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::TopReadBytesAdd [GOOD] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/service/ut/unittest >> SysViewQueryHistory::AddDedupRandom [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/service/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::Tree [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/service/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageCompStrat::Test1 |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/service/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/service/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/service/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/service/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/service/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/service/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/service/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/service/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TLockFreeIntrusiveStackTest::ConcurrentAutoHeavyContention [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TCowBTreeTest::MultipleSnapshotsWithGc [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClear >> TBlobStorageCompStrat::Test1 [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/util/ut/unittest |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |89.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |89.4%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |89.4%| [TA] $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TStateStorageConfig::NonDuplicatedNodesTest [GOOD] >> TStateStorageConfig::DuplicatedNodesTest |89.4%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TBlobStorageCompStrat::Test1 [GOOD] |89.4%| [TA] {RESULT} $(B)/ydb/core/sys_view/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |89.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |89.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests |89.5%| [LD] {RESULT} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests >> TFlatDatabasePgTest::BasicTypes >> TFlatDatabasePgTest::BasicTypes [GOOD] |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [TS] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming |89.5%| [LD] {RESULT} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [TS] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut_pg/unittest >> TBlobStorageSyncLogData::SerializeParseEmpty1_Proto [GOOD] >> TBlobStorageSyncLogData::SerializeParseEmpty2_Proto [GOOD] >> CodecsTest::Basic [GOOD] >> CodecsTest::NaturalNumbersAndZero [GOOD] >> CodecsTest::LargeAndRepeated [GOOD] >> NaiveFragmentWriterTest::Basic [GOOD] >> RunLengthCodec::Random32 |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [TS] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [TS] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [TS] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut_pg/unittest >> SemiSortedDeltaCodec::Random32 >> VarLengthIntCodec::BasicTest64 [GOOD] >> TBlobStorageSyncLogDsk::AddByOne [GOOD] >> VarLengthIntCodec::Random32 >> TBlobStorageSyncLogDsk::AddFive [GOOD] >> TBlobStorageSyncLogDsk::ComplicatedSerializeWithOverlapping [GOOD] >> TBlobStorageSyncLogDsk::DeleteChunks [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClear [GOOD] >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc >> SemiSortedDeltaAndVarLengthCodec::Random32 >> RunLengthCodec::Random32 [GOOD] >> RunLengthCodec::Random64 |89.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut_pg/unittest >> TFlatDatabasePgTest::BasicTypes [GOOD] |89.5%| [TS] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [TS] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [TS] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut_pg/unittest >> SemiSortedDeltaCodec::Random32 [GOOD] >> TBlobStorageSyncLogKeeper::CutLog_EntryPointNewFormat [GOOD] >> TBlobStorageSyncLogMem::EmptyMemRecLog [GOOD] >> TBlobStorageSyncLogMem::FilledIn1 [GOOD] >> TBlobStorageSyncLogMem::EmptyMemRecLogPutAfterSnapshot [GOOD] >> SemiSortedDeltaCodec::Random64 [GOOD] >> TBlobStorageSyncLogDsk::SeveralChunks [GOOD] >> TBlobStorageSyncLogDsk::OverlappingPages_OnePageIndexed [GOOD] >> TBlobStorageSyncLogDsk::OverlappingPages_SeveralPagesIndexed [GOOD] >> TBlobStorageSyncLogDsk::TrimLog [GOOD] >> VarLengthIntCodec::Random32 [GOOD] >> VarLengthIntCodec::Random64 >> RunLengthCodec::Random64 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::BasicTest32 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::BasicTest64 [GOOD] >> NaiveFragmentWriterTest::Long >> ReadBatcher::Range [GOOD] >> TBlobStorageSyncLogMem::FilledIn1PutAfterSnapshot [GOOD] >> TBlobStorageSyncLogMem::ManyLogoBlobsPerf >> VarLengthIntCodec::Random64 [GOOD] |89.5%| [TS] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut_pg/unittest |89.5%| [TS] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut_pg/unittest >> SemiSortedDeltaAndVarLengthCodec::Random32 [GOOD] >> SemiSortedDeltaAndVarLengthCodec::Random64 >> TBlobStorageHullFreshSegment::PerfAppendix [GOOD] >> TBlobStorageHullFreshSegment::PerfSkipList |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> NaiveFragmentWriterTest::Basic [GOOD] >> SemiSortedDeltaAndVarLengthCodec::Random64 [GOOD] >> SemiSortedDeltaCodec::BasicTest32 [GOOD] >> SemiSortedDeltaCodec::BasicTest64 [GOOD] >> NaiveFragmentWriterTest::Long [GOOD] >> ReorderCodecTest::Basic [GOOD] >> RunLengthCodec::BasicTest32 [GOOD] >> RunLengthCodec::BasicTest64 [GOOD] |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |89.5%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogDsk::DeleteChunks [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/synclog/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaAndVarLengthCodec::BasicTest64 [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/synclog/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogMem::EmptyMemRecLogPutAfterSnapshot [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaCodec::Random64 [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> VarLengthIntCodec::Random64 [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> TBlobStorageSyncLogDsk::TrimLog [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/synclog/ut/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/synclog/ut/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/synclog/ut/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/synclog/ut/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/synclog/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> SemiSortedDeltaCodec::BasicTest64 [GOOD] |89.5%| [TA] $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageHullCompactDeferredQueueTest::Basic [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/synclog/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> RunLengthCodec::BasicTest64 [GOOD] |89.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/synclog/ut/unittest |89.5%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut_pg/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> ReadBatcher::Range [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hullop/ut/unittest |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/compatibility/streaming/ydb-tests-compatibility-streaming |89.5%| [LD] {RESULT} $(B)/ydb/tests/compatibility/streaming/ydb-tests-compatibility-streaming |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/compatibility/streaming/ydb-tests-compatibility-streaming |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |89.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |89.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |89.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/backup/tools/decrypt/decrypt |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |89.5%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |89.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/security/ydb-tests-functional-security |89.5%| [LD] {RESULT} $(B)/ydb/tests/functional/security/ydb-tests-functional-security |89.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/security/ydb-tests-functional-security ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TBlobStorageHullCompactDeferredQueueTest::Basic [GOOD] Test command err: STEP 1 STEP 2 StringToId# 63 numItems# 110271 >> TCowBTreeTest::MultipleSnapshotsWithClearWithGc [GOOD] >> TCowBTreeTest::DuplicateKeysInplace |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hullop/ut/unittest >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] |89.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut >> TCowBTreeTest::DuplicateKeysInplace [GOOD] >> TCowBTreeTest::DuplicateKeysThreadSafe |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |89.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut >> Mirror3of4::ReplicationHuge [GOOD] |89.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] [GOOD] |89.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] |89.5%| [AR] {RESULT} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |89.6%| [TA] $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/blobsan/blobsan |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] |89.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [LD] {RESULT} $(B)/ydb/tools/blobsan/blobsan |89.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageSyncLogMem::ManyLogoBlobsPerf [GOOD] >> TBlobStorageSyncLogMem::ManyLogoBlobsBuildSwapSnapshot [GOOD] >> VarLengthIntCodec::BasicTest32 [GOOD] |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |89.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |89.6%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |89.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_mirror3of4/unittest >> Mirror3of4::ReplicationHuge [GOOD] Test command err: 2025-12-23T18:23:05.465808Z 1 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:0:0]: (0) SKELETON START Marker# BSVS37 2025-12-23T18:23:05.466100Z 2 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:1:0]: (0) SKELETON START Marker# BSVS37 2025-12-23T18:23:05.466254Z 3 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:2:0]: (0) SKELETON START Marker# BSVS37 2025-12-23T18:23:05.466405Z 4 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:3:0]: (0) SKELETON START Marker# BSVS37 2025-12-23T18:23:05.466580Z 5 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:4:0]: (0) SKELETON START Marker# BSVS37 2025-12-23T18:23:05.466787Z 6 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:5:0]: (0) SKELETON START Marker# BSVS37 2025-12-23T18:23:05.466961Z 7 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:6:0]: (0) SKELETON START Marker# BSVS37 2025-12-23T18:23:05.467122Z 8 00h00m00.000000s :BS_SKELETON INFO: PDiskId# 1 VDISK[0:_:0:7:0]: (0) SKELETON START Marker# BSVS37 2025-12-23T18:23:05.467565Z 1 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery START 2025-12-23T18:23:05.467661Z 1 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:0:0]: (0) Sending TEvYardInit: pdiskGuid# 3901202759931496043 skeletonid# [1:139:13] selfid# [1:155:22] delay 0.000000 sec 2025-12-23T18:23:05.467727Z 2 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:1:0]: (0) LocalRecovery START 2025-12-23T18:23:05.467764Z 2 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) Sending TEvYardInit: pdiskGuid# 10817611948347073339 skeletonid# [2:140:11] selfid# [2:156:12] delay 0.000000 sec 2025-12-23T18:23:05.467802Z 3 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:2:0]: (0) LocalRecovery START 2025-12-23T18:23:05.467839Z 3 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) Sending TEvYardInit: pdiskGuid# 262525593318849325 skeletonid# [3:141:11] selfid# [3:157:12] delay 0.000000 sec 2025-12-23T18:23:05.467870Z 4 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:3:0]: (0) LocalRecovery START 2025-12-23T18:23:05.467905Z 4 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:3:0]: (0) Sending TEvYardInit: pdiskGuid# 2859280866125988918 skeletonid# [4:142:11] selfid# [4:158:12] delay 0.000000 sec 2025-12-23T18:23:05.467937Z 5 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:4:0]: (0) LocalRecovery START 2025-12-23T18:23:05.467993Z 5 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:4:0]: (0) Sending TEvYardInit: pdiskGuid# 381843792507398368 skeletonid# [5:143:11] selfid# [5:159:12] delay 0.000000 sec 2025-12-23T18:23:05.468051Z 6 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:5:0]: (0) LocalRecovery START 2025-12-23T18:23:05.468087Z 6 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) Sending TEvYardInit: pdiskGuid# 12179568241364008638 skeletonid# [6:144:11] selfid# [6:160:12] delay 0.000000 sec 2025-12-23T18:23:05.468118Z 7 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:6:0]: (0) LocalRecovery START 2025-12-23T18:23:05.468165Z 7 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:6:0]: (0) Sending TEvYardInit: pdiskGuid# 14077462363762599708 skeletonid# [7:145:11] selfid# [7:161:12] delay 0.000000 sec 2025-12-23T18:23:05.468204Z 8 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:7:0]: (0) LocalRecovery START 2025-12-23T18:23:05.468251Z 8 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: PDiskId# 1 VDISK[0:_:0:7:0]: (0) Sending TEvYardInit: pdiskGuid# 6653596026819848730 skeletonid# [8:146:11] selfid# [8:162:12] delay 0.000000 sec 2025-12-23T18:23:05.468612Z 1 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:463} PDiskMock[1:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:0:0] PDiskGuid# 3901202759931496043 CutLogID# [1:139:13] WhiteboardProxyId# [1:122:10] SlotId# 0 GroupSizeInUnits# 0} 2025-12-23T18:23:05.469327Z 1 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:500} PDiskMock[1:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {}} Created# true 2025-12-23T18:23:05.469429Z 2 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:463} PDiskMock[2:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:1:0] PDiskGuid# 10817611948347073339 CutLogID# [2:140:11] WhiteboardProxyId# [2:124:10] SlotId# 0 GroupSizeInUnits# 0} 2025-12-23T18:23:05.469485Z 2 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:500} PDiskMock[2:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {}} Created# true 2025-12-23T18:23:05.469539Z 3 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:463} PDiskMock[3:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:2:0] PDiskGuid# 262525593318849325 CutLogID# [3:141:11] WhiteboardProxyId# [3:126:10] SlotId# 0 GroupSizeInUnits# 0} 2025-12-23T18:23:05.469621Z 3 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:500} PDiskMock[3:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {}} Created# true 2025-12-23T18:23:05.469681Z 4 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:463} PDiskMock[4:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:3:0] PDiskGuid# 2859280866125988918 CutLogID# [4:142:11] WhiteboardProxyId# [4:128:10] SlotId# 0 GroupSizeInUnits# 0} 2025-12-23T18:23:05.469744Z 4 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:500} PDiskMock[4:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {}} Created# true 2025-12-23T18:23:05.469814Z 5 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:463} PDiskMock[5:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:4:0] PDiskGuid# 381843792507398368 CutLogID# [5:143:11] WhiteboardProxyId# [5:130:10] SlotId# 0 GroupSizeInUnits# 0} 2025-12-23T18:23:05.469870Z 5 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:500} PDiskMock[5:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {}} Created# true 2025-12-23T18:23:05.469928Z 6 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:463} PDiskMock[6:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:5:0] PDiskGuid# 12179568241364008638 CutLogID# [6:144:11] WhiteboardProxyId# [6:132:10] SlotId# 0 GroupSizeInUnits# 0} 2025-12-23T18:23:05.469984Z 6 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:500} PDiskMock[6:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {}} Created# true 2025-12-23T18:23:05.470030Z 7 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:463} PDiskMock[7:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:6:0] PDiskGuid# 14077462363762599708 CutLogID# [7:145:11] WhiteboardProxyId# [7:134:10] SlotId# 0 GroupSizeInUnits# 0} 2025-12-23T18:23:05.470083Z 7 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:500} PDiskMock[7:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {}} Created# true 2025-12-23T18:23:05.470147Z 8 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:463} PDiskMock[8:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:7:0] PDiskGuid# 6653596026819848730 CutLogID# [8:146:11] WhiteboardProxyId# [8:136:10] SlotId# 0 GroupSizeInUnits# 0} 2025-12-23T18:23:05.470194Z 8 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:500} PDiskMock[8:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 SlotSizeInUnits# 0 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943 IsTinyDisk# 0}} OwnedChunks# {}} Created# true 2025-12-23T18:23:05.471860Z 1 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:0:0]: (0) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-12-23T18:23:05.472917Z 2 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:1:0]: (0) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-12-23T18:23:05.473941Z 3 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:2:0]: (0) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-12-23T18:23:05.474620Z 4 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: PDiskId# 1 VDISK[0:_:0:3:0]: (0) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet ... g Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 581 Lsn# 25 LsnSegmentStart# 25 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:7:0] 2025-12-23T18:23:45.563171Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:671} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 21 Cookie# 9}{Lsn# 22 Cookie# 10}{Lsn# 23 Cookie# 11}{Lsn# 24 Cookie# 12}} Recipient# [8:146:11] 2025-12-23T18:23:45.563194Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:671} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 25 Cookie# 0}} Recipient# [8:363:30] 2025-12-23T18:23:45.565702Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:609} PDiskMock[7:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 583 Lsn# 26 LsnSegmentStart# 26 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:6:0] 2025-12-23T18:23:45.565749Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:671} PDiskMock[7:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 26 Cookie# 0}} Recipient# [7:353:30] 2025-12-23T18:23:45.565823Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:609} PDiskMock[8:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 581 Lsn# 26 LsnSegmentStart# 26 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:7:0] 2025-12-23T18:23:45.565852Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:671} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 26 Cookie# 0}} Recipient# [8:363:30] 2025-12-23T18:23:45.566069Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-12-23T18:23:45.566302Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:609} PDiskMock[7:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 583 Lsn# 27 LsnSegmentStart# 27 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:6:0] 2025-12-23T18:23:45.566336Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:671} PDiskMock[7:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 27 Cookie# 0}} Recipient# [7:353:30] 2025-12-23T18:23:45.566373Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) GLUEREAD(0x7cbbed53d8c0): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 137284019206208} 2025-12-23T18:23:45.566419Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:609} PDiskMock[8:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 581 Lsn# 27 LsnSegmentStart# 27 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:7:0] 2025-12-23T18:23:45.566449Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:671} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 27 Cookie# 0}} Recipient# [8:363:30] 2025-12-23T18:23:45.566505Z 2 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:761} PDiskMock[2:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 137284019206208} VDiskId# [0:4294967295:0:1:0] 2025-12-23T18:23:45.567297Z 2 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:814} PDiskMock[2:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 137284019206208 StatusFlags# None} 2025-12-23T18:23:45.567417Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) GLUEREAD FINISHED(0x7cbbed53d8c0): actualReadN# 1 origReadN# 1 2025-12-23T18:23:45.567653Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:1:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:2] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 1369701526376808448} BlockedGeneration# 0} 2025-12-23T18:23:45.570445Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-12-23T18:23:45.571041Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) GLUEREAD(0x7cbbed3f4f40): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 137284019265600} 2025-12-23T18:23:45.571308Z 3 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:761} PDiskMock[3:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 137284019265600} VDiskId# [0:4294967295:0:2:0] 2025-12-23T18:23:45.572142Z 3 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:814} PDiskMock[3:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 137284019265600 StatusFlags# None} 2025-12-23T18:23:45.572239Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) GLUEREAD FINISHED(0x7cbbed3f4f40): actualReadN# 1 origReadN# 1 2025-12-23T18:23:45.572305Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:2:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:1] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 2522623030983655424} BlockedGeneration# 0} 2025-12-23T18:23:45.574184Z 4 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:3:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-12-23T18:23:45.574346Z 4 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:3:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 793240774073384960} BlockedGeneration# 0} 2025-12-23T18:23:45.574910Z 5 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:4:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-12-23T18:23:45.575032Z 5 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:4:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 793240774073384960} BlockedGeneration# 0} 2025-12-23T18:23:45.575505Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-12-23T18:23:45.575640Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) GLUEREAD(0x7cbbed5abb40): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 137284019084864} 2025-12-23T18:23:45.575691Z 6 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:761} PDiskMock[6:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 137284019084864} VDiskId# [0:4294967295:0:5:0] 2025-12-23T18:23:45.576466Z 6 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:814} PDiskMock[6:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 137284019084864 StatusFlags# None} 2025-12-23T18:23:45.576518Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) GLUEREAD FINISHED(0x7cbbed5abb40): actualReadN# 1 origReadN# 1 2025-12-23T18:23:45.576596Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:5:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:2] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 1946162278680231936} {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 1946162278680231936} BlockedGeneration# 0} 2025-12-23T18:23:45.578328Z 7 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:6:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-12-23T18:23:45.578448Z 7 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:6:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:0] NODATA Ingress# 216780021769961472} BlockedGeneration# 0} 2025-12-23T18:23:45.578924Z 8 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:7:0]: (0) TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-12-23T18:23:45.579029Z 8 00h00m00.000000s :BS_VDISK_GET DEBUG: PDiskId# 1 VDISK[0:_:0:7:0]: (0) TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:0] NODATA Ingress# 216780021769961472} BlockedGeneration# 0} >> TCowBTreeTest::DuplicateKeysThreadSafe [GOOD] >> TCowBTreeTest::IteratorDestructor [GOOD] >> TCowBTreeTest::Concurrent >> TCowBTreeTest::Concurrent [GOOD] |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk >> TCowBTreeTest::Alignment [GOOD] |89.6%| [TM] {RESULT} ydb/core/blobstorage/ut_mirror3of4/unittest |89.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/synclog/ut/unittest >> VarLengthIntCodec::BasicTest32 [GOOD] |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/synclog/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_mirror3of4/unittest |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |89.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |89.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/compatibility/s3_backups/ydb-tests-compatibility-s3_backups |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/compatibility/s3_backups/ydb-tests-compatibility-s3_backups |89.6%| [LD] {RESULT} $(B)/ydb/tests/compatibility/s3_backups/ydb-tests-compatibility-s3_backups |89.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |89.6%| [TA] $(B)/ydb/core/blobstorage/vdisk/synclog/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |89.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |89.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/util/ut/unittest >> TCowBTreeTest::Alignment [GOOD] Test command err: Producer 0 worked for 0.1708110806 seconds Producer 1 worked for 0.1366132933 seconds Consumer 0 worked for 0.1199811917 seconds on a snapshot of size 20000 Consumer 1 worked for 0.1709676981 seconds on a snapshot of size 40000 Consumer 2 worked for 0.2312991598 seconds on a snapshot of size 60000 Consumer 3 worked for 0.2630422367 seconds on a snapshot of size 80000 Consumers had 1199917 successful seeks >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh >> TBsVDiskExtremeHuge::Simple3Put3GetFresh |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/util/ut/unittest >> TBsVDiskExtremeHandoffHuge::SimpleHndPut1SeqGetFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh >> TBsVDiskBadBlobId::PutBlobWithBadId >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh >> TBsVDiskRepl1::ReplProxyKeepBits >> TBsVDiskRepl3::SyncLogTest >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh >> TBsVDiskExtreme::Simple3Put3GetFresh >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh >> TBsLocalRecovery::WriteRestartReadHuge >> TBsVDiskOutOfSpace::WriteUntilOrangeZone [GOOD] >> TBsVDiskOutOfSpace::WriteUntilYellowZone >> TBsDbStat::ChaoticParallelWrite_DbStat >> TBsVDiskGC::GCPutKeepIntoEmptyDB >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction >> TBsLocalRecovery::StartStopNotEmptyDB >> TBsVDiskExtreme::SimpleGetFromEmptyDB >> TBsVDiskGC::TGCManyVPutsDelTabletTest |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |89.6%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |89.6%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds >> TBlobStorageReplRecoveryMachine::BasicFunctionality >> TBlobStorageBlocksCacheTest::Repeat [GOOD] >> HullReplWriteSst::Basic >> TBlobStorageBlocksCacheTest::LegacyAndModern [GOOD] >> TBlobStorageBlocksCacheTest::PutDeepIntoPast [GOOD] >> TBlobStorageBlocksCacheTest::PutIntoPast [GOOD] >> TStateStorageConfig::DuplicatedNodesTest [GOOD] |89.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBlobStorageBlocksCacheTest::MultipleTables [GOOD] >> TBlobStorageBlocksCacheTest::DeepInFlight [GOOD] |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump] |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/repl/ut/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/repl/ut/unittest |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] |89.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/repl/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/repl/ut/unittest |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::Repeat [GOOD] |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutDeepIntoPast [GOOD] |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::PutIntoPast [GOOD] |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::MultipleTables [GOOD] |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::LegacyAndModern [GOOD] |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/repl/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/repl/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBlobStorageBlocksCacheTest::DeepInFlight [GOOD] |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/repl/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |89.6%| [TA] $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsVDiskBadBlobId::PutBlobWithBadId [GOOD] >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath >> TBsVDiskGC::GCPutKeepIntoEmptyDB [GOOD] >> TBsVDiskGC::GCPutBarrierVDisk0NoSync >> TBsVDiskExtreme::SimpleGetFromEmptyDB [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetFresh >> TBsVDiskExtreme::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtreme::Simple3Put3GetCompaction >> TBsVDiskExtremeHuge::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHndPut1SeqGetFresh [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction |89.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/repl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TStateStorageConfig::DuplicatedNodesTest [GOOD] Test command err: AnonRss: 11 CGroupLimit: 21 MemTotal: 31 MemAvailable: 41 AllocatedMemory: 51 AllocatorCachesMemory: 61 HardLimit: 71 SoftLimit: 81 TargetUtilization: 91 ExternalConsumption: 101 SharedCacheConsumption: 111 SharedCacheLimit: 121 MemTableConsumption: 131 MemTableLimit: 141 QueryExecutionConsumption: 151 QueryExecutionLimit: 161 AnonRss: 12 CGroupLimit: 22 MemTotal: 32 MemAvailable: 42 AllocatedMemory: 52 AllocatorCachesMemory: 62 HardLimit: 72 SoftLimit: 82 TargetUtilization: 92 ExternalConsumption: 102 SharedCacheConsumption: 112 SharedCacheLimit: 122 MemTableConsumption: 132 MemTableLimit: 142 QueryExecutionConsumption: 152 QueryExecutionLimit: 162 AnonRss: 13 CGroupLimit: 23 MemTotal: 33 MemAvailable: 43 AllocatedMemory: 53 AllocatorCachesMemory: 63 HardLimit: 73 SoftLimit: 83 TargetUtilization: 93 ExternalConsumption: 103 SharedCacheConsumption: 113 SharedCacheLimit: 123 MemTableConsumption: 133 MemTableLimit: 143 QueryExecutionConsumption: 153 QueryExecutionLimit: 163 AnonRss: 36 CGroupLimit: 66 MemTotal: 96 MemAvailable: 126 AllocatedMemory: 156 AllocatorCachesMemory: 186 HardLimit: 216 SoftLimit: 246 TargetUtilization: 276 SharedCacheConsumption: 336 SharedCacheLimit: 366 MemTableConsumption: 396 MemTableLimit: 426 QueryExecutionConsumption: 456 QueryExecutionLimit: 486 |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |89.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |89.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |89.6%| [LD] {RESULT} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |89.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/ydb_serializable/replay/replay >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut/unittest |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics >> TBsVDiskRepl3::SyncLogTest [GOOD] >> THugeMigration::ExtendMap_HugeBlobs |89.6%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |89.7%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |89.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 >> ParseStats::ParseWithSources [GOOD] >> ParseStats::ParseJustOutput [GOOD] >> ParseStats::ParseMultipleGraphsV1 [GOOD] >> ParseStats::ParseMultipleGraphsV2 [GOOD] |89.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 >> TBsVDiskRepl1::ReplProxyKeepBits [GOOD] >> TBsVDiskRepl2::ReplEraseDiskRestoreWOOneDisk |89.7%| [LD] {RESULT} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest >> ParseStats::ParseMultipleGraphsV2 [GOOD] >> TBsVDiskGC::TGCManyVPutsDelTabletTest [GOOD] >> TBsVDiskManyPutGet::ManyPutGet |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |89.7%| [LD] {RESULT} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath [GOOD] >> TBsVDiskDefrag::DefragEmptyDB >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetFresh [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetCompaction |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest |89.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRepl1::ReplProxyData >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh |89.7%| [TM] {RESULT} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/test_connection/ut/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh >> TBsVDiskGC::GCPutBarrierVDisk0NoSync [GOOD] >> TBsVDiskGC::GCPutBarrierSync >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly >> TBsVDiskExtreme::Simple3Put3GetCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh |89.7%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/test_connection/ut/unittest >> VDiskTest::HugeBlobRecompaction >> VDiskTest::HugeBlobWrite |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> TopicNameConverterTest::Paths [GOOD] >> TopicNameConverterTest::PathFromDiscoveryConverter [GOOD] >> TopicNameConverterForCPTest::BadLegacyTopics [GOOD] >> TopicNameConverterForCPTest::BadModernTopics [GOOD] >> DiscoveryConverterTest::FullLegacyNames [GOOD] >> DiscoveryConverterTest::FullLegacyPath [GOOD] >> DiscoveryConverterTest::FullLegacyNamesWithRootDatabase [GOOD] >> DiscoveryConverterTest::DiscoveryConverter [GOOD] >> DiscoveryConverterTest::EmptyModern [GOOD] >> DiscoveryConverterTest::FirstClass [GOOD] |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk2/unittest >> DiscoveryConverterTest::AccountDatabase [GOOD] >> DiscoveryConverterTest::CmWay [GOOD] >> TopicNameConverterForCPTest::CorrectLegacyTopics [GOOD] >> TopicNameConverterForCPTest::CorrectModernTopics [GOOD] >> TopicNameConverterTest::LegacyStyleDoubleName [GOOD] >> TopicNameConverterTest::LegacyStyle [GOOD] >> TopicNameConverterTest::FirstClass [GOOD] >> TopicNameConverterTest::NoTopicName [GOOD] >> DiscoveryConverterTest::MinimalName [GOOD] >> DiscoveryConverterTest::WithLogbrokerPath [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump] [GOOD] >> TBsVDiskDefrag::DefragEmptyDB [GOOD] >> TBsVDiskDefrag::Defrag50PercentGarbage >> TVDiskConfigTest::RtmrProblem1 [GOOD] |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario >> TVDiskConfigTest::RtmrProblem2 [GOOD] >> TVDiskConfigTest::ThreeLevels [GOOD] >> TResizableCircleBufTest::Test1 [GOOD] >> TResizableCircleBufTest::Test2 [GOOD] >> TTrackable::TBuffer [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump_ds_init] |89.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |89.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [TS] {RESULT} ydb/core/fq/libs/test_connection/ut/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterForCPTest::BadModernTopics [GOOD] |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::PathFromDiscoveryConverter [GOOD] |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::FirstClass [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction >> TBlobStorageSyncNeighborsTest::IterateOverAllDisks [GOOD] >> TBlobStorageSyncNeighborsTest::SerDes [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskIterators [GOOD] >> TCircleBufStringStreamTest::TestAligned [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh [GOOD] |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::EmptyModern [GOOD] |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::FirstClass [GOOD] |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::FullLegacyNamesWithRootDatabase [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction >> THugeMigration::ExtendMap_HugeBlobs [GOOD] >> THugeMigration::ExtendMap_SmallBlobsBecameHuge >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetCompaction [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetFresh |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterForCPTest::CorrectModernTopics [GOOD] |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::CmWay [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::ThreeLevels [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskRepl1::ReplProxyData [GOOD] >> TBsVDiskRepl1::ReplEraseDiskRestore >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> DiscoveryConverterTest::WithLogbrokerPath [GOOD] |89.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/persqueue/topic_parser/ut/unittest >> TopicNameConverterTest::NoTopicName [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction >> ClosedIntervalSet::Difference [GOOD] >> ClosedIntervalSet::Contains |89.7%| [TS] {BAZEL_UPLOAD} ydb/library/persqueue/topic_parser/ut/unittest |89.7%| [TS] {BAZEL_UPLOAD} ydb/library/persqueue/topic_parser/ut/unittest |89.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/blobsan/blobsan |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TBuffer [GOOD] >> ClosedIntervalSet::Contains [GOOD] >> ClosedIntervalSet::EnumInRange >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh >> TPDiskErrorStateTests::Basic2 [GOOD] >> TPDiskErrorStateTests::BasicErrorReason >> TPDiskErrorStateTests::Basic [GOOD] >> VDiskTest::HugeBlobRecompaction [GOOD] >> TCircleBufStringStreamTest::TestNotAligned [GOOD] >> TVDiskConfigTest::JustConfig [GOOD] >> TCircleBufStringStreamTest::TestOverflow [GOOD] >> TCircleBufTest::EmptyTest [GOOD] >> TCircleBufTest::OverflowTest [GOOD] >> TVDiskConfigTest::Basic [GOOD] >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] >> TPDiskErrorStateTests::BasicErrorReason [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse2Threads >> TBlobStorageSyncNeighborsTest::CheckRevLookup [GOOD] >> TBlobStorageSyncNeighborsTest::CheckIsMyDomain [GOOD] >> TBlobStorageSyncNeighborsTest::CheckFailDomainsIterators [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] |89.7%| [TS] {BAZEL_UPLOAD} ydb/library/persqueue/topic_parser/ut/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufStringStreamTest::TestAligned [GOOD] |89.7%| [TS] {BAZEL_UPLOAD} ydb/library/persqueue/topic_parser/ut/unittest |89.7%| [TS] {BAZEL_UPLOAD} ydb/library/persqueue/topic_parser/ut/unittest >> TTrackable::TVector [GOOD] >> TTrackable::TList [GOOD] >> TTrackable::TString [GOOD] >> TCircleBufTest::SimpleTest [GOOD] >> TCircleBufTest::PtrTest [GOOD] >> TLsnAllocTrackerTests::Test1 [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse |89.7%| [TS] {BAZEL_UPLOAD} ydb/library/persqueue/topic_parser/ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/common/ut/unittest |89.7%| [TS] {BAZEL_UPLOAD} ydb/library/persqueue/topic_parser/ut/unittest |89.7%| [TS] {BAZEL_UPLOAD} ydb/library/persqueue/topic_parser/ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/common/ut/unittest |89.7%| [TS] {BAZEL_UPLOAD} ydb/library/persqueue/topic_parser/ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/common/ut/unittest >> TBsVDiskManyPutGet::ManyPutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiSinglePutGet |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TPDiskErrorStateTests::BasicErrorReason [GOOD] |89.7%| [TS] {BAZEL_UPLOAD} ydb/library/persqueue/topic_parser/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> VDiskTest::HugeBlobRecompaction [GOOD] Test command err: Change MinHugeBlobSize# 512000 Change MinHugeBlobSize# 307200 |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufTest::OverflowTest [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TString [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/common/ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk2/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/common/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/common/ut/unittest >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetFresh [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetCompaction |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/common/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/common/ut/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/secrets/ydb-tests-functional-secrets >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh |89.8%| [LD] {RESULT} $(B)/ydb/tests/functional/secrets/ydb-tests-functional-secrets |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/secrets/ydb-tests-functional-secrets >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartFresh |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh >> TBsVDiskGC::GCPutBarrierSync [GOOD] >> TBsVDiskGC::GCPutKeepBarrierSync |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |89.8%| [LD] {RESULT} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning >> THugeMigration::ExtendMap_SmallBlobsBecameHuge [GOOD] >> THugeMigration::RollbackMap_HugeBlobs |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/common/ut/unittest |89.8%| [TA] $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump_ds_init] [GOOD] |89.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ClosedIntervalSet::EnumInRange [GOOD] >> ClosedIntervalSet::EnumInRangeReverse >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh [GOOD] |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init >> TLsnMngrTests::AllocLsnForLocalUse2Threads [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse10Threads >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction >> TBsVDiskExtreme::Simple3Put1GetMissingPartFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetCompaction [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_domains_config[dump_ds_init] [GOOD] |89.8%| [TA] {RESULT} $(B)/ydb/library/persqueue/topic_parser/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh [GOOD] |89.8%| [LD] {RESULT} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init |89.8%| [TA] {RESULT} $(B)/ydb/core/util/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsVDiskManyPutGet::ManyMultiSinglePutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGet |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/library/yaml_config/ut_transform/py3test |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] |89.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] >> TBsLocalRecovery::WriteRestartReadHuge [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeIncreased >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] >> ClosedIntervalSet::EnumInRangeReverse [GOOD] >> GivenIdRange::IssueNewRange [GOOD] >> GivenIdRange::Trim |89.8%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/query/ut/unittest >> THugeMigration::RollbackMap_HugeBlobs [GOOD] >> TMonitoring::ReregisterTest [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/partcheck/partcheck |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest |89.8%| [LD] {RESULT} $(B)/ydb/tools/partcheck/partcheck >> TBlobStorageGroupInfoBlobMapTest::CheckCorrectBehaviourWithHashOverlow [GOOD] >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction [GOOD] >> TBlobStorageGroupInfoTest::TestBelongsToSubgroup >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 >> TBlobStorageGroupInfoIterTest::IteratorForward [GOOD] >> TBlobStorageGroupInfoIterTest::IteratorBackward [GOOD] >> TBlobStorageGroupInfoIterTest::PerRealmIterator [GOOD] >> TBlobStorageGroupInfoIterTest::WalkFailRealms [GOOD] >> TBlobStorageGroupInfoIterTest::IteratorForwardAndBackward [GOOD] >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/query/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark >> TBlobStorageGroupInfoTest::TestBelongsToSubgroup [GOOD] >> TBlobStorageGroupInfoTest::SubgroupPartLayout >> TBlobStorageGroupInfoTest::GroupQuorumCheckerOrdinary [GOOD] >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] >> GivenIdRange::Trim [GOOD] >> GivenIdRange::Subtract >> TSubgroupPartLayoutTest::CountEffectiveReplicas3of4 >> TBlobStorageGroupInfoIterTest::Domains [GOOD] >> TBlobStorageGroupInfoIterTest::Indexes [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] |89.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp >> GivenIdRange::Subtract [GOOD] >> GivenIdRange::Points |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TMonitoring::ReregisterTest [GOOD] Test command err: RUN TEST SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::IteratorBackward [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::WalkFailRealms [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/query/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |89.8%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |89.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tools/partcheck/partcheck |89.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/groupinfo/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/groupinfo/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::PerFailDomainRange [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoIterTest::Indexes [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/groupinfo/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/groupinfo/ut/unittest >> TBsVDiskManyPutGet::ManyMultiPutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::GroupQuorumCheckerMirror3dc [GOOD] |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |89.8%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test |89.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut >> GivenIdRange::Points [GOOD] >> GivenIdRange::Runs |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/groupinfo/ut/unittest >> GivenIdRange::Runs [GOOD] >> GivenIdRange::Allocate |89.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut >> TLsnMngrTests::AllocLsnForLocalUse10Threads [GOOD] >> TOutOfSpaceStateTests::TestLocal [GOOD] >> TOutOfSpaceStateTests::TestGlobal [GOOD] >> GivenIdRange::Allocate [GOOD] >> TBsVDiskRepl2::ReplEraseDiskRestoreWOOneDisk [GOOD] >> TBsVDiskRepl3::ReplEraseDiskRestoreMultipart |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/common/ut/unittest >> TOutOfSpaceStateTests::TestGlobal [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/common/ut/unittest |89.8%| [TA] $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] |89.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut >> TBsVDiskGC::GCPutKeepBarrierSync [GOOD] >> TBsVDiskGC::GCPutManyBarriersNoSync |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TBlobStorageAnubisAlgo::Mirror3 [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest |89.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blob_depot/ut/unittest >> GivenIdRange::Allocate [GOOD] |89.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/util/btree_benchmark/btree_benchmark ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::Mirror3dcMapper [GOOD] Test command err: [0:1:0:3:1]# 173 184 157 167 152 185 195 192 144 [0:1:1:1:1]# 189 195 192 171 157 161 167 155 196 [0:1:3:3:1]# 184 157 182 152 185 157 192 144 189 [0:1:3:4:0]# 148 154 155 158 194 160 156 163 140 [0:1:2:3:2]# 152 177 174 176 154 146 161 170 168 [0:1:1:2:1]# 157 167 152 189 195 192 171 157 161 [0:1:1:0:2]# 158 150 131 167 177 161 177 174 173 [0:1:3:0:1]# 161 155 171 196 154 167 184 157 182 [0:1:0:3:2]# 174 173 152 146 184 176 168 157 161 [0:1:2:2:0]# 163 140 161 148 162 159 168 178 190 [0:1:0:2:0]# 161 156 163 159 196 148 190 162 168 [0:1:3:2:1]# 152 185 157 192 144 189 161 155 171 [0:1:2:3:1]# 157 182 173 185 157 167 144 189 195 [0:1:3:1:2]# 157 161 170 131 190 158 161 178 167 [0:1:2:0:1]# 155 171 157 154 167 155 157 182 173 [0:1:3:0:2]# 131 190 158 161 178 167 173 152 177 [0:1:2:0:2]# 190 158 150 178 167 177 152 177 174 [0:1:2:4:1]# 154 167 155 157 182 173 185 157 167 [0:1:2:1:2]# 161 170 168 190 158 150 178 167 177 [0:1:2:4:2]# 178 167 177 152 177 174 176 154 146 [0:1:0:2:1]# 167 152 185 195 192 144 157 161 155 [0:1:0:0:0]# 190 162 168 174 148 154 177 158 194 [0:1:3:2:0]# 156 163 140 196 148 162 162 168 178 [0:1:1:0:1]# 171 157 161 167 155 196 182 173 184 [0:1:0:2:2]# 146 184 176 168 157 161 150 131 190 [0:1:1:0:0]# 178 190 162 155 174 148 160 177 158 [0:1:2:3:0]# 194 160 177 163 140 161 148 162 159 [0:1:2:4:0]# 154 155 174 194 160 177 163 140 161 [0:1:1:3:2]# 177 174 173 154 146 184 170 168 157 [0:1:2:1:1]# 144 189 195 155 171 157 154 167 155 [0:1:1:1:0]# 162 159 196 178 190 162 155 174 148 [0:1:1:3:1]# 182 173 184 157 167 152 189 195 192 [0:1:3:4:1]# 196 154 167 184 157 182 152 185 157 [0:1:1:4:2]# 167 177 161 177 174 173 154 146 184 [0:1:0:1:0]# 159 196 148 190 162 168 174 148 154 [0:1:3:4:2]# 161 178 167 173 152 177 184 176 154 [0:1:0:0:1]# 157 161 155 155 196 154 173 184 157 [0:1:1:4:0]# 155 174 148 160 177 158 140 161 156 [0:1:2:1:0]# 148 162 159 168 178 190 154 155 174 [0:1:2:0:0]# 168 178 190 154 155 174 194 160 177 [0:1:3:3:2]# 173 152 177 184 176 154 157 161 170 [0:1:0:4:0]# 174 148 154 177 158 194 161 156 163 [0:1:1:2:0]# 140 161 156 162 159 196 178 190 162 [0:1:0:1:1]# 195 192 144 157 161 155 155 196 154 [0:1:3:0:0]# 162 168 178 148 154 155 158 194 160 [0:1:3:1:1]# 192 144 189 161 155 171 196 154 167 [0:1:0:4:1]# 155 196 154 173 184 157 167 152 185 [0:1:2:2:1]# 185 157 167 144 189 195 155 171 157 [0:1:3:1:0]# 196 148 162 162 168 178 148 154 155 [0:1:2:2:2]# 176 154 146 161 170 168 190 158 150 [0:1:0:3:0]# 177 158 194 161 156 163 159 196 148 [0:1:3:3:0]# 158 194 160 156 163 140 196 148 162 [0:1:0:1:2]# 168 157 161 150 131 190 177 161 178 [0:1:3:2:2]# 184 176 154 157 161 170 131 190 158 [0:1:1:3:0]# 160 177 158 140 161 156 162 159 196 [0:1:1:2:2]# 154 146 184 170 168 157 158 150 131 [0:1:1:4:1]# 167 155 196 182 173 184 157 167 152 [0:1:1:1:2]# 170 168 157 158 150 131 167 177 161 [0:1:0:0:2]# 150 131 190 177 161 178 174 173 152 [0:1:0:4:2]# 177 161 178 174 173 152 146 184 176 mean# 166.6666667 dev# 15.11254078 |89.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/blob_depot/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> TBlobStorageAnubisAlgo::Mirror3 [GOOD] >> TBsVDiskGC::GCPutManyBarriersNoSync [GOOD] >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/groupinfo/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/anubis_osiris/ut/unittest >> Backpressure::MonteCarlo [GOOD] |89.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [LD] {RESULT} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |89.9%| [TM] {RESULT} ydb/core/blob_depot/ut/unittest |89.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/util/btree_benchmark/btree_benchmark >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly [GOOD] >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize >> TBsVDiskRepl1::ReplEraseDiskRestore [GOOD] >> TBsVDiskRepl1::ReadOnly |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |89.9%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/local_ydb/local_ydb |89.9%| [LD] {RESULT} $(B)/ydb/public/tools/local_ydb/local_ydb |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/local_ydb/local_ydb |89.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/backpressure/ut_client/unittest >> Backpressure::MonteCarlo [GOOD] Test command err: Clock# 1970-01-01T00:00:00.000000Z elapsed# 0.000038s EventsProcessed# 0 clients.size# 0 Clock# 1970-01-01T00:00:14.456368Z elapsed# 0.000181s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:31.018646Z elapsed# 0.000204s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:50.010312Z elapsed# 0.000231s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:02.135071Z elapsed# 0.000257s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:22.110671Z elapsed# 0.021987s EventsProcessed# 2336 clients.size# 1 Clock# 1970-01-01T00:01:32.648627Z elapsed# 0.029905s EventsProcessed# 3544 clients.size# 1 Clock# 1970-01-01T00:01:49.593968Z elapsed# 0.042199s EventsProcessed# 5507 clients.size# 1 Clock# 1970-01-01T00:02:05.995478Z elapsed# 0.054329s EventsProcessed# 7435 clients.size# 1 Clock# 1970-01-01T00:02:16.663760Z elapsed# 0.061839s EventsProcessed# 8657 clients.size# 1 Clock# 1970-01-01T00:02:27.703388Z elapsed# 0.074731s EventsProcessed# 10026 clients.size# 1 Clock# 1970-01-01T00:02:44.876881Z elapsed# 0.091936s EventsProcessed# 12024 clients.size# 1 Clock# 1970-01-01T00:03:01.003998Z elapsed# 0.107794s EventsProcessed# 13923 clients.size# 1 Clock# 1970-01-01T00:03:19.870607Z elapsed# 0.136289s EventsProcessed# 18348 clients.size# 2 Clock# 1970-01-01T00:03:36.383896Z elapsed# 0.161910s EventsProcessed# 22358 clients.size# 2 Clock# 1970-01-01T00:03:52.797616Z elapsed# 0.196205s EventsProcessed# 26177 clients.size# 2 Clock# 1970-01-01T00:04:09.162726Z elapsed# 0.221560s EventsProcessed# 30006 clients.size# 2 Clock# 1970-01-01T00:04:28.380446Z elapsed# 0.252892s EventsProcessed# 34637 clients.size# 2 Clock# 1970-01-01T00:04:43.487576Z elapsed# 0.276952s EventsProcessed# 38230 clients.size# 2 Clock# 1970-01-01T00:05:02.931808Z elapsed# 0.309028s EventsProcessed# 42743 clients.size# 2 Clock# 1970-01-01T00:05:22.895237Z elapsed# 0.347652s EventsProcessed# 47718 clients.size# 2 Clock# 1970-01-01T00:05:41.118734Z elapsed# 0.395356s EventsProcessed# 52053 clients.size# 2 Clock# 1970-01-01T00:05:53.516434Z elapsed# 0.425703s EventsProcessed# 55032 clients.size# 2 Clock# 1970-01-01T00:06:06.631931Z elapsed# 0.468143s EventsProcessed# 58098 clients.size# 2 Clock# 1970-01-01T00:06:18.445731Z elapsed# 0.509857s EventsProcessed# 60943 clients.size# 2 Clock# 1970-01-01T00:06:35.772958Z elapsed# 0.557348s EventsProcessed# 65121 clients.size# 2 Clock# 1970-01-01T00:06:49.349019Z elapsed# 0.593437s EventsProcessed# 68327 clients.size# 2 Clock# 1970-01-01T00:07:01.382268Z elapsed# 0.629420s EventsProcessed# 71243 clients.size# 2 Clock# 1970-01-01T00:07:19.899621Z elapsed# 0.671404s EventsProcessed# 75459 clients.size# 2 Clock# 1970-01-01T00:07:39.335788Z elapsed# 0.708368s EventsProcessed# 80077 clients.size# 2 Clock# 1970-01-01T00:07:56.445319Z elapsed# 0.752326s EventsProcessed# 86160 clients.size# 3 Clock# 1970-01-01T00:08:14.807137Z elapsed# 0.800244s EventsProcessed# 92747 clients.size# 3 Clock# 1970-01-01T00:08:33.960787Z elapsed# 0.854818s EventsProcessed# 99538 clients.size# 3 Clock# 1970-01-01T00:08:47.749156Z elapsed# 0.892998s EventsProcessed# 104579 clients.size# 3 Clock# 1970-01-01T00:09:03.173477Z elapsed# 0.936834s EventsProcessed# 110234 clients.size# 3 Clock# 1970-01-01T00:09:14.317883Z elapsed# 0.965737s EventsProcessed# 114237 clients.size# 3 Clock# 1970-01-01T00:09:25.237337Z elapsed# 0.993932s EventsProcessed# 118064 clients.size# 3 Clock# 1970-01-01T00:09:36.948533Z elapsed# 1.025515s EventsProcessed# 122396 clients.size# 3 Clock# 1970-01-01T00:09:49.824612Z elapsed# 1.059972s EventsProcessed# 127070 clients.size# 3 Clock# 1970-01-01T00:10:01.712795Z elapsed# 1.081703s EventsProcessed# 129995 clients.size# 2 Clock# 1970-01-01T00:10:11.755725Z elapsed# 1.098634s EventsProcessed# 132361 clients.size# 2 Clock# 1970-01-01T00:10:26.824941Z elapsed# 1.140544s EventsProcessed# 137913 clients.size# 3 Clock# 1970-01-01T00:10:39.317151Z elapsed# 1.173456s EventsProcessed# 142221 clients.size# 3 Clock# 1970-01-01T00:10:57.403224Z elapsed# 1.220576s EventsProcessed# 148690 clients.size# 3 Clock# 1970-01-01T00:11:09.791913Z elapsed# 1.253875s EventsProcessed# 153038 clients.size# 3 Clock# 1970-01-01T00:11:27.442663Z elapsed# 1.302909s EventsProcessed# 159213 clients.size# 3 Clock# 1970-01-01T00:11:47.367294Z elapsed# 1.356777s EventsProcessed# 166228 clients.size# 3 Clock# 1970-01-01T00:11:59.826695Z elapsed# 1.387683s EventsProcessed# 170535 clients.size# 3 Clock# 1970-01-01T00:12:09.939180Z elapsed# 1.415082s EventsProcessed# 174186 clients.size# 3 Clock# 1970-01-01T00:12:20.431003Z elapsed# 1.441361s EventsProcessed# 177817 clients.size# 3 Clock# 1970-01-01T00:12:36.671926Z elapsed# 1.491702s EventsProcessed# 183472 clients.size# 3 Clock# 1970-01-01T00:12:54.922169Z elapsed# 1.540442s EventsProcessed# 189988 clients.size# 3 Clock# 1970-01-01T00:13:06.727355Z elapsed# 1.582611s EventsProcessed# 195550 clients.size# 4 Clock# 1970-01-01T00:13:17.149957Z elapsed# 1.621791s EventsProcessed# 200557 clients.size# 4 Clock# 1970-01-01T00:13:27.331511Z elapsed# 1.658937s EventsProcessed# 205312 clients.size# 4 Clock# 1970-01-01T00:13:42.980259Z elapsed# 1.702122s EventsProcessed# 210978 clients.size# 3 Clock# 1970-01-01T00:13:53.048024Z elapsed# 1.728547s EventsProcessed# 214390 clients.size# 3 Clock# 1970-01-01T00:14:10.142030Z elapsed# 1.776821s EventsProcessed# 220625 clients.size# 3 Clock# 1970-01-01T00:14:20.575282Z elapsed# 1.806693s EventsProcessed# 224310 clients.size# 3 Clock# 1970-01-01T00:14:39.154296Z elapsed# 1.864322s EventsProcessed# 231010 clients.size# 3 Clock# 1970-01-01T00:14:57.661700Z elapsed# 1.919407s EventsProcessed# 237680 clients.size# 3 Clock# 1970-01-01T00:15:11.916999Z elapsed# 1.954331s EventsProcessed# 242625 clients.size# 3 Clock# 1970-01-01T00:15:31.184179Z elapsed# 2.001492s EventsProcessed# 249383 clients.size# 3 Clock# 1970-01-01T00:15:48.868213Z elapsed# 2.030691s EventsProcessed# 253557 clients.size# 2 Clock# 1970-01-01T00:16:03.504195Z elapsed# 2.055655s EventsProcessed# 257178 clients.size# 2 Clock# 1970-01-01T00:16:22.348285Z elapsed# 2.086254s EventsProcessed# 261585 clients.size# 2 Clock# 1970-01-01T00:16:33.079491Z elapsed# 2.104033s EventsProcessed# 264084 clients.size# 2 Clock# 1970-01-01T00:16:48.745932Z elapsed# 2.130323s EventsProcessed# 267797 clients.size# 2 Clock# 1970-01-01T00:17:06.523822Z elapsed# 2.159688s EventsProcessed# 271915 clients.size# 2 Clock# 1970-01-01T00:17:21.274240Z elapsed# 2.184857s EventsProcessed# 275313 clients.size# 2 Clock# 1970-01-01T00:17:33.298828Z elapsed# 2.206768s EventsProcessed# 278088 clients.size# 2 Clock# 1970-01-01T00:17:47.531129Z elapsed# 2.232433s EventsProcessed# 281350 clients.size# 2 Clock# 1970-01-01T00:17:59.033588Z elapsed# 2.242721s EventsProcessed# 282645 clients.size# 1 Clock# 1970-01-01T00:18:09.276354Z elapsed# 2.252048s EventsProcessed# 283806 clients.size# 1 Clock# 1970-01-01T00:18:24.691229Z elapsed# 2.266382s EventsProcessed# 285670 clients.size# 1 Clock# 1970-01-01T00:18:35.159275Z elapsed# 2.275970s EventsProcessed# 286938 clients.size# 1 Clock# 1970-01-01T00:18:54.542398Z elapsed# 2.293217s EventsProcessed# 289155 clients.size# 1 Clock# 1970-01-01T00:19:14.473503Z elapsed# 2.312104s EventsProcessed# 291451 clients.size# 1 Clock# 1970-01-01T00:19:29.061968Z elapsed# 2.326953s EventsProcessed# 293251 clients.size# 1 Clock# 1970-01-01T00:19:42.002123Z elapsed# 2.339085s EventsProcessed# 294797 clients.size# 1 Clock# 1970-01-01T00:19:55.729768Z elapsed# 2.339227s EventsProcessed# 294799 clients.size# 0 Clock# 1970-01-01T00:20:14.513939Z elapsed# 2.339244s EventsProcessed# 294799 clients.size# 0 Clock# 1970-01-01T00:20:25.892155Z elapsed# 2.339258s EventsProcessed# 294799 clients.size# 0 Clock# 1970-01-01T00:20:42.638856Z elapsed# 2.355398s EventsProcessed# 296738 clients.size# 1 Clock# 1970-01-01T00:20:56.096075Z elapsed# 2.380009s EventsProcessed# 299921 clients.size# 2 Clock# 1970-01-01T00:21:06.294471Z elapsed# 2.397758s EventsProcessed# 302244 clients.size# 2 Clock# 1970-01-01T00:21:20.505178Z elapsed# 2.438468s EventsProcessed# 307357 clients.size# 3 Clock# 1970-01-01T00:21:33.927412Z elapsed# 2.488338s EventsProcessed# 313709 clients.size# 4 Clock# 1970-01-01T00:21:51.164425Z elapsed# 2.575858s EventsProcessed# 324361 clients.size# 5 Clock# 1970-01-01T00:22:06.023477Z elapsed# 2.649898s EventsProcessed# 333277 clients.size# 5 Clock# 1970-01-01T00:22:22.726313Z elapsed# 2.730258s EventsProcessed# 343211 clients.size# 5 Clock# 1970-01-01T00:22:37.220085Z elapsed# 2.813021s EventsProcessed# 351748 clients.size# 5 Clock# 1970-01-01T00:22:53.884114Z elapsed# 2.883835s EventsProcessed# 361604 clients.size# 5 Clock# 1970-01-01T00:23:08.786028Z elapsed# 2.946742s EventsProcessed# 370318 clients.size# 5 Clock# 1970-01-01T00:23:26.779852Z elapsed# 3.040343s EventsProcessed# 381057 clients.size# 5 Clock# 1970-01-01T00:23:38.194758Z elapsed# 3.086276s EventsProcessed# 387828 clients.size# 5 Clock# 1970-01-01T00:23:55.524944Z elapsed# 3.156696s EventsProcessed# 398233 clients.size# 5 Clock# 1970-01-01T00:24:15.095961Z elapsed# 3.233509s EventsProcessed# 409939 clients.size# 5 Clock# 1970-01-01T00:24:33.185696Z elapsed# 3.320416s EventsProcessed# 420496 clients.size# 5 Clock# 1970-01-01T00:24:50.643533Z elapsed# 3.384695s EventsProcessed# 430980 clients.size# 5 Clock# 1970-01-01T00:25:02.595427Z elapsed# 3.439946s EventsProcessed# 439762 clients.size# 6 Clock# 1970-01-01T00:25:19.501509Z elapsed# 3.522696s EventsProcessed# 451739 clients.size# 6 Clock# 1970-01-01T00:25:30.080157Z elapsed# 3.599543s EventsProcessed# 459258 clients.size# 6 Clock# 1970-01-01T00:25:47.039198Z elapsed# 3.680275s EventsProcessed# 471178 clients.size# 6 Clock# 1970-01-01T00:25:59.919333Z elapsed# 3.743609s EventsProcessed# 480236 clients.size# 6 Clock# 1970-01-01T00:26:12.630918Z elapsed# 3.821287s EventsProcessed# 489063 clients.size# 6 Clock# 1970-01-01T00:26:28.877542Z elapsed# 3.904324s EventsProcessed# 500608 clients.size# 6 Clock# 1970-01-01T00:26:48.203178Z elapsed# 3.991036s EventsProcessed# 514341 clients.size# 6 Clock# 1970-01-01T00:27:05.968556Z elapsed# 4.077174s EventsProcessed# 524751 clients.size# 5 Clock# 1970-01-01T00:27:22.379408Z elapsed# 4.139062s EventsProcessed# 534285 clients.size# 5 Clock# 1970-01-01T00:27:38.331697Z elapsed# 4.185586s EventsProcessed# 541855 clients.size# 4 Clock# 1970-01-01T00:27:51.921451Z elapsed# 4.224140s EventsProcessed# 548050 clients.size# 4 Clock# 1970-01-01T00:28:09.096766Z elapsed# 4.284891s EventsProcessed# 556317 clients.size# 4 Clock# 1970-01-01T00:28:23.804235Z elapsed# 4.344964s EventsProcessed# 563531 clients.size# 4 Clock# 1970-01-01T00:28:37.574482Z elapsed# 4.379914s EventsProcessed# 570131 clients.size# 4 Clock# 1970-01-01T00:28:52.531121Z elapsed# 4.426419s EventsProcessed# 577311 clients.size# 4 Clock# 1970-01-01T00:29:06.123331Z elapsed# 4.478204s EventsProcessed# 583911 clients.size# 4 Clock# 1970-01-01T00:29:21.253900Z elapsed# 4.517860s EventsProcessed# 589252 clients.size# 3 Clock# 1970-01-01T00:29:35.342756Z elapsed# 4.560183s EventsProcessed# 594335 clients.size# 3 Clock# 1970-01-01T00:29:46.728804Z elapsed# 4.611143s EventsProcessed# 598374 clients.size# 3 Clock# 1970-01-01T00:29:59.258532Z elapsed# 4.636565s EventsProcessed# 602828 clients.size# 3 Clock# 1970-01-01T00:30:10.124498Z elapsed# 4.658167s EventsProcessed# 606631 clients.size# 3 Clock# 1970-01-01T00:30:29.600875Z elapsed# 4.704076s EventsProcessed# 613495 clients.size# 3 Clock# 1970-01-01T00:30:40.822871Z elapsed# 4.734692s EventsProcessed# 617397 clients.size# 3 Clock# 1970-01-01T00:30:53.533687Z elapsed# 4.760840s EventsProcessed# 622020 clients.size# 3 Clock# 1970-01-01T00:31:12.419944Z elapsed# 4.797777s EventsProcessed# 628714 clients.size# 3 Clock# 1970-01-01T00:31:28.186435Z elapsed# 4.862211s EventsProcessed# 634313 clients.size# 3 Clock# 1970-01-01T00:31:40.940419Z elapsed# 4.886996s EventsProcessed# 638888 clients.size# 3 Clock# 1970-01-01T00:31:53 ... 5:28:37.385954Z elapsed# 41.358124s EventsProcessed# 4439813 clients.size# 6 Clock# 1970-01-01T05:28:50.105527Z elapsed# 41.419751s EventsProcessed# 4448802 clients.size# 6 Clock# 1970-01-01T05:29:03.973163Z elapsed# 41.533265s EventsProcessed# 4458664 clients.size# 6 Clock# 1970-01-01T05:29:21.513009Z elapsed# 41.655277s EventsProcessed# 4471211 clients.size# 6 Clock# 1970-01-01T05:29:34.618730Z elapsed# 41.732486s EventsProcessed# 4480412 clients.size# 6 Clock# 1970-01-01T05:29:51.794350Z elapsed# 41.822220s EventsProcessed# 4492525 clients.size# 6 Clock# 1970-01-01T05:30:05.256395Z elapsed# 41.944385s EventsProcessed# 4503555 clients.size# 7 Clock# 1970-01-01T05:30:16.403394Z elapsed# 42.036446s EventsProcessed# 4512860 clients.size# 7 Clock# 1970-01-01T05:30:31.955976Z elapsed# 42.132183s EventsProcessed# 4523804 clients.size# 6 Clock# 1970-01-01T05:30:42.702593Z elapsed# 42.224982s EventsProcessed# 4531260 clients.size# 6 Clock# 1970-01-01T05:31:02.321782Z elapsed# 42.318060s EventsProcessed# 4545416 clients.size# 6 Clock# 1970-01-01T05:31:19.011537Z elapsed# 42.391427s EventsProcessed# 4557197 clients.size# 6 Clock# 1970-01-01T05:31:35.236501Z elapsed# 42.494711s EventsProcessed# 4568704 clients.size# 6 Clock# 1970-01-01T05:31:47.894417Z elapsed# 42.552750s EventsProcessed# 4577895 clients.size# 6 Clock# 1970-01-01T05:32:05.491993Z elapsed# 42.663024s EventsProcessed# 4592718 clients.size# 7 Clock# 1970-01-01T05:32:23.798123Z elapsed# 42.803272s EventsProcessed# 4605726 clients.size# 6 Clock# 1970-01-01T05:32:43.362674Z elapsed# 42.907480s EventsProcessed# 4619635 clients.size# 6 Clock# 1970-01-01T05:33:01.138243Z elapsed# 43.004265s EventsProcessed# 4632085 clients.size# 6 Clock# 1970-01-01T05:33:16.556100Z elapsed# 43.144824s EventsProcessed# 4643041 clients.size# 6 Clock# 1970-01-01T05:33:33.349668Z elapsed# 43.277946s EventsProcessed# 4655144 clients.size# 6 Clock# 1970-01-01T05:33:43.815791Z elapsed# 43.352543s EventsProcessed# 4662520 clients.size# 6 Clock# 1970-01-01T05:34:03.704279Z elapsed# 43.464186s EventsProcessed# 4676615 clients.size# 6 Clock# 1970-01-01T05:34:18.793863Z elapsed# 43.534853s EventsProcessed# 4687317 clients.size# 6 Clock# 1970-01-01T05:34:32.223213Z elapsed# 43.616755s EventsProcessed# 4698483 clients.size# 7 Clock# 1970-01-01T05:34:44.730585Z elapsed# 43.736770s EventsProcessed# 4708540 clients.size# 7 Clock# 1970-01-01T05:35:01.063663Z elapsed# 43.928483s EventsProcessed# 4721987 clients.size# 7 Clock# 1970-01-01T05:35:17.874498Z elapsed# 44.062321s EventsProcessed# 4735956 clients.size# 7 Clock# 1970-01-01T05:35:34.927109Z elapsed# 44.224215s EventsProcessed# 4748200 clients.size# 6 Clock# 1970-01-01T05:35:52.434450Z elapsed# 44.380707s EventsProcessed# 4760601 clients.size# 6 Clock# 1970-01-01T05:36:03.722154Z elapsed# 44.472947s EventsProcessed# 4768782 clients.size# 6 Clock# 1970-01-01T05:36:15.605700Z elapsed# 44.541116s EventsProcessed# 4777163 clients.size# 6 Clock# 1970-01-01T05:36:33.465411Z elapsed# 44.724113s EventsProcessed# 4789745 clients.size# 6 Clock# 1970-01-01T05:36:48.100194Z elapsed# 44.856323s EventsProcessed# 4800297 clients.size# 6 Clock# 1970-01-01T05:37:04.796741Z elapsed# 45.064951s EventsProcessed# 4812043 clients.size# 6 Clock# 1970-01-01T05:37:21.529619Z elapsed# 45.255293s EventsProcessed# 4823891 clients.size# 6 Clock# 1970-01-01T05:37:40.820158Z elapsed# 45.380114s EventsProcessed# 4837608 clients.size# 6 Clock# 1970-01-01T05:37:56.841222Z elapsed# 45.494125s EventsProcessed# 4848992 clients.size# 6 Clock# 1970-01-01T05:38:15.600381Z elapsed# 45.683650s EventsProcessed# 4862224 clients.size# 6 Clock# 1970-01-01T05:38:34.197296Z elapsed# 45.852748s EventsProcessed# 4877864 clients.size# 7 Clock# 1970-01-01T05:38:50.429352Z elapsed# 45.980031s EventsProcessed# 4891630 clients.size# 7 Clock# 1970-01-01T05:39:06.053537Z elapsed# 46.160218s EventsProcessed# 4904234 clients.size# 7 Clock# 1970-01-01T05:39:18.403002Z elapsed# 46.289134s EventsProcessed# 4914607 clients.size# 7 Clock# 1970-01-01T05:39:31.514946Z elapsed# 46.422909s EventsProcessed# 4925640 clients.size# 7 Clock# 1970-01-01T05:39:46.281090Z elapsed# 46.595473s EventsProcessed# 4937969 clients.size# 7 Clock# 1970-01-01T05:39:58.673794Z elapsed# 46.730268s EventsProcessed# 4950003 clients.size# 8 Clock# 1970-01-01T05:40:13.222958Z elapsed# 46.909550s EventsProcessed# 4963825 clients.size# 8 Clock# 1970-01-01T05:40:29.365829Z elapsed# 47.111287s EventsProcessed# 4978972 clients.size# 8 Clock# 1970-01-01T05:40:41.139380Z elapsed# 47.262289s EventsProcessed# 4991454 clients.size# 9 Clock# 1970-01-01T05:40:54.643219Z elapsed# 47.451544s EventsProcessed# 5005600 clients.size# 9 Clock# 1970-01-01T05:41:12.029814Z elapsed# 47.599584s EventsProcessed# 5024273 clients.size# 9 Clock# 1970-01-01T05:41:29.645178Z elapsed# 47.779577s EventsProcessed# 5043502 clients.size# 9 Clock# 1970-01-01T05:41:48.335594Z elapsed# 47.982418s EventsProcessed# 5063560 clients.size# 9 Clock# 1970-01-01T05:42:07.636364Z elapsed# 48.239206s EventsProcessed# 5082027 clients.size# 8 Clock# 1970-01-01T05:42:18.658454Z elapsed# 48.338894s EventsProcessed# 5091007 clients.size# 7 Clock# 1970-01-01T05:42:37.423814Z elapsed# 48.505305s EventsProcessed# 5106602 clients.size# 7 Clock# 1970-01-01T05:42:54.432106Z elapsed# 48.720696s EventsProcessed# 5120612 clients.size# 7 Clock# 1970-01-01T05:43:08.249876Z elapsed# 48.850917s EventsProcessed# 5132201 clients.size# 7 Clock# 1970-01-01T05:43:20.530578Z elapsed# 48.960178s EventsProcessed# 5143801 clients.size# 8 Clock# 1970-01-01T05:43:38.841274Z elapsed# 49.159123s EventsProcessed# 5161204 clients.size# 8 Clock# 1970-01-01T05:43:52.275244Z elapsed# 49.288594s EventsProcessed# 5174025 clients.size# 8 Clock# 1970-01-01T05:44:08.106522Z elapsed# 49.455165s EventsProcessed# 5189207 clients.size# 8 Clock# 1970-01-01T05:44:20.761469Z elapsed# 49.566980s EventsProcessed# 5201063 clients.size# 8 Clock# 1970-01-01T05:44:36.433704Z elapsed# 49.683333s EventsProcessed# 5213998 clients.size# 7 Clock# 1970-01-01T05:44:56.274137Z elapsed# 49.852828s EventsProcessed# 5230595 clients.size# 7 Clock# 1970-01-01T05:45:12.839941Z elapsed# 49.980298s EventsProcessed# 5244322 clients.size# 7 Clock# 1970-01-01T05:45:23.618874Z elapsed# 50.076183s EventsProcessed# 5253100 clients.size# 7 Clock# 1970-01-01T05:45:35.295363Z elapsed# 50.239284s EventsProcessed# 5262792 clients.size# 7 Clock# 1970-01-01T05:45:49.953324Z elapsed# 50.360353s EventsProcessed# 5274783 clients.size# 7 Clock# 1970-01-01T05:46:02.510857Z elapsed# 50.466787s EventsProcessed# 5285164 clients.size# 7 Clock# 1970-01-01T05:46:17.748618Z elapsed# 50.656604s EventsProcessed# 5297948 clients.size# 7 Clock# 1970-01-01T05:46:36.328569Z elapsed# 50.830223s EventsProcessed# 5315537 clients.size# 8 Clock# 1970-01-01T05:46:53.096572Z elapsed# 50.975172s EventsProcessed# 5331402 clients.size# 8 Clock# 1970-01-01T05:47:06.701148Z elapsed# 51.099204s EventsProcessed# 5344122 clients.size# 8 Clock# 1970-01-01T05:47:22.572161Z elapsed# 51.229825s EventsProcessed# 5361104 clients.size# 9 Clock# 1970-01-01T05:47:41.059562Z elapsed# 51.466074s EventsProcessed# 5380928 clients.size# 9 Clock# 1970-01-01T05:47:52.457332Z elapsed# 51.568922s EventsProcessed# 5393074 clients.size# 9 Clock# 1970-01-01T05:48:07.480018Z elapsed# 51.757171s EventsProcessed# 5409108 clients.size# 9 Clock# 1970-01-01T05:48:26.456740Z elapsed# 51.956886s EventsProcessed# 5429441 clients.size# 9 Clock# 1970-01-01T05:48:36.712757Z elapsed# 52.134819s EventsProcessed# 5441751 clients.size# 10 Clock# 1970-01-01T05:48:52.004204Z elapsed# 52.339913s EventsProcessed# 5459806 clients.size# 10 Clock# 1970-01-01T05:49:02.574992Z elapsed# 52.499478s EventsProcessed# 5472241 clients.size# 10 Clock# 1970-01-01T05:49:17.908915Z elapsed# 52.669058s EventsProcessed# 5490941 clients.size# 10 Clock# 1970-01-01T05:49:30.328619Z elapsed# 52.771400s EventsProcessed# 5505382 clients.size# 10 Clock# 1970-01-01T05:49:49.390198Z elapsed# 52.970695s EventsProcessed# 5528072 clients.size# 10 Clock# 1970-01-01T05:50:03.587599Z elapsed# 53.109470s EventsProcessed# 5544968 clients.size# 10 Clock# 1970-01-01T05:50:16.061300Z elapsed# 53.217968s EventsProcessed# 5559647 clients.size# 10 Clock# 1970-01-01T05:50:31.682752Z elapsed# 53.361876s EventsProcessed# 5578738 clients.size# 10 Clock# 1970-01-01T05:50:48.074996Z elapsed# 53.549044s EventsProcessed# 5598179 clients.size# 10 Clock# 1970-01-01T05:50:59.404928Z elapsed# 53.654231s EventsProcessed# 5610219 clients.size# 9 Clock# 1970-01-01T05:51:10.196716Z elapsed# 53.770445s EventsProcessed# 5621544 clients.size# 9 Clock# 1970-01-01T05:51:20.550643Z elapsed# 53.849887s EventsProcessed# 5632456 clients.size# 9 Clock# 1970-01-01T05:51:38.348487Z elapsed# 54.024945s EventsProcessed# 5651624 clients.size# 9 Clock# 1970-01-01T05:51:48.672009Z elapsed# 54.147744s EventsProcessed# 5662555 clients.size# 9 Clock# 1970-01-01T05:52:06.105403Z elapsed# 54.308672s EventsProcessed# 5680870 clients.size# 9 Clock# 1970-01-01T05:52:16.465693Z elapsed# 54.449016s EventsProcessed# 5691935 clients.size# 9 Clock# 1970-01-01T05:52:29.518684Z elapsed# 54.590647s EventsProcessed# 5705936 clients.size# 9 Clock# 1970-01-01T05:52:43.032684Z elapsed# 54.739847s EventsProcessed# 5720273 clients.size# 9 Clock# 1970-01-01T05:53:02.681974Z elapsed# 55.016893s EventsProcessed# 5743575 clients.size# 10 Clock# 1970-01-01T05:53:21.527707Z elapsed# 55.248289s EventsProcessed# 5763500 clients.size# 9 Clock# 1970-01-01T05:53:36.091150Z elapsed# 55.370964s EventsProcessed# 5779073 clients.size# 9 Clock# 1970-01-01T05:53:53.599007Z elapsed# 55.556655s EventsProcessed# 5797993 clients.size# 9 Clock# 1970-01-01T05:54:03.601587Z elapsed# 55.675266s EventsProcessed# 5808695 clients.size# 9 Clock# 1970-01-01T05:54:18.066283Z elapsed# 55.848505s EventsProcessed# 5824228 clients.size# 9 Clock# 1970-01-01T05:54:37.122003Z elapsed# 56.074887s EventsProcessed# 5844619 clients.size# 9 Clock# 1970-01-01T05:54:48.183601Z elapsed# 56.201777s EventsProcessed# 5857595 clients.size# 10 Clock# 1970-01-01T05:55:04.070972Z elapsed# 56.425577s EventsProcessed# 5876145 clients.size# 10 Clock# 1970-01-01T05:55:15.720746Z elapsed# 56.547323s EventsProcessed# 5889770 clients.size# 10 Clock# 1970-01-01T05:55:34.715410Z elapsed# 56.811636s EventsProcessed# 5912640 clients.size# 10 Clock# 1970-01-01T05:55:49.066087Z elapsed# 56.946253s EventsProcessed# 5929709 clients.size# 10 Clock# 1970-01-01T05:56:03.801715Z elapsed# 57.112504s EventsProcessed# 5947047 clients.size# 10 Clock# 1970-01-01T05:56:15.049228Z elapsed# 57.274004s EventsProcessed# 5960323 clients.size# 10 Clock# 1970-01-01T05:56:33.032711Z elapsed# 57.745717s EventsProcessed# 5981448 clients.size# 10 Clock# 1970-01-01T05:56:50.827072Z elapsed# 58.207116s EventsProcessed# 6000466 clients.size# 9 Clock# 1970-01-01T05:57:03.863228Z elapsed# 58.550816s EventsProcessed# 6014234 clients.size# 9 Clock# 1970-01-01T05:57:21.228117Z elapsed# 59.024847s EventsProcessed# 6030612 clients.size# 8 Clock# 1970-01-01T05:57:37.528725Z elapsed# 59.455893s EventsProcessed# 6045835 clients.size# 8 Clock# 1970-01-01T05:57:47.961415Z elapsed# 59.872355s EventsProcessed# 6055902 clients.size# 8 Clock# 1970-01-01T05:58:05.442362Z elapsed# 60.363500s EventsProcessed# 6074448 clients.size# 9 Clock# 1970-01-01T05:58:17.140270Z elapsed# 60.763091s EventsProcessed# 6087128 clients.size# 9 Clock# 1970-01-01T05:58:36.279488Z elapsed# 61.321625s EventsProcessed# 6107636 clients.size# 9 Clock# 1970-01-01T05:58:52.836695Z elapsed# 61.957720s EventsProcessed# 6125562 clients.size# 9 Clock# 1970-01-01T05:59:10.715698Z elapsed# 62.346093s EventsProcessed# 6144359 clients.size# 9 Clock# 1970-01-01T05:59:24.636136Z elapsed# 62.691227s EventsProcessed# 6159438 clients.size# 9 Clock# 1970-01-01T05:59:37.693985Z elapsed# 63.173935s EventsProcessed# 6173635 clients.size# 9 Clock# 1970-01-01T05:59:56.249323Z elapsed# 63.768080s EventsProcessed# 6191251 clients.size# 8 |89.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/backpressure/ut_client/unittest |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |89.9%| [TM] {RESULT} ydb/core/blobstorage/backpressure/ut_client/unittest |89.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |89.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |89.9%| [LD] {RESULT} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice |89.9%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |89.9%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |89.9%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |89.9%| [LD] {RESULT} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication >> TBsVDiskRepl1::ReadOnly [GOOD] >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest [GOOD] |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |89.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |89.9%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRepl1::ReadOnly [GOOD] Test command err: 2025-12-23T18:24:05.528592Z :BS_SYNCER ERROR: guid_recovery.cpp:714: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:24:05.662955Z :BS_SYNCER ERROR: guid_recovery.cpp:767: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 670147381020480351] 2025-12-23T18:24:06.763384Z :BS_SYNCER ERROR: blobstorage_osiris.cpp:203: PDiskId# 4 VDISK[0:_:0:1:1]: (0) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> TBsVDiskDefrag::Defrag50PercentGarbage [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::WriteRestartReadHugeIncreased [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeDecreased >> TBsVDiskRepl3::ReplEraseDiskRestoreMultipart [GOOD] >> TBsVDiskRepl3::AnubisTest [GOOD] >> TBsVDiskRepl3::ReplPerf |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/compatibility/federated_queries/ydb-tests-compatibility-federated_queries |89.9%| [LD] {RESULT} $(B)/ydb/tests/compatibility/federated_queries/ydb-tests-compatibility-federated_queries |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/compatibility/federated_queries/ydb-tests-compatibility-federated_queries >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |89.9%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest [GOOD] |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest |89.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |89.9%| [LD] {RESULT} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |89.9%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |89.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] |89.9%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |89.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |89.9%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/tstool/tstool |89.9%| [LD] {RESULT} $(B)/ydb/tools/tstool/tstool |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/tstool/tstool |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch |90.0%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/compatibility/olap/ydb-tests-compatibility-olap |90.0%| [LD] {RESULT} $(B)/ydb/tests/compatibility/olap/ydb-tests-compatibility-olap >> TSubgroupPartLayoutTest::CountEffectiveReplicas1of4 [GOOD] |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/compatibility/result_set_format/ydb-tests-compatibility-result_set_format |89.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/compatibility/olap/ydb-tests-compatibility-olap |90.0%| [LD] {RESULT} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/compatibility/result_set_format/ydb-tests-compatibility-result_set_format |90.0%| [LD] {RESULT} $(B)/ydb/tests/compatibility/result_set_format/ydb-tests-compatibility-result_set_format >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] Test command err: 2025-12-23T18:23:50.241805Z :BS_VDISK_PUT ERROR: blobstorage_skeleton.cpp:569: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVPut: TabletID cannot be empty; id# [0:1:10:0:0:10:1] Marker# BSVS43 2025-12-23T18:23:52.029398Z :BS_VDISK_OTHER ERROR: vdisk_context.h:143: PDiskId# 1 VDISK[0:_:0:0:0]: (0) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2025-12-23T18:23:52.029467Z :BS_SKELETON ERROR: blobstorage_skeletonfront.cpp:1755: PDiskId# 1 VDISK[0:_:0:0:0]: (0) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |90.0%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |90.0%| [LD] {RESULT} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |90.0%| [LD] {RESULT} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut |90.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] >> NameserviceConfigValidatorTests::TestModifyIdForHostPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyIdForResolveHostPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyResolveHost [GOOD] >> NameserviceConfigValidatorTests::TestModifyPort [GOOD] >> NameserviceConfigValidatorTests::TestEmptyConfig [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingId [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingResolveHostPort [GOOD] >> NameserviceConfigValidatorTests::TestEmptyAddresses [GOOD] >> TRegistryTests::TestLock [GOOD] >> TRegistryTests::TestClasses [GOOD] >> TRegistryTests::TestDisableEnable [GOOD] >> ResourceBrokerConfigValidatorTests::TestMinConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestRepeatedQueueName [GOOD] >> ResourceBrokerConfigValidatorTests::TestNoDefaultQueue [GOOD] >> ResourceBrokerConfigValidatorTests::TestNoUnknownTask [GOOD] >> NameserviceConfigValidatorTests::TestRemoveTooMany [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyQueueName [GOOD] >> ResourceBrokerConfigValidatorTests::TestEmptyTaskName [GOOD] >> TRegistryTests::TestAddGet [GOOD] >> TRegistryTests::TestCheckConfig [GOOD] >> ResourceBrokerConfigValidatorTests::TestZeroQueueWeight [GOOD] >> ResourceBrokerConfigValidatorTests::TestZeroDefaultDuration [GOOD] >> ResourceBrokerConfigValidatorTests::TestRepeatedTaskName [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnknownQueue [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnlimitedResource [GOOD] >> ResourceBrokerConfigValidatorTests::TestUnusedQueue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas2of4 [GOOD] Test command err: testing erasure none main# 0 main# 1 Checked 2 cases, took 5 us testing erasure block-4-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 main# 32 main# 33 main# 34 main# 35 main# 36 main# 37 main# 38 main# 39 main# 40 main# 41 main# 42 main# 43 main# 44 main# 45 main# 46 main# 47 main# 48 main# 49 main# 50 main# 51 main# 52 main# 53 main# 54 main# 55 main# 56 main# 57 main# 58 main# 59 main# 60 main# 61 main# 62 main# 63 Checked 262144 cases, took 368059 us testing erasure mirror-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 Checked 512 cases, took 315 us testing erasure block-2-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 4096 cases, took 2504 us testing erasure mirror-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 Checked 64 cases, took 9 us testing erasure block-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 32768 cases, took 22050 us testing erasure stripe-2-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 4096 cases, took 921 us |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestEmptyAddresses [GOOD] >> BootstrapTabletsValidatorTests::TestNoNodeForTablet [GOOD] >> BootstrapTabletsValidatorTests::TestRequiredTablet [GOOD] >> BootstrapTabletsValidatorTests::TestImportantTablet [GOOD] >> BootstrapTabletsValidatorTests::TestCompactionBroker [GOOD] >> NameserviceConfigValidatorTests::TestLongWalleDC [GOOD] >> NameserviceConfigValidatorTests::TestModifyClusterUUID [GOOD] >> NameserviceConfigValidatorTests::TestModifyIdForAddrPort [GOOD] >> NameserviceConfigValidatorTests::TestModifyHost [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/groupinfo/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestModifyPort [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/validators/ut/unittest >> TBsLocalRecovery::WriteRestartReadHugeDecreased [GOOD] >> TBsOther1::PoisonPill |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/validators/ut/unittest >> TBlobStorageDiskBlob::CreateFromDistinctParts |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestUnusedQueue [GOOD] >> TBlobStorageLinearTrackBar::TestLinearTrackBarDouble [GOOD] >> TBlobStorageLinearTrackBar::TestLinearTrackBarWithDecimal [GOOD] >> TBlobStorageHullStorageRatio::Test [GOOD] >> TBlobStorageKeyBarrierTest::ParseTest [GOOD] >> TBlobStorageHullDecimal::TestMkRatio [GOOD] >> TBlobStorageHullDecimal::TestMult [GOOD] >> THullDsHeapItTest::HeapAppendixTreeForwardIteratorBenchmark >> TBlobStorageHullDecimal::TestRoundToInt [GOOD] >> THullDsHeapItTest::HeapLevelSliceForwardIteratorBenchmark >> TBlobStorageHullDecimal::TestToUi64 [GOOD] >> TBlobStorageDiskBlob::CreateFromDistinctParts [GOOD] >> TBlobStorageDiskBlob::CreateIterate [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestEmptyTaskName [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestZeroDefaultDuration [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> TRegistryTests::TestDisableEnable [GOOD] >> BootstrapTabletsValidatorTests::TestUnknownNodeForTablet [GOOD] >> NameserviceConfigValidatorTests::TestAddNewNode [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingHostPort [GOOD] >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/validators/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/validators/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/validators/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> ResourceBrokerConfigValidatorTests::TestNoUnknownTask [GOOD] >> THullDsHeapItTest::HeapForwardIteratorAllEntities [GOOD] >> THullDsHeapItTest::HeapBackwardIteratorAllEntities [GOOD] >> THullDsHeapItTest::HeapLevelSliceForwardIteratorBenchmark [GOOD] >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark >> THullDsGenericNWayIt::ForwardIteration [GOOD] >> THullDsGenericNWayIt::BackwardIteration [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestModifyHost [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/validators/ut/unittest >> TBlobStorageDiskBlob::Merge [GOOD] >> TBlobStorageHullDecimal::TestMkDecimal [GOOD] |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> BootstrapTabletsValidatorTests::TestCompactionBroker [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/validators/ut/unittest |90.0%| [LD] {RESULT} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/validators/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestMult [GOOD] >> THullDsHeapItTest::HeapAppendixTreeForwardIteratorBenchmark [GOOD] >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageLinearTrackBar::TestLinearTrackBarWithDecimal [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapBackwardIteratorAllEntities [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageDiskBlob::CreateIterate [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestToUi64 [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsGenericNWayIt::BackwardIteration [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/validators/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageHullDecimal::TestMkDecimal [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/validators/ut/unittest >> NameserviceConfigValidatorTests::TestDuplicatingAddrPort [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> TBlobStorageKeyBarrierTest::ParseTest [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/validators/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapLevelSliceBackwardIteratorBenchmark [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest >> THullDsHeapItTest::HeapAppendixTreeBackwardIteratorBenchmark [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/base/ut/unittest |90.0%| [TA] $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsOther1::PoisonPill [GOOD] >> TBsOther1::ChaoticParallelWrite |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |90.0%| [TA] {RESULT} $(B)/ydb/core/cms/console/validators/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard >> BlobDepotWithTestShard::PlainGroup [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_testshard/unittest >> BlobDepotWithTestShard::PlainGroup [GOOD] >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_testshard/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoTest::SubgroupPartLayout [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/groupinfo/ut/unittest |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |90.0%| [TM] {RESULT} ydb/core/blobstorage/ut_testshard/unittest |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |90.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot >> TBsVDiskRepl3::ReplPerf [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRepl3::ReplPerf [GOOD] Test command err: 2025-12-23T18:24:00.695415Z :BS_SYNCER ERROR: guid_recovery.cpp:714: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:24:00.737116Z :BS_SYNCER ERROR: guid_recovery.cpp:767: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7695987167055385267] 2025-12-23T18:24:00.934351Z :BS_SYNCER ERROR: blobstorage_osiris.cpp:203: PDiskId# 4 VDISK[0:_:0:1:1]: (0) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:24:11.306992Z :BS_SYNCER ERROR: guid_recovery.cpp:714: PDiskId# 4 VDISK[0:_:0:3:0]: (0) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:24:11.500668Z :BS_SYNCER ERROR: guid_recovery.cpp:767: PDiskId# 4 VDISK[0:_:0:3:0]: (0) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7017763127727070610] 2025-12-23T18:24:12.551035Z :BS_SYNCER ERROR: blobstorage_osiris.cpp:203: PDiskId# 4 VDISK[0:_:0:3:0]: (0) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:24:30.079273Z :BS_SYNCER ERROR: guid_recovery.cpp:714: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:24:30.127682Z :BS_SYNCER ERROR: guid_recovery.cpp:767: PDiskId# 4 VDISK[0:_:0:1:1]: (0) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15057756634591995283] 2025-12-23T18:24:30.243107Z :BS_SYNCER ERROR: blobstorage_osiris.cpp:203: PDiskId# 4 VDISK[0:_:0:1:1]: (0) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest >> BlobDepot::BasicPutAndGet |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator >> TBsLocalRecovery::StartStopNotEmptyDB [GOOD] >> TBsLocalRecovery::WriteRestartRead >> TStateStorageConfig::TestReplicaSelection [GOOD] >> TStateStorageConfig::TestMultiReplicaFailDomains >> TBlobStorageHullFresh::AppendixPerf [GOOD] >> TBlobStorageHullFresh::AppendixPerf_Tune >> BlobDepot::BasicPutAndGet [GOOD] >> BlobDepot::TestBlockedEvGetRequest >> TRUCalculatorTests::TestReadTable [GOOD] >> TRUCalculatorTests::TestBulkUpsert [GOOD] >> BlobDepot::TestBlockedEvGetRequest [GOOD] >> BlobDepot::BasicRange |90.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TRUCalculatorTests::TestBulkUpsert [GOOD] |90.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TBsDbStat::ChaoticParallelWrite_DbStat [GOOD] >> TBsHuge::Simple >> TBsLocalRecovery::WriteRestartRead [GOOD] >> TBsLocalRecovery::MultiPutWriteRestartRead |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |90.0%| [TS] {RESULT} ydb/core/tx/schemeshard/ut_ru_calculator/unittest |90.0%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load >> BlobDepot::BasicRange [GOOD] >> BlobDepot::BasicDiscover >> TBsHuge::Simple [GOOD] >> TBsHuge::SimpleErasureNone >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBsLocalRecovery::MultiPutWriteRestartRead [GOOD] >> TBsLocalRecovery::MultiPutWriteRestartReadHuge >> BlobDepot::BasicDiscover [GOOD] >> BlobDepot::BasicBlock |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence >> TBsHuge::SimpleErasureNone [GOOD] >> TBsLocalRecovery::ChaoticWriteRestart |90.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |90.0%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBsOther1::ChaoticParallelWrite [GOOD] >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload >> GroupedMemoryLimiter::Simplest [GOOD] >> GroupedMemoryLimiter::Simple [GOOD] >> GroupedMemoryLimiter::CommonUsage [GOOD] >> GroupedMemoryLimiter::Update [GOOD] >> BlobDepot::BasicBlock [GOOD] >> BlobDepot::BasicCollectGarbage |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |90.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/limiter/grouped_memory/ut/unittest >> GroupedMemoryLimiter::Update [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=1;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=1;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=1;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=1;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=50;delta=50; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=2;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=2;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=100;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=2;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=2;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=3;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=50;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=1;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=1;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=1;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:43;event=common_forced_group;count=1;external_group_id=2; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=3;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=100;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=3;wait=0;external_group_id=2;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=3;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=2;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=2;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=1;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=1;min_group=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=2;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=2;min_group=NO_VALUE_OPTIONAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:73;event=scope_cleaned;process_id=0;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=50;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=50; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=1;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=1;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=4;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=4;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=10;delta=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=2;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=2;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=5;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=3;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=3;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=6;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=7;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=7;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=1010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=7;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=7;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=1;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=1;min_group=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:40;event=forced_group;count=1;external_group_id=2; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=5;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=2010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=4;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=4;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=2;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=2;min_group=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=3;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:40;event=forced_group;count=1;external_group_id=3; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=6;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=3010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=5;wait=0;external_group_id=2;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=5;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=3;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=3;min_group=NO_VALUE_OPTIONAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=6;wait=0;external_group_id=3;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=6;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:73;event=scope_cleaned;process_id=0;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=2010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=1010;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=10;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=1;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=1;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=8;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=8;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=1000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=9;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=9;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=2000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=2;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=2;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=10;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=3;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=3;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=11;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=1;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=1;min_group=2; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:40;event=forced_group;count=1;external_group_id=2; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=10;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=3000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=9;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=9;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=2;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=2;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=2;min_group=3; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=3;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:40;event=forced_group;count=1;external_group_id=3; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=11;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=4000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=10;wait=0;external_group_id=2;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=10;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=3;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=3;min_group=NO_VALUE_OPTIONAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=11;wait=0;external_group_id=3;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=11;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=8;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=8;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:73;event=scope_cleaned;process_id=0;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=3000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=2000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=1000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=1;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=1;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=12;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=12;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=1000;delta=1000; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:16;event=register_group;external_process_id=0;external_group_id=3;size=1;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:157;event=register_group;external_group_id=3;min_group=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=allocation.cpp:17;event=add;id=13;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:100;name=GLOBAL;event=update;usage=1000;waiting=10;allocated=1;from=1000;to=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=0;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:27;event=try_allocation;limit=1;external_process_id=0;external_scope_id=0;forced_external_group_id=1;is_priority_process=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=group.cpp:43;event=common_forced_group;count=1;external_group_id=3; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:27;event=allocated;allocation_id=13;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:66;name=GLOBAL;event=allocate;usage=20;delta=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=3;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=3;min_group=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=13;wait=0;external_group_id=3;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=13;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=manager.cpp:26;event=unregister_group;external_process_id=0;external_group_id=1;size=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:145;event=remove_group;external_group_id=1;min_group=NO_VALUE_OPTIONAL; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=process.h:135;event=allocation_unregister;allocation_id=12;wait=0;external_group_id=1;allocation_status=Allocated; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=12;stage=GLOBAL; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2700;fline=process.h:73;event=scope_cleaned;process_id=0;external_scope_id=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=10;delta=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=10; >> TBsLocalRecovery::MultiPutWriteRestartReadHuge [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX |90.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/limiter/grouped_memory/ut/unittest |90.0%| [TS] {RESULT} ydb/core/tx/limiter/grouped_memory/ut/unittest >> NodeWardenDsProxyConfigRetrieval::Disconnect >> BlobDepot::BasicCollectGarbage [GOOD] >> BlobDepot::VerifiedRandom >> RangeOps::Intersection [GOOD] >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |90.0%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut >> TBsVDiskOutOfSpace::WriteUntilYellowZone [GOOD] >> TBsVDiskRange::RangeGetFromEmptyDB ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_range_ops/unittest >> RangeOps::Intersection [GOOD] Test command err: first [(Uint64 : NULL, Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 5)] result [(Uint64 : 10) ; (Uint64 : 5)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 17)] result [(Uint64 : 15) ; (Uint64 : 17)] correct [(Uint64 : 15) ; (Uint64 : 17)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 20)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 30)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 20)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 30)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 25) ; (Uint64 : 30)] result [(Uint64 : 25) ; (Uint64 : 20)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result ((Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result ((Uint64 : 10) ; (Uint64 : 15)] correct ((Uint64 : 10) ; (Uint64 : 15)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)) result ((Uint64 : 10) ; (Uint64 : 15)) correct ((Uint64 : 10) ; (Uint64 : 15)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; (Uint64 : 20)) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 20)) correct [(Uint64 : 1) ; (Uint64 : 20)) first [(Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 10) ; (Uint64 : 20)) correct [(Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; (Uint64 : 10)] second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 10)] correct [(Uint64 : 1) ; (Uint64 : 10)] first [(Uint64 : NULL) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 1) ; (Uint64 : 10)) correct [(Uint64 : 1) ; (Uint64 : 10)) |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_range_ops/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] Test command err: Caught NodeWarden registration actorId# [1:11:2058] 2025-12-23T18:24:53.350350Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:24:53.380038Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/8ptz/0050d6/r3tmp/tmpZV7mN0/static.dat" PDiskGuid: 9153998141671857446 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 9153998141671857446 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 9153998141671857446 } } } } AvailabilityDomains: 0 } 2025-12-23T18:24:53.380333Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:/home/runner/.ya/build/build_root/8ptz/0050d6/r3tmp/tmpZV7mN0/static.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:24:53.381060Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:24:53.381406Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 9153998141671857446 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-12-23T18:24:53.382441Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 9153998141671857446 2025-12-23T18:24:53.382498Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:24:53.383651Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:29:2076] ControllerId# 72057594037932033 2025-12-23T18:24:53.383705Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:24:53.383798Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:24:53.383969Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:24:53.400181Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:24:53.400783Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:24:53.486959Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:24:53.488611Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:24:53.488670Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-12-23T18:24:53.508429Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:24:53.517481Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-12-23T18:24:53.517538Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2025-12-23T18:24:53.517608Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2025-12-23T18:24:53.535590Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\374\026\377\364S\374L\305-\226\302\244t\327P7#\321\036%" } 2025-12-23T18:24:53.542928Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-12-23T18:24:53.543003Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2025-12-23T18:24:53.553811Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/8ptz/0050d6/r3tmp/tmpZV7mN0/static.dat" PDiskGuid: 9153998141671857446 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 9153998141671857446 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 9153998141671857446 } } } } AvailabilityDomains: 0 } 2025-12-23T18:24:53.554006Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-12-23T18:24:53.563660Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2025-12-23T18:24:53.607585Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2025-12-23T18:24:53.608691Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2025-12-23T18:24:53.668607Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:24:53.693364Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:24:53.693772Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:24:53.694470Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:24:53.695880Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:24:53.696489Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:24:53.696527Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:24:53.696730Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:24:53.771166Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:24:53.771466Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:24:53.771634Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:24:53.771763Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:24:53.771862Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:24:53.772066Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:24:53.823378Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:24:53.823559Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:24:53.843251Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:24:53.843367Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:24:53.843482Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:24:53.843571Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:24:53.843671Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:24:53.843750Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:24:53.843790Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:24:53.843833Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:24:53.863219Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:24:53.863330Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:24:53.887245Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:24:53.887380Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:24:53.888632Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:24:53.888668Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:24:53.932858Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:24:53.932948Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:24:53.933350Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639257 Sender# [1:92:2123] SessionId# [0:0:0] Cookie# 0 Pipe connected clientId# [1:29:2076] 2025-12-23T18:24:53.933644Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:53} TEvTabletPipe::TEvClientConnected OK ClientId# [1:29:2076] ServerId# [1:125:2147] TabletId# 72057594037932033 PipeClientId# [1:29:2076] 2025-12-23T18:24:53.934128Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 9153998141671857446 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-12-23T18:24:53.934219Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-12-23T18:24:53.943013Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:/home/runner/.ya/build/build_root/8ptz/0050d6/r ... Cookie# 0 === Waiting for pipe to establish === === Breaking pipe === === Sending put === Pipe disconnected clientId# [1:29:2076] 2025-12-23T18:24:53.988725Z node 1 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [1:29:2076] ServerId# [1:125:2147] TabletId# 72057594037932033 PipeClientId# [1:29:2076] 2025-12-23T18:24:53.988807Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:139:2160] ControllerId# 72057594037932033 2025-12-23T18:24:53.988839Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:24:53.989220Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:142} HandleForwarded GroupId# 2147483648 EnableProxyMock# false NoGroup# false 2025-12-23T18:24:53.989281Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 2147483648 HasGroupInfo# false GroupInfoGeneration# 2025-12-23T18:24:53.989315Z node 1 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:285} RequestGroupConfig GroupId# 2147483648 2025-12-23T18:24:53.989533Z node 1 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2147483648 2025-12-23T18:24:53.989631Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2025-12-23T18:24:53.989867Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2025-12-23T18:24:54.000636Z node 1 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2147483648 Pipe connected clientId# [1:139:2160] 2025-12-23T18:24:54.001244Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:53} TEvTabletPipe::TEvClientConnected OK ClientId# [1:139:2160] ServerId# [1:150:2169] TabletId# 72057594037932033 PipeClientId# [1:139:2160] 2025-12-23T18:24:54.001454Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 9153998141671857446 Status: READY OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-12-23T18:24:54.001532Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-12-23T18:24:54.001827Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2025-12-23T18:24:54.001992Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:11:2058] Cookie# 0 Recipient# [1:150:2169] RecipientRewrite# [1:92:2123] Request# {NodeID: 1 GroupIDs: 2147483648 } StopGivingGroups# false 2025-12-23T18:24:54.002070Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 2147483648 } 2025-12-23T18:24:54.002243Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-12-23T18:24:54.002367Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 0 } } 2025-12-23T18:24:54.035976Z node 1 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:845} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/8ptz/0050d6/r3tmp/tmpZV7mN0/static.dat" PDiskGuid: 9153998141671857446 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 9153998141671857446 } VDiskKind: Default StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 9153998141671857446 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "ad29f747-a9fddbf7-c61c8423-adf5f12" Comprehensive: true AvailDomain: 0 UpdateSyncers: true } 2025-12-23T18:24:54.036225Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:863} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/8ptz/0050d6/r3tmp/tmpZV7mN0/static.dat" PDiskGuid: 9153998141671857446 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 9153998141671857446 } VDiskKind: Default StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 9153998141671857446 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "ad29f747-a9fddbf7-c61c8423-adf5f12" Comprehensive: true AvailDomain: 0 UpdateSyncers: true } 2025-12-23T18:24:54.036397Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# true Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/8ptz/0050d6/r3tmp/tmpZV7mN0/static.dat" PDiskGuid: 9153998141671857446 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 9153998141671857446 } VDiskKind: Default StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 9153998141671857446 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-12-23T18:24:54.036559Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-12-23T18:24:54.036634Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-12-23T18:24:54.036731Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 9153998141671857446 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-12-23T18:24:54.037580Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 9153998141671857446 2025-12-23T18:24:54.048425Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-12-23T18:24:54.055023Z node 1 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:845} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 9153998141671857446 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2025-12-23T18:24:54.055202Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:863} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 9153998141671857446 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2025-12-23T18:24:54.055301Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 9153998141671857446 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-12-23T18:24:54.055445Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-12-23T18:24:54.055518Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-12-23T18:24:54.056821Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 9153998141671857446 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2025-12-23T18:24:54.066925Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2025-12-23T18:24:54.087128Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } } 2025-12-23T18:24:54.128748Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2025-12-23T18:24:54.129604Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 9153998141671857446 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-12-23T18:24:54.130291Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2025-12-23T18:24:54.130536Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 9153998141671857446 Status: READY OnlyPhantomsRemain: false } } 2025-12-23T18:24:54.851276Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34189869056 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 17041457152 State: Normal SlotCount: 2 SlotSizeInUnits: 0 PDiskUsage: 0.10449320794148381 } } |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas3of4 [GOOD] >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload [GOOD] >> TBsVDiskRange::RangeGetFromEmptyDB [GOOD] |90.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |90.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |90.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_range_ops/unittest |90.0%| [TM] {RESULT} ydb/core/blobstorage/nodewarden/ut_sequence/unittest |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload [GOOD] Test command err: 2025-12-23T18:24:38.439143Z :BS_SYNCLOG ERROR: blobstorage_synclog.cpp:161: PDiskId# 1 VDISK[0:_:0:0:0]: (0) Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:1:1] targetVDisk# [0:1:0:0:0] 2025-12-23T18:24:38.439215Z :BS_SYNCLOG ERROR: blobstorage_synclog.cpp:161: PDiskId# 1 VDISK[0:_:0:0:0]: (0) Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:2:0] targetVDisk# [0:1:0:0:0] 2025-12-23T18:24:38.439274Z :BS_SYNCLOG ERROR: blobstorage_synclog.cpp:161: PDiskId# 1 VDISK[0:_:0:0:0]: (0) Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:2:1] targetVDisk# [0:1:0:0:0] 2025-12-23T18:24:38.439323Z :BS_SYNCLOG ERROR: blobstorage_synclog.cpp:161: PDiskId# 1 VDISK[0:_:0:0:0]: (0) Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:3:0] targetVDisk# [0:1:0:0:0] 2025-12-23T18:24:38.439363Z :BS_SYNCLOG ERROR: blobstorage_synclog.cpp:161: PDiskId# 1 VDISK[0:_:0:0:0]: (0) Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:3:1] targetVDisk# [0:1:0:0:0] 2025-12-23T18:24:38.439407Z :BS_SYNCLOG ERROR: blobstorage_synclog.cpp:161: PDiskId# 1 VDISK[0:_:0:0:0]: (0) Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:0:1] targetVDisk# [0:1:0:0:0] 2025-12-23T18:24:38.439431Z :BS_SYNCLOG ERROR: blobstorage_synclog.cpp:161: PDiskId# 1 VDISK[0:_:0:0:0]: (0) Handle(TEvSyncLogRead): locked; sourceVDisk# [0:1:0:1:0] targetVDisk# [0:1:0:0:0] |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh |90.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest >> TMemoryController::Counters >> BlobDepot::VerifiedRandom [GOOD] >> BlobDepot::LoadPutAndRead >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum >> TFetchRequestTests::HappyWay >> DataShardDiskQuotas::DiskQuotaExceeded >> TBsLocalRecovery::ChaoticWriteRestart [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHuge [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased >> DataShardReplication::SimpleApplyChanges >> SequenceProxy::Basics >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> Vacuum::Vacuum >> TMemoryController::Counters [GOOD] >> TMemoryController::Counters_HardLimit |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |90.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> SequenceProxy::Basics [GOOD] >> SequenceProxy::DropRecreate >> BlobDepot::LoadPutAndRead [GOOD] >> BlobDepot::DecommitPutAndRead >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased >> TTxDataShardBuildFulltextDictScan::BadRequest >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize [GOOD] |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |90.1%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut >> TSequence::CreateTableWithDefaultFromSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize [GOOD] Test command err: 2025-12-23T18:25:10.180651Z :BS_VDISK_GET CRIT: query_base.h:102: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVGetResult: Result message is too large; size# 67108001 orig# {ExtrQuery# [5000:1:0:0:0:100000:1] sh# 257 sz# 99743 c# 0}{ExtrQuery# [5000:1:1:0:0:100000:1] sh# 257 sz# 99743 c# 1}{ExtrQuery# [5000:1:2:0:0:100000:1] sh# 257 sz# 99743 c# 2}{ExtrQuery# [5000:1:3:0:0:100000:1] sh# 257 sz# 99743 c# 3}{ExtrQuery# [5000:1:4:0:0:100000:1] sh# 257 sz# 99743 c# 4}{ExtrQuery# [5000:1:5:0:0:100000:1] sh# 257 sz# 99743 c# 5}{ExtrQuery# [5000:1:6:0:0:100000:1] sh# 257 sz# 99743 c# 6}{ExtrQuery# [5000:1:7:0:0:100000:1] sh# 257 sz# 99743 c# 7}{ExtrQuery# [5000:1:8:0:0:100000:1] sh# 257 sz# 99743 c# 8}{ExtrQuery# [5000:1:9:0:0:100000:1] sh# 257 sz# 99743 c# 9}{ExtrQuery# [5000:1:10:0:0:100000:1] sh# 257 sz# 99743 c# 10}{ExtrQuery# [5000:1:11:0:0:100000:1] sh# 257 sz# 99743 c# 11}{ExtrQuery# [5000:1:12:0:0:100000:1] sh# 257 sz# 99743 c# 12}{ExtrQuery# [5000:1:13:0:0:100000:1] sh# 257 sz# 99743 c# 13}{ExtrQuery# [5000:1:14:0:0:100000:1] sh# 257 sz# 99743 c# 14}{ExtrQuery# [5000:1:15:0:0:100000:1] sh# 257 sz# 99743 c# 15}{ExtrQuery# [5000:1:16:0:0:100000:1] sh# 257 sz# 99743 c# 16}{ExtrQuery# [5000:1:17:0:0:100000:1] sh# 257 sz# 99743 c# 17}{ExtrQuery# [5000:1:18:0:0:100000:1] sh# 257 sz# 99743 c# 18}{ExtrQuery# [5000:1:19:0:0:100000:1] sh# 257 sz# 99743 c# 19}{ExtrQuery# [5000:1:20:0:0:100000:1] sh# 257 sz# 99743 c# 20}{ExtrQuery# [5000:1:21:0:0:100000:1] sh# 257 sz# 99743 c# 21}{ExtrQuery# [5000:1:22:0:0:100000:1] sh# 257 sz# 99743 c# 22}{ExtrQuery# [5000:1:23:0:0:100000:1] sh# 257 sz# 99743 c# 23}{ExtrQuery# [5000:1:24:0:0:100000:1] sh# 257 sz# 99743 c# 24}{ExtrQuery# [5000:1:25:0:0:100000:1] sh# 257 sz# 99743 c# 25}{ExtrQuery# [5000:1:26:0:0:100000:1] sh# 257 sz# 99743 c# 26}{ExtrQuery# [5000:1:27:0:0:100000:1] sh# 257 sz# 99743 c# 27}{ExtrQuery# [5000:1:28:0:0:100000:1] sh# 257 sz# 99743 c# 28}{ExtrQuery# [5000:1:29:0:0:100000:1] sh# 257 sz# 99743 c# 29}{ExtrQuery# [5000:1:30:0:0:100000:1] sh# 257 sz# 99743 c# 30}{ExtrQuery# [5000:1:31:0:0:100000:1] sh# 257 sz# 99743 c# 31}{ExtrQuery# [5000:1:32:0:0:100000:1] sh# 257 sz# 99743 c# 32}{ExtrQuery# [5000:1:33:0:0:100000:1] sh# 257 sz# 99743 c# 33}{ExtrQuery# [5000:1:34:0:0:100000:1] sh# 257 sz# 99743 c# 34}{ExtrQuery# [5000:1:35:0:0:100000:1] sh# 257 sz# 99743 c# 35}{ExtrQuery# [5000:1:36:0:0:100000:1] sh# 257 sz# 99743 c# 36}{ExtrQuery# [5000:1:37:0:0:100000:1] sh# 257 sz# 99743 c# 37}{ExtrQuery# [5000:1:38:0:0:100000:1] sh# 257 sz# 99743 c# 38}{ExtrQuery# [5000:1:39:0:0:100000:1] sh# 257 sz# 99743 c# 39}{ExtrQuery# [5000:1:40:0:0:100000:1] sh# 257 sz# 99743 c# 40}{ExtrQuery# [5000:1:41:0:0:100000:1] sh# 257 sz# 99743 c# 41}{ExtrQuery# [5000:1:42:0:0:100000:1] sh# 257 sz# 99743 c# 42}{ExtrQuery# [5000:1:43:0:0:100000:1] sh# 257 sz# 99743 c# 43}{ExtrQuery# [5000:1:44:0:0:100000:1] sh# 257 sz# 99743 c# 44}{ExtrQuery# [5000:1:45:0:0:100000:1] sh# 257 sz# 99743 c# 45}{ExtrQuery# [5000:1:46:0:0:100000:1] sh# 257 sz# 99743 c# 46}{ExtrQuery# [5000:1:47:0:0:100000:1] sh# 257 sz# 99743 c# 47}{ExtrQuery# [5000:1:48:0:0:100000:1] sh# 257 sz# 99743 c# 48}{ExtrQuery# [5000:1:49:0:0:100000:1] sh# 257 sz# 99743 c# 49}{ExtrQuery# [5000:1:50:0:0:100000:1] sh# 257 sz# 99743 c# 50}{ExtrQuery# [5000:1:51:0:0:100000:1] sh# 257 sz# 99743 c# 51}{ExtrQuery# [5000:1:52:0:0:100000:1] sh# 257 sz# 99743 c# 52}{ExtrQuery# [5000:1:53:0:0:100000:1] sh# 257 sz# 99743 c# 53}{ExtrQuery# [5000:1:54:0:0:100000:1] sh# 257 sz# 99743 c# 54}{ExtrQuery# [5000:1:55:0:0:100000:1] sh# 257 sz# 99743 c# 55}{ExtrQuery# [5000:1:56:0:0:100000:1] sh# 257 sz# 99743 c# 56}{ExtrQuery# [5000:1:57:0:0:100000:1] sh# 257 sz# 99743 c# 57}{ExtrQuery# [5000:1:58:0:0:100000:1] sh# 257 sz# 99743 c# 58}{ExtrQuery# [5000:1:59:0:0:100000:1] sh# 257 sz# 99743 c# 59}{ExtrQuery# [5000:1:60:0:0:100000:1] sh# 257 sz# 99743 c# 60}{ExtrQuery# [5000:1:61:0:0:100000:1] sh# 257 sz# 99743 c# 61}{ExtrQuery# [5000:1:62:0:0:100000:1] sh# 257 sz# 99743 c# 62}{ExtrQuery# [5000:1:63:0:0:100000:1] sh# 257 sz# 99743 c# 63}{ExtrQuery# [5000:1:64:0:0:100000:1] sh# 257 sz# 99743 c# 64}{ExtrQuery# [5000:1:65:0:0:100000:1] sh# 257 sz# 99743 c# 65}{ExtrQuery# [5000:1:66:0:0:100000:1] sh# 257 sz# 99743 c# 66}{ExtrQuery# [5000:1:67:0:0:100000:1] sh# 257 sz# 99743 c# 67}{ExtrQuery# [5000:1:68:0:0:100000:1] sh# 257 sz# 99743 c# 68}{ExtrQuery# [5000:1:69:0:0:100000:1] sh# 257 sz# 99743 c# 69}{ExtrQuery# [5000:1:70:0:0:100000:1] sh# 257 sz# 99743 c# 70}{ExtrQuery# [5000:1:71:0:0:100000:1] sh# 257 sz# 99743 c# 71}{ExtrQuery# [5000:1:72:0:0:100000:1] sh# 257 sz# 99743 c# 72}{ExtrQuery# [5000:1:73:0:0:100000:1] sh# 257 sz# 99743 c# 73}{ExtrQuery# [5000:1:74:0:0:100000:1] sh# 257 sz# 99743 c# 74}{ExtrQuery# [5000:1:75:0:0:100000:1] sh# 257 sz# 99743 c# 75}{ExtrQuery# [5000:1:76:0:0:100000:1] sh# 257 sz# 99743 c# 76}{ExtrQuery# [5000:1:77:0:0:100000:1] sh# 257 sz# 99743 c# 77}{ExtrQuery# [5000:1:78:0:0:100000:1] sh# 257 sz# 99743 c# 78}{ExtrQuery# [5000:1:79:0:0:100000:1] sh# 257 sz# 99743 c# 79}{ExtrQuery# [5000:1:80:0:0:100000:1] sh# 257 sz# 99743 c# 80}{ExtrQuery# [5000:1:81:0:0:100000:1] sh# 257 sz# 99743 c# 81}{ExtrQuery# [5000:1:82:0:0:100000:1] sh# 257 sz# 99743 c# 82}{ExtrQuery# [5000:1:83:0:0:100000:1] sh# 257 sz# 99743 c# 83}{ExtrQuery# [5000:1:84:0:0:100000:1] sh# 257 sz# 99743 c# 84}{ExtrQuery# [5000:1:85:0:0:100000:1] sh# 257 sz# 99743 c# 85}{ExtrQuery# [5000:1:86:0:0:100000:1] sh# 257 sz# 99743 c# 86}{ExtrQuery# [5000:1:87:0:0:100000:1] sh# 257 sz# 99743 c# 87}{ExtrQuery# [5000:1:88:0:0:100000:1] sh# 257 sz# 99743 c# 88}{ExtrQuery# [5000:1:89:0:0:100000:1] sh# 257 sz# 99743 c# 89}{ExtrQuery# [5000:1:90:0:0:100000:1] sh# 257 sz# 99743 c# 90}{ExtrQuery# [5000:1:91:0:0:100000:1] sh# 257 sz# 99743 c# 91}{ExtrQuery# [5000:1:92:0:0:100000:1] sh# 257 sz# 99743 c# 92}{ExtrQuery# [5000:1:93:0:0:100000:1] sh# 257 sz# 99743 c# 93}{ExtrQuery# [5000:1:94:0:0:100000:1] sh# 257 sz# 99743 c# 94}{ExtrQuery# [5000:1:95:0:0:100000:1] sh# 257 sz# 99743 c# 95}{ExtrQuery# [5000:1:96:0:0:100000:1] sh# 257 sz# 99743 c# 96}{ExtrQuery# [5000:1:97:0:0:100000:1] sh# 257 sz# 99743 c# 97}{ExtrQuery# [5000:1:98:0:0:100000:1] sh# 257 sz# 99743 c# 98}{ExtrQuery# [5000:1:99:0:0:100000:1] sh# 257 sz# 99743 c# 99}{ExtrQuery# [5000:1:100:0:0:100000:1] sh# 257 sz# 99743 c# 100}{ExtrQuery# [5000:1:101:0:0:100000:1] sh# 257 sz# 99743 c# 101}{ExtrQuery# [5000:1:102:0:0:100000:1] sh# 257 sz# 99743 c# 102}{ExtrQuery# [5000:1:103:0:0:100000:1] sh# 257 sz# 99743 c# 103}{ExtrQuery# [5000:1:104:0:0:100000:1] sh# 257 sz# 99743 c# 104}{ExtrQuery# [5000:1:105:0:0:100000:1] sh# 257 sz# 99743 c# 105}{ExtrQuery# [5000:1:106:0:0:100000:1] sh# 257 sz# 99743 c# 106}{ExtrQuery# [5000:1:107:0:0:100000:1] sh# 257 sz# 99743 c# 107}{ExtrQuery# [5000:1:108:0:0:100000:1] sh# 257 sz# 99743 c# 108}{ExtrQuery# [5000:1:109:0:0:100000:1] sh# 257 sz# 99743 c# 109}{ExtrQuery# [5000:1:110:0:0:100000:1] sh# 257 sz# 99743 c# 110}{ExtrQuery# [5000:1:111:0:0:100000:1] sh# 257 sz# 99743 c# 111}{ExtrQuery# [5000:1:112:0:0:100000:1] sh# 257 sz# 99743 c# 112}{ExtrQuery# [5000:1:113:0:0:100000:1] sh# 257 sz# 99743 c# 113}{ExtrQuery# [5000:1:114:0:0:100000:1] sh# 257 sz# 99743 c# 114}{ExtrQuery# [5000:1:115:0:0:100000:1] sh# 257 sz# 99743 c# 115}{ExtrQuery# [5000:1:116:0:0:100000:1] sh# 257 sz# 99743 c# 116}{ExtrQuery# [5000:1:117:0:0:100000:1] sh# 257 sz# 99743 c# 117}{ExtrQuery# [5000:1:118:0:0:100000:1] sh# 257 sz# 99743 c# 118}{ExtrQuery# [5000:1:119:0:0:100000:1] sh# 257 sz# 99743 c# 119}{ExtrQuery# [5000:1:120:0:0:100000:1] sh# 257 sz# 99743 c# 120}{ExtrQuery# [5000:1:121:0:0:100000:1] sh# 257 sz# 99743 c# 121}{ExtrQuery# [5000:1:122:0:0:100000:1] sh# 257 sz# 99743 c# 122}{ExtrQuery# [5000:1:123:0:0:100000:1] sh# 257 sz# 99743 c# 123}{ExtrQuery# [5000:1:124:0:0:100000:1] sh# 257 sz# 99743 c# 124}{ExtrQuery# [5000:1:125:0:0:100000:1] sh# 257 sz# 99743 c# 125}{ExtrQuery# [5000:1:126:0:0:100000:1] sh# 257 sz# 99743 c# 126}{ExtrQuery# [5000:1:127:0:0:100000:1] sh# 257 sz# 99743 c# 127}{ExtrQuery# [5000:1:128:0:0:100000:1] sh# 257 sz# 99743 c# 128}{ExtrQuery# [5000:1:129:0:0:100000:1] sh# 257 sz# 99743 c# 129}{ExtrQuery# [5000:1:130:0:0:100000:1] sh# 257 sz# 99743 c# 130}{ExtrQuery# [5000:1:131:0:0:100000:1] sh# 257 sz# 99743 c# 131}{ExtrQuery# [5000:1:132:0:0:100000:1] sh# 257 sz# 99743 c# 132}{ExtrQuery# [5000:1:133:0:0:100000:1] sh# 257 sz# 99743 c# 133}{ExtrQuery# [5000:1:134:0:0:100000:1] sh# 257 sz# 99743 c# 134}{ExtrQuery# [5000:1:135:0:0:100000:1] sh# 257 sz# 99743 c# 135}{ExtrQuery# [5000:1:136:0:0:100000:1] sh# 257 sz# 99743 c# 136}{ExtrQuery# [5000:1:137:0:0:100000:1] sh# 257 sz# 99743 c# 137}{ExtrQuery# [5000:1:138:0:0:100000:1] sh# 257 sz# 99743 c# 138}{ExtrQuery# [5000:1:139:0:0:100000:1] sh# 257 sz# 99743 c# 139}{ExtrQuery# [5000:1:140:0:0:100000:1] sh# 257 sz# 99743 c# 140}{ExtrQuery# [5000:1:141:0:0:100000:1] sh# 257 sz# 99743 c# 141}{ExtrQuery# [5000:1:142:0:0:100000:1] sh# 257 sz# 99743 c# 142}{ExtrQuery# [5000:1:143:0:0:100000:1] sh# 257 sz# 99743 c# 143}{ExtrQuery# [5000:1:144:0:0:100000:1] sh# 257 sz# 99743 c# 144}{ExtrQuery# [5000:1:145:0:0:100000:1] sh# 257 sz# 99743 c# 145}{ExtrQuery# [5000:1:146:0:0:100000:1] sh# 257 sz# 99743 c# 146}{ExtrQuery# [5000:1:147:0:0:100000:1] sh# 257 sz# 99743 c# 147}{ExtrQuery# [5000:1:148:0:0:100000:1] sh# 257 sz# 99743 c# 148}{ExtrQuery# [5000:1:149:0:0:100000:1] sh# 257 sz# 99743 c# 149}{ExtrQuery# [5000:1:150:0:0:100000:1] sh# 257 sz# 99743 c# 150}{ExtrQuery# [5000:1:151:0:0:100000:1] sh# 257 sz# 99743 c# 151}{ExtrQuery# [5000:1:152:0:0:100000:1] sh# 257 sz# 99743 c# 152}{ExtrQuery# [5000:1:153:0:0:100000:1] sh# 257 sz# 99743 c# 153}{ExtrQuery# [5000:1:154:0:0:100000:1] sh# 257 sz# 99743 c# 154}{ExtrQuery# [5000:1:155:0:0:100000:1] sh# 257 sz# 99743 c# 155}{ExtrQuery# [5000:1:156:0:0:100000:1] sh# 257 sz# 99743 c# 156}{ExtrQuery# [5000:1:157:0:0:100000:1] sh# 257 sz# 99743 c# 157}{ExtrQuery# [5000:1:158:0:0:100000:1] sh# 257 sz# 99743 c# 158}{ExtrQuery# [5000:1:159:0:0:100000:1] sh# 257 sz# 99743 c# 159}{ExtrQuery# [5000:1:160:0:0:100000:1] sh# 257 sz# 99743 c# 160}{ExtrQuery# [5000:1:161:0:0:100000:1] sh# 257 sz# 99743 c# 161}{ExtrQuery# [5000:1:162:0:0:100000:1] sh# 257 sz# 99743 c# 162}{ExtrQuery# [5000:1:163:0:0:100000:1] sh# 257 sz# 99743 c# 163}{ExtrQuery# [5000:1:164:0:0:100000:1] sh# 257 sz# 99743 c# 164}{ExtrQuery# [5000:1:165:0:0:100000:1] sh# 257 sz# 99743 c# 165}{ExtrQuery# [5000:1:166:0:0:100000:1] sh# 257 sz# 99743 c# 166}{ExtrQuery# [5000:1:167:0:0:100000:1] sh# 257 sz# 99743 c# 167}{ExtrQuery# [5000:1:168:0:0:100000:1] sh# 257 sz# 99743 c# 168}{ExtrQuery# [5000:1:169:0:0:100000:1] sh# 257 sz# 99743 c# 169}{ExtrQuery# [5000:1:170:0:0:100000:1] sh# 257 sz# 99743 c# 170}{ExtrQuery# [5000:1:171:0:0:100000:1] sh# 257 sz# 99743 c# 171}{ExtrQuery# [5000:1:172:0:0:100000:1] sh# 257 sz# 99743 c# 172}{ExtrQuery# [5000:1:173:0:0:100000:1] sh# 257 sz# 99743 c# 173}{ExtrQuery# [5000:1:174:0:0:100000:1] sh# 257 sz# 99743 c# 174}{ExtrQuery# [5000:1:175:0:0:100000:1] sh# 257 sz# 99743 c# 175}{ExtrQuery# [5000:1:176:0:0:100000:1] sh# 257 sz# 99743 c# 176}{ExtrQuery# [5000:1:177:0:0:100000:1] sh# 257 sz# 99743 c# 177}{ExtrQuery# [5000:1:178:0:0:100000:1] sh# 257 sz# 99743 c# 178}{ExtrQuery# [5000:1:179:0:0:100000:1] sh# 257 sz# 99743 c# 179}{ExtrQuery# [5000:1:180:0:0:100000:1] sh# 257 sz# 99743 c# 180}{ExtrQuery# [5000:1:181:0:0:100000:1] sh# 257 sz# 99743 c# 181}{ExtrQuery# [5000:1:182:0:0:100000:1] sh# 257 sz# 99743 c# 182}{ExtrQuery# [5000:1:183:0:0:100000:1] sh# 257 sz# 99743 c# 183}{ExtrQuery# [5000:1:184:0:0:100000:1] sh# 257 sz# 99743 c# 184}{ExtrQuery# [5000:1:185:0:0:100000:1] sh# 257 sz# 99743 c# 185}{ExtrQuery# [5000:1:186:0:0:100000:1] sh# 257 sz# 99743 c# 186}{ExtrQuery# [5000:1:187:0:0:100000:1] sh# 257 sz# 99743 c# 187}{ExtrQuery# [5000:1:188:0:0:100000:1] sh# 257 sz# 99743 c# 188}{ExtrQuery# [5000:1:189:0:0:100000:1] sh# 257 sz# 99743 c# 189}{ExtrQuery# [5000:1:190:0:0:100000:1] sh# 257 sz# 99743 c# 190}{ExtrQuery# [5000:1:191 ... sz# 99743 c# 484}{ExtrQuery# [5000:1:485:0:0:100000:1] sh# 257 sz# 99743 c# 485}{ExtrQuery# [5000:1:486:0:0:100000:1] sh# 257 sz# 99743 c# 486}{ExtrQuery# [5000:1:487:0:0:100000:1] sh# 257 sz# 99743 c# 487}{ExtrQuery# [5000:1:488:0:0:100000:1] sh# 257 sz# 99743 c# 488}{ExtrQuery# [5000:1:489:0:0:100000:1] sh# 257 sz# 99743 c# 489}{ExtrQuery# [5000:1:490:0:0:100000:1] sh# 257 sz# 99743 c# 490}{ExtrQuery# [5000:1:491:0:0:100000:1] sh# 257 sz# 99743 c# 491}{ExtrQuery# [5000:1:492:0:0:100000:1] sh# 257 sz# 99743 c# 492}{ExtrQuery# [5000:1:493:0:0:100000:1] sh# 257 sz# 99743 c# 493}{ExtrQuery# [5000:1:494:0:0:100000:1] sh# 257 sz# 99743 c# 494}{ExtrQuery# [5000:1:495:0:0:100000:1] sh# 257 sz# 99743 c# 495}{ExtrQuery# [5000:1:496:0:0:100000:1] sh# 257 sz# 99743 c# 496}{ExtrQuery# [5000:1:497:0:0:100000:1] sh# 257 sz# 99743 c# 497}{ExtrQuery# [5000:1:498:0:0:100000:1] sh# 257 sz# 99743 c# 498}{ExtrQuery# [5000:1:499:0:0:100000:1] sh# 257 sz# 99743 c# 499}{ExtrQuery# [5000:1:500:0:0:100000:1] sh# 257 sz# 99743 c# 500}{ExtrQuery# [5000:1:501:0:0:100000:1] sh# 257 sz# 99743 c# 501}{ExtrQuery# [5000:1:502:0:0:100000:1] sh# 257 sz# 99743 c# 502}{ExtrQuery# [5000:1:503:0:0:100000:1] sh# 257 sz# 99743 c# 503}{ExtrQuery# [5000:1:504:0:0:100000:1] sh# 257 sz# 99743 c# 504}{ExtrQuery# [5000:1:505:0:0:100000:1] sh# 257 sz# 99743 c# 505}{ExtrQuery# [5000:1:506:0:0:100000:1] sh# 257 sz# 99743 c# 506}{ExtrQuery# [5000:1:507:0:0:100000:1] sh# 257 sz# 99743 c# 507}{ExtrQuery# [5000:1:508:0:0:100000:1] sh# 257 sz# 99743 c# 508}{ExtrQuery# [5000:1:509:0:0:100000:1] sh# 257 sz# 99743 c# 509}{ExtrQuery# [5000:1:510:0:0:100000:1] sh# 257 sz# 99743 c# 510}{ExtrQuery# [5000:1:511:0:0:100000:1] sh# 257 sz# 99743 c# 511}{ExtrQuery# [5000:1:512:0:0:100000:1] sh# 257 sz# 99743 c# 512}{ExtrQuery# [5000:1:513:0:0:100000:1] sh# 257 sz# 99743 c# 513}{ExtrQuery# [5000:1:514:0:0:100000:1] sh# 257 sz# 99743 c# 514}{ExtrQuery# [5000:1:515:0:0:100000:1] sh# 257 sz# 99743 c# 515}{ExtrQuery# [5000:1:516:0:0:100000:1] sh# 257 sz# 99743 c# 516}{ExtrQuery# [5000:1:517:0:0:100000:1] sh# 257 sz# 99743 c# 517}{ExtrQuery# [5000:1:518:0:0:100000:1] sh# 257 sz# 99743 c# 518}{ExtrQuery# [5000:1:519:0:0:100000:1] sh# 257 sz# 99743 c# 519}{ExtrQuery# [5000:1:520:0:0:100000:1] sh# 257 sz# 99743 c# 520}{ExtrQuery# [5000:1:521:0:0:100000:1] sh# 257 sz# 99743 c# 521}{ExtrQuery# [5000:1:522:0:0:100000:1] sh# 257 sz# 99743 c# 522}{ExtrQuery# [5000:1:523:0:0:100000:1] sh# 257 sz# 99743 c# 523}{ExtrQuery# [5000:1:524:0:0:100000:1] sh# 257 sz# 99743 c# 524}{ExtrQuery# [5000:1:525:0:0:100000:1] sh# 257 sz# 99743 c# 525}{ExtrQuery# [5000:1:526:0:0:100000:1] sh# 257 sz# 99743 c# 526}{ExtrQuery# [5000:1:527:0:0:100000:1] sh# 257 sz# 99743 c# 527}{ExtrQuery# [5000:1:528:0:0:100000:1] sh# 257 sz# 99743 c# 528}{ExtrQuery# [5000:1:529:0:0:100000:1] sh# 257 sz# 99743 c# 529}{ExtrQuery# [5000:1:530:0:0:100000:1] sh# 257 sz# 99743 c# 530}{ExtrQuery# [5000:1:531:0:0:100000:1] sh# 257 sz# 99743 c# 531}{ExtrQuery# [5000:1:532:0:0:100000:1] sh# 257 sz# 99743 c# 532}{ExtrQuery# [5000:1:533:0:0:100000:1] sh# 257 sz# 99743 c# 533}{ExtrQuery# [5000:1:534:0:0:100000:1] sh# 257 sz# 99743 c# 534}{ExtrQuery# [5000:1:535:0:0:100000:1] sh# 257 sz# 99743 c# 535}{ExtrQuery# [5000:1:536:0:0:100000:1] sh# 257 sz# 99743 c# 536}{ExtrQuery# [5000:1:537:0:0:100000:1] sh# 257 sz# 99743 c# 537}{ExtrQuery# [5000:1:538:0:0:100000:1] sh# 257 sz# 99743 c# 538}{ExtrQuery# [5000:1:539:0:0:100000:1] sh# 257 sz# 99743 c# 539}{ExtrQuery# [5000:1:540:0:0:100000:1] sh# 257 sz# 99743 c# 540}{ExtrQuery# [5000:1:541:0:0:100000:1] sh# 257 sz# 99743 c# 541}{ExtrQuery# [5000:1:542:0:0:100000:1] sh# 257 sz# 99743 c# 542}{ExtrQuery# [5000:1:543:0:0:100000:1] sh# 257 sz# 99743 c# 543}{ExtrQuery# [5000:1:544:0:0:100000:1] sh# 257 sz# 99743 c# 544}{ExtrQuery# [5000:1:545:0:0:100000:1] sh# 257 sz# 99743 c# 545}{ExtrQuery# [5000:1:546:0:0:100000:1] sh# 257 sz# 99743 c# 546}{ExtrQuery# [5000:1:547:0:0:100000:1] sh# 257 sz# 99743 c# 547}{ExtrQuery# [5000:1:548:0:0:100000:1] sh# 257 sz# 99743 c# 548}{ExtrQuery# [5000:1:549:0:0:100000:1] sh# 257 sz# 99743 c# 549}{ExtrQuery# [5000:1:550:0:0:100000:1] sh# 257 sz# 99743 c# 550}{ExtrQuery# [5000:1:551:0:0:100000:1] sh# 257 sz# 99743 c# 551}{ExtrQuery# [5000:1:552:0:0:100000:1] sh# 257 sz# 99743 c# 552}{ExtrQuery# [5000:1:553:0:0:100000:1] sh# 257 sz# 99743 c# 553}{ExtrQuery# [5000:1:554:0:0:100000:1] sh# 257 sz# 99743 c# 554}{ExtrQuery# [5000:1:555:0:0:100000:1] sh# 257 sz# 99743 c# 555}{ExtrQuery# [5000:1:556:0:0:100000:1] sh# 257 sz# 99743 c# 556}{ExtrQuery# [5000:1:557:0:0:100000:1] sh# 257 sz# 99743 c# 557}{ExtrQuery# [5000:1:558:0:0:100000:1] sh# 257 sz# 99743 c# 558}{ExtrQuery# [5000:1:559:0:0:100000:1] sh# 257 sz# 99743 c# 559}{ExtrQuery# [5000:1:560:0:0:100000:1] sh# 257 sz# 99743 c# 560}{ExtrQuery# [5000:1:561:0:0:100000:1] sh# 257 sz# 99743 c# 561}{ExtrQuery# [5000:1:562:0:0:100000:1] sh# 257 sz# 99743 c# 562}{ExtrQuery# [5000:1:563:0:0:100000:1] sh# 257 sz# 99743 c# 563}{ExtrQuery# [5000:1:564:0:0:100000:1] sh# 257 sz# 99743 c# 564}{ExtrQuery# [5000:1:565:0:0:100000:1] sh# 257 sz# 99743 c# 565}{ExtrQuery# [5000:1:566:0:0:100000:1] sh# 257 sz# 99743 c# 566}{ExtrQuery# [5000:1:567:0:0:100000:1] sh# 257 sz# 99743 c# 567}{ExtrQuery# [5000:1:568:0:0:100000:1] sh# 257 sz# 99743 c# 568}{ExtrQuery# [5000:1:569:0:0:100000:1] sh# 257 sz# 99743 c# 569}{ExtrQuery# [5000:1:570:0:0:100000:1] sh# 257 sz# 99743 c# 570}{ExtrQuery# [5000:1:571:0:0:100000:1] sh# 257 sz# 99743 c# 571}{ExtrQuery# [5000:1:572:0:0:100000:1] sh# 257 sz# 99743 c# 572}{ExtrQuery# [5000:1:573:0:0:100000:1] sh# 257 sz# 99743 c# 573}{ExtrQuery# [5000:1:574:0:0:100000:1] sh# 257 sz# 99743 c# 574}{ExtrQuery# [5000:1:575:0:0:100000:1] sh# 257 sz# 99743 c# 575}{ExtrQuery# [5000:1:576:0:0:100000:1] sh# 257 sz# 99743 c# 576}{ExtrQuery# [5000:1:577:0:0:100000:1] sh# 257 sz# 99743 c# 577}{ExtrQuery# [5000:1:578:0:0:100000:1] sh# 257 sz# 99743 c# 578}{ExtrQuery# [5000:1:579:0:0:100000:1] sh# 257 sz# 99743 c# 579}{ExtrQuery# [5000:1:580:0:0:100000:1] sh# 257 sz# 99743 c# 580}{ExtrQuery# [5000:1:581:0:0:100000:1] sh# 257 sz# 99743 c# 581}{ExtrQuery# [5000:1:582:0:0:100000:1] sh# 257 sz# 99743 c# 582}{ExtrQuery# [5000:1:583:0:0:100000:1] sh# 257 sz# 99743 c# 583}{ExtrQuery# [5000:1:584:0:0:100000:1] sh# 257 sz# 99743 c# 584}{ExtrQuery# [5000:1:585:0:0:100000:1] sh# 257 sz# 99743 c# 585}{ExtrQuery# [5000:1:586:0:0:100000:1] sh# 257 sz# 99743 c# 586}{ExtrQuery# [5000:1:587:0:0:100000:1] sh# 257 sz# 99743 c# 587}{ExtrQuery# [5000:1:588:0:0:100000:1] sh# 257 sz# 99743 c# 588}{ExtrQuery# [5000:1:589:0:0:100000:1] sh# 257 sz# 99743 c# 589}{ExtrQuery# [5000:1:590:0:0:100000:1] sh# 257 sz# 99743 c# 590}{ExtrQuery# [5000:1:591:0:0:100000:1] sh# 257 sz# 99743 c# 591}{ExtrQuery# [5000:1:592:0:0:100000:1] sh# 257 sz# 99743 c# 592}{ExtrQuery# [5000:1:593:0:0:100000:1] sh# 257 sz# 99743 c# 593}{ExtrQuery# [5000:1:594:0:0:100000:1] sh# 257 sz# 99743 c# 594}{ExtrQuery# [5000:1:595:0:0:100000:1] sh# 257 sz# 99743 c# 595}{ExtrQuery# [5000:1:596:0:0:100000:1] sh# 257 sz# 99743 c# 596}{ExtrQuery# [5000:1:597:0:0:100000:1] sh# 257 sz# 99743 c# 597}{ExtrQuery# [5000:1:598:0:0:100000:1] sh# 257 sz# 99743 c# 598}{ExtrQuery# [5000:1:599:0:0:100000:1] sh# 257 sz# 99743 c# 599}{ExtrQuery# [5000:1:600:0:0:100000:1] sh# 257 sz# 99743 c# 600}{ExtrQuery# [5000:1:601:0:0:100000:1] sh# 257 sz# 99743 c# 601}{ExtrQuery# [5000:1:602:0:0:100000:1] sh# 257 sz# 99743 c# 602}{ExtrQuery# [5000:1:603:0:0:100000:1] sh# 257 sz# 99743 c# 603}{ExtrQuery# [5000:1:604:0:0:100000:1] sh# 257 sz# 99743 c# 604}{ExtrQuery# [5000:1:605:0:0:100000:1] sh# 257 sz# 99743 c# 605}{ExtrQuery# [5000:1:606:0:0:100000:1] sh# 257 sz# 99743 c# 606}{ExtrQuery# [5000:1:607:0:0:100000:1] sh# 257 sz# 99743 c# 607}{ExtrQuery# [5000:1:608:0:0:100000:1] sh# 257 sz# 99743 c# 608}{ExtrQuery# [5000:1:609:0:0:100000:1] sh# 257 sz# 99743 c# 609}{ExtrQuery# [5000:1:610:0:0:100000:1] sh# 257 sz# 99743 c# 610}{ExtrQuery# [5000:1:611:0:0:100000:1] sh# 257 sz# 99743 c# 611}{ExtrQuery# [5000:1:612:0:0:100000:1] sh# 257 sz# 99743 c# 612}{ExtrQuery# [5000:1:613:0:0:100000:1] sh# 257 sz# 99743 c# 613}{ExtrQuery# [5000:1:614:0:0:100000:1] sh# 257 sz# 99743 c# 614}{ExtrQuery# [5000:1:615:0:0:100000:1] sh# 257 sz# 99743 c# 615}{ExtrQuery# [5000:1:616:0:0:100000:1] sh# 257 sz# 99743 c# 616}{ExtrQuery# [5000:1:617:0:0:100000:1] sh# 257 sz# 99743 c# 617}{ExtrQuery# [5000:1:618:0:0:100000:1] sh# 257 sz# 99743 c# 618}{ExtrQuery# [5000:1:619:0:0:100000:1] sh# 257 sz# 99743 c# 619}{ExtrQuery# [5000:1:620:0:0:100000:1] sh# 257 sz# 99743 c# 620}{ExtrQuery# [5000:1:621:0:0:100000:1] sh# 257 sz# 99743 c# 621}{ExtrQuery# [5000:1:622:0:0:100000:1] sh# 257 sz# 99743 c# 622}{ExtrQuery# [5000:1:623:0:0:100000:1] sh# 257 sz# 99743 c# 623}{ExtrQuery# [5000:1:624:0:0:100000:1] sh# 257 sz# 99743 c# 624}{ExtrQuery# [5000:1:625:0:0:100000:1] sh# 257 sz# 99743 c# 625}{ExtrQuery# [5000:1:626:0:0:100000:1] sh# 257 sz# 99743 c# 626}{ExtrQuery# [5000:1:627:0:0:100000:1] sh# 257 sz# 99743 c# 627}{ExtrQuery# [5000:1:628:0:0:100000:1] sh# 257 sz# 99743 c# 628}{ExtrQuery# [5000:1:629:0:0:100000:1] sh# 257 sz# 99743 c# 629}{ExtrQuery# [5000:1:630:0:0:100000:1] sh# 257 sz# 99743 c# 630}{ExtrQuery# [5000:1:631:0:0:100000:1] sh# 257 sz# 99743 c# 631}{ExtrQuery# [5000:1:632:0:0:100000:1] sh# 257 sz# 99743 c# 632}{ExtrQuery# [5000:1:633:0:0:100000:1] sh# 257 sz# 99743 c# 633}{ExtrQuery# [5000:1:634:0:0:100000:1] sh# 257 sz# 99743 c# 634}{ExtrQuery# [5000:1:635:0:0:100000:1] sh# 257 sz# 99743 c# 635}{ExtrQuery# [5000:1:636:0:0:100000:1] sh# 257 sz# 99743 c# 636}{ExtrQuery# [5000:1:637:0:0:100000:1] sh# 257 sz# 99743 c# 637}{ExtrQuery# [5000:1:638:0:0:100000:1] sh# 257 sz# 99743 c# 638}{ExtrQuery# [5000:1:639:0:0:100000:1] sh# 257 sz# 99743 c# 639}{ExtrQuery# [5000:1:640:0:0:100000:1] sh# 257 sz# 99743 c# 640}{ExtrQuery# [5000:1:641:0:0:100000:1] sh# 257 sz# 99743 c# 641}{ExtrQuery# [5000:1:642:0:0:100000:1] sh# 257 sz# 99743 c# 642}{ExtrQuery# [5000:1:643:0:0:100000:1] sh# 257 sz# 99743 c# 643}{ExtrQuery# [5000:1:644:0:0:100000:1] sh# 257 sz# 99743 c# 644}{ExtrQuery# [5000:1:645:0:0:100000:1] sh# 257 sz# 99743 c# 645}{ExtrQuery# [5000:1:646:0:0:100000:1] sh# 257 sz# 99743 c# 646}{ExtrQuery# [5000:1:647:0:0:100000:1] sh# 257 sz# 99743 c# 647}{ExtrQuery# [5000:1:648:0:0:100000:1] sh# 257 sz# 99743 c# 648}{ExtrQuery# [5000:1:649:0:0:100000:1] sh# 257 sz# 99743 c# 649}{ExtrQuery# [5000:1:650:0:0:100000:1] sh# 257 sz# 99743 c# 650}{ExtrQuery# [5000:1:651:0:0:100000:1] sh# 257 sz# 99743 c# 651}{ExtrQuery# [5000:1:652:0:0:100000:1] sh# 257 sz# 99743 c# 652}{ExtrQuery# [5000:1:653:0:0:100000:1] sh# 257 sz# 99743 c# 653}{ExtrQuery# [5000:1:654:0:0:100000:1] sh# 257 sz# 99743 c# 654}{ExtrQuery# [5000:1:655:0:0:100000:1] sh# 257 sz# 99743 c# 655}{ExtrQuery# [5000:1:656:0:0:100000:1] sh# 257 sz# 99743 c# 656}{ExtrQuery# [5000:1:657:0:0:100000:1] sh# 257 sz# 99743 c# 657}{ExtrQuery# [5000:1:658:0:0:100000:1] sh# 257 sz# 99743 c# 658}{ExtrQuery# [5000:1:659:0:0:100000:1] sh# 257 sz# 99743 c# 659}{ExtrQuery# [5000:1:660:0:0:100000:1] sh# 257 sz# 99743 c# 660}{ExtrQuery# [5000:1:661:0:0:100000:1] sh# 257 sz# 99743 c# 661}{ExtrQuery# [5000:1:662:0:0:100000:1] sh# 257 sz# 99743 c# 662}{ExtrQuery# [5000:1:663:0:0:100000:1] sh# 257 sz# 99743 c# 663}{ExtrQuery# [5000:1:664:0:0:100000:1] sh# 257 sz# 99743 c# 664}{ExtrQuery# [5000:1:665:0:0:100000:1] sh# 257 sz# 99743 c# 665}{ExtrQuery# [5000:1:666:0:0:100000:1] sh# 257 sz# 99743 c# 666}{ExtrQuery# [5000:1:667:0:0:100000:1] sh# 257 sz# 99743 c# 667}{ExtrQuery# [5000:1:668:0:0:100000:1] sh# 257 sz# 99743 c# 668}{ExtrQuery# [5000:1:669:0:0:100000:1] sh# 257 sz# 99743 c# 669}{ExtrQuery# [5000:1:670:0:0:100000:1] sh# 257 sz# 99743 c# 670}{ExtrQuery# [5000:1:671:0:0:100000:1] sh# 257 sz# 99743 c# 671}{ExtrQuery# [5000:1:672:0:0:17027:1] sh# 257 sz# 16770 c# 672} {MsgQoS} Notify# 0 Internals# 0 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0}; VDISK CAN NOT REPLY ON TEvVGet REQUEST |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest >> SequenceProxy::DropRecreate [GOOD] >> TMemoryController::Counters_HardLimit [GOOD] >> TMemoryController::Counters_NoHardLimit >> HttpRouter::Basic [GOOD] >> DataShardReplication::SimpleApplyChanges [GOOD] >> DataShardReplication::SplitMergeChanges ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceproxy/ut/unittest >> SequenceProxy::DropRecreate [GOOD] Test command err: 2025-12-23T18:25:06.260814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:25:06.260901Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:06.747303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:07.590772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) 2025-12-23T18:25:07.911339Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:25:07.911838Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/0060d8/r3tmp/tmpJb7Ipy/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:25:07.912347Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/0060d8/r3tmp/tmpJb7Ipy/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/0060d8/r3tmp/tmpJb7Ipy/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17969129348919840048 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:25:09.703296Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:25:09.703377Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:09.848131Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:10.597575Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) 2025-12-23T18:25:10.881877Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:25:10.882285Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/0060d8/r3tmp/tmpisaVHU/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:25:10.882454Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/0060d8/r3tmp/tmpisaVHU/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/0060d8/r3tmp/tmpisaVHU/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12844004693489148804 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:25:11.163309Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715658:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp:353) 2025-12-23T18:25:11.469642Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715659:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) |90.1%| [TS] {BAZEL_UPLOAD} ydb/core/tx/sequenceproxy/ut/unittest >> Vacuum::Vacuum [GOOD] >> Vacuum::VacuumWithoutCompaction |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/public_http/ut/unittest >> HttpRouter::Basic [GOOD] |90.1%| [TS] {RESULT} ydb/core/tx/sequenceproxy/ut/unittest |90.1%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |90.1%| [TS] {RESULT} ydb/core/public_http/ut/unittest |90.1%| [TS] {BAZEL_UPLOAD} ydb/core/public_http/ut/unittest |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> DataShardDiskQuotas::DiskQuotaExceeded [GOOD] >> DataShardDiskQuotas::ShardRestartOnCreateTable >> XmlBuilderTest::WritesProperly [GOOD] >> XmlBuilderTest::MacroBuilder [GOOD] >> BlobDepot::DecommitPutAndRead [GOOD] >> BlobDepot::DecommitVerifiedRandom |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/http/ut/unittest >> XmlBuilderTest::MacroBuilder [GOOD] |90.1%| [TS] {BAZEL_UPLOAD} ydb/core/ymq/http/ut/unittest >> TTxDataShardBuildFulltextDictScan::BadRequest [GOOD] >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst-SkipLast >> TFetchRequestTests::HappyWay [GOOD] >> TFetchRequestTests::CDC >> Splitter::Simple >> Splitter::Simple [GOOD] >> Splitter::Small >> Splitter::Small [GOOD] >> Splitter::Minimal [GOOD] >> Splitter::Trivial >> Splitter::Trivial [GOOD] >> Splitter::BigAndSmall >> Splitter::BigAndSmall [GOOD] >> Splitter::CritSmallPortions >> TMemoryController::Counters_NoHardLimit [GOOD] >> TMemoryController::Config_ConsumerLimits |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |90.1%| [TS] {RESULT} ydb/core/ymq/http/ut/unittest |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::SequencesIndex >> Splitter::CritSmallPortions [GOOD] >> Splitter::Crit |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |90.1%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut >> Vacuum::VacuumWithoutCompaction [GOOD] >> Vacuum::MultipleVacuums |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |90.1%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst-SkipLast [GOOD] >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst-SkipLast >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased [GOOD] >> DataShardReplication::SplitMergeChanges [GOOD] >> DataShardReplication::SplitMergeChangesReboots |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export >> TDataShardRSTest::TestCleanupInRS+UseSink |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased [GOOD] Test command err: 2025-12-23T18:24:51.372897Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:390:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.372935Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:871:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.372966Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:380:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.372984Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:511:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373005Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:376:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373026Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:64:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373048Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:837:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373072Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:652:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373091Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:939:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373113Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:701:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373604Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:404:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373637Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:653:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373665Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:618:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373696Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:191:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373728Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:162:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373748Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:45:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373768Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:891:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373787Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:346:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373813Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:750:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.373834Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:706:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374216Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:924:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374238Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:137:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374257Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:108:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374276Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:584:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374292Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:861:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374310Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:361:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374328Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:274:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374347Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:667:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374366Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:696:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374383Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:842:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374890Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:929:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374909Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:594:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374925Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:200:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374942Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:681:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374961Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:98:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.374983Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:890:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375000Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:648:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375019Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:521:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375037Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:176:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375056Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:482:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375536Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:65:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375555Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:492:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375575Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:244:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375596Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:438:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375615Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:6:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375633Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:794:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375652Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:472:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375674Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:225:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375694Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:847:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.375715Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:531:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.376231Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:234:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.376250Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:963:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.376270Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:862:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.376288Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:419:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.376309Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:973:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.376326Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:784:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.376343Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:205:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.376363Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:512:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.376388Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:195:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.376404Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:983:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.376994Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:239:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.377014Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:322:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.377033Z :BS_VDISK_PU ... b# [5000:1:720:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.377658Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:857:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.377674Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:866:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.377710Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:516:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.377730Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:118:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.377752Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:89:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.377770Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:502:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.377787Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:774:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.377808Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:36:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.377828Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:171:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.378327Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:1:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.378345Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:30:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.378366Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:210:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.378390Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:21:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.378408Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:585:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.378425Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:852:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.378446Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:370:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.378464Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:541:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.378481Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:229:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.378499Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:103:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.387476Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:157:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.387502Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:424:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.387519Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:264:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.387537Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:915:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.387557Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:308:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.387575Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:716:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.387590Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:463:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.387608Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:613:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.387628Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:337:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.387644Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:954:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.388131Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:99:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.388162Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:920:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.388188Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:371:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.388211Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:682:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.388230Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:900:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.388251Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:672:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.388273Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:579:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.388294Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:609:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.388312Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:16:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.388332Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:779:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389091Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:448:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389110Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:832:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389129Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:133:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389157Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:341:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389180Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:74:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389205Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:725:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389224Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:614:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389248Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:439:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389268Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:434:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389285Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:949:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389891Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:895:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389916Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:409:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389937Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:147:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389955Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:113:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.389981Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:161:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.390004Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:278:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.390023Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:997:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.390042Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:506:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.390060Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:764:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.390087Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:395:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.390691Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:298:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.390710Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:551:0:0:66560:1] Marker# BSVS08 2025-12-23T18:24:51.390732Z :BS_VDISK_PUT CRIT: blobstorage_skeleton.cpp:634: PDiskId# 1 VDISK[0:_:0:0:0]: (0) TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:823:0:0:66560:1] Marker# BSVS08 |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |90.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest >> MetadataConversion::MakeAuthTest [GOOD] >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] >> KeyValueGRPCService::TestSimpleExecuteTransactionV1 |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/gateway/ut/gtest >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] |90.1%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/gateway/ut/gtest >> TMemoryController::Config_ConsumerLimits [GOOD] >> TMemoryController::SharedCache |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |90.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |90.1%| [TS] {RESULT} ydb/core/kqp/gateway/ut/gtest |90.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut >> BlobDepot::DecommitVerifiedRandom [GOOD] >> BlobDepot::CheckIntegrity >> DataShardDiskQuotas::ShardRestartOnCreateTable [GOOD] >> DataShardDiskQuotas::ShardRestartOnSplitDst >> Splitter::Crit [GOOD] >> Splitter::CritSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] Test command err: 2025-12-23T18:24:46.426168Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:3072} PDiskId# 1 ownerId# 5 invalid OwnerRound, got# 101 expected# 151 error in TLogWrite for ownerId# 5 ownerRound# 101 lsn# 13 PDiskId# 1 2025-12-23T18:24:48.865217Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:3072} PDiskId# 1 ownerId# 3 invalid OwnerRound, got# 101 expected# 151 error in TLogWrite for ownerId# 3 ownerRound# 101 lsn# 12 PDiskId# 1 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk/unittest >> ExportS3BufferTest::MinBufferSize [GOOD] >> ExportS3BufferTest::MinBufferSizeWithCompression [GOOD] >> ExportS3BufferTest::MinBufferSizeWithCompressionAndEncryption [GOOD] >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst-SkipLast [GOOD] >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst+SkipLast >> BlobDepot::CheckIntegrity [GOOD] |90.1%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> StatisticsScan::RunScanOnShard |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_export/unittest >> ExportS3BufferTest::MinBufferSizeWithCompressionAndEncryption [GOOD] |90.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_export/unittest >> TListAllTopicsTests::PlainList >> TSequence::SequencesIndex [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceFromSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest >> BlobDepot::CheckIntegrity [GOOD] Test command err: Mersenne random seed 601032270 RandomSeed# 15872132443557689978 Mersenne random seed 1789542088 Mersenne random seed 740663731 Mersenne random seed 4280151402 Mersenne random seed 1771777544 2025-12-23T18:24:50.532510Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.532672Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.532732Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.532787Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.532840Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.532892Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:2] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.532957Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.533012Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:1:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.533316Z 1 00h00m25.012048s :BS_PROXY_PUT ERROR: [da34aa26b53fdf83] Result# TEvPutResult {Id# [15:1:1:0:1:100:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-12-23T18:24:50.534428Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:2] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.534590Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.534646Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.534699Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.534768Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.534832Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.534887Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.534942Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [15:3:1:0:1:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.552475Z 1 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.552693Z 5 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.552752Z 6 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.552815Z 4 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:2] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.552871Z 3 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.552928Z 8 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.552981Z 7 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:1] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.553035Z 2 00h00m25.012048s :BS_VDISK_PUT ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) TEvVPut: failed to pass the Hull check; id# [16:2:2:0:2:100:3] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:24:50.553274Z 1 00h00m25.012048s :BS_PROXY_PUT ERROR: [fc8c35fafc426cb8] Result# TEvPutResult {Id# [16:2:2:0:2:100:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Mersenne random seed 133342520 Read over the barrier, blob id# [15:1:1:0:1:100:0] Read over the barrier, blob id# [15:1:2:0:1:100:0] 2025-12-23T18:24:52.199662Z 1 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-12-23T18:24:52.200135Z 2 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-12-23T18:24:52.200256Z 3 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-12-23T18:24:52.200364Z 4 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-12-23T18:24:52.200469Z 5 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-12-23T18:24:52.200570Z 6 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-12-23T18:24:52.200673Z 7 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 2025-12-23T18:24:52.200776Z 8 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 1 soft] barrier# 1:2 new key# [15 0 2 2 soft] barrier# 1:1 Put over the barrier, blob id# [15:1:1:0:99:100:0] Put over the barrier, blob id# [15:1:3:0:99:100:0] 2025-12-23T18:24:52.318507Z 1 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-12-23T18:24:52.327116Z 2 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-12-23T18:24:52.327279Z 3 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-12-23T18:24:52.327384Z 4 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-12-23T18:24:52.327486Z 5 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-12-23T18:24:52.327594Z 6 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-12-23T18:24:52.327695Z 7 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 2025-12-23T18:24:52.327793Z 8 00h00m25.012048s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 2 3 hard] barrier# 1:3 new key# [15 0 2 4 hard] barrier# 1:1 Read over the barrier, blob id# [15:1:5:0:1:100:0] Read over the barrier, blob id# [15:1:6:0:1:100:0] Read over the barrier, blob id# [15:1:19:0:1:100:0] Read over the barrier, blob id# [15:2:1:0:1:100:0] Read over the barrier, blob id# [15:2:2:0:1:100:0] TEvRange returned collected blob with id# [15:1:17:0:1:100:0] TEvRange returned collected blob with id# [15:1:19:0:1:100:0] TEvRange returned collected blob with id# [15:2:1:0:1:100:0] TEvRange returned collected blob with id# [15:2:2:0:1:100:0] TEvRange returned collected blob with id# [15:2:3:0:1:100:0] TEvRange returned collected blob with id# [15:2:4:0:1:100:0] TEvRange returned collected blob with id# [15:2:5:0:1:100:0] TEvRange returned collected blob with id# [15:2:6:0:1:100:0] Read over the barrier, blob id# [100:1:3:0:1:100:0] Read over the barrier, blob id# [100:1:5:0:1:100:0] Read over the barrier, blob id# [100:1:6:0:1:100:0] Read over the barrier, blob id# [100:2:1:0:1:100:0] Read over the barrier, blob id# [100:2:2:0:1:100:0] TEvRange returned collected blob with id# [100:2:2:0:1:100:0] TEvRange returned collected blob with id# [100:2:3:0:1:100:0] TEvRange returned collected blob with id# [100:2:4:0:1:100:0] TEvRange returned collected blob with id# [100:2:5:0:1:100:0] TEvRange returned collected blob with id# [100:2:6:0:1:100:0] Mersenne random seed 1084782413 TEvRange returned collected blob with id# [102:1:2:2:14740704:649:0] Read over the barrier, blob id# [100:1:1:1:7495738:191:0] Read over the barrier, blob id# [102:1:2:2:14740704:649:0] Read over the barrier, blob id# [102:1:2:2:14740704:649:0] Read over the barrier, blob id# [102: ... ob id# [16:1:2:0:1174890:61:0] Read over the barrier, blob id# [16:1:2:0:1174890:61:0] Read over the barrier, blob id# [16:1:3:0:5623365:473:0] 2025-12-23T18:25:21.159376Z 7 00h00m25.012048s :BS_HULLRECS ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [15 2 11 4 hard] new key# [15 2 11 0 hard] new barrier# 1:0 2025-12-23T18:25:21.159721Z 1 00h00m25.012048s :BS_HULLRECS ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [15 2 11 4 hard] new key# [15 2 11 0 hard] new barrier# 1:0 2025-12-23T18:25:21.159896Z 2 00h00m25.012048s :BS_HULLRECS ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [15 2 11 4 hard] new key# [15 2 11 0 hard] new barrier# 1:0 2025-12-23T18:25:21.160050Z 3 00h00m25.012048s :BS_HULLRECS ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [15 2 11 4 hard] new key# [15 2 11 0 hard] new barrier# 1:0 2025-12-23T18:25:21.160202Z 4 00h00m25.012048s :BS_HULLRECS ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [15 2 11 4 hard] new key# [15 2 11 0 hard] new barrier# 1:0 2025-12-23T18:25:21.160343Z 5 00h00m25.012048s :BS_HULLRECS ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [15 2 11 4 hard] new key# [15 2 11 0 hard] new barrier# 1:0 2025-12-23T18:25:21.160491Z 6 00h00m25.012048s :BS_HULLRECS ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [15 2 11 4 hard] new key# [15 2 11 0 hard] new barrier# 1:0 2025-12-23T18:25:21.160678Z 8 00h00m25.012048s :BS_HULLRECS ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: out-of-order requests: existing key# [15 2 11 4 hard] new key# [15 2 11 0 hard] new barrier# 1:0 Read over the barrier, blob id# [17:3:9:2:14153431:528:0] 2025-12-23T18:25:23.013242Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 6 0 soft] barrier# 1:0 new key# [15 2 14 1 soft] barrier# 0:1 2025-12-23T18:25:23.014603Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 6 0 soft] barrier# 1:0 new key# [15 2 14 1 soft] barrier# 0:1 2025-12-23T18:25:23.014812Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 6 0 soft] barrier# 1:0 new key# [15 2 14 1 soft] barrier# 0:1 2025-12-23T18:25:23.015019Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 6 0 soft] barrier# 1:0 new key# [15 2 14 1 soft] barrier# 0:1 2025-12-23T18:25:23.015220Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 6 0 soft] barrier# 1:0 new key# [15 2 14 1 soft] barrier# 0:1 2025-12-23T18:25:23.015491Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 6 0 soft] barrier# 1:0 new key# [15 2 14 1 soft] barrier# 0:1 2025-12-23T18:25:23.015701Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 6 0 soft] barrier# 1:0 new key# [15 2 14 1 soft] barrier# 0:1 2025-12-23T18:25:23.015924Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 2 6 0 soft] barrier# 1:0 new key# [15 2 14 1 soft] barrier# 0:1 Read over the barrier, blob id# [16:1:3:0:12035650:732:0] Read over the barrier, blob id# [16:1:2:0:10959273:599:0] Read over the barrier, blob id# [16:1:2:0:10959273:599:0] Read over the barrier, blob id# [16:1:3:0:12035650:732:0] Read over the barrier, blob id# [16:1:2:0:10959273:599:0] Read over the barrier, blob id# [16:1:3:0:5623365:473:0] Read over the barrier, blob id# [17:1:7:0:12902003:676:0] Read over the barrier, blob id# [17:1:4:0:5144611:917:0] Read over the barrier, blob id# [17:1:4:0:5144611:917:0] Read over the barrier, blob id# [17:1:4:0:5144611:917:0] Read over the barrier, blob id# [17:1:7:2:13430585:275:0] Read over the barrier, blob id# [17:1:7:2:13430585:275:0] Read over the barrier, blob id# [17:3:9:2:14153431:528:0] Read over the barrier, blob id# [17:1:4:0:5144611:917:0] Read over the barrier, blob id# [17:1:4:0:5144611:917:0] Read over the barrier, blob id# [15:1:1:2:9523620:492:0] Read over the barrier, blob id# [15:1:1:2:9523620:492:0] Read over the barrier, blob id# [16:3:4:2:9937924:122:0] Read over the barrier, blob id# [16:1:3:0:5623365:473:0] Read over the barrier, blob id# [16:1:3:0:12035650:732:0] Read over the barrier, blob id# [15:3:27:0:2911157:509:0] Read over the barrier, blob id# [15:3:27:0:2911157:509:0] Read over the barrier, blob id# [15:3:27:0:2911157:509:0] Read over the barrier, blob id# [15:3:27:0:2911157:509:0] Read over the barrier, blob id# [16:1:2:0:1174890:61:0] Read over the barrier, blob id# [16:1:2:0:1174890:61:0] Read over the barrier, blob id# [16:5:5:2:14216294:218:0] Read over the barrier, blob id# [15:1:1:2:9523620:492:0] 2025-12-23T18:25:25.548702Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 23 0 hard] barrier# 2:1 new key# [15 0 25 1 hard] barrier# 1:2 2025-12-23T18:25:25.549537Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 23 0 hard] barrier# 2:1 new key# [15 0 25 1 hard] barrier# 1:2 2025-12-23T18:25:25.549702Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 23 0 hard] barrier# 2:1 new key# [15 0 25 1 hard] barrier# 1:2 2025-12-23T18:25:25.549905Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 23 0 hard] barrier# 2:1 new key# [15 0 25 1 hard] barrier# 1:2 2025-12-23T18:25:25.550079Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 23 0 hard] barrier# 2:1 new key# [15 0 25 1 hard] barrier# 1:2 2025-12-23T18:25:25.550249Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 23 0 hard] barrier# 2:1 new key# [15 0 25 1 hard] barrier# 1:2 2025-12-23T18:25:25.550439Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 23 0 hard] barrier# 2:1 new key# [15 0 25 1 hard] barrier# 1:2 2025-12-23T18:25:25.550593Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [15 0 23 0 hard] barrier# 2:1 new key# [15 0 25 1 hard] barrier# 1:2 Read over the barrier, blob id# [16:5:5:2:14216294:218:0] Read over the barrier, blob id# [17:1:4:0:5144611:917:0] Read over the barrier, blob id# [17:3:9:2:14153431:528:0] Read over the barrier, blob id# [17:3:9:2:14153431:528:0] Read over the barrier, blob id# [15:3:27:0:2911157:509:0] 2025-12-23T18:25:25.796228Z 7 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 0 23 0 soft] barrier# 4:0 new key# [16 0 23 1 soft] barrier# 3:0 2025-12-23T18:25:25.796552Z 1 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 0 23 0 soft] barrier# 4:0 new key# [16 0 23 1 soft] barrier# 3:0 2025-12-23T18:25:25.796738Z 2 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 0 23 0 soft] barrier# 4:0 new key# [16 0 23 1 soft] barrier# 3:0 2025-12-23T18:25:25.796918Z 3 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 0 23 0 soft] barrier# 4:0 new key# [16 0 23 1 soft] barrier# 3:0 2025-12-23T18:25:25.797086Z 4 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 0 23 0 soft] barrier# 4:0 new key# [16 0 23 1 soft] barrier# 3:0 2025-12-23T18:25:25.797246Z 5 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 0 23 0 soft] barrier# 4:0 new key# [16 0 23 1 soft] barrier# 3:0 2025-12-23T18:25:25.797418Z 6 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 0 23 0 soft] barrier# 4:0 new key# [16 0 23 1 soft] barrier# 3:0 2025-12-23T18:25:25.797607Z 8 00h00m25.013072s :BS_HULLRECS CRIT: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Db# Barriers ValidateGCCmd: decreasing barrier: existing key# [16 0 23 0 soft] barrier# 4:0 new key# [16 0 23 1 soft] barrier# 3:0 Read over the barrier, blob id# [17:1:7:0:12902003:676:0] Read over the barrier, blob id# [17:1:7:2:13430585:275:0] Read over the barrier, blob id# [15:4:28:0:3648840:849:0] Read over the barrier, blob id# [15:3:27:0:2911157:509:0] Read over the barrier, blob id# [17:1:4:0:5144611:917:0] Read over the barrier, blob id# [17:3:9:2:14153431:528:0] Read over the barrier, blob id# [17:1:7:0:12902003:676:0] Read over the barrier, blob id# [15:3:27:0:2911157:509:0] Read over the barrier, blob id# [17:1:4:0:5144611:917:0] Read over the barrier, blob id# [17:1:7:0:12902003:676:0] Read over the barrier, blob id# [17:1:7:0:5143356:129:0] Read over the barrier, blob id# [17:1:7:0:5143356:129:0] Mersenne random seed 2545262559 ErrorReason DataInfo Disks: 0: [82000000:1:0:2:0] 1: [82000000:1:0:3:0] 2: [82000000:1:0:4:0] 3: [82000000:1:0:5:0] 4: [82000000:1:0:6:0] 5: [82000000:1:0:7:0] 6: [82000000:1:0:0:0] 7: [82000000:1:0:1:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ErrorReason DataInfo [72075186224037888:1:1:3:1:100:0] Disks: 0: [82000000:1:0:7:0] 1: [82000000:1:0:0:0] 2: [82000000:1:0:1:0] 3: [82000000:1:0:2:0] 4: [82000000:1:0:3:0] 5: [82000000:1:0:4:0] 6: [82000000:1:0:5:0] 7: [82000000:1:0:6:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest |90.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |90.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TS] {RESULT} ydb/core/tx/datashard/ut_export/unittest |90.2%| [TM] {RESULT} ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/unittest |90.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut >> Vacuum::MultipleVacuums [GOOD] >> Vacuum::MultipleVacuumsWithOldGenerations >> Splitter::CritSimple [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/splitter/ut/unittest >> Splitter::CritSimple [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=seria ... 82944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964800;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964800;columns=1; |90.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/columnshard/splitter/ut/unittest >> TFetchRequestTests::CDC [GOOD] >> TFetchRequestTests::SmallBytesRead |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |90.2%| [TS] {RESULT} ydb/core/tx/columnshard/splitter/ut/unittest |90.2%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut >> DataShardReplication::SplitMergeChangesReboots [GOOD] >> DataShardReplication::ReplicatedTable+UseSink >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery >> KeyValueGRPCService::TestSimpleExecuteTransactionV1 [GOOD] >> KeyValueGRPCService::TestSimpleExecuteTransactionV2 >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst+SkipLast [GOOD] >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst+SkipLast >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus >> StatisticsScan::RunScanOnShard [GOOD] >> Graph::CreateGraphShard >> TSequence::CreateTableWithDefaultFromSequenceFromSelect [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceBadRequest |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |90.2%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TSubgroupPartLayoutTest::CountEffectiveReplicas4of4 [GOOD] Test command err: testing erasure block-3-1 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 256 cases, took 93 us testing erasure stripe-4-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 main# 32 main# 33 main# 34 main# 35 main# 36 main# 37 main# 38 main# 39 main# 40 main# 41 main# 42 main# 43 main# 44 main# 45 main# 46 main# 47 main# 48 main# 49 main# 50 main# 51 main# 52 main# 53 main# 54 main# 55 main# 56 main# 57 main# 58 main# 59 main# 60 main# 61 main# 62 main# 63 Checked 262144 cases, took 356372 us testing erasure block-2-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 1048576 cases, took 1036314 us testing erasure stripe-3-1 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 Checked 256 cases, took 177 us testing erasure stripe-3-2 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 32768 cases, took 25192 us testing erasure stripe-2-3 main# 0 main# 1 main# 2 main# 3 main# 4 main# 5 main# 6 main# 7 main# 8 main# 9 main# 10 main# 11 main# 12 main# 13 main# 14 main# 15 main# 16 main# 17 main# 18 main# 19 main# 20 main# 21 main# 22 main# 23 main# 24 main# 25 main# 26 main# 27 main# 28 main# 29 main# 30 main# 31 Checked 1048576 cases, took 1124329 us |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/groupinfo/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_column_stats/unittest >> StatisticsScan::RunScanOnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:25:35.501907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:35.725368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:25:35.738793Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:35.739155Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:35.739235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:36.159620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:36.159752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:36.246538Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514330214907 != 1766514330214911 2025-12-23T18:25:36.277553Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:36.343836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:36.445241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004095/r3tmp/tmpHsCir1/pdisk_1.dat 2025-12-23T18:25:36.846642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:36.863970Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:36.986169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:37.587636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:950:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:37.587753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:961:2813], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:37.587836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:37.588826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:966:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:37.588909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:37.601322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:25:37.768351Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:964:2816], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:25:37.869075Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1022:2855] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_column_stats/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData >> Graph::CreateGraphShard [GOOD] >> Graph::UseGraphShard >> Vacuum::MultipleVacuumsWithOldGenerations [GOOD] >> Vacuum::VacuumWithRestart >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst+SkipLast [GOOD] >> TTxDataShardBuildFulltextIndexScan::BadRequest >> KeyValueGRPCService::TestSimpleExecuteTransactionV2 [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV1 >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob >> GraphShard::NormalizeAndDownsample1 [GOOD] >> GraphShard::NormalizeAndDownsample2 [GOOD] >> GraphShard::NormalizeAndDownsample3 [GOOD] >> GraphShard::NormalizeAndDownsample4 [GOOD] >> GraphShard::NormalizeAndDownsample5 [GOOD] >> GraphShard::NormalizeAndDownsample6 [GOOD] >> GraphShard::CheckHistogramToPercentileConversions [GOOD] >> GraphShard::CreateGraphShard >> DataShardDiskQuotas::ShardRestartOnSplitDst [GOOD] |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast |90.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_column_stats/unittest |90.2%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections >> DataShardReplication::ReplicatedTable+UseSink [GOOD] >> DataShardReplication::ReplicatedTable-UseSink >> Graph::UseGraphShard [GOOD] >> Graph::MemoryBackendFullCycle >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_disk_quotas/unittest >> DataShardDiskQuotas::ShardRestartOnSplitDst [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:25:07.597877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:07.916981Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:25:07.938801Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:07.939112Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:07.939180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:08.503673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:08.503810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:08.851607Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514302285648 != 1766514302285652 2025-12-23T18:25:08.924794Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:09.023850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:09.201340Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00362d/r3tmp/tmpFWs3rr/pdisk_1.dat ... Setting hard disk quota to 1 byte 2025-12-23T18:25:09.645152Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:832:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:25:09.645224Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:25:09.645263Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046644480 2025-12-23T18:25:09.645379Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122432, Sender [1:829:2724], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:25:09.645411Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5258: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-12-23T18:25:09.958303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } DatabaseQuotas { data_size_hard_quota: 1 } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:25:09.958515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:25:09.958711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:25:09.966769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:25:09.966988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:25:09.967072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:09.967165Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:25:09.967742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:25:09.967867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:25:09.967911Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:25:09.967944Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2025-12-23T18:25:09.968125Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:25:09.968157Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:25:09.968230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:25:09.968286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:25:09.968329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-12-23T18:25:09.968364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2025-12-23T18:25:09.968440Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:25:09.968898Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [1:832:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:25:09.968934Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:25:09.968964Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046644480 2025-12-23T18:25:09.969081Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:25:09.969114Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2025-12-23T18:25:09.969205Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:25:09.969233Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:25:09.969281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:25:09.969331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:25:09.969370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2025-12-23T18:25:09.969431Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:25:09.969702Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:25:09.969742Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2025-12-23T18:25:09.969825Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:25:09.969849Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:25:09.969896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:25:09.969940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:25:09.969980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-12-23T18:25:09.970029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:25:09.970067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-12-23T18:25:09.988712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:25:09.989307Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:25:09.989358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-12-23T18:25:09.989464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:25:09.989682Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvPropos ... .h:3153: StateWork, received event# 268829696, Sender [3:914:2776], Recipient [3:923:2782]: NKikimr::TEvTablet::TEvTabletDead 2025-12-23T18:25:41.144427Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2025-12-23T18:25:41.144616Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2025-12-23T18:25:41.146282Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435084, Sender [3:398:2397], Recipient [3:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-12-23T18:25:41.146348Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5438: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-12-23T18:25:41.146440Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:25:41.146500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2025-12-23T18:25:41.146657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 4 2025-12-23T18:25:41.148490Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [3:1243:3043], Recipient [3:398:2397]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:25:41.148541Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:25:41.148572Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046644480 2025-12-23T18:25:41.148977Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877763, Sender [3:920:2780], Recipient [3:398:2397]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037888 ClientId: [3:920:2780] ServerId: [3:925:2783] } 2025-12-23T18:25:41.149016Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-12-23T18:25:41.149065Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6176: Client pipe, to tablet: 72075186224037888, from:72057594046644480 is reset 2025-12-23T18:25:41.149293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2025-12-23T18:25:41.149378Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-12-23T18:25:41.149679Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877763, Sender [3:1537:3268], Recipient [3:398:2397]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [3:1537:3268] ServerId: [3:1538:3269] } 2025-12-23T18:25:41.149715Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-12-23T18:25:41.149743Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6176: Client pipe, to tablet: 72057594037968897, from:72057594046644480 is reset 2025-12-23T18:25:41.149935Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:25:41.150205Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-12-23T18:25:41.451605Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:25:41.451677Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:25:41.451768Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [3:398:2397], Recipient [3:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:25:41.451802Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... Inserting the 4th row 2025-12-23T18:25:41.652334Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [3:1570:3283], Recipient [3:1391:3155]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2025-12-23T18:25:41.652412Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037890 2025-12-23T18:25:41.652540Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [3:1391:3155], Recipient [3:1391:3155]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-12-23T18:25:41.652575Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-12-23T18:25:41.652648Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037890 2025-12-23T18:25:41.652777Z node 3 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037890, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2025-12-23T18:25:41.652861Z node 3 :TX_DATASHARD TRACE: datashard_write_operation.cpp:256: Table /Root/table2, shard: 72075186224037890, write point (Uint32 : 4) 2025-12-23T18:25:41.652921Z node 3 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:39:1] 2025-12-23T18:25:41.653014Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit CheckWrite 2025-12-23T18:25:41.653071Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2025-12-23T18:25:41.653113Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit CheckWrite 2025-12-23T18:25:41.653158Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-12-23T18:25:41.653192Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit BuildAndWaitDependencies 2025-12-23T18:25:41.653233Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2500/281474976715659 IncompleteEdge# v{min} UnprotectedReadEdge# v23000/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T18:25:41.653295Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037890 2025-12-23T18:25:41.653336Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2025-12-23T18:25:41.653363Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-12-23T18:25:41.653385Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit BlockFailPoint 2025-12-23T18:25:41.653408Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit BlockFailPoint 2025-12-23T18:25:41.653431Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2025-12-23T18:25:41.653451Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit BlockFailPoint 2025-12-23T18:25:41.653474Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit ExecuteWrite 2025-12-23T18:25:41.653495Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit ExecuteWrite 2025-12-23T18:25:41.653532Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037890 2025-12-23T18:25:41.653590Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2500/281474976715659 IncompleteEdge# v{min} UnprotectedReadEdge# v23000/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T18:25:41.653714Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037890, row count=1 2025-12-23T18:25:41.653790Z node 3 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2025-12-23T18:25:41.653864Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is ExecutedNoMoreRestarts 2025-12-23T18:25:41.653891Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit ExecuteWrite 2025-12-23T18:25:41.653932Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit FinishProposeWrite 2025-12-23T18:25:41.653972Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit FinishProposeWrite 2025-12-23T18:25:41.654058Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is DelayCompleteNoMoreRestarts 2025-12-23T18:25:41.654087Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit FinishProposeWrite 2025-12-23T18:25:41.654125Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit CompletedOperations 2025-12-23T18:25:41.654163Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit CompletedOperations 2025-12-23T18:25:41.654225Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2025-12-23T18:25:41.654263Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit CompletedOperations 2025-12-23T18:25:41.654296Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037890 has finished 2025-12-23T18:25:41.665176Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037890 2025-12-23T18:25:41.665253Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037890 on unit FinishProposeWrite 2025-12-23T18:25:41.665309Z node 3 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037890 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-12-23T18:25:41.665405Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dqrun/dqrun >> GraphShard::CreateGraphShard [GOOD] |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_disk_quotas/unittest |90.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_disk_quotas/unittest |90.2%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> DescribeSchemaSecretsService::GetNewValue ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/shard/ut/unittest >> GraphShard::CreateGraphShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:25:42.021502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:25:42.021621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:25:42.021663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:25:42.021709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:25:42.021766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:25:42.021799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:25:42.021861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:25:42.021936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:25:42.022888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:25:42.023173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:25:42.113280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:25:42.113355Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:42.119049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:25:42.119276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:25:42.119441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:25:42.122533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:25:42.122792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:25:42.123588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:25:42.123842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:25:42.125866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:25:42.126037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:25:42.127136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:25:42.127204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:25:42.127357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:25:42.127410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:25:42.127463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:25:42.127597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:25:42.256051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.256814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.256900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.256970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.257037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.257084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.257140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.257194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.257255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.257302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.257341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.257402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.257448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.257484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:42.257552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 7462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409546, cookie: 281474976720657 2025-12-23T18:25:43.367651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-12-23T18:25:43.367805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:25:43.367957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409546, cookie: 281474976720657 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2025-12-23T18:25:43.368193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409546, cookie: 281474976720658 2025-12-23T18:25:43.368304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:25:43.368444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:25:43.368510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:715: TTxOperationPlanStep Execute operation part is already done, operationId: 102:0 2025-12-23T18:25:43.368563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2025-12-23T18:25:43.368922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:1 128 -> 240 2025-12-23T18:25:43.368995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2025-12-23T18:25:43.369168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2025-12-23T18:25:43.369273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[1:818:2781], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 72075186234409549, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:25:43.375973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:25:43.376030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:25:43.376236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:25:43.376283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:25:43.376367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-12-23T18:25:43.376414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 102:1, ProgressState, NeedSyncHive: 0 2025-12-23T18:25:43.376473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:1 240 -> 240 2025-12-23T18:25:43.377807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:25:43.377921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:25:43.377957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:25:43.377988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2025-12-23T18:25:43.378024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2025-12-23T18:25:43.378110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/2, is published: true 2025-12-23T18:25:43.380786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-12-23T18:25:43.380843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:1 ProgressState 2025-12-23T18:25:43.380933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:1 progress is 2/2 2025-12-23T18:25:43.380965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-12-23T18:25:43.381002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:1 progress is 2/2 2025-12-23T18:25:43.381030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-12-23T18:25:43.381064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-12-23T18:25:43.381101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-12-23T18:25:43.381171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:25:43.381213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:25:43.381376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2025-12-23T18:25:43.381424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2025-12-23T18:25:43.381448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:1 2025-12-23T18:25:43.381553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2025-12-23T18:25:43.382021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:25:43.383697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:25:43.383746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:25:43.384154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:25:43.384251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:25:43.384304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1013:2945] TestWaitNotification: OK eventTxId 102 2025-12-23T18:25:43.384807Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/db1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:25:43.385049Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/db1" took 235us result status StatusSuccess 2025-12-23T18:25:43.385555Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/db1" PathDescription { Self { Name: "db1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 GraphShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.2%| [TS] {BAZEL_UPLOAD} ydb/core/graph/shard/ut/unittest >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> TListAllTopicsTests::PlainList [GOOD] >> TListAllTopicsTests::RecursiveList >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV1 [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV2 >> TTxDataShardBuildFulltextIndexScan::BadRequest [GOOD] >> TTxDataShardBuildFulltextIndexScan::Build >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding >> test.py::test[solomon-BadDownsamplingAggregation-] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:25:16.822513Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:16.932770Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:25:16.942611Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:16.942939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:16.943010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:17.396141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:17.396327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:17.563084Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514312374906 != 1766514312374910 2025-12-23T18:25:17.591472Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:17.643892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:17.777320Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0037e2/r3tmp/tmpsBnoqI/pdisk_1.dat 2025-12-23T18:25:18.222807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:18.240178Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:18.366397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:18.928083Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:982:2831], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:18.928196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:992:2836], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:18.928620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:18.929268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:998:2841], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:18.934471Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:18.939017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:25:19.107860Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:996:2839], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:25:19.229486Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1054:2878] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } ... waiting for SysViewsRoster update finished 2025-12-23T18:25:26.006088Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:26.032670Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:26.033056Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:26.033147Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:26.398238Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:26.398399Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:26.422800Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766514322178893 != 1766514322178896 2025-12-23T18:25:26.430160Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:26.479886Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:26.557541Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0037e2/r3tmp/tmpDd1NOW/pdisk_1.dat 2025-12-23T18:25:26.972757Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:26.987690Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:27.111621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:27.505541Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1030:2868], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:27.505648Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1040:2873], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:27.505721Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:27.506534Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1045:2877], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:27.506688Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:27.518916Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:25:27.697664Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1044:2876], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:25:27.746519Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1103:2916] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_va ... . waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0037e2/r3tmp/tmpkc4wyf/pdisk_1.dat 2025-12-23T18:25:36.368855Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:36.383157Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:36.501693Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:36.903204Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:982:2831], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:36.903319Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:992:2836], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:36.903712Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:36.904342Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:998:2841], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:36.904475Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:36.923226Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:25:37.084006Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:996:2839], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:25:37.126172Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1054:2878] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 303 } } { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } } { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } }, { items { int64_value: 3 } items { uint32_value: 303 } } ... waiting for SysViewsRoster update finished 2025-12-23T18:25:42.710346Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:42.717983Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:25:42.723501Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:314:2357], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:42.723803Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:42.723876Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:43.050072Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:43.050225Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:43.078905Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1766514339351202 != 1766514339351206 2025-12-23T18:25:43.084292Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:43.131042Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:43.216950Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0037e2/r3tmp/tmpuLrgOh/pdisk_1.dat 2025-12-23T18:25:43.495553Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:43.511158Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:43.624889Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:43.943032Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:982:2831], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:43.943234Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:992:2836], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:43.943353Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:43.944915Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:998:2841], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:43.945477Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:43.953292Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:25:44.092340Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:996:2839], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:25:44.128448Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1054:2878] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:25:44.230490Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:1063:2886], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017 2025-12-23T18:25:44.234499Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=4&id=OTA1MDdmNTYtODU3NTdlNWEtNmI0NjdjMDMtZTgyMDQxZDc=, ActorId: [4:979:2828], ActorState: ExecuteState, TraceId: 01kd67bg047rb4aprmaxwtbff2, ReplyQueryCompileError, status: BAD_REQUEST, issues: { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 98 } message: "Key columns are not specified." end_position { row: 1 column: 98 } issue_code: 2017 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 98 } message: "Key columns are not specified." end_position { row: 1 column: 98 } issue_code: 2017 severity: 1 } }, remove tx with tx_id: 2025-12-23T18:25:44.298317Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:1086:2903], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017 2025-12-23T18:25:44.302320Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=4&id=YmQ1YTEwNzctOWNlMTE4MDQtMTI5MGZjMTYtNTc4YzZjNzU=, ActorId: [4:1078:2895], ActorState: ExecuteState, TraceId: 01kd67bg9gcccqccnrhaw2c7jm, ReplyQueryCompileError, status: BAD_REQUEST, issues: { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 103 } message: "Key columns are not specified." end_position { row: 1 column: 103 } issue_code: 2017 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 103 } message: "Key columns are not specified." end_position { row: 1 column: 103 } issue_code: 2017 severity: 1 } }, remove tx with tx_id: |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_sequence/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |90.2%| [TS] {RESULT} ydb/core/graph/shard/ut/unittest |90.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_sequence/unittest |90.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |90.2%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |90.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> Vacuum::VacuumWithRestart [GOOD] >> Vacuum::OutReadSetsCleanedAfterCopyTable |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |90.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |90.2%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery >> DataShardReplication::ReplicatedTable-UseSink [GOOD] >> DataShardReplication::ApplyChangesToReplicatedTable >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries >> DataShardReassign::AutoReassignOnYellowFlag >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected [GOOD] >> KesusProxyTest::ReconnectsWithKesusWhenPipeDestroyed [GOOD] >> KesusProxyTest::ReconnectsWithKesusAfterSeveralRetries >> KesusProxyTest::ReconnectsWithKesusAfterSeveralRetries [GOOD] >> KesusProxyTest::RejectsNotCanonizedResourceName [GOOD] >> KesusProxyTest::SubscribesOnResource [GOOD] >> KesusProxyTest::SubscribesOnResourcesWhenReconnected [GOOD] >> KesusProxyTest::ProxyRequestDuringDisconnection [GOOD] >> KesusProxyTest::DeactivateSessionWhenResourceClosed [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnSuccess [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnFailure [GOOD] >> KesusProxyTest::AnswersWithSessionWhenResourceIsAlreadyKnown [GOOD] >> KesusProxyTest::SendsBrokenUpdateWhenKesusPassesError [GOOD] >> KesusProxyTest::AllocatesResourceWithKesus [GOOD] >> KesusProxyTest::DisconnectsDuringActiveSession >> KesusProxyTest::DisconnectsDuringActiveSession [GOOD] >> KesusProxyTest::AllocatesResourceOffline [GOOD] >> KesusProxyTest::ConnectsDuringOfflineAllocation [GOOD] >> KesusResourceAllocationStatisticsTest::ReturnsDefaultValues [GOOD] >> KesusResourceAllocationStatisticsTest::CalculatesAverage [GOOD] >> KesusResourceAllocationStatisticsTest::TakesBestStat [GOOD] >> TQuoterServiceTest::StaticRateLimiter >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |90.2%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut >> TTxDataShardBuildFulltextIndexScan::Build [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithData >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource >> TxKeys::ComparePointKeys >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel [GOOD] >> TCheckpointCoordinatorTests::ShouldAllSnapshots >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV2 [GOOD] >> KeyValueGRPCService::SimpleAcquireLockV1 >> TCheckpointCoordinatorTests::ShouldAllSnapshots [GOOD] >> TCheckpointCoordinatorTests::Should2Increments1Snapshot >> DescribeSchemaSecretsService::GetNewValue [GOOD] >> DescribeSchemaSecretsService::GetUpdatedValue >> TCheckpointCoordinatorTests::Should2Increments1Snapshot [GOOD] >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks >> KqpExecuter::TestSuddenAbortAfterReady >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery >> Graph::MemoryBackendFullCycle [GOOD] >> Graph::LocalBackendFullCycle >> TxKeys::ComparePointKeys [GOOD] >> TxKeys::ComparePointKeysWithNull >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |90.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery >> TxKeys::ComparePointKeysWithNull [GOOD] >> TxKeys::ComparePointAndRange >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData >> TMLPChangerTests::TopicNotExists >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs >> TFetchRequestTests::SmallBytesRead [GOOD] >> TFetchRequestTests::EmptyTopic >> TMemoryController::SharedCache [GOOD] >> TMemoryController::SharedCache_ConfigLimit >> TxKeys::ComparePointAndRange [GOOD] >> TxKeys::ComparePointAndRangeWithNull >> TDataShardRSTest::TestCleanupInRS+UseSink [GOOD] >> TDataShardRSTest::TestCleanupInRS-UseSink >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob >> TStateStorageConfig::TestMultiReplicaFailDomains [GOOD] >> TStateStorageConfig::TestReplicaSelectionUniqueCombinations >> TQuoterServiceTest::StaticRateLimiter [GOOD] >> TQuoterServiceTest::StaticMultipleAndResources >> TTxDataShardTestInit::TestGetShardStateAfterInitialization >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection >> TxKeys::ComparePointAndRangeWithNull [GOOD] >> TxKeys::ComparePointAndRangeWithInf >> TTxDataShardTestInit::TestGetShardStateAfterInitialization [GOOD] >> TTxDataShardTestInit::TestTableHasPath >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> DataShardReplication::ApplyChangesToReplicatedTable [GOOD] >> DataShardReplication::ApplyChangesToCommonTable >> Vacuum::OutReadSetsCleanedAfterCopyTable [GOOD] >> Vacuum::BorrowerDataCleanedAfterCopyTable >> KeyValueGRPCService::SimpleAcquireLockV1 [GOOD] >> KeyValueGRPCService::SimpleAcquireLockV2 >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections >> TxKeys::ComparePointAndRangeWithInf [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithData [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithTextData >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_keys/unittest >> TxKeys::ComparePointAndRangeWithInf [GOOD] Test command err: 2025-12-23T18:25:51.126251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:25:51.126323Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:51.127895Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:25:51.142871Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:25:51.143228Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:25:51.143483Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:25:51.192522Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:25:51.199646Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:25:51.199759Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:25:51.201459Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:25:51.201547Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:25:51.201607Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:25:51.202040Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:25:51.202114Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:25:51.202191Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:25:51.284458Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:25:51.320732Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:25:51.321002Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:25:51.321132Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:25:51.321170Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:25:51.321205Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:25:51.321241Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:25:51.321393Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:25:51.321443Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:25:51.321797Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:25:51.321909Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:25:51.322063Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:25:51.322118Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:25:51.322156Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:25:51.322197Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:25:51.322234Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:25:51.322268Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:25:51.322307Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:25:51.322392Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:25:51.322435Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:25:51.322484Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:25:51.329449Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:25:51.329531Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:25:51.329654Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:25:51.329858Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:25:51.329916Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:25:51.329975Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:25:51.330031Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:25:51.330068Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:25:51.330105Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:25:51.330139Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:25:51.330528Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:25:51.330593Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:25:51.330634Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:25:51.330671Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:25:51.330722Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:25:51.330773Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:25:51.330806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:25:51.330854Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:25:51.330883Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:25:51.343241Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:25:51.343333Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:25:51.343371Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:25:51.343412Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:25:51.343506Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:25:51.344043Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:25:51.344097Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:25:51.344162Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:25:51.344293Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:25:51.344330Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:25:51.344470Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:25:51.344523Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:25:51.344592Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:25:51.344629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:25:51.348494Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:25:51.348576Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:25:51.348823Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:25:51.348871Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:25:51.348933Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:25:51.348974Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:25:51.349014Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:25:51.349056Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:25:51.349113Z node 1 :TX_DATASHARD TRACE: dat ... ode 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:25:54.572542Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [5:26:2073], Recipient [5:134:2156]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-12-23T18:25:54.572571Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-12-23T18:25:54.572602Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-12-23T18:25:54.572631Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:25:54.574140Z node 5 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000001 txid# 1} 2025-12-23T18:25:54.574188Z node 5 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000001} 2025-12-23T18:25:54.574229Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:25:54.574935Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:25:54.574969Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000001:1] at 9437184 on unit CreateTable 2025-12-23T18:25:54.575001Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:25:54.575039Z node 5 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 9437184 2025-12-23T18:25:54.575061Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000001:1] at 9437184 on unit CompleteOperation 2025-12-23T18:25:54.575101Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000001 : 1] from 9437184 at tablet 9437184 send result to client [5:103:2137], exec latency: 0 ms, propose latency: 2 ms 2025-12-23T18:25:54.575137Z node 5 :TX_DATASHARD INFO: datashard.cpp:1599: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-12-23T18:25:54.575192Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:25:54.575670Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5936: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 134 RawX2: 21474838636 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 2 2025-12-23T18:25:54.575746Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877760, Sender [5:233:2229], Recipient [5:134:2156]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:235:2230] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-12-23T18:25:54.575772Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3186: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-12-23T18:25:54.575823Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [5:127:2152], Recipient [5:134:2156]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-12-23T18:25:54.575849Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-12-23T18:25:54.575875Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-12-23T18:25:54.575915Z node 5 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-12-23T18:25:54.576144Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 65543, Sender [5:103:2137], Recipient [5:134:2156]: NActors::TEvents::TEvPoison 2025-12-23T18:25:54.576504Z node 5 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 9437184 2025-12-23T18:25:54.576581Z node 5 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 9437184 2025-12-23T18:25:54.583493Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [5:238:2231], Recipient [5:240:2232]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:25:54.585543Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [5:238:2231], Recipient [5:240:2232]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:25:54.585598Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828684, Sender [5:238:2231], Recipient [5:240:2232]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:25:54.592033Z node 5 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [5:240:2232] 2025-12-23T18:25:54.592247Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:25:54.595782Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:704: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-12-23T18:25:54.620926Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:25:54.621016Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:25:54.622381Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:25:54.622434Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:25:54.622476Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:25:54.622734Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:25:54.622863Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:25:54.622900Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [5:283:2232] in generation 3 2025-12-23T18:25:54.634308Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:25:54.634396Z node 5 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 9437184 2025-12-23T18:25:54.634465Z node 5 :TX_DATASHARD INFO: datashard.cpp:1599: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-12-23T18:25:54.634544Z node 5 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-12-23T18:25:54.634704Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [5:288:2271] 2025-12-23T18:25:54.634734Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:25:54.634790Z node 5 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 9437184 2025-12-23T18:25:54.634816Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:25:54.634985Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2025-12-23T18:25:54.635065Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2025-12-23T18:25:54.635161Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [5:240:2232], Recipient [5:240:2232]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:25:54.635199Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:25:54.635467Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5936: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 240 RawX2: 21474838712 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-12-23T18:25:54.635510Z node 5 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:25:54.635577Z node 5 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:25:54.635627Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [5:26:2073], Recipient [5:240:2232]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-12-23T18:25:54.635652Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-12-23T18:25:54.635682Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-12-23T18:25:54.635710Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:25:54.635768Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [5:26:2073], Recipient [5:240:2232]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-12-23T18:25:54.635793Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-12-23T18:25:54.635826Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-12-23T18:25:54.635874Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:25:54.635904Z node 5 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:25:54.635939Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:25:54.635965Z node 5 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:25:54.635993Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:25:54.636018Z node 5 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:25:54.636046Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:25:54.636106Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877760, Sender [5:286:2269], Recipient [5:240:2232]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:290:2273] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-12-23T18:25:54.636128Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3186: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-12-23T18:25:54.636187Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [5:127:2152], Recipient [5:240:2232]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-12-23T18:25:54.636211Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-12-23T18:25:54.636239Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-12-23T18:25:54.636278Z node 5 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-12-23T18:25:54.647566Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [5:286:2269], Recipient [5:240:2232]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [5:286:2269] ServerId: [5:290:2273] } 2025-12-23T18:25:54.647605Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_keys/unittest |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |90.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_keys/unittest |90.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection >> DescribeSchemaSecretsService::GetUpdatedValue [GOOD] >> DescribeSchemaSecretsService::GetUnexistingValue >> TListAllTopicsTests::RecursiveList [GOOD] >> TListAllTopicsTests::ListLimitAndPaging >> TQuoterServiceTest::StaticMultipleAndResources [GOOD] >> TQuoterServiceTest::StaticDeadlines >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reassign/unittest >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:25:52.232073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:52.356515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:25:52.370446Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:52.370980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:52.371088Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:52.683795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:52.683963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:52.750907Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514349860970 != 1766514349860974 2025-12-23T18:25:52.763364Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:52.806170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:52.907893Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005200/r3tmp/tmpMFIFVx/pdisk_1.dat 2025-12-23T18:25:53.185289Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2025-12-23T18:25:53.185381Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:53.187200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:53.187489Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{189, redo 997b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2025-12-23T18:25:53.187571Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:53.188358Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:190:1:24576:527:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:53.188452Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:190:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:53.188533Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} commited cookie 1 for step 190 2025-12-23T18:25:53.188822Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-12-23T18:25:53.188876Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:53.189083Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{190, redo 175b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-12-23T18:25:53.189137Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:53.189418Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:191:1:24576:132:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:53.189472Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:191:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:53.189545Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} commited cookie 1 for step 191 2025-12-23T18:25:53.189654Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-12-23T18:25:53.189686Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:53.189796Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{191, redo 175b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-12-23T18:25:53.189835Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:53.190029Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:192:1:24576:132:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:53.190077Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:192:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:53.190116Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} commited cookie 1 for step 192 2025-12-23T18:25:53.190192Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-12-23T18:25:53.190223Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:53.190322Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{192, redo 121b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-12-23T18:25:53.190346Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:53.190482Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:193:1:24576:97:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:53.190534Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:193:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:53.190579Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} commited cookie 1 for step 193 2025-12-23T18:25:53.191180Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2025-12-23T18:25:53.191224Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:53.191284Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{193, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:25:53.191326Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:53.202672Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:53.296754Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-12-23T18:25:53.296856Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:53.297103Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{8, redo 264b alter 0b annex 0, ~{ 0, 4, 2 } -{ }, 0 gb} 2025-12-23T18:25:53.297160Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:53.297550Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:9:1:24576:165:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:53.297625Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:53.297695Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:10} commited cookie 1 for step 9 2025-12-23T18:25:53.298146Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep 2025-12-23T18:25:53.298194Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:53.298611Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} hope 1 -> done Change{193, redo 819b alter 0b annex 0, ~{ 1, 18, 33, 27, 35, 42, 48, 4 } -{ }, 0 gb} 2025-12-23T18:25:53.298665Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:53.298976Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:194:1:24576:427:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:53.2990 ... Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} commited cookie 1 for step 25 2025-12-23T18:25:56.099963Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-12-23T18:25:56.100090Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:56.100327Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{25, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-12-23T18:25:56.100393Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:56.100880Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:26:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:56.100964Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:56.101060Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} commited cookie 1 for step 26 2025-12-23T18:25:56.246839Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-12-23T18:25:56.246954Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:56.247132Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{26, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-12-23T18:25:56.247230Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:56.247690Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:27:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:56.247799Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:56.247908Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} commited cookie 1 for step 27 2025-12-23T18:25:56.397140Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-12-23T18:25:56.397229Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:56.397391Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{27, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-12-23T18:25:56.397437Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:56.397798Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:28:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:56.397854Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:56.397927Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} commited cookie 1 for step 28 2025-12-23T18:25:56.411170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:25:56.411234Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:56.435055Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics 2025-12-23T18:25:56.435160Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:56.435254Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:25:56.435326Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:56.577936Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-12-23T18:25:56.578075Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:56.578285Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{28, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-12-23T18:25:56.578384Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:56.578972Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:29:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:56.579086Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:56.579197Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} commited cookie 1 for step 29 2025-12-23T18:25:56.689726Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-12-23T18:25:56.689841Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-12-23T18:25:56.689948Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxCleanupTransaction 2025-12-23T18:25:56.690006Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:56.690096Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037888 outdated step 16000 last cleanup 0 2025-12-23T18:25:56.690174Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:25:56.690219Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-12-23T18:25:56.690256Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:25:56.690304Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:25:56.690391Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:25:56.690473Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:56.690647Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T18:25:56.759380Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-12-23T18:25:56.759480Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:56.759669Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{29, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-12-23T18:25:56.759735Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:56.760196Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:30:1:24576:87:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:56.760317Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:56.760464Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} commited cookie 1 for step 30 2025-12-23T18:25:56.782878Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2025-12-23T18:25:56.782968Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:25:56.783262Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{13, redo 490b alter 0b annex 0, ~{ 16, 1, 4 } -{ }, 0 gb} 2025-12-23T18:25:56.783324Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:25:56.793985Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037968897:2:11:0:0:262:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:25:56.794094Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} commited cookie 1 for step 11 --- Captured TEvCheckBlobstorageStatusResult event --- Waiting for TEvReassignTablet event... 2025-12-23T18:25:56.908074Z node 1 :TABLET_EXECUTOR NOTICE: Leader{72075186224037888:1:10} CheckYellow current light yellow move channels: [ 0 1 ] 2025-12-23T18:25:56.908162Z node 1 :TABLET_EXECUTOR NOTICE: Leader{72075186224037888:1:10} CheckYellow reassign channels: [ 0 1 ] tablet# 72075186224037888 hive# 72057594037968897 --- Captured TEvReassignTablet event |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_reassign/unittest >> TTxDataShardTestInit::TestTableHasPath [GOOD] >> TTxDataShardTestInit::TestResolvePathAfterRestart >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut >> TMemoryController::SharedCache_ConfigLimit [GOOD] >> TMemoryController::MemTable |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |90.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_reassign/unittest |90.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut >> TTxDataShardBuildFulltextIndexScan::BuildWithTextData [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithTextFromKey >> Graph::LocalBackendFullCycle [GOOD] >> Graph::MemoryBordersOnGet >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings >> DataShardReplication::ApplyChangesToCommonTable [GOOD] >> DataShardReplication::ApplyChangesWithConcurrentTx >> KeyValueGRPCService::SimpleAcquireLockV2 [GOOD] >> KeyValueGRPCService::SimpleRenameUnexistedKeyV1 >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding >> TQuoterServiceTest::StaticDeadlines [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding >> Graph::MemoryBordersOnGet [GOOD] >> Graph::LocalBordersOnGet >> TDqSolomonWriteActorTest::TestWriteFormat >> KqpExecuter::TestSuddenAbortAfterReady [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery >> DescribeSchemaSecretsService::GetUnexistingValue [GOOD] >> DescribeSchemaSecretsService::GetDroppedValue >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries >> Vacuum::BorrowerDataCleanedAfterCopyTable [GOOD] >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> KqpExecuter::TestSuddenAbortAfterReady [GOOD] Test command err: Trying to start YDB, gRPC: 19832, MsgBus: 32458 ... waiting for SysViewsRoster update finished 2025-12-23T18:25:53.407058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:53.531350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:25:53.541851Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:53.542194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:53.542267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:53.841811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:53.841944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:53.920969Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514351177620 != 1766514351177624 2025-12-23T18:25:53.937058Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:53.983154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:54.166474Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003293/r3tmp/tmpMXNEOV/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19832, node 1 2025-12-23T18:25:54.451930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:25:54.451986Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:25:54.452025Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:25:54.452380Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:25:54.464340Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:32458 TClient is connected to server localhost:32458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:25:54.783408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:25:54.830837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:25:55.120968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:25:55.473796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:25:55.842443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:25:56.654424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1901:3510], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:56.654824Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:56.655954Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3529], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:56.656191Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:56.695276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:56.910417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:57.145688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:57.458787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:57.710864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:57.980060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:58.260802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:58.567245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:58.867100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4170], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:58.867194Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:58.867515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:58.867570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:58.867605Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4177], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't hav ... 4446] Got NKikimr::TEvTxProcessing::TEvReadSetAck [1:942:2800] [1:2071:3611] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:24339059:0] [1:2889:4242] Got NKikimr::TEvBlobStorage::TEvPutResult [1:3143:4445] [1:940:2799] Got NKikimr::TEvBlobStorage::TEvPutResult [1:3144:4446] [1:940:2799] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2891:4242] [1:2887:4242] Got NKikimr::TEvBlobStorage::TEvPut [0:34190892238664546:8519680] [1:3145:4447] Got NKikimr::TEvTxProcessing::TEvReadSetAck [1:942:2800] [1:2621:4045] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:1099535966835:0] [1:2891:4242] Got NKikimr::TEvTabletBase::TEvWriteLogResult [1:2594:4029] [1:3143:4445] Got NKikimr::TEvTabletBase::TEvWriteLogResult [1:2597:4031] [1:3144:4446] Got NKikimr::TEvBlobStorage::TEvPut [0:34190892238664546:8519680] [1:3146:4448] Got NKikimr::TEvTablet::TEvCommitResult [1:2668:4047] [1:2594:4029] Got NKikimr::TEvTablet::TEvCommitResult [1:2670:4050] [1:2597:4031] Got NKikimr::TEvBlobStorage::TEvPutResult [1:3145:4447] [1:940:2799] Got NKikimr::TEvBlobStorage::TEvPutResult [1:3146:4448] [1:940:2799] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2893:4242] [1:2888:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2889:4242] [1:2:2049] Got NKikimr::TEvBlobStorage::TEvPut [0:34190892238664546:8519680] [1:3147:4449] Got NKikimr::TEvTxProcessing::TEvReadSetAck [1:942:2800] [1:2628:4047] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2199047594611:0] [1:2893:4242] Got NKikimr::TEvTabletBase::TEvWriteLogResult [1:2599:4033] [1:3145:4447] Got NKikimr::TEvTabletBase::TEvWriteLogResult [1:1045:2878] [1:3146:4448] Got NKikimr::TEvBlobStorage::TEvPut [0:34190892238664546:8519680] [1:3148:4450] Got NKikimr::TEvTxProcessing::TEvReadSetAck [1:942:2800] [1:2631:4050] Got NKikimr::TEvTablet::TEvCommitResult [1:2685:4054] [1:2599:4033] Got NKikimr::TEvBlobStorage::TEvPutResult [1:3147:4449] [1:940:2799] Got NKikimr::TEvBlobStorage::TEvPutResult [1:3148:4450] [1:940:2799] Got NKikimr::TEvTablet::TEvCommitResult [1:1134:2905] [1:1045:2878] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2891:4242] [1:5:2052] Got NKikimr::TEvBlobStorage::TEvPut [0:34190892238664546:8519680] [1:3149:4451] Got NKikimr::TEvTxProcessing::TEvReadSetAck [1:942:2800] [1:2637:4054] Got NKikimr::TEvTabletBase::TEvWriteLogResult [1:2125:3656] [1:3147:4449] Got NKikimr::TEvTabletBase::TEvWriteLogResult [1:1048:2880] [1:3148:4450] Got NKikimr::TEvTablet::TEvCommitResult [1:2141:3661] [1:2125:3656] Got NKikimr::TEvBlobStorage::TEvPutResult [1:3149:4451] [1:940:2799] Got NKikimr::TEvTablet::TEvCommitResult [1:1161:2907] [1:1048:2880] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2886:4242] [1:2889:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2893:4242] [1:8:2055] Got NKikimr::TEvBlobStorage::TEvPut [0:34190892238664546:8519680] [1:3150:4452] Got NKikimr::TEvTxProcessing::TEvReadSetAck [1:942:2800] [1:1094:2905] Got NKikimr::TEvTabletBase::TEvWriteLogResult [1:1050:2882] [1:3149:4451] Got NKikimr::TEvBlobStorage::TEvPutResult [1:3150:4452] [1:940:2799] Got NKikimr::TEvTablet::TEvCommitResult [1:1162:2912] [1:1050:2882] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2887:4242] [1:2891:4242] Got NKikimr::TEvBlobStorage::TEvPut [0:34190892238664546:8519680] [1:3151:4453] Got NKikimr::TEvTxProcessing::TEvReadSetAck [1:942:2800] [1:2133:3661] Got NKikimr::TEvTabletBase::TEvWriteLogResult [1:1053:2884] [1:3150:4452] Got NKikimr::TEvBlobStorage::TEvPutResult [1:3151:4453] [1:940:2799] Got NKikimr::TEvTablet::TEvCommitResult [1:1182:2915] [1:1053:2884] Got NKikimr::TEvBlobStorage::TEvPut [0:34190892238664546:8519680] [1:3152:4454] Got NKikimr::TEvTxProcessing::TEvReadSetAck [1:942:2800] [1:1099:2907] Got NKikimr::TEvTabletBase::TEvWriteLogResult [1:1055:2886] [1:3151:4453] Got NKikimr::TEvBlobStorage::TEvPutResult [1:3152:4454] [1:940:2799] Got NKikimr::TEvTablet::TEvCommitResult [1:1190:2917] [1:1055:2886] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2883:4242] [1:2886:4242] Got NKikimr::TEvBlobStorage::TEvPut [0:34190892238664546:8519680] [1:3153:4455] Got NKikimr::TEvTxProcessing::TEvReadSetAck [1:942:2800] [1:1105:2912] Got NKikimr::TEvTabletBase::TEvWriteLogResult [1:1058:2888] [1:3152:4454] Got NKikimr::TEvBlobStorage::TEvPutResult [1:3153:4455] [1:940:2799] Got NKikimr::TEvTablet::TEvCommitResult [1:1201:2919] [1:1058:2888] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2888:4242] [1:2893:4242] Got NKikimr::TEvTxProcessing::TEvReadSetAck [1:942:2800] [1:1108:2915] Got NKikimr::TEvTabletBase::TEvWriteLogResult [1:1059:2889] [1:3153:4455] Got NKikimr::TEvTablet::TEvCommitResult [1:1202:2921] [1:1059:2889] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2883:4242] [1:2887:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse [1:100:2147] [1:2883:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2883:4242] [1:2888:4242] Got NKikimr::TEvTxProcessing::TEvReadSetAck [1:942:2800] [1:1111:2917] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2886:4242] [1:2883:4242] Got NKikimr::TEvTxProcessing::TEvReadSetAck [1:942:2800] [1:1114:2919] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2887:4242] [1:2883:4242] Got NKikimr::TEvTxProcessing::TEvReadSetAck [1:942:2800] [1:1116:2921] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2888:4242] [1:2883:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2889:4242] [1:2886:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2891:4242] [1:2887:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:24339059:0] [1:2889:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:1099535966835:0] [1:2891:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2893:4242] [1:2888:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2889:4242] [1:2:2049] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2199047594611:0] [1:2893:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2891:4242] [1:5:2052] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2886:4242] [1:2889:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2893:4242] [1:8:2055] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2887:4242] [1:2891:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2883:4242] [1:2886:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2888:4242] [1:2893:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2883:4242] [1:2887:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse [1:100:2147] [1:2883:4242] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2883:4242] [1:2888:4242] Got NActors::IEventHandle [1:3164:4466] [1:100:2147] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:3163:4465] [1:3164:4466] Got NActors::IEventHandle [1:3165:4467] [1:3163:4465] Got NKikimr::NStat::TEvStatistics::TEvGetStatistics [1:8534995652929746003:6644585] [1:3165:4467] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:76:2123] Got NActors::IEventHandle [1:3166:4468] [1:100:2147] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:76:2123] [1:3166:4468] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:76:2123] Got NActors::IEventHandle [1:3167:4469] [1:100:2147] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:76:2123] [1:3167:4469] Got NKikimr::NStat::TEvStatistics::TEvGetStatisticsResult [1:3165:4467] [1:76:2123] Got NKikimr::NKqp::TEvKqp::TEvContinueProcess [1:3161:4463] [1:8320808721877066593:7169396] Got NKikimr::NKqp::TEvKqp::TEvContinueProcess [1:3161:4463] [1:8320808721877066593:7169396] Got NKikimr::NKqp::TEvKqp::TEvContinueProcess [1:3161:4463] [1:8320808721877066593:7169396] Got NKikimr::NKqp::NPrivateEvents::TEvCompileResponse [1:92:2139] [1:3161:4463] Got NKikimr::NKqp::NPrivateEvents::TEvCompileResponse [1:3127:4429] [1:92:2139] Got NKikimr::TEvTxUserProxy::TEvProposeKqpTransaction [0:6014971197384587348:7762533] [1:3127:4429] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxRequest [1:3168:4429] [1:67:2114] Got NKikimr::NKqp::NScheduler::TEvAddDatabase [1:7235142148544295275:29292] [1:3168:4429] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:3168:4429] Got NKikimr::NKqp::NScheduler::TEvAddQuery [1:7235142148544295275:29292] [1:3168:4429] 2025-12-23T18:26:00.938515Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:3168:4429] TxId: 281474976715673. Ctx: { TraceId: 01kd67c08v1r9k1g60c2mp4nwy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjhlOWEwYi00MmRiYjNiZS01YTBhZTNhZC1mNGEwZjY2NA==, PoolId: default, IsStreamingQuery: 0}. STATUS_CODE_UNSPECIFIED: Got NKikimr::TEvPipeCache::TEvUnlink [0:7521962744731429200:16741] [1:3168:4429] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxResponse [1:3127:4429] [1:3168:4429] 2025-12-23T18:26:00.938774Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=MjhlOWEwYi00MmRiYjNiZS01YTBhZTNhZC1mNGEwZjY2NA==, ActorId: [1:3127:4429], ActorState: ExecuteState, TraceId: 01kd67c08v1r9k1g60c2mp4nwy, Create QueryResponse for error on request, msg: , status: STATUS_CODE_UNSPECIFIED, issues: Got NKikimr::NSysView::TEvSysView::TEvCollectQueryStats [1:6014387330472966483:2188150] [1:3127:4429] Got NKikimr::TEvTxUserProxy::TEvProposeKqpTransaction [0:6014971197384587348:7762533] [1:3127:4429] Got NKikimr::NKqp::NScheduler::TEvRemoveQuery [1:7235142148544295275:29292] [1:3168:4429] Got NKikimr::NKqp::NWorkload::TEvCleanupRequest [1:2864:4227] [1:3127:4429] Got NActors::TEvents::TEvPoison [1:3168:4429] [1:3168:4429] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvFinishRequestInPool [1:7742373267896299883:25708] [1:2864:4227] Got NKikimr::NKqp::NScheduler::TEvQueryResponse [1:3168:4429] [1:97:2144] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxRequest [1:3169:4429] [1:67:2114] Got NKikimr::NKqp::NScheduler::TEvAddDatabase [1:7235142148544295275:29292] [1:3169:4429] Got NKikimr::NKqp::NWorkload::TEvCleanupResponse [1:3127:4429] [1:2864:4227] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:3169:4429] Got NKikimr::NKqp::NScheduler::TEvAddQuery [1:7235142148544295275:29292] [1:3169:4429] 2025-12-23T18:26:00.939379Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:3169:4429] TxId: 281474976715674. Ctx: { TraceId: 01kd67c08v1r9k1g60c2mp4nwy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MjhlOWEwYi00MmRiYjNiZS01YTBhZTNhZC1mNGEwZjY2NA==, PoolId: default, IsStreamingQuery: 0}. STATUS_CODE_UNSPECIFIED: Got NKikimr::NKqp::NScheduler::TEvRemoveQuery [1:7235142148544295275:29292] [1:3169:4429] Got NKikimr::TEvPipeCache::TEvUnlink [0:7521962744731429200:16741] [1:3169:4429] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxResponse [1:3127:4429] [1:3169:4429] 2025-12-23T18:26:00.939511Z node 1 :KQP_SESSION ERROR: kqp_session_actor.cpp:2966: SessionId: ydb://session/3?node_id=1&id=MjhlOWEwYi00MmRiYjNiZS01YTBhZTNhZC1mNGEwZjY2NA==, ActorId: [1:3127:4429], ActorState: CleanupState, TraceId: 01kd67c08v1r9k1g60c2mp4nwy, Failed to cleanup: Got NKikimr::NKqp::NPrivateEvents::TEvQueryResponse [1:65:2112] [1:3127:4429] Got NKikimr::NKqp::NPrivateEvents::TEvQueryResponse [1:3154:4456] [1:65:2112] Got NActors::TEvents::TEvPoison [1:3155:4457] [1:65:2112] Got NActors::TEvents::TEvPoison [1:3169:4429] [1:3169:4429] Got NKikimr::NKqp::NScheduler::TEvQueryResponse [1:3169:4429] [1:97:2144] |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/executer_actor/ut/unittest >> Graph::LocalBordersOnGet [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery >> TTxDataShardBuildFulltextIndexScan::BuildWithTextFromKey [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithRelevance ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_vacuum/unittest >> Vacuum::BorrowerDataCleanedAfterCopyTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:25:09.099710Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:09.238972Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2025-12-23T18:25:09.239922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:25:09.259491Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:111:2158], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:09.259688Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:09.259777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:09.753172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:09.753314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:09.913536Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:36:2083] 1766514306090197 != 1766514306090201 2025-12-23T18:25:09.938357Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:10.003975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:10.113040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0000c0/r3tmp/tmp7Ca1Lx/pdisk_1.dat 2025-12-23T18:25:10.554579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:10.571901Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:10.695917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:11.132653Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:985:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:11.132752Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:11.132810Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:11.133285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:999:2850], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:11.133466Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:11.145139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:25:11.310368Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1000:2851], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:25:11.408926Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1057:2889] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2025-12-23T18:25:18.483795Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:18.488212Z node 2 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2025-12-23T18:25:18.489170Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:25:18.502463Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:298:2342], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:18.502792Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:18.502931Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:18.992846Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:18.992971Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:19.047319Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:36:2083] 1766514314033144 != 1766514314033147 2025-12-23T18:25:19.050457Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:19.119627Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:19.196905Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0000c0/r3tmp/tmpnGdfup/pdisk_1.dat 2025-12-23T18:25:19.636802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:19.659297Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:19.778011Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:20.198051Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:978:2836], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:20.198162Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:989:2841], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:20.198551Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:20.207356Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:994:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:20.207563Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:20.219952Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:25:20.375289Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:992:2844], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:25:20.412619Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1049:2882] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finis ... :125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:51.094439Z node 6 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2025-12-23T18:25:51.095566Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:25:51.097599Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:316:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:51.097814Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:51.097920Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:51.303792Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:51.303910Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:51.324348Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:36:2083] 1766514348306437 != 1766514348306441 2025-12-23T18:25:51.328629Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:51.384238Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:51.465973Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0000c0/r3tmp/tmpTnWNRy/pdisk_1.dat 2025-12-23T18:25:51.731804Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:51.744904Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:51.850430Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:52.413810Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1075:2914], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:52.413966Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1085:2919], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:52.414061Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:52.415245Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1090:2923], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:52.415425Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:52.421562Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:25:52.572250Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:1089:2922], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:25:52.610368Z node 6 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [6:1148:2962] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2025-12-23T18:25:58.518790Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:58.524910Z node 7 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2025-12-23T18:25:58.525824Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:25:58.529227Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:58.529443Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:58.529565Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:58.748020Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:58.748154Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:58.770539Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:36:2083] 1766514354829881 != 1766514354829885 2025-12-23T18:25:58.775869Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:58.817293Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:58.884551Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0000c0/r3tmp/tmp89qOLi/pdisk_1.dat 2025-12-23T18:25:59.178929Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:59.193406Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:59.297421Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:59.824864Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:1075:2914], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:59.824994Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:1085:2919], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:59.825416Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:59.826257Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:1091:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:59.826478Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:59.831434Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:25:59.969069Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:1089:2922], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:26:00.006069Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:1148:2962] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:26:01.258563Z node 7 :TX_DATASHARD WARN: datashard__vacuum.cpp:37: Vacuum of tablet# 72075186224037888: has borrowed parts, requested from [7:831:2726] |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_vacuum/unittest >> TMLPChangerTests::TopicNotExists [GOOD] >> TMLPChangerTests::ConsumerNotExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_init/unittest >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] Test command err: 2025-12-23T18:25:54.288967Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:112:2143]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:25:54.292050Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:112:2143]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:25:54.292347Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:112:2143] 2025-12-23T18:25:54.292556Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:25:54.323510Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:106:2139], Recipient [1:112:2143]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:25:54.327167Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:25:54.327259Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:25:54.328525Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:25:54.328590Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:25:54.328643Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:25:54.328917Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:25:54.328997Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:25:54.329055Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:135:2143] in generation 2 2025-12-23T18:25:54.362135Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:25:54.382017Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:25:54.382179Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:25:54.382266Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:141:2163] 2025-12-23T18:25:54.382296Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:25:54.382323Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:25:54.382348Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:25:54.382508Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:112:2143], Recipient [1:112:2143]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:25:54.382541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:25:54.382666Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:25:54.382753Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:25:54.382794Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:25:54.382829Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:25:54.382868Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:25:54.382896Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:25:54.382922Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:25:54.382948Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:25:54.382976Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:25:54.383864Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [1:103:2137], Recipient [1:112:2143]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 4294969433 } 2025-12-23T18:25:54.383905Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState ... waiting for SysViewsRoster update finished 2025-12-23T18:25:57.081131Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:57.197817Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:57.198205Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:57.198291Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:57.464584Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:57.464740Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:57.555945Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766514354551005 != 1766514354551008 2025-12-23T18:25:57.570673Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:57.619228Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:57.706147Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0047a7/r3tmp/tmpIIMTXi/pdisk_1.dat 2025-12-23T18:25:57.990794Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:58.004604Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:58.108387Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:58.135898Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:884:2764] 2025-12-23T18:25:58.136203Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:25:58.173303Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:25:58.173411Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:25:58.174652Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:25:58.174725Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:25:58.174790Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:25:58.175066Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:25:58.175178Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:25:58.175255Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:900:2764] in generation 1 2025-12-23T18:25:58.185993Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:25:58.186092Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:25:58.186187Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:25:58.186267Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:902:2774] 2025-12-23T18:25:58.186311Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:25:58.186345Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:25:58.186384Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:25:58.186701Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:25:58.186849Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:25:58.186944Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:25:58.186973Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:25:58.187007Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:25:58.187037Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:25:58.187120Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:882:2762], serverId# [2:887:2765], sessionId# [0:0:0] 2025-12-23T18:25:58.187639Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:25:58.187819Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:25:58.187895Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:25:58.189260Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:25:58.199974Z node 2 :TX_DATASHARD DEBUG: dat ... planned 0 immediate 0 planned 1 2025-12-23T18:26:01.758979Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:26:01.759199Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:26:01.759306Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:26:01.759940Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:26:01.760019Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:26:01.760381Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:26:01.760755Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:26:01.762651Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:26:01.762691Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:26:01.763337Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:26:01.763391Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:26:01.808140Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:26:01.808205Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:26:01.808246Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:26:01.808312Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:26:01.808363Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:26:01.808451Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:26:01.809602Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:26:01.811370Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:26:01.811866Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:26:01.811915Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:26:01.816528Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2025-12-23T18:26:01.816688Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2025-12-23T18:26:01.848992Z node 3 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:955:2811] 2025-12-23T18:26:01.849247Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:26:01.853382Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:26:01.854300Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:26:01.856315Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:26:01.856388Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:26:01.856451Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:26:01.856810Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:26:01.857070Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:26:01.857154Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [3:970:2811] in generation 2 2025-12-23T18:26:01.878490Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:26:01.878603Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037888 2025-12-23T18:26:01.878694Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:26:01.878837Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:26:01.878923Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4210: Resolve path at 72075186224037888: reason# empty path 2025-12-23T18:26:01.879048Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [3:974:2821] 2025-12-23T18:26:01.879089Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:26:01.879133Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:26:01.879174Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:26:01.879461Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2025-12-23T18:26:01.879692Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2025-12-23T18:26:01.880265Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:26:01.880360Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:26:01.880692Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5936: Got TEvDataShard::TEvSchemaChanged for unknown txId 281474976715657 message# Source { RawX1: 955 RawX2: 12884904699 } Origin: 72075186224037888 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-12-23T18:26:01.881068Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 2000 2025-12-23T18:26:01.881118Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:26:01.881431Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:26:01.881511Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:26:01.881554Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:26:01.881590Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:26:01.881632Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:26:01.881907Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:26:01.912397Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4271: Got scheme resolve result at 72075186224037888: Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 2000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 2025-12-23T18:26:01.912701Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:26:01.912788Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:26:01.912972Z node 3 :TX_DATASHARD DEBUG: datashard__store_table_path.cpp:20: TTxStoreTablePath::Execute at 72075186224037888 2025-12-23T18:26:01.914021Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:978:2825], serverId# [3:980:2826], sessionId# [0:0:0] 2025-12-23T18:26:01.927362Z node 3 :TX_DATASHARD DEBUG: datashard__store_table_path.cpp:39: TTxStoreTablePath::Complete at 72075186224037888 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_init/unittest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/graph/ut/unittest >> Graph::LocalBordersOnGet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:25:39.448208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:25:39.448343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:25:39.448399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:25:39.448443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:25:39.448494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:25:39.448533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:25:39.448596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:25:39.448674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:25:39.455221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:25:39.455677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:25:39.564328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:25:39.564424Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:39.577965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:25:39.578276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:25:39.578453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:25:39.584327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:25:39.584561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:25:39.585444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:25:39.585701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:25:39.592009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:25:39.592225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:25:39.593532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:25:39.593610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:25:39.593796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:25:39.593855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:25:39.593906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:25:39.594057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:25:39.796832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.798002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.798132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.798202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.798273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.798341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.798454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.798521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.798613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.798687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.798782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.798882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.798955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.799076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:25:39.799169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 3T18:26:02.407427Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2025-12-23T18:26:02.407448Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2025-12-23T18:26:02.407487Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 109 } Time: 109 2025-12-23T18:26:02.407503Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2025-12-23T18:26:02.407522Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2025-12-23T18:26:02.407542Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2025-12-23T18:26:02.407603Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 110 } Time: 110 2025-12-23T18:26:02.407618Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2025-12-23T18:26:02.407637Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2025-12-23T18:26:02.407660Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2025-12-23T18:26:02.407711Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 111 } Time: 111 2025-12-23T18:26:02.407727Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2025-12-23T18:26:02.407749Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2025-12-23T18:26:02.407774Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2025-12-23T18:26:02.407827Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 112 } Time: 112 2025-12-23T18:26:02.407841Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2025-12-23T18:26:02.407860Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2025-12-23T18:26:02.407881Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2025-12-23T18:26:02.407920Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 113 } Time: 113 2025-12-23T18:26:02.407937Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2025-12-23T18:26:02.407955Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2025-12-23T18:26:02.407975Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2025-12-23T18:26:02.408028Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 114 } Time: 114 2025-12-23T18:26:02.408046Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2025-12-23T18:26:02.408067Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2025-12-23T18:26:02.408088Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2025-12-23T18:26:02.408140Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 115 } Time: 115 2025-12-23T18:26:02.408155Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2025-12-23T18:26:02.408174Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2025-12-23T18:26:02.408194Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2025-12-23T18:26:02.408241Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 116 } Time: 116 2025-12-23T18:26:02.408257Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2025-12-23T18:26:02.408276Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2025-12-23T18:26:02.408299Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2025-12-23T18:26:02.408347Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 117 } Time: 117 2025-12-23T18:26:02.408380Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2025-12-23T18:26:02.408399Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2025-12-23T18:26:02.408420Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2025-12-23T18:26:02.408474Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 118 } Time: 118 2025-12-23T18:26:02.408490Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2025-12-23T18:26:02.408509Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2025-12-23T18:26:02.408528Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2025-12-23T18:26:02.408589Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 119 } Time: 119 2025-12-23T18:26:02.408602Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2025-12-23T18:26:02.408619Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2025-12-23T18:26:02.408643Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2025-12-23T18:26:02.408685Z node 6 :GRAPH TRACE: shard_impl.cpp:226: SHARD Handle TEvGraph::TEvGetMetrics from [6:1013:2945] 2025-12-23T18:26:02.408736Z node 6 :GRAPH DEBUG: tx_get_metrics.cpp:20: SHARD TTxGetMetrics::Execute 2025-12-23T18:26:02.408781Z node 6 :GRAPH DEBUG: backends.cpp:352: DB Querying from 0 to 119 2025-12-23T18:26:02.420925Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.420996Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421017Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421034Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421052Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421071Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421089Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421106Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421123Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421140Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421156Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421173Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421191Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421209Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421225Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421241Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421258Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421274Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421291Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421309Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421324Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421341Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421361Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421376Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421393Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421409Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421427Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421448Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421463Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421478Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421495Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421511Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421527Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421542Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421560Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421577Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421602Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421626Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421650Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421669Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421686Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421704Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421720Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421736Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421753Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421769Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421800Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421816Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421831Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421847Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421863Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421880Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421895Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421910Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421926Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421941Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421957Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421972Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.421990Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.422008Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2025-12-23T18:26:02.422034Z node 6 :GRAPH DEBUG: tx_get_metrics.cpp:25: SHARD TTxGetMetric::Complete 2025-12-23T18:26:02.422068Z node 6 :GRAPH TRACE: tx_get_metrics.cpp:26: SHARD TxGetMetrics returned 60 points for request 3 2025-12-23T18:26:02.422184Z node 6 :GRAPH TRACE: service_impl.cpp:201: SVC TEvMetricsResult 3 2025-12-23T18:26:02.422227Z node 6 :GRAPH TRACE: service_impl.cpp:204: SVC TEvMetricsResult found request 3 resending to [6:1014:2946] >> test.py::test[solomon-Basic-default.txt] |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/graph/ut/unittest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery >> KeyValueGRPCService::SimpleRenameUnexistedKeyV1 [GOOD] >> KeyValueGRPCService::SimpleRenameUnexistedKeyV2 >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable [GOOD] >> DataShardFollowers::FollowerStaleRo >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |90.2%| [TM] {RESULT} ydb/core/kqp/executer_actor/ut/unittest |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs [GOOD] |90.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_vacuum/unittest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob |90.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_init/unittest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection |90.2%| [TM] {RESULT} ydb/core/graph/ut/unittest |90.2%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_replication/unittest >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:25:09.639694Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:09.886232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:25:09.899955Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:09.900304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:09.900408Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:10.330638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:10.330807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:10.472593Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514304739142 != 1766514304739146 2025-12-23T18:25:10.500178Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:10.559887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:10.667363Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0000c7/r3tmp/tmpioJ4ER/pdisk_1.dat 2025-12-23T18:25:11.078021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:11.095915Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:11.223636Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:25:11.223704Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:25:11.223867Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:25:11.497909Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:25:11.497991Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:25:11.498478Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:25:11.498547Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:25:11.498866Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:25:11.498992Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:25:11.499101Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:25:11.499326Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:25:11.500730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:11.501672Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:25:11.501746Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:25:11.546346Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:25:11.551541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:25:11.551816Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:25:11.552083Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:25:11.656080Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:25:11.656734Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:25:11.656837Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:25:11.658292Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:25:11.658373Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:25:11.658430Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:25:11.662754Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:25:11.662889Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:25:11.662964Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:25:11.675132Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:25:11.717073Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:25:11.717268Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:25:11.717369Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:25:11.717400Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:25:11.717446Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:25:11.717481Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:25:11.717677Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:25:11.717746Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:25:11.717983Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:25:11.718060Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:25:11.718390Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:25:11.718432Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:25:11.718469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:25:11.718501Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:25:11.718531Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:25:11.718561Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:25:11.718611Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:25:11.718686Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:25:11.718720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:25:11.718785Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:25:11.718859Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:25:11.718890Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:25:11.718974Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:25:11.719182Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:25:11.719223Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose sche ... :04.482122Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2025-12-23T18:26:04.482155Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T18:26:04.482173Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:26:04.482189Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:26:04.482205Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:26:04.482293Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-12-23T18:26:04.482542Z node 8 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T18:26:04.482596Z node 8 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-12-23T18:26:04.482636Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[8:1068:2887], 0} after executionsCount# 1 2025-12-23T18:26:04.482680Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[8:1068:2887], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:26:04.482774Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[8:1068:2887], 0} finished in read 2025-12-23T18:26:04.482858Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T18:26:04.482878Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:26:04.482897Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:26:04.482917Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:26:04.482946Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T18:26:04.482962Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:26:04.482985Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2025-12-23T18:26:04.483022Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:26:04.483112Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:26:04.483277Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [8:69:2116], Recipient [8:885:2764]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715660 LockNode: 8 Status: STATUS_SUBSCRIBED 2025-12-23T18:26:04.483736Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [8:1068:2887], Recipient [8:885:2764]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:26:04.483800Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } } 2025-12-23T18:26:04.486442Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [8:1073:2892], Recipient [8:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:26:04.486519Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:26:04.486564Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [8:1072:2891], serverId# [8:1073:2892], sessionId# [0:0:0] 2025-12-23T18:26:04.486782Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549570, Sender [8:1071:2890], Recipient [8:885:2764]: NKikimrTxDataShard.TEvApplyReplicationChanges TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Source: "my-source" Changes { SourceOffset: 1 WriteTxId: 0 Key: "\001\000\004\000\000\000\001\000\000\000" Upsert { Tags: 2 Data: "\001\000\004\000\000\000\025\000\000\000" } } 2025-12-23T18:26:04.486970Z node 8 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v2000/18446744073709551615 2025-12-23T18:26:04.487112Z node 8 :TX_DATASHARD TRACE: locks.cpp:194: Lock 281474976715660 marked broken at v{min} 2025-12-23T18:26:04.498141Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:2572: Waiting for PlanStep# 2501 from mediator time cast 2025-12-23T18:26:04.498800Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270977, Sender [8:26:2073], Recipient [8:885:2764]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 2501} 2025-12-23T18:26:04.498848Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2025-12-23T18:26:04.498892Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:3810: Notified by mediator time cast with PlanStep# 2501 at tablet 72075186224037888 2025-12-23T18:26:04.498950Z node 8 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:26:04.605534Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [8:1096:2909], Recipient [8:885:2764]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-12-23T18:26:04.605677Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:26:04.605741Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-12-23T18:26:04.605838Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T18:26:04.605879Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:26:04.605915Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:26:04.605947Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:26:04.605980Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2025-12-23T18:26:04.606015Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T18:26:04.606034Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:26:04.606050Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:26:04.606066Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:26:04.606153Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-12-23T18:26:04.606365Z node 8 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715660, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T18:26:04.606418Z node 8 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-12-23T18:26:04.606462Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[8:1096:2909], 0} after executionsCount# 1 2025-12-23T18:26:04.606509Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[8:1096:2909], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:26:04.606577Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[8:1096:2909], 0} finished in read 2025-12-23T18:26:04.606630Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T18:26:04.606649Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:26:04.606667Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:26:04.606686Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:26:04.606718Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T18:26:04.606733Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:26:04.606786Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2025-12-23T18:26:04.606835Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:26:04.606952Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:26:04.607745Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [8:1096:2909], Recipient [8:885:2764]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:26:04.607813Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } } |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_replication/unittest >> TTxDataShardBuildFulltextIndexScan::BuildWithRelevance [GOOD] >> TTxDataShardBuildIndexScan::BadRequest >> TSentinelUnstableTests::BSControllerCantChangeStatus >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections >> TListAllTopicsTests::ListLimitAndPaging [GOOD] >> DescribeSchemaSecretsService::GetDroppedValue [GOOD] >> DescribeSchemaSecretsService::GetInParallel >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-BasicExtractMembers-default.txt] >> KeyValueGRPCService::SimpleRenameUnexistedKeyV2 [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKeyV1 >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath [GOOD] >> QuoterWithKesusTest::HandlesNonExistentResource >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/list_topics/ut/unittest >> TListAllTopicsTests::ListLimitAndPaging [GOOD] Test command err: 2025-12-23T18:25:31.567803Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587121282321846793:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:25:31.568249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b42/r3tmp/tmpsdzho0/pdisk_1.dat 2025-12-23T18:25:31.702251Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:25:32.305811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:32.305902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:32.419349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:25:32.425883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:32.566901Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:32.567276Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587121282321846754:2081] 1766514331494874 != 1766514331494877 2025-12-23T18:25:32.571630Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12351, node 1 2025-12-23T18:25:32.810857Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:25:32.951321Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005b42/r3tmp/yandex8pWgLV.tmp 2025-12-23T18:25:32.970797Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005b42/r3tmp/yandex8pWgLV.tmp 2025-12-23T18:25:32.970969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005b42/r3tmp/yandex8pWgLV.tmp 2025-12-23T18:25:32.971180Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:25:33.000837Z INFO: TTestServer started on Port 19276 GrpcPort 12351 TClient is connected to server localhost:19276 PQClient connected to localhost:12351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514332506 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:25:33.682279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:25:33.722914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:25:33.762900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:25:33.776897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:25:36.562889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587121282321846793:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:25:36.562962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:25:38.119665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121312386618846:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:38.119875Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:38.124409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121312386618881:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:38.124495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121312386618882:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:38.126990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:38.133839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:25:38.211168Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587121312386618886:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:25:38.274003Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587121312386618950:2655] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:25:38.611287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:38.622412Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587121312386618958:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:25:38.623195Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=ZDNmZGQ2MTMtMThiZGMzYmUtOTE4OWJjYTAtZGMzZjUyMGY=, ActorId: [1:7587121312386618841:2329], ActorState: ExecuteState, TraceId: 01kd67ba9ecr25gfza9zphg3d3, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:25:38.626072Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:25:38.666849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:38.831593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587121316681586557:2836] === CheckClustersList. Ok 2025-12-23T18:25:45.659399Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:196: new Create topic request 2025-12-23T18:25:45.664567Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.c ... :06.450589Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72075186224037896][Partition][0][StateInit] SYNC INIT topic dir2/topic3 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:26:06.450606Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037896][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:26:06.450623Z node 3 :PERSQUEUE DEBUG: partition.cpp:4479: [72075186224037896][Partition][0][StateIdle] Process pending events. Count 1 2025-12-23T18:26:06.450639Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:26:06.450652Z node 3 :PERSQUEUE DEBUG: partition.cpp:2416: [72075186224037896][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2025-12-23T18:26:06.450677Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:26:06.450687Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:06.450698Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:26:06.450706Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:26:06.450720Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:26:06.450730Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:26:06.450760Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:06.450774Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:26:06.450783Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:26:06.450810Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72075186224037896][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:26:06.451030Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037896][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:26:06.451070Z node 3 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037896][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1766514366498, TxId 281474976710677 2025-12-23T18:26:06.451090Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:26:06.451098Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:26:06.451105Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:06.451134Z node 3 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037896][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2025-12-23T18:26:06.451215Z node 3 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037896][Partition][0][StateIdle] Schedule reply tx done 281474976710677 2025-12-23T18:26:06.451229Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:26:06.451237Z node 3 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037896][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:26:06.451248Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:26:06.451360Z node 3 :PERSQUEUE DEBUG: read.h:275: [72075186224037896][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:26:06.451799Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037896][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:26:06.451963Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037896][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:26:06.452011Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037896][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:26:06.452031Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:26:06.452041Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.452049Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:06.452059Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.452066Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:26:06.452080Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037896][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:26:06.452320Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72075186224037896] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic3" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir2/topic3" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MonitoringProjectId: "" 2025-12-23T18:26:06.452382Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:26:06.452466Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:26:06.452806Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:26:06.517081Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:26:06.517108Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.517118Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:06.517130Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.517141Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:26:06.543766Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:26:06.543802Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.543814Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:06.543828Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.543842Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:26:06.550869Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:26:06.550901Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.550913Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:06.550927Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.550937Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:26:06.617438Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:26:06.617469Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.617481Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:06.617497Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.617509Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:26:06.644102Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:26:06.644131Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.644144Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:06.644162Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.644177Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:26:06.651264Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:26:06.651289Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.651303Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:06.651330Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:06.651343Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/public/list_topics/ut/unittest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount >> DataShardFollowers::FollowerStaleRo [GOOD] >> DataShardFollowers::FollowerRebootAfterSysCompaction >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |90.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_replication/unittest |90.2%| [TM] {RESULT} ydb/core/persqueue/public/list_topics/ut/unittest |90.2%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut >> TFetchRequestTests::EmptyTopic [GOOD] >> TFetchRequestTests::BadTopicName >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount >> TDataShardRSTest::TestCleanupInRS-UseSink [GOOD] >> TDataShardRSTest::TestDelayedRSAckForUnknownTx >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings >> test.py::test[solomon-BasicExtractMembers-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> QuoterWithKesusTest::HandlesNonExistentResource [GOOD] >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding >> KeyValueGRPCService::SimpleConcatUnexistedKeyV1 [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKeyV2 >> TDqSolomonWriteActorTest::TestWriteFormat [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding >> TTxDataShardBuildIndexScan::BadRequest [GOOD] >> TTxDataShardBuildIndexScan::RunScan >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding >> DescribeSchemaSecretsService::GetInParallel [GOOD] >> DescribeSchemaSecretsService::GetSameValueMultipleTimes >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery >> TMLPChangerTests::ConsumerNotExists [GOOD] >> TMLPChangerTests::PartitionNotExists >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery >> DataShardFollowers::FollowerRebootAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterSysCompaction >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus >> TIndexProcesorTests::TestCreateIndexProcessor >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> TMemoryController::MemTable [GOOD] >> TMemoryController::ResourceBroker >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery >> TDataShardRSTest::TestDelayedRSAckForUnknownTx [GOOD] >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx >> KeyValueGRPCService::SimpleConcatUnexistedKeyV2 [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKeyV1 >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource [GOOD] >> QuoterWithKesusTest::GetsQuota >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery >> TIndexProcesorTests::TestCreateIndexProcessor [GOOD] >> TIndexProcesorTests::TestSingleCreateQueueEvent >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData >> TTxDataShardBuildIndexScan::RunScan [GOOD] >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob >> TIndexProcesorTests::TestSingleCreateQueueEvent [GOOD] >> TIndexProcesorTests::TestReindexSingleQueue >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] >> DescribeSchemaSecretsService::GetSameValueMultipleTimes [GOOD] >> DescribeSchemaSecretsService::FailWithoutGrants >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection >> TFetchRequestTests::BadTopicName [GOOD] >> TFetchRequestTests::CheckAccess >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> TIndexProcesorTests::TestReindexSingleQueue [GOOD] >> TIndexProcesorTests::TestDeletedQueueNotReindexed >> DataShardFollowers::FollowerAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterDataCompaction >> KeyValueGRPCService::SimpleCopyUnexistedKeyV1 [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKeyV2 >> QuoterWithKesusTest::GetsQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuota >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection >> TMemoryController::ResourceBroker [GOOD] >> TMemoryController::ResourceBroker_ConfigLimit >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionCommit >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection >> test.py::test[solomon-InvalidProject-] >> TIndexProcesorTests::TestDeletedQueueNotReindexed [GOOD] >> TIndexProcesorTests::TestManyMessages >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection >> TMLPChangerTests::PartitionNotExists [GOOD] >> TMLPChangerTests::CommitTest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding >> KeyValueGRPCService::SimpleCopyUnexistedKeyV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadV1 >> QuoterWithKesusTest::GetsBigQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuotaWithDeadline >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings >> TBlobStorageGroupInfoBlobMapTest::BelongsToSubgroupBenchmark [GOOD] >> TBlobStorageGroupInfoBlobMapTest::BasicChecks >> DescribeSchemaSecretsService::FailWithoutGrants [GOOD] >> DescribeSchemaSecretsService::GroupGrants >> TIndexProcesorTests::TestManyMessages [GOOD] >> TIndexProcesorTests::TestOver1000Queues >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding >> DataShardFollowers::FollowerAfterDataCompaction [GOOD] >> DataShardFollowers::FollowerDuringSysPartSwitch >> TMemoryController::ResourceBroker_ConfigLimit [GOOD] >> TMemoryController::ResourceBroker_ConfigCS >> TDataShardRSTest::TestGenericReadSetDecisionCommit [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionAbort >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/groupinfo/ut/unittest >> TBlobStorageGroupInfoBlobMapTest::BasicChecks [GOOD] Test command err: None domains 1 new (ns): 411.1460757 None domains 1 old (ns): 223.0155133 None domains 9 new (ns): 191.7206489 None domains 9 old (ns): 101.2795729 Mirror3 domains 4 new (ns): 274.3331895 Mirror3 domains 4 old (ns): 110.4740559 Mirror3 domains 9 new (ns): 209.2585935 Mirror3 domains 9 old (ns): 122.0458815 4Plus2Block domains 8 new (ns): 202.644175 4Plus2Block domains 8 old (ns): 66.68722501 4Plus2Block domains 9 new (ns): 103.5437553 4Plus2Block domains 9 old (ns): 53.19942309 ErasureMirror3of4 domains 8 new (ns): 73.45220561 ErasureMirror3of4 domains 8 old (ns): 43.98412675 ErasureMirror3of4 domains 9 new (ns): 71.0799927 ErasureMirror3of4 domains 9 old (ns): 44.69985939 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/groupinfo/ut/unittest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding |90.2%| [TA] $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus >> KeyValueGRPCService::SimpleWriteReadV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadV2 >> QuoterWithKesusTest::GetsBigQuotaWithDeadline [GOOD] >> QuoterWithKesusTest::FailsToGetBigQuota >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery >> TMemoryController::ResourceBroker_ConfigCS [GOOD] >> TMemoryController::GroupedMemoryLimiter_ConfigCS >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery >> DataShardFollowers::FollowerDuringSysPartSwitch [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData |90.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |90.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |90.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/groupinfo/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut >> DescribeSchemaSecretsService::GroupGrants [GOOD] >> DescribeSchemaSecretsService::BatchRequest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_rs/unittest >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:25:30.105436Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:30.347687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:25:30.370175Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:30.370574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:30.370648Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:30.739418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:30.739575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:30.891147Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514325160805 != 1766514325160809 2025-12-23T18:25:30.919407Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:30.979033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:31.137033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003c06/r3tmp/tmph64Wio/pdisk_1.dat 2025-12-23T18:25:31.532237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:31.547819Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:31.665756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:31.737975Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:902:2773] 2025-12-23T18:25:31.738220Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:25:31.838717Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:25:31.838863Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:25:31.840298Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:25:31.840367Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:25:31.840419Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:25:31.840816Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:25:31.841045Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:25:31.841106Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:941:2773] in generation 1 2025-12-23T18:25:31.841418Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:905:2775] 2025-12-23T18:25:31.841571Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:25:31.850436Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:911:2779] 2025-12-23T18:25:31.850608Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:25:31.861281Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:25:31.861398Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:25:31.862717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:25:31.862805Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:25:31.862867Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:25:31.863141Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:25:31.863422Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:25:31.863466Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:958:2775] in generation 1 2025-12-23T18:25:31.864440Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:25:31.864518Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:25:31.865583Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-12-23T18:25:31.865638Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2025-12-23T18:25:31.865688Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2025-12-23T18:25:31.865987Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:25:31.866137Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:25:31.866178Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:961:2779] in generation 1 2025-12-23T18:25:31.866445Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:913:2781] 2025-12-23T18:25:31.866597Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:25:31.877432Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:25:31.877528Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:25:31.878832Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-12-23T18:25:31.878903Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2025-12-23T18:25:31.878956Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2025-12-23T18:25:31.879230Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:25:31.879348Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:25:31.879404Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:967:2781] in generation 1 2025-12-23T18:25:31.890665Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:25:31.936346Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:25:31.936538Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:25:31.936651Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:972:2814] 2025-12-23T18:25:31.936688Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:25:31.936719Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:25:31.936753Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:25:31.937205Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:25:31.937245Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:25:31.937296Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:25:31.937363Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:973:2815] 2025-12-23T18:25:31.937394Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:25:31.937416Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:25:31.937436Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:25:31.937498Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:25:31.937533Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2025-12-23T18:25:31.937602Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:25:31.937642Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:974:2816] 2025-12-23T18:25:31.937660Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2025-12-23T18:25:31.937677Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-12-23T18:25:31.937696Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-12-23T18:25:31.937927Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:25:31.938020Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:25:31.938116Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:25:31.938140Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2025-12-23T18:25:31.938184Z node 1 :TX_DATASHARD ... tabaseId: /Root, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-12-23T18:26:28.428589Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:26:28.428615Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3002:281474976715664] at 72075186224037889 on unit CompleteWrite 2025-12-23T18:26:28.428648Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:26:28.428905Z node 6 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=6&id=N2IxMGQyODAtMzViMGQ5N2ItYzc3YmNhNTItYjMwZGVjNjk=, ActorId: [6:1133:2934], ActorState: ExecuteState, TraceId: 01kd67cvc3ff1tr11d0nmgmhcp, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/table-1`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } 2025-12-23T18:26:28.429433Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [6:1208:2987], Recipient [6:969:2824]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:26:28.429460Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:26:28.429486Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [6:1207:2986], serverId# [6:1208:2987], sessionId# [0:0:0] 2025-12-23T18:26:28.429580Z node 6 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [6:67:2114] Handle TEvExecuteKqpTransaction 2025-12-23T18:26:28.429605Z node 6 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [6:67:2114] TxId# 281474976715665 ProcessProposeKqpTransaction ... generic readset: Decision: DECISION_ABORT 2025-12-23T18:26:28.429706Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [6:884:2764], Recipient [6:969:2824]: {TEvReadSet step# 3002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-12-23T18:26:28.429729Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-12-23T18:26:28.429751Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715664 2025-12-23T18:26:28.429790Z node 6 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-12-23T18:26:28.429859Z node 6 :TX_DATASHARD TRACE: volatile_tx.cpp:884: Processed readset with decision 2 from 72075186224037888 to 72075186224037889 at tablet 72075186224037889 2025-12-23T18:26:28.429925Z node 6 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2025-12-23T18:26:28.430450Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [6:1191:2934], Recipient [6:884:2764]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2025-12-23T18:26:28.430487Z node 6 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2025-12-23T18:26:28.430615Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [6:884:2764], Recipient [6:884:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-12-23T18:26:28.430665Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-12-23T18:26:28.430761Z node 6 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2025-12-23T18:26:28.430909Z node 6 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2025-12-23T18:26:28.431002Z node 6 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715662, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2025-12-23T18:26:28.431078Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-12-23T18:26:28.431129Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:26:28.431168Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-12-23T18:26:28.431210Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:26:28.431240Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:26:28.431281Z node 6 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3002/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3001/0 ImmediateWriteEdgeReplied# v3001/0 2025-12-23T18:26:28.431334Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2025-12-23T18:26:28.431374Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:26:28.431405Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:26:28.431426Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2025-12-23T18:26:28.431443Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2025-12-23T18:26:28.431461Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:26:28.431479Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2025-12-23T18:26:28.431503Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-12-23T18:26:28.431529Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-12-23T18:26:28.431575Z node 6 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2025-12-23T18:26:28.431652Z node 6 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2025-12-23T18:26:28.431697Z node 6 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:6] at 72075186224037888 2025-12-23T18:26:28.431744Z node 6 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2025-12-23T18:26:28.431802Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:26:28.431824Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-12-23T18:26:28.431866Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-12-23T18:26:28.431909Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:26:28.431936Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-12-23T18:26:28.431965Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-12-23T18:26:28.432001Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:26:28.432036Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:26:28.432069Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:26:28.432085Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:26:28.432124Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2025-12-23T18:26:28.432168Z node 6 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-12-23T18:26:28.432202Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:26:28.432246Z node 6 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-12-23T18:26:28.432308Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:26:28.432623Z node 6 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715664; 2025-12-23T18:26:28.432786Z node 6 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [3002 : 281474976715664] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-12-23T18:26:28.432845Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:26:28.433024Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [6:969:2824], Recipient [6:884:2764]: {TEvReadSet step# 3002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-12-23T18:26:28.433053Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:26:28.433097Z node 6 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 2025-12-23T18:26:28.434088Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [6:69:2116], Recipient [6:884:2764]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715662 LockNode: 6 Status: STATUS_NOT_FOUND |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_rs/unittest |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |90.3%| [TM] {RESULT} ydb/core/tx/datashard/ut_rs/unittest |90.3%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob >> TTestYqlToMiniKQLCompile::CheckResolve [GOOD] >> TTestYqlToMiniKQLCompile::OnlyResult >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection >> TTestYqlToMiniKQLCompile::OnlyResult [GOOD] >> TTestYqlToMiniKQLCompile::EraseRow [GOOD] >> TTestYqlToMiniKQLCompile::UpdateRow >> TTestYqlToMiniKQLCompile::UpdateRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRange >> TTestYqlToMiniKQLCompile::SelectRange [GOOD] >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx [GOOD] >> TTestYqlToMiniKQLCompile::AcquireLocks >> TTestYqlToMiniKQLCompile::AcquireLocks [GOOD] >> TTestYqlToMiniKQLCompile::StaticMapTypeOf [GOOD] >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange [GOOD] >> TTestYqlToMiniKQLCompile::Extract [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> SequenceShardTests::Basics >> KeyValueGRPCService::SimpleWriteReadV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV1 >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] >> TTxDataShardFilterKMeansScan::BadRequest |90.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/client/minikql_compile/ut/unittest >> TTestYqlToMiniKQLCompile::Extract [GOOD] |90.3%| [TS] {BAZEL_UPLOAD} ydb/core/client/minikql_compile/ut/unittest >> QuoterWithKesusTest::FailsToGetBigQuota [GOOD] >> QuoterWithKesusTest::PrefetchCoefficient >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections >> SequenceShardTests::Basics [GOOD] >> SequenceShardTests::MarkedPipeRetries >> SequenceShardTests::MarkedPipeRetries [GOOD] >> SequenceShardTests::FreezeRestoreRedirect >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon [GOOD] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries >> SequenceShardTests::FreezeRestoreRedirect [GOOD] >> SequenceShardTests::NegativeIncrement >> TFetchRequestTests::CheckAccess [GOOD] >> TMemoryController::GroupedMemoryLimiter_ConfigCS [GOOD] >> TMemoryController::ColumnShardCaches_Config >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection >> TMLPChangerTests::CommitTest [GOOD] >> TMLPChangerTests::ReadAndReleaseTest >> SequenceShardTests::NegativeIncrement [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/ut/unittest >> SequenceShardTests::NegativeIncrement [GOOD] Test command err: 2025-12-23T18:26:32.037120Z node 1 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2025-12-23T18:26:32.037205Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-12-23T18:26:32.044624Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2025-12-23T18:26:32.047119Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-12-23T18:26:32.047162Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2025-12-23T18:26:32.051739Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-12-23T18:26:32.051844Z node 1 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2025-12-23T18:26:32.084125Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-12-23T18:26:32.084434Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-12-23T18:26:32.084495Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:33: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SEQUENCE_ALREADY_EXISTS PathId# [OwnerId: 123, LocalPathId: 42] 2025-12-23T18:26:32.084540Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-12-23T18:26:32.084711Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } StartValue: 100001 Cache: 10 2025-12-23T18:26:32.084782Z node 1 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 100001 Cache# 10 Increment# 1 Cycle# false State# Active 2025-12-23T18:26:32.096107Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-12-23T18:26:32.096319Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-12-23T18:26:32.096392Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 1 AllocationCount# 1 AllocationIncrement# 1 2025-12-23T18:26:32.108093Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:32.108419Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2025-12-23T18:26:32.108503Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 2 AllocationCount# 10 AllocationIncrement# 1 2025-12-23T18:26:32.120010Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:32.120243Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-12-23T18:26:32.120310Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100001 AllocationCount# 10 AllocationIncrement# 1 2025-12-23T18:26:32.132001Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:32.132259Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 50 2025-12-23T18:26:32.132328Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100011 AllocationCount# 50 AllocationIncrement# 1 2025-12-23T18:26:32.143805Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:32.144089Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 99] Cache# 0 2025-12-23T18:26:32.144137Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:35: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 99] 2025-12-23T18:26:32.144198Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:32.144359Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-12-23T18:26:32.144422Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 12 AllocationCount# 9223372036854775796 AllocationIncrement# 1 2025-12-23T18:26:32.155865Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:32.156091Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-12-23T18:26:32.156122Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:72: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-12-23T18:26:32.156158Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:32.156309Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:20: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-12-23T18:26:32.156367Z node 1 :SEQUENCESHARD NOTICE: tx_drop_sequence.cpp:43: [sequenceshard 72057594037927937] TTxDropSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-12-23T18:26:32.167850Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:48: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-12-23T18:26:32.168111Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:20: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-12-23T18:26:32.168148Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:33: [sequenceshard 72057594037927937] TTxDropSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-12-23T18:26:32.168190Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:48: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-12-23T18:26:32.176402Z node 1 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2025-12-23T18:26:32.176477Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-12-23T18:26:32.176766Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-12-23T18:26:32.177106Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2025-12-23T18:26:32.177239Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2025-12-23T18:26:32.179769Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-12-23T18:26:32.179807Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:35: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-12-23T18:26:32.179851Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:32.180022Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-12-23T18:26:32.180075Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100061 AllocationCount# 10 AllocationIncrement# 1 2025-12-23T18:26:32.221911Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:32.222210Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } NextValue: 200000 NextUsed: true 2025-12-23T18:26:32.222286Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-12-23T18:26:32.233769Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-12-23T18:26:32.234069Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-12-23T18:26:32.234132Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200001 AllocationCount# 10 AllocationIncrement# 1 2025-12-23T18:26:32.245561Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:32.245912Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } Cache: 5 2025-12-23T18:26:32.245988Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-12-23T18:26:32.267805Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-12-23T18:26:32.268075Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-12-23T18:26:32.268138Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200011 AllocationCount# 5 AllocationIncrement# 1 2025-12-23T18:26:32.279710Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard ... Id: 43] Cache# 0 2025-12-23T18:26:33.004553Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] AllocationStart# 11 AllocationCount# 100 AllocationIncrement# 1 2025-12-23T18:26:33.015928Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:33.016208Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:21: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2025-12-23T18:26:33.016242Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:66: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SEQUENCE_ALREADY_ACTIVE PathId# [OwnerId: 123, LocalPathId: 43] 2025-12-23T18:26:33.016279Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:103: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-12-23T18:26:33.016426Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-12-23T18:26:33.016480Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-12-23T18:26:33.028100Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-12-23T18:26:33.028391Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-12-23T18:26:33.028444Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-12-23T18:26:33.039798Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-12-23T18:26:33.039965Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-12-23T18:26:33.040009Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-12-23T18:26:33.051303Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-12-23T18:26:33.051524Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-12-23T18:26:33.051568Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:54: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 42] MovedTo# 12345 2025-12-23T18:26:33.051622Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:33.051797Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:20: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-12-23T18:26:33.051853Z node 3 :SEQUENCESHARD NOTICE: tx_freeze_sequence.cpp:68: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2025-12-23T18:26:33.063230Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:73: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-12-23T18:26:33.063550Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:21: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-12-23T18:26:33.063639Z node 3 :SEQUENCESHARD NOTICE: tx_restore_sequence.cpp:98: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-12-23T18:26:33.075036Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:103: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-12-23T18:26:33.075343Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-12-23T18:26:33.075418Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-12-23T18:26:33.086829Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-12-23T18:26:33.087075Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:20: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-12-23T18:26:33.087106Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:48: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 43] MovedTo# 54321 2025-12-23T18:26:33.087145Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:73: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-12-23T18:26:33.087298Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-12-23T18:26:33.087365Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 111 AllocationCount# 100 AllocationIncrement# 1 2025-12-23T18:26:33.098657Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:33.501908Z node 4 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2025-12-23T18:26:33.501994Z node 4 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-12-23T18:26:33.509758Z node 4 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2025-12-23T18:26:33.512875Z node 4 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-12-23T18:26:33.512933Z node 4 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2025-12-23T18:26:33.514013Z node 4 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cache: 10 Increment: -1 2025-12-23T18:26:33.514113Z node 4 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# -9223372036854775808 MaxValue# -1 StartValue# -1 Cache# 10 Increment# -1 Cycle# false State# Active 2025-12-23T18:26:33.547067Z node 4 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-12-23T18:26:33.547354Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-12-23T18:26:33.547443Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-12-23T18:26:33.559318Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:33.559618Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-12-23T18:26:33.559705Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-12-23T18:26:33.571460Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:33.571733Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-12-23T18:26:33.571818Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -21 AllocationCount# 9223372036854775788 AllocationIncrement# -1 2025-12-23T18:26:33.583605Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:33.583910Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-12-23T18:26:33.583955Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:72: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-12-23T18:26:33.584005Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:33.584231Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cycle: true 2025-12-23T18:26:33.584300Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-12-23T18:26:33.596393Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-12-23T18:26:33.596853Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-12-23T18:26:33.596978Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-12-23T18:26:33.608771Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-12-23T18:26:33.609083Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-12-23T18:26:33.609166Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-12-23T18:26:33.621521Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete |90.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/sequenceshard/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/fetcher/ut/unittest >> TFetchRequestTests::CheckAccess [GOOD] Test command err: 2025-12-23T18:25:02.336490Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587121158968441805:2147];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:25:02.336521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003105/r3tmp/tmpXMVmQ9/pdisk_1.dat 2025-12-23T18:25:02.510052Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:25:03.048109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:03.048225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:03.052765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:25:03.102546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64158, node 1 2025-12-23T18:25:03.384532Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:25:03.385942Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:03.395358Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587121158968441695:2081] 1766514302285376 != 1766514302285379 2025-12-23T18:25:03.467016Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:03.815473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/003105/r3tmp/yandexSRdIG8.tmp 2025-12-23T18:25:03.815496Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/003105/r3tmp/yandexSRdIG8.tmp 2025-12-23T18:25:03.815640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/003105/r3tmp/yandexSRdIG8.tmp 2025-12-23T18:25:03.815741Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:25:03.898351Z INFO: TTestServer started on Port 22944 GrpcPort 64158 TClient is connected to server localhost:22944 PQClient connected to localhost:64158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514303358 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:25:04.585902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:25:04.599483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:25:04.615560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:25:04.625212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:25:04.870904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:25:07.336465Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587121158968441805:2147];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:25:07.336523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:25:08.298528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121184738246506:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:08.299003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:08.302868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121184738246525:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:08.302942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121184738246526:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:08.303293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:08.307986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:25:08.330370Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587121184738246529:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:25:08.427942Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587121184738246595:2655] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:25:08.809329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:08.847556Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587121184738246603:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:25:08.850177Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=ODY0NmMxYTAtODc0NGViYjEtZTI5MTliYWQtNGU2Njk0YzM=, ActorId: [1:7587121184738246495:2331], ActorState: ExecuteState, TraceId: 01kd67ad64a27esrqk0hkraa1d, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:25:08.852412Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:25:08.873924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:09.028360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587121189033214188:2833] === CheckCluster ... e] Process user action and tx events 2025-12-23T18:26:32.841362Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:26:32.841384Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.841389Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.841397Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:32.841399Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:32.841414Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.841414Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.841426Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:26:32.841426Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2025-12-23T18:26:32.841469Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:26:32.841471Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:26:32.841476Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.841479Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.841482Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:32.841485Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:32.841490Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.841493Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.841495Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][2][StateIdle] Try persist 2025-12-23T18:26:32.841499Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][3][StateIdle] Try persist 2025-12-23T18:26:32.841774Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037897][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:26:32.841777Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037898][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:26:32.841783Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.841787Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.841788Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037898][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:32.841793Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037897][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:32.841795Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.841800Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037898][Partition][0][StateIdle] Try persist 2025-12-23T18:26:32.841801Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.841809Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037897][Partition][4][StateIdle] Try persist 2025-12-23T18:26:32.869596Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:26:32.933088Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037898][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:26:32.933088Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037897][Partition][4][StateIdle] No data for blobs compaction 2025-12-23T18:26:32.936643Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037895][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:26:32.937075Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037896][Partition][3][StateIdle] No data for blobs compaction 2025-12-23T18:26:32.939698Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][1][StateIdle] No data for blobs compaction 2025-12-23T18:26:32.939764Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:330: [[6:7587121520767660713:3138]] Handle TEvPersQueue::TEvHasDataInfoResponse EndOffset: 0 Cookie: 0 SizeLag: 0 WriteTimestampEstimateMS: 0 ReadingFinished: false 2025-12-23T18:26:32.939776Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:336: [[6:7587121520767660713:3138]] Partition 0 status is 2 2025-12-23T18:26:32.939793Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:274: [[6:7587121520767660713:3138]] Processing 0/1 2025-12-23T18:26:32.939802Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:282: [[6:7587121520767660713:3138]] Skip partition 0 because status is DataReceived 2025-12-23T18:26:32.939806Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:274: [[6:7587121520767660713:3138]] Processing 1/1 2025-12-23T18:26:32.939868Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:540: [[6:7587121520767660713:3138]] Reply to [6:7587121520767660699:3131]: PartResult { Topic: "/Root/topic1" Partition: 1 ReadResult { MaxOffset: 0 ErrorCode: READ_ERROR_TOO_BIG_OFFSET } } 2025-12-23T18:26:32.939992Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037894] server disconnected, pipe [6:7587121520767660724:3138] destroyed 2025-12-23T18:26:32.940135Z node 6 :PQ_FETCH_REQUEST INFO: fetch_request_actor.cpp:156: [[6:7587121542242497379:3212]] Fetch request actor boostrapped. Request is valid: 1 2025-12-23T18:26:32.940153Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:171: [[6:7587121542242497379:3212]] DescribeTopics 2025-12-23T18:26:32.940336Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:186: [[6:7587121542242497379:3212]] Handle NDescriber::TEvDescribeTopicsResponse 2025-12-23T18:26:32.940396Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:540: [[6:7587121542242497379:3212]] Reply to [6:7587121520767660699:3131]: 2025-12-23T18:26:32.941659Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:26:32.941671Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:26:32.941683Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.941685Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.941694Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:32.941694Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:32.941707Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.941707Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.941721Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2025-12-23T18:26:32.941721Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][3][StateIdle] Try persist 2025-12-23T18:26:32.941765Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:26:32.941765Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:26:32.941778Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.941778Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.941789Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:32.941790Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:32.941807Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.941811Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.941818Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][2][StateIdle] Try persist 2025-12-23T18:26:32.941835Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:26:32.942133Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037897][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:26:32.942140Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037898][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:26:32.942146Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.942148Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.942153Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037897][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:32.942154Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037898][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:26:32.942160Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.942161Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:26:32.942166Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037897][Partition][4][StateIdle] Try persist 2025-12-23T18:26:32.942167Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037898][Partition][0][StateIdle] Try persist |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/public/fetcher/ut/unittest >> DataShardFollowers::FollowerDuringDataPartSwitch [GOOD] >> DataShardFollowers::FollowerReadDuringSplit >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumnAliases-default.txt] >> TTxDataShardFilterKMeansScan::BadRequest [GOOD] >> TTxDataShardFilterKMeansScan::BuildToPosting >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> DescribeSchemaSecretsService::BatchRequest [GOOD] >> DescribeSchemaSecretsService::BigBatchRequest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding >> TIndexProcesorTests::TestOver1000Queues [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV2 >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings >> QuoterWithKesusTest::PrefetchCoefficient [GOOD] >> QuoterWithKesusTest::GetsQuotaAfterPause >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding >> test.py::test[solomon-LabelColumnAliases-default.txt] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/yc_search_ut/unittest >> TIndexProcesorTests::TestOver1000Queues [GOOD] Test command err: 2025-12-23T18:26:13.959994Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587121464122019535:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:26:13.960049Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002f76/r3tmp/tmpF9KzPu/pdisk_1.dat 2025-12-23T18:26:14.105939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:26:14.106014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:26:14.107487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:26:14.143416Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:26:14.173317Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:26:14.175608Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587121464122019507:2081] 1766514373958631 != 1766514373958634 TServer::EnableGrpc on GrpcPort 2071, node 1 2025-12-23T18:26:14.204206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:26:14.204226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:26:14.204231Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:26:14.204313Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:26:14.380797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514374226 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:26:14.430539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... TClient is connected to server localhost:61557 waiting... 2025-12-23T18:26:14.965161Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:26:15.767746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:26:15.769484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) TClient is connected to server localhost:61557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766514374478 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514374226 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "SQS" PathId: 38 ... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:26:15.981586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:26:15.987214Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587121472711955217:2662] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) TClient is connected to server localhost:61557 waiting... 2025-12-23T18:26:16.125005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:26:16.126055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===Execute query: UPSERT INTO `/Root/SQS/SingleCreateQueueEvent/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue1", 1, "myQueueCustomName", 1766514375999, "myFolder", "{\"k1\": \"v1\"}"); 2025-12-23T18:26:16.198689Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121477006922679:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:16.198689Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121477006922690:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:16.198793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:16.199032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121477006922694:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:16.199108Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:16.201199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710667:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:26:16.208119Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587121477006922693:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710667 completed, doublechecking } 2025-12-23T18:26:16.263668Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587121477006922746:2813] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 52], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-12-23T18:26:15.999000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-12-23T18:26:15.999000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-12-23T18:26:16.822794Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} TClient is connected to server localhost:61557 2025-12-23T18:26:17.200654Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587121481301890238:2883] txid# 281474976710678, issues: { message: "Check failed: path: \'/Root/SQS/Root/SQS/SingleCreateQueueEvent\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS/Root/SQS/SingleCreateQueueEvent', ... der","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} TClient is connected to server localhost:61557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766514374478 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 4 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".metadata" PathId: 49 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710667 CreateStep: 1766514376249 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: ".sys" P... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:26:21.811746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710726:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:26:21.817416Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587121498481760412:3396] txid# 281474976710727, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) TClient is connected to server localhost:61557 waiting... 2025-12-23T18:26:22.013409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710729:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:26:22.015149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710730:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "existing1", "myQueueCustomName", 1766514380000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "existing2", "myQueueCustomName", 1766514380000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "existing3", "myQueueCustomName", 1766514380000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "deleting1", "myQueueCustomName", 1766514380000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "deleting2", "myQueueCustomName", 1766514380000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "deleting1", 0, "myQueueCustomName", 1766514381833, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "deleting2", 0, "myQueueCustomName", 1766514381833, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "creating1", 1, "myQueueCustomName", 1766514381833, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "creating2", 1, "myQueueCustomName", 1766514381833, "myFolder", "{\"k1\": \"v1\"}"); ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-12-23T18:26:21.833000Z","resource_id":"deleting2","name":"myQueueCustomName","service":"message-queue","deleted":"2025-12-23T18:26:21.833000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-12-23T18:26:21.833000Z","resource_id":"deleting1","name":"myQueueCustomName","service":"message-queue","deleted":"2025-12-23T18:26:21.833000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-12-23T18:26:21.833000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-12-23T18:26:21.833000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-12-23T18:26:20.000000Z","resource_id":"existing1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-12-23T18:26:23.256189Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-12-23T18:26:20.000000Z","resource_id":"existing2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-12-23T18:26:23.256248Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-12-23T18:26:21.833000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-12-23T18:26:23.256268Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-12-23T18:26:20.000000Z","resource_id":"existing3","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-12-23T18:26:23.256307Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-12-23T18:26:21.833000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-12-23T18:26:23.256326Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} TClient is connected to server localhost:61557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766514374478 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 16 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 16 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 5 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".metadata" PathId: 49 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710667 CreateStep: 1766514376249 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: ".sys" P... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:26:23.657834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710752:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:26:23.662848Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587121507071695561:3659] txid# 281474976710753, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) TClient is connected to server localhost:61557 waiting... 2025-12-23T18:26:23.874244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710755:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:26:23.876759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710756:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===Started add queue batch 2025-12-23T18:26:29.109115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:26:29.109139Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/ymq/actor/yc_search_ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding >> TMemoryController::ColumnShardCaches_Config [GOOD] >> TMemTableMemoryConsumersCollection::Empty [GOOD] >> TMemTableMemoryConsumersCollection::Destruction [GOOD] >> TMemTableMemoryConsumersCollection::Register [GOOD] >> TMemTableMemoryConsumersCollection::Unregister [GOOD] >> TMemTableMemoryConsumersCollection::SetConsumption [GOOD] >> TMemTableMemoryConsumersCollection::CompactionComplete [GOOD] >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario >> DataShardFollowers::FollowerReadDuringSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/memory_controller/ut/unittest >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] Test command err: ResourceBrokerSelfConfig: LimitBytes: 0B ... waiting for SysViewsRoster update finished 2025-12-23T18:24:58.886792Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:235: Periodic memory stats: AnonRss: none CGroupLimit: none MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 60MiB ConsumersConsumption: 0B OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 6MiB Coefficient: 0.9999990463 2025-12-23T18:24:58.895413Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer MemTable state: Consumption: 0B Limit: 6MiB Min: 2MiB Max: 6MiB 2025-12-23T18:24:58.895541Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 12.5MiB Min: 12.5MiB Max: 12.5MiB 2025-12-23T18:24:58.895646Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 0B Limit: 40MiB 2025-12-23T18:24:58.895728Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:458: Apply ResourceBroker config: LimitBytes: 60MiB queue_cs_general: 7.5MiB queue_cs_indexation: 2.5MiB queue_cs_normalizer: 7.5MiB queue_cs_ttl: 2.5MiB queue_kqp_resource_manager: 40MiB 2025-12-23T18:24:58.896438Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:161: Bootstrapped with config HardLimitBytes: 209715200 2025-12-23T18:24:58.921099Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:24:58.927170Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 33554432 2025-12-23T18:25:01.747532Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer SharedCache [1:20:2067] registered 2025-12-23T18:25:01.748406Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1189: New config diff: Queues { Name: "queue_cs_general" Limit { Memory: 7864320 } } Queues { Name: "queue_cs_indexation" Limit { Memory: 2621440 } } Queues { Name: "queue_cs_normalizer" Limit { Memory: 7864320 } } Queues { Name: "queue_cs_ttl" Limit { Memory: 2621440 } } Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 41943040 } } ResourceLimit { Memory: 62914560 } 2025-12-23T18:25:01.749133Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 2621440 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 2621440 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 7864320 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 7864320 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 41943040 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 62914560 } 2025-12-23T18:25:01.750366Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1240: Configure result: Success: true 2025-12-23T18:25:01.750655Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:238: Register memory consumer 2025-12-23T18:25:01.750710Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2025-12-23T18:25:01.751105Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:25:01.771551Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesBlobCache [1:21:2068] registered 2025-12-23T18:25:01.772446Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesDataAccessorCache [1:22:2069] registered 2025-12-23T18:25:02.068513Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesColumnDataCache [1:23:2070] registered 2025-12-23T18:25:02.068706Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2025-12-23T18:25:02.068805Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesScanGroupedMemory [1:50:2097] registered 2025-12-23T18:25:02.070631Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesCompGroupedMemory [1:51:2098] registered 2025-12-23T18:25:02.070999Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesDeduplicationGroupedMemory [1:52:2099] registered 2025-12-23T18:25:02.071664Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:335: ResourceBroker configure result Success: true 2025-12-23T18:25:02.073072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:25:02.112495Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:25:02.112869Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:02.113120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:25:02.228368Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:428:2393] 1 registered 2025-12-23T18:25:02.242096Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:442:2395] 0 registered 2025-12-23T18:25:02.248245Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:442:2395] 2 registered 2025-12-23T18:25:02.248343Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:442:2395] 4 registered 2025-12-23T18:25:02.248688Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:442:2395] 5 registered 2025-12-23T18:25:02.250233Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:451:2397] 1 registered 2025-12-23T18:25:02.250589Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:451:2397] 2 registered 2025-12-23T18:25:02.358797Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:458:2399] 1 registered 2025-12-23T18:25:02.412717Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:458:2399] 2 registered 2025-12-23T18:25:02.413048Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:458:2399] 3 registered 2025-12-23T18:25:02.414084Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:458:2399] 4 registered 2025-12-23T18:25:02.414302Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:458:2399] 5 registered 2025-12-23T18:25:02.423373Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:458:2399] 6 registered 2025-12-23T18:25:02.423738Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:458:2399] 7 registered 2025-12-23T18:25:02.423805Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:458:2399] 8 registered 2025-12-23T18:25:02.433978Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:458:2399] 9 registered 2025-12-23T18:25:02.434294Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:458:2399] 10 registered 2025-12-23T18:25:02.464384Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:458:2399] 11 registered 2025-12-23T18:25:02.464769Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:458:2399] 12 registered 2025-12-23T18:25:02.464902Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:458:2399] 13 registered 2025-12-23T18:25:02.464976Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: Me ... in: 50MiB Max: 50MiB 2025-12-23T18:26:37.916656Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesBlobCache state: Consumption: 0B Limit: 6.25MiB Min: 6.25MiB Max: 6.25MiB 2025-12-23T18:26:37.916693Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDataAccessorCache state: Consumption: 0B Limit: 6.25MiB Min: 6.25MiB Max: 6.25MiB 2025-12-23T18:26:37.916726Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesColumnDataCache state: Consumption: 0B Limit: 6.25MiB Min: 6.25MiB Max: 6.25MiB 2025-12-23T18:26:37.916755Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDeduplicationGroupedMemory state: Consumption: 0B Limit: 50MiB Min: 50MiB Max: 50MiB 2025-12-23T18:26:37.916785Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 31.3MiB Min: 31.3MiB Max: 31.3MiB 2025-12-23T18:26:37.916824Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 100MiB 2025-12-23T18:26:37.916932Z node 12 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:248: Limit memory consumer with 236MiB 2025-12-23T18:26:37.916975Z node 12 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2025-12-23T18:26:38.107932Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:235: Periodic memory stats: AnonRss: none CGroupLimit: 1.95GiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 1.95GiB SoftLimit: 1.46GiB TargetUtilization: 1000MiB ActivitiesLimitBytes: 600MiB ConsumersConsumption: 172KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 1000MiB ResultingConsumersConsumption: 1000MiB Coefficient: 0.90625 2025-12-23T18:26:38.108548Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer SharedCache state: Consumption: 0B Limit: 944MiB Min: 400MiB Max: 1000MiB 2025-12-23T18:26:38.108615Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer MemTable state: Consumption: 172KiB Limit: 56.3MiB Min: 20MiB Max: 60MiB 2025-12-23T18:26:38.108646Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesScanGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2025-12-23T18:26:38.108677Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesCompGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2025-12-23T18:26:38.108704Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesBlobCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2025-12-23T18:26:38.108729Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDataAccessorCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2025-12-23T18:26:38.108756Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesColumnDataCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2025-12-23T18:26:38.108790Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDeduplicationGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2025-12-23T18:26:38.108819Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 125MiB Min: 125MiB Max: 125MiB 2025-12-23T18:26:38.108871Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 400MiB 2025-12-23T18:26:38.108973Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:458: Apply ResourceBroker config: LimitBytes: 600MiB queue_cs_general: 75MiB queue_cs_indexation: 25MiB queue_cs_normalizer: 75MiB queue_cs_ttl: 25MiB queue_kqp_resource_manager: 400MiB 2025-12-23T18:26:38.109260Z node 12 :RESOURCE_BROKER INFO: resource_broker.cpp:1189: New config diff: Queues { Name: "queue_cs_general" Limit { Memory: 78643200 } } Queues { Name: "queue_cs_indexation" Limit { Memory: 26214400 } } Queues { Name: "queue_cs_normalizer" Limit { Memory: 78643200 } } Queues { Name: "queue_cs_ttl" Limit { Memory: 26214400 } } Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 419430400 } } ResourceLimit { Memory: 629145600 } 2025-12-23T18:26:38.110054Z node 12 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 26214400 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 26214400 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 78643200 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 78643200 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 419430400 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 629145600 } 2025-12-23T18:26:38.111710Z node 12 :RESOURCE_BROKER INFO: resource_broker.cpp:1240: Configure result: Success: true 2025-12-23T18:26:38.112081Z node 12 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:248: Limit memory consumer with 944MiB 2025-12-23T18:26:38.112140Z node 12 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2025-12-23T18:26:38.112465Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:335: ResourceBroker configure result Success: true 2025-12-23T18:26:38.248749Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:235: Periodic memory stats: AnonRss: none CGroupLimit: 1.95GiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 1.95GiB SoftLimit: 1.46GiB TargetUtilization: 1000MiB ActivitiesLimitBytes: 600MiB ConsumersConsumption: 172KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 1000MiB ResultingConsumersConsumption: 1000MiB Coefficient: 0.90625 2025-12-23T18:26:38.249211Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer SharedCache state: Consumption: 0B Limit: 944MiB Min: 400MiB Max: 1000MiB 2025-12-23T18:26:38.249257Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer MemTable state: Consumption: 172KiB Limit: 56.3MiB Min: 20MiB Max: 60MiB 2025-12-23T18:26:38.249284Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesScanGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2025-12-23T18:26:38.249316Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesCompGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2025-12-23T18:26:38.249344Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesBlobCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2025-12-23T18:26:38.249372Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDataAccessorCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2025-12-23T18:26:38.249399Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesColumnDataCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2025-12-23T18:26:38.249429Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDeduplicationGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2025-12-23T18:26:38.249460Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 125MiB Min: 125MiB Max: 125MiB 2025-12-23T18:26:38.249498Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 400MiB 2025-12-23T18:26:38.249607Z node 12 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:248: Limit memory consumer with 944MiB 2025-12-23T18:26:38.249647Z node 12 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/memory_controller/ut/unittest >> TTxDataShardFilterKMeansScan::BuildToPosting [GOOD] >> TTxDataShardFilterKMeansScan::BuildToBuild |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |90.3%| [TS] {RESULT} ydb/core/client/minikql_compile/ut/unittest |90.3%| [TS] {RESULT} ydb/core/tx/sequenceshard/ut/unittest |90.3%| [TM] {RESULT} ydb/core/persqueue/public/fetcher/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_followers/unittest >> DataShardFollowers::FollowerReadDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:26:00.830725Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:26:00.915311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:26:00.923555Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:26:00.923786Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:26:00.923874Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:26:01.126768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:26:01.126883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:26:01.180783Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514358584703 != 1766514358584707 2025-12-23T18:26:01.192608Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:26:01.233938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:26:01.330071Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00310f/r3tmp/tmpuLe1rV/pdisk_1.dat 2025-12-23T18:26:01.582270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:26:01.595531Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:26:01.692685Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:26:01.692743Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:26:01.692852Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:26:01.794235Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 PartitionConfig { FollowerGroups { FollowerCount: 1 AllowLeaderPromotion: false } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:26:01.794334Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:26:01.794781Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:26:01.794869Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:26:01.795096Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:26:01.795205Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:26:01.795298Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:26:01.795502Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:26:01.796768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:26:01.797629Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:26:01.797693Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:26:01.834854Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:26:01.835618Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:26:01.835816Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:26:01.836035Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:26:01.871904Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:26:01.872694Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:26:01.872830Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:26:01.874284Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:26:01.874351Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:26:01.874391Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:26:01.874671Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:26:01.874784Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:26:01.874873Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:26:01.885588Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:26:01.910250Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:26:01.910425Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:26:01.910536Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:26:01.910562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:26:01.910602Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:26:01.910632Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:26:01.910843Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:26:01.910878Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:26:01.911084Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:26:01.911148Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:26:01.911497Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:26:01.911546Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:26:01.911597Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:26:01.911626Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:26:01.911652Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:26:01.911684Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:26:01.911720Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:26:01.911791Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:26:01.911818Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:26:01.911852Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:26:01.911909Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:26:01.911942Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:26:01.912033Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:26:01.912215Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:26:01.912262Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction ... }, { items { uint32_value: 2 } items { uint32_value: 22 } } ... reading from the right follower 2025-12-23T18:26:39.517845Z node 8 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [8:67:2114] Handle TEvExecuteKqpTransaction 2025-12-23T18:26:39.517912Z node 8 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [8:67:2114] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-12-23T18:26:39.519003Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269877761, Sender [8:1306:3060], Recipient [8:1279:3042]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:26:39.519078Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3303: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:26:39.519133Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at follower 1 tablet# 72075186224037890, clientId# [8:1304:3059], serverId# [8:1306:3060], sessionId# [0:0:0] 2025-12-23T18:26:39.520618Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553215, Sender [8:1310:3061], Recipient [8:1279:3042]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-12-23T18:26:39.520657Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3305: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2025-12-23T18:26:39.520773Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-12-23T18:26:39.520829Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:26:39.520948Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-12-23T18:26:39.521023Z node 8 :TX_DATASHARD DEBUG: datashard__init.cpp:837: Updating sys metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=6, epoch=1} 2025-12-23T18:26:39.521567Z node 8 :TX_DATASHARD DEBUG: datashard__init.cpp:854: Updating tables metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=4, epoch=1} 2025-12-23T18:26:39.521953Z node 8 :TX_DATASHARD DEBUG: datashard__init.cpp:925: Updating snapshots metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=0, epoch=1} 2025-12-23T18:26:39.522027Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037890 changed HEAD read to repeatable v2500/18446744073709551615 2025-12-23T18:26:39.522088Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2025-12-23T18:26:39.522162Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2025-12-23T18:26:39.522198Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2025-12-23T18:26:39.522239Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-12-23T18:26:39.522282Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2025-12-23T18:26:39.522325Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1] at 72075186224037890 2025-12-23T18:26:39.522367Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2025-12-23T18:26:39.522385Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-12-23T18:26:39.522402Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2025-12-23T18:26:39.522419Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-12-23T18:26:39.522515Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-12-23T18:26:39.522709Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Restart 2025-12-23T18:26:39.522734Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037890 2025-12-23T18:26:39.522822Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> retry Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:26:39.522877Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} pin 0 (0 b) load 1 (65 b) 2025-12-23T18:26:39.522940Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 8388608b of static mem, Memory{8388608 dyn 0} 2025-12-23T18:26:39.523024Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} request page collection [72075186224037888:1:23:1:12288:190:0] pages [ 0 ] 2025-12-23T18:26:39.523086Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} postponed, loading 1 pages, 65 bytes, newly pinned 0 pages, 0 bytes 2025-12-23T18:26:39.523222Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} got result TEvResult{1 pages [72075186224037888:1:23:1:12288:190:0] ok OK}, type 1 2025-12-23T18:26:39.523287Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} activated 2025-12-23T18:26:39.523391Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-12-23T18:26:39.523420Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-12-23T18:26:39.523487Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-12-23T18:26:39.523649Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037890 Complete read# {[8:1310:3061], 0} after executionsCount# 2 2025-12-23T18:26:39.523704Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037890 read iterator# {[8:1310:3061], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-12-23T18:26:39.523789Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2025-12-23T18:26:39.523814Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2025-12-23T18:26:39.523841Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2025-12-23T18:26:39.523861Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2025-12-23T18:26:39.523891Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2025-12-23T18:26:39.523908Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2025-12-23T18:26:39.523930Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037890 has finished 2025-12-23T18:26:39.523964Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-12-23T18:26:39.524033Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 2 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:26:39.524081Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 8388608b of static, Memory{0 dyn 0} 2025-12-23T18:26:39.524122Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-12-23T18:26:39.524215Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553217, Sender [8:1279:3042], Recipient [8:1279:3042]: NKikimr::TEvDataShard::TEvReadContinue 2025-12-23T18:26:39.524244Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3306: StateWorkAsFollower, processing event TEvDataShard::TEvReadContinue 2025-12-23T18:26:39.524320Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} queued, type NKikimr::NDataShard::TDataShard::TTxReadContinue 2025-12-23T18:26:39.524380Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:26:39.524439Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3081: 72075186224037890 ReadContinue for iterator# {[8:1310:3061], 0}, firstUnprocessedQuery# 0 2025-12-23T18:26:39.524513Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3174: 72075186224037890 ReadContinue: iterator# {[8:1310:3061], 0}, FirstUnprocessedQuery# 0 2025-12-23T18:26:39.524612Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3338: 72075186224037890 readContinue iterator# {[8:1310:3061], 0} sends rowCount# 0, bytes# 0, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:26:39.524669Z node 8 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:3362: 72075186224037890 read iterator# {[8:1310:3061], 0} finished in ReadContinue 2025-12-23T18:26:39.524748Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:26:39.524804Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:26:39.525353Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553219, Sender [8:1310:3061], Recipient [8:1279:3042]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:26:39.525400Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3308: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-12-23T18:26:39.525460Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037890 ReadCancel: { ReadId: 0 } { items { uint32_value: 3 } items { uint32_value: 33 } } |90.3%| [TM] {RESULT} ydb/core/ymq/actor/yc_search_ut/unittest >> TStateStorageConfig::TestReplicaSelectionUniqueCombinations [GOOD] >> TStateStorageConfig::UniformityTest >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_followers/unittest |90.3%| [TM] {RESULT} ydb/core/memory_controller/ut/unittest |90.3%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV1 >> TGRpcRateLimiterTest::CreateResource >> QuoterWithKesusTest::GetsQuotaAfterPause [GOOD] >> QuoterWithKesusTest::GetsSeveralQuotas |90.3%| [TM] {RESULT} ydb/core/tx/datashard/ut_followers/unittest >> TDqSolomonWriteActorTest::TestWriteWithTimeseries [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints >> DescribeSchemaSecretsService::BigBatchRequest [GOOD] >> DescribeSchemaSecretsService::EmptyBatch >> TDqSolomonWriteActorTest::TestCheckpoints [GOOD] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint >> TTxDataShardFilterKMeansScan::BuildToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BadRequest >> TGRpcRateLimiterTest::CreateResource [GOOD] >> TGRpcRateLimiterTest::UpdateResource >> test.py::test[solomon-Subquery-default.txt] [GOOD] >> test.py::test[solomon-UnknownSetting-] |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |90.3%| [LD] {RESULT} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV2 >> TBufferedWriter::Flush_AfterEAGAIN_ShouldRestartFromSavedPositionInBuffer_1 [GOOD] >> TBufferedWriter::Flush_AfterEAGAIN_ShouldRestartFromSavedPositionInBuffer_2 [GOOD] |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |90.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |90.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/raw_socket/ut/unittest >> TBufferedWriter::Flush_AfterEAGAIN_ShouldRestartFromSavedPositionInBuffer_2 [GOOD] |90.3%| [TS] {BAZEL_UPLOAD} ydb/core/raw_socket/ut/unittest >> TGRpcRateLimiterTest::UpdateResource [GOOD] >> TMLPChangerTests::ReadAndReleaseTest [GOOD] >> TGRpcRateLimiterTest::DropResource >> TMLPChangerTests::CapacityTest [GOOD] >> TMLPReaderTests::TopicNotExists >> QuoterWithKesusTest::GetsSeveralQuotas [GOOD] >> QuoterWithKesusTest::KesusRecreation |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |90.3%| [TS] {RESULT} ydb/core/raw_socket/ut/unittest |90.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |90.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |90.3%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> test.py::test[solomon-UnknownSetting-] [GOOD] >> TDescriberTests::TopicExists |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/kqprun |90.3%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun >> TTxDataShardLocalKMeansScan::BadRequest [GOOD] >> TTxDataShardLocalKMeansScan::TooManyClusters |90.3%| [TM] {asan, default-linux-x86_64, pic, release} ydb/library/yql/tests/sql/solomon/pytest >> test.py::test[solomon-UnknownSetting-] [GOOD] |90.3%| [TM] {BAZEL_UPLOAD} ydb/library/yql/tests/sql/solomon/pytest |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |90.3%| [TM] {RESULT} ydb/library/yql/tests/sql/solomon/pytest |90.3%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut >> DataShardCompaction::CompactBorrowed >> DataShardSnapshotIsolation::ReadWriteNoLocksNoConflict >> MediatorTest::BasicTimecastUpdates >> test_cte.py::TestCte::test_toplevel >> test_ctas.py::TestYtCtas::test_simple_ctast |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydbd/ydbd |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd >> test.py::test[solomon-BadDownsamplingAggregation-] |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |90.3%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd >> TGRpcRateLimiterTest::DropResource [GOOD] >> TGRpcRateLimiterTest::DescribeResource |90.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view >> DescribeSchemaSecretsService::EmptyBatch [GOOD] >> DescribeSchemaSecretsService::MixedGrantsInBatch >> GenericProviderLookupActor::Lookup [GOOD] >> GenericProviderLookupActor::LookupWithErrors >> GenericProviderLookupActor::LookupWithErrors [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/actors/ut/unittest >> GenericProviderLookupActor::LookupWithErrors [GOOD] Test command err: 2025-12-23 18:26:52.472 INFO ydb-library-yql-providers-generic-actors-ut(pid=1067816, tid=0x00007F34024EDFC0) [generic] yql_generic_lookup_actor.cpp:151: New generic proivider lookup source actor(ActorId=[1:4:2051]) for kind=YDB, endpoint=host: "some_host" port: 2135, database=some_db, use_tls=1, protocol=NATIVE, table=lookup_test 2025-12-23 18:26:52.485 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1067816, tid=0x00007F34024EDFC0) [generic] yql_generic_lookup_actor.cpp:299: ActorId=[1:4:2051] Got LookupRequest for 3 keys Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } ... " } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 ListSplits result. GRpcStatusCode: 0 2025-12-23 18:26:52.599 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1067816, tid=0x00007B33FD616640) [generic] yql_generic_lookup_actor.cpp:330: ActorId=[2:7587121630435230377:2051] Got TListSplitsStreamIterator 2025-12-23 18:26:52.599 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1067816, tid=0x00007B33FD616640) [generic] yql_generic_lookup_actor.cpp:198: ActorId=[2:7587121630435230377:2051] Got TListSplitsResponse from Connector Call ReadSplits. data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY GENERIC-CONNECTOR-MOCK Expected: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY GENERIC-CONNECTOR-MOCK Actual: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY ReadSplits result. GRpcStatusCode: 0 2025-12-23 18:26:52.599 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1067816, tid=0x00007B33FD616640) [generic] yql_generic_lookup_actor.cpp:231: ActorId=[2:7587121630435230377:2051] Got ReadSplitsStreamIterator from Connector 2025-12-23 18:26:52.600 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1067816, tid=0x00007B33FD616640) [generic] yql_generic_lookup_actor.cpp:352: ActorId=[2:7587121630435230377:2051] Got DataChunk 2025-12-23 18:26:52.600 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1067816, tid=0x00007B33FD616640) [generic] yql_generic_lookup_actor.cpp:363: ActorId=[2:7587121630435230377:2051] Got EOF 2025-12-23 18:26:52.601 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1067816, tid=0x00007B33FD616640) [generic] yql_generic_lookup_actor.cpp:413: Sending lookup results for 3 keys |90.3%| [TS] {BAZEL_UPLOAD} ydb/library/yql/providers/generic/actors/ut/unittest >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V1 |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |90.3%| [TS] {RESULT} ydb/library/yql/providers/generic/actors/ut/unittest |90.3%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut >> MediatorTest::BasicTimecastUpdates [GOOD] |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |90.3%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/config/ut/ydb-services-config-ut |90.3%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut >> MediatorTest::MultipleTablets >> QuoterWithKesusTest::KesusRecreation [GOOD] >> QuoterWithKesusTest::AllocationStatistics ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/providers/solomon/actors/ut/unittest >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] Test command err: 2025-12-23T18:26:01.559173Z node 1 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2025-12-23T18:26:01.559691Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-12-23T18:26:01.559812Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-12-23T18:26:01.559932Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-12-23T18:26:01.559947Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-12-23T18:26:01.563697Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Tue, 23 Dec 2025 18:26:01 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1} 2025-12-23T18:26:01.563817Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-12-23T18:26:11.768224Z node 2 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2025-12-23T18:26:11.773717Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-12-23T18:26:11.783836Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-12-23T18:26:11.792543Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-12-23T18:26:11.801621Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-12-23T18:26:11.810098Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-12-23T18:26:11.818682Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-12-23T18:26:11.830178Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-12-23T18:26:11.839153Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-12-23T18:26:11.843914Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 54513 bytes of data to buffer 2025-12-23T18:26:11.844296Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-12-23T18:26:11.844624Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-12-23T18:26:11.844878Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-12-23T18:26:11.844902Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-12-23T18:26:11.916872Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Tue, 23 Dec 2025 18:26:11 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2025-12-23T18:26:11.917222Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-12-23T18:26:11.917244Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-12-23T18:26:11.955126Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Tue, 23 Dec 2025 18:26:11 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2025-12-23T18:26:11.955512Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-12-23T18:26:11.955551Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-12-23T18:26:11.995706Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Tue, 23 Dec 2025 18:26:11 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2025-12-23T18:26:11.996010Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-12-23T18:26:11.996029Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-12-23T18:26:12.072365Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Tue, 23 Dec 2025 18:26:12 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2025-12-23T18:26:12.072664Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-12-23T18:26:12.072688Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-12-23T18:26:12.102300Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Tue, 23 Dec 2025 18:26:12 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2025-12-23T18:26:12.102481Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 54513 bytes to solomon 2025-12-23T18:26:12.102494Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2025-12-23T18:26:12.163835Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Tue, 23 Dec 2025 18:26:12 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2025-12-23T18:26:12.163948Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-12-23T18:26:12.182715Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Tue, 23 Dec 2025 18:26:12 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 500} 2025-12-23T18:26:12.182861Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-12-23T18:26:12.243509Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Tue, 23 Dec 2025 18:26:12 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2025-12-23T18:26:12.243640Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-12-23T18:26:22.497467Z node 3 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2025-12-23T18:26:22.501997Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-12-23T18:26:22.513217Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 79903 bytes of data to buffer 2025-12-23T18:26:22.523443Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2025-12-23T18:26:22.533850Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2025-12-23T18:26:22.544536Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2025-12-23T18:26:22.555368Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2025-12-23T18:26:22.565604Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2025-12-23T18:26:22.575772Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2025-12-23T18:26:22.581020Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 40513 bytes of data to buffer 2025-12-23T18:26:22.581398Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 79903 bytes to solomon 2025-12-23T18:26:22.581693Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2025-12-23T18:26:22.582041Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2025-12-23T18:26:22.582063Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-12-23T18:26:22.629884Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Tue, 23 Dec 2025 18:26:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2025-12-23T18:26:22.630201Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2025-12-23T18:26:22.630238Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-12-23T18:26:22.683334Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Tue, 23 Dec 2025 18:26:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2025-12-23T18:26:22.683620Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2025-12-23T18:26:22.683637Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-12-23T18:26:22.727555Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Tue, 23 Dec 2025 18:26:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2025-12-23T18:26:22.727822Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2025-12-23T18:26:22.727862Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-12-23T18:26:22.774345Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Tue, 23 Dec 2025 18:26:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2025-12-23T18:26:22.774638Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2025-12-23T18:26:22.774656Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-12-23T18:26:22.821529Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Tue, 23 Dec 2025 18:26:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2025-12-23T18:26:22.821718Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 40513 bytes to solomon 2025-12-23T18:26:22.821736Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2025-12-23T18:26:22.869417Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Tue, 23 Dec 2025 18:26:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2025-12-23T18:26:22.869513Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-12-23T18:26:22.919967Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Tue, 23 Dec 2025 18:26:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2025-12-23T18:26:22.920065Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-12-23T18:26:22.944090Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 25 Date: Tue, 23 Dec 2025 18:26:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 500} 2025-12-23T18:26:22.944165Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-12-23T18:26:33.498579Z node 4 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2025-12-23T18:26:33.498895Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 10 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-12-23T18:26:33.499254Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 579 bytes of data to buffer 2025-12-23T18:26:33.499381Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 10 metrics with size of 579 bytes to solomon 2025-12-23T18:26:33.499405Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-12-23T18:26:33.502266Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 27 Date: Tue, 23 Dec 2025 18:26:33 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 10} 2025-12-23T18:26:33.502358Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-12-23T18:26:43.666153Z node 5 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2025-12-23T18:26:43.667729Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 2400 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-12-23T18:26:43.678725Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-12-23T18:26:43.688585Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-12-23T18:26:43.692125Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 43613 bytes of data to buffer 2025-12-23T18:26:43.692385Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-12-23T18:26:43.692608Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-12-23T18:26:43.692677Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 400 metrics with size of 43613 bytes to solomon 2025-12-23T18:26:43.692690Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-12-23T18:26:43.708218Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Tue, 23 Dec 2025 18:26:43 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 400} 2025-12-23T18:26:43.708334Z node 5 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:373: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 2 2025-12-23T18:26:43.778161Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Tue, 23 Dec 2025 18:26:43 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2025-12-23T18:26:43.778270Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: CheckpointInProgress Empty buffer 2025-12-23T18:26:43.807683Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Tue, 23 Dec 2025 18:26:43 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2025-12-23T18:26:43.807812Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-12-23T18:26:44.196405Z node 6 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2025-12-23T18:26:44.196571Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-12-23T18:26:44.196664Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-12-23T18:26:44.196835Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-12-23T18:26:44.196853Z node 6 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:373: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 1 2025-12-23T18:26:44.204659Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Tue, 23 Dec 2025 18:26:44 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1} 2025-12-23T18:26:44.204767Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-12-23T18:26:44.204901Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-12-23T18:26:44.204985Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-12-23T18:26:44.205042Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-12-23T18:26:44.205054Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-12-23T18:26:44.206713Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Tue, 23 Dec 2025 18:26:44 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1} 2025-12-23T18:26:44.206785Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer |90.3%| [TM] {BAZEL_UPLOAD} ydb/library/yql/providers/solomon/actors/ut/unittest >> TTxDataShardLocalKMeansScan::TooManyClusters [GOOD] >> TTxDataShardLocalKMeansScan::MainToPosting >> DataShardSnapshotIsolation::ReadWriteNoLocksNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnUncommittedWrite >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types1-all_types1-index1] >> test_select.py::TestDML::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> test_select.py::TestDML::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_select.py::TestDML::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] >> test_select.py::TestDML::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_disk.py::TestSafeDiskBreak::test_erase_method |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |90.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> overlapping_portions.py::TestOverlappingPortions::test >> test_cte.py::TestCte::test_toplevel [GOOD] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |90.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.3%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> TMLPReaderTests::TopicNotExists [GOOD] >> TMLPReaderTests::TopicWithoutConsumer >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/plan2svg/py3test >> test_cte.py::TestCte::test_toplevel [GOOD] |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |90.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/plan2svg/py3test |90.3%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |90.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> TGRpcRateLimiterTest::DescribeResource [GOOD] >> TGRpcRateLimiterTest::ListResources >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] >> TDescriberTests::TopicExists [GOOD] >> TDescriberTests::TopicNotExists |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.3%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> MediatorTest::MultipleTablets [GOOD] >> test_ctas.py::TestYtCtas::test_simple_ctast [GOOD] >> test_yt_reading.py::TestYtReading::test_partitioned_reading >> MediatorTest::TabletAckBeforePlanComplete |90.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |90.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution |90.3%| [TM] {RESULT} ydb/library/yql/providers/solomon/actors/ut/unittest |90.3%| [TM] {RESULT} ydb/tests/functional/kqp/plan2svg/py3test |90.3%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnUncommittedWrite [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitWithEffects |90.4%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V2 |90.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> DataShardCompaction::CompactBorrowed [GOOD] >> DataShardCompaction::CompactBorrowedTxStatus |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |90.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/py3test >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |90.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/kqprun/tests/py3test |90.4%| [TM] {RESULT} ydb/tests/tools/kqprun/tests/py3test |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> listing_batching.py::TestListingBatching::test_listing_batching_solomon |90.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> QuoterWithKesusTest::AllocationStatistics [GOOD] >> QuoterWithKesusTest::UpdatesCountersForParentResources >> DescribeSchemaSecretsService::MixedGrantsInBatch [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> DescribeSchemaSecretsService::SchemeCacheRetryErrors >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_selectors >> data_paging.py::TestDataPaging::test_data_paging_solomon >> MediatorTest::TabletAckBeforePlanComplete [GOOD] >> basic_reading.py::TestBasicReading::test_basic_reading_solomon >> TTxDataShardLocalKMeansScan::MainToPosting [GOOD] >> TTxDataShardLocalKMeansScan::MainToPostingWithOverlap >> MediatorTest::TabletAckWhenDead |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> TGRpcRateLimiterTest::ListResources [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi >> listing_paging.py::TestListingPaging::test_listing_paging_solomon >> YdbSdkSessions::MultiThreadSync >> test_yt_reading.py::TestYtReading::test_partitioned_reading [GOOD] >> test_yt_reading.py::TestYtReading::test_block_reading >> TDescriberTests::TopicNotExists [GOOD] >> TDescriberTests::TopicNotTopic >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::SessionsServerLimit [SKIPPED] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |90.4%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |90.4%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] >> YdbSdkSessionsPool1Session::FailTest/0 >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitWithEffects [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitAfterAnotherCommit >> YdbSdkSessionsPool::StressTestSync/0 >> YdbSdkSessionsPool1Session::RunSmallPlan/0 |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |90.4%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut >> YdbSdkSessionsPool1Session::GetSession/0 >> YdbSdkSessionsPool::WaitQueue/0 >> YdbSdkSessionsPool::StressTestSync/1 >> YdbSdkSessionsPool::StressTestAsync/0 >> YdbSdkSessionsPool::PeriodicTask/0 >> YdbSdkSessionsPool1Session::CustomPlan/0 >> TMLPReaderTests::TopicWithoutConsumer [GOOD] >> TMLPReaderTests::EmptyTopic >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |90.4%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V1 |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |90.4%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> TTxDataShardLocalKMeansScan::MainToPostingWithOverlap [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuild |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimit [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:548: Enable after accepting a pull request with merging configs |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] |90.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest |90.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |90.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats >> MediatorTest::TabletAckWhenDead [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> QuoterWithKesusTest::UpdatesCountersForParentResources [GOOD] >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt >> MediatorTest::PlanStepAckToReconnectedMediator |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] |90.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitAfterAnotherCommit [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertTwiceThenCommit |90.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |90.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |90.4%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_import/py3test >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] |90.4%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/yt/kqp_yt_import/py3test |90.4%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_import/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] >> YdbSdkSessionsPool::StressTestSync/0 [GOOD] >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |90.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery >> basic_reading.py::TestBasicReading::test_basic_reading_solomon [GOOD] >> basic_reading.py::TestBasicReading::test_basic_reading_monitoring >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] >> data_paging.py::TestDataPaging::test_data_paging_solomon [GOOD] >> data_paging.py::TestDataPaging::test_listing_paging_monitoring >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> MediatorTest::PlanStepAckToReconnectedMediator [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> MediatorTest::WatcherReconnect >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V2 >> TDescriberTests::TopicNotTopic [GOOD] >> TDescriberTests::CDC >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] >> data_paging.py::TestDataPaging::test_listing_paging_monitoring [GOOD] >> basic_reading.py::TestBasicReading::test_basic_reading_monitoring [GOOD] >> DescribeSchemaSecretsService::SchemeCacheRetryErrors [GOOD] >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors >> test_select.py::TestDML::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> TMLPReaderTests::EmptyTopic [GOOD] >> TMLPReaderTests::TopicWithData >> DataShardSnapshotIsolation::ReadWriteUpsertTwiceThenCommit [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertAgainOnCommitNoConflict >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] |90.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter >> TTxDataShardLocalKMeansScan::MainToBuild [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuildWithOverlap |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/0 [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/1 [GOOD] |90.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |90.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/ut/ydb-core-control-ut |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |90.4%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt [GOOD] >> QuoterWithKesusTest::CanKillKesusWhenUsingIt |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |90.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_compaction/unittest >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:26:53.042243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:26:53.157984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:26:53.175725Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:26:53.176081Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:26:53.176154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:26:53.484125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:26:53.484275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:26:53.600306Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514410674018 != 1766514410674022 2025-12-23T18:26:53.612936Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:26:53.664111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:26:53.759381Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003107/r3tmp/tmpYJm8cM/pdisk_1.dat 2025-12-23T18:26:54.159134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:26:54.176016Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:26:54.291781Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:26:54.291847Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:26:54.291964Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:26:54.459474Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:26:54.459575Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:26:54.460276Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:26:54.460375Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:26:54.460790Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:26:54.460921Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:26:54.461072Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:26:54.461407Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:26:54.463125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:26:54.464368Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:26:54.464441Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:26:54.497793Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:26:54.498875Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:26:54.499176Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:26:54.499489Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:26:54.548083Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:26:54.548917Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:26:54.549049Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:26:54.550885Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:26:54.550990Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:26:54.551051Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:26:54.551436Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:26:54.551634Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:26:54.551718Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:26:54.562669Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:26:54.618617Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:26:54.618851Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:26:54.618983Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:26:54.619027Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:26:54.619063Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:26:54.619112Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:26:54.619364Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:26:54.619413Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:26:54.619694Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:26:54.619783Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:26:54.620190Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:26:54.620238Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:26:54.620281Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:26:54.620318Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:26:54.620351Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:26:54.620382Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:26:54.620465Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:26:54.620563Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:26:54.620601Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:26:54.620649Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:26:54.620785Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:26:54.620821Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:26:54.620926Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:26:54.621181Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:26:54.621234Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2 ... 1474976715661] at 72075186224037892 is DelayComplete 2025-12-23T18:27:34.201005Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037892 executing on unit CompleteOperation 2025-12-23T18:27:34.201041Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037892 to execution unit CompletedOperations 2025-12-23T18:27:34.201072Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037892 on unit CompletedOperations 2025-12-23T18:27:34.201110Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037892 is Executed 2025-12-23T18:27:34.201137Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037892 executing on unit CompletedOperations 2025-12-23T18:27:34.201167Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [72500:281474976715661] at 72075186224037892 has finished 2025-12-23T18:27:34.201206Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:27:34.201240Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037892 2025-12-23T18:27:34.201276Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-12-23T18:27:34.201309Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037892 2025-12-23T18:27:34.212403Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2025-12-23T18:27:34.212486Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2025-12-23T18:27:34.212528Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [72500:281474976715661] at 72075186224037892 on unit CompleteOperation 2025-12-23T18:27:34.212596Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [72500 : 281474976715661] from 72075186224037892 at tablet 72075186224037892 send result to client [2:1646:3439], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:27:34.212650Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-12-23T18:27:34.212860Z node 2 :TX_PROXY DEBUG: datareq.cpp:2286: Actor# [2:1646:3439] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2025-12-23T18:27:34.212918Z node 2 :TX_PROXY DEBUG: datareq.cpp:2968: Send stream clearance, shard: 72075186224037890, txid: 281474976715661, cleared: 1 2025-12-23T18:27:34.213072Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [2:1646:3439], Recipient [2:982:2832]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-12-23T18:27:34.213122Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-12-23T18:27:34.213223Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:982:2832], Recipient [2:982:2832]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:27:34.213258Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:27:34.213333Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-12-23T18:27:34.213377Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:27:34.213423Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [72500:281474976715661] at 72075186224037890 for WaitForStreamClearance 2025-12-23T18:27:34.213459Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit WaitForStreamClearance 2025-12-23T18:27:34.213501Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [72500:281474976715661] at 72075186224037890 2025-12-23T18:27:34.213548Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Executed 2025-12-23T18:27:34.213613Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit WaitForStreamClearance 2025-12-23T18:27:34.213662Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037890 to execution unit ReadTableScan 2025-12-23T18:27:34.213697Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-12-23T18:27:34.213988Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Continue 2025-12-23T18:27:34.214028Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:27:34.214063Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037890 2025-12-23T18:27:34.214100Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-12-23T18:27:34.214135Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037890 2025-12-23T18:27:34.214706Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1678:3468], Recipient [2:982:2832]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-12-23T18:27:34.218871Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-12-23T18:27:34.219280Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-12-23T18:27:34.219442Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-12-23T18:27:34.280309Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2025-12-23T18:27:34.280371Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037890 2025-12-23T18:27:34.280623Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:982:2832], Recipient [2:982:2832]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:27:34.280658Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:27:34.280709Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-12-23T18:27:34.280741Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:27:34.280776Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [72500:281474976715661] at 72075186224037890 for ReadTableScan 2025-12-23T18:27:34.280808Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-12-23T18:27:34.280850Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [72500:281474976715661] at 72075186224037890 error: , IsFatalError: 0 2025-12-23T18:27:34.280897Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Executed 2025-12-23T18:27:34.280932Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit ReadTableScan 2025-12-23T18:27:34.280964Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037890 to execution unit CompleteOperation 2025-12-23T18:27:34.280986Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-12-23T18:27:34.281166Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is DelayComplete 2025-12-23T18:27:34.281202Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit CompleteOperation 2025-12-23T18:27:34.281240Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037890 to execution unit CompletedOperations 2025-12-23T18:27:34.281272Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit CompletedOperations 2025-12-23T18:27:34.281313Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Executed 2025-12-23T18:27:34.281339Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit CompletedOperations 2025-12-23T18:27:34.281367Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [72500:281474976715661] at 72075186224037890 has finished 2025-12-23T18:27:34.281402Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:27:34.281433Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2025-12-23T18:27:34.281465Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-12-23T18:27:34.281496Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2025-12-23T18:27:34.292496Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:27:34.292595Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:27:34.292639Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [72500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-12-23T18:27:34.292707Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [72500 : 281474976715661] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1646:3439], exec latency: 1 ms, propose latency: 1 ms 2025-12-23T18:27:34.292761Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:27:34.292965Z node 2 :TX_PROXY DEBUG: datareq.cpp:2286: Actor# [2:1646:3439] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037890 marker# P12 2025-12-23T18:27:34.293067Z node 2 :TX_PROXY INFO: datareq.cpp:834: Actor# [2:1646:3439] txid# 281474976715661 RESPONSE Status# ExecComplete prepare time: 0.000500s execute time: 0.001500s total time: 0.002000s marker# P13 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_compaction/unittest >> MediatorTest::WatcherReconnect [GOOD] >> MediatorTest::MultipleSteps >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-BasicExtractMembers-default.txt] >> listing_batching.py::TestListingBatching::test_listing_batching_solomon [GOOD] >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring >> YdbSdkSessionsPool::PeriodicTask/0 [GOOD] >> YdbSdkSessionsPool::PeriodicTask/1 >> DataShardSnapshotIsolation::ReadWriteUpsertAgainOnCommitNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertAgainTwiceOnCommitNoConflict >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> MediatorTest::MultipleSteps [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V2 [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuildWithOverlap [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV1 >> TTxDataShardLocalKMeansScan::BuildToPosting |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> MediatorTest::WatchesBeforeFirstStep >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.4%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test.py::test[solomon-BasicExtractMembers-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> TDescriberTests::CDC [GOOD] >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertAgainTwiceOnCommitNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileConflict >> MediatorTest::WatchesBeforeFirstStep [GOOD] >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/federated_query/ut_service/fast/unittest >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors [GOOD] Test command err: Trying to start YDB, gRPC: 6032, MsgBus: 10113 2025-12-23T18:25:44.690826Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587121335512517187:2148];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:25:44.691284Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033f4/r3tmp/tmpcOCuwT/pdisk_1.dat 2025-12-23T18:25:45.253779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:45.253883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:45.286602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:45.428898Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:25:45.563280Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6032, node 1 2025-12-23T18:25:45.641399Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:25:45.818817Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:45.880912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:25:45.880943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:25:45.880961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:25:45.881067Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10113 TClient is connected to server localhost:10113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514345540 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:25:46.492210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:25:46.509425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:25:46.619356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:25:46.752647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:25:46.807172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:25:48.295769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121352692388123:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:48.295852Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:48.296042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121352692388133:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:48.296082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:48.534998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:48.558393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:48.581291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:48.604484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:48.628566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:48.653859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:48.680060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:48.709263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:48.764759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121352692388998:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:48.764825Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:48.765173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121352692389004:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:48.765189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121352692389003:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:48.765241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:48.768664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:25:48.780559Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587121352692389007:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Trans ... 23T18:27:34.211597Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:27:34.211630Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:27:34.211646Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:27:34.211997Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:27:34.314091Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:27:34.538909Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3271 TClient is connected to server localhost:3271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514454019 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:27:35.674998Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:27:35.682761Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:27:35.692308Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:27:35.833938Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:27:36.159323Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:27:36.297386Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:27:38.543104Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7587121805615373027:2261];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:27:38.543210Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:27:41.788965Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7587121839975113072:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:27:41.789135Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:27:41.789960Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7587121839975113082:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:27:41.790056Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:27:41.949580Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:27:42.029316Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:27:42.109960Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:27:42.195377Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:27:42.265153Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:27:42.352437Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:27:42.441278Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:27:42.553674Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:27:42.771296Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7587121844270081253:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:27:42.771525Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:27:42.772455Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7587121844270081260:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:27:42.772545Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7587121844270081261:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:27:42.772618Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:27:42.782173Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:27:42.815529Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7587121844270081264:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:27:42.883308Z node 14 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [14:7587121844270081315:3794] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:27:46.599943Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/federated_query/ut_service/fast/unittest |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> MediatorTest::RebootTargetTablets >> TMLPReaderTests::TopicWithData [GOOD] |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |90.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |90.4%| [TM] {RESULT} ydb/core/tx/datashard/ut_compaction/unittest >> TMLPReaderTests::TopicWithManyIterationsData |90.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |90.4%| [TM] {RESULT} ydb/core/kqp/federated_query/ut_service/fast/unittest |90.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring [GOOD] >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> data_paging.py::TestDataPaging::test_listing_paging_monitoring [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> basic_reading.py::TestBasicReading::test_basic_reading_monitoring [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi |90.5%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] |90.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete |90.4%| [LD] {RESULT} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] |90.5%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] Test command err: 2025-12-23T18:25:49.884438Z node 1 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-12-23T18:25:49.884565Z node 1 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2025-12-23T18:25:49.885743Z node 1 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2025-12-23T18:25:49.885806Z node 1 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2025-12-23T18:25:49.902886Z node 2 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-12-23T18:25:49.902960Z node 2 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2025-12-23T18:25:49.903173Z node 2 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2025-12-23T18:25:49.903226Z node 2 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:824: [/Path/KesusName]: Disconnected from tablet 2025-12-23T18:25:49.903277Z node 2 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2025-12-23T18:25:49.903445Z node 2 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2025-12-23T18:25:49.919635Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-12-23T18:25:49.919739Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2025-12-23T18:25:49.919894Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2025-12-23T18:25:49.919920Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2025-12-23T18:25:49.920095Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2025-12-23T18:25:49.920149Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2025-12-23T18:25:49.920162Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2025-12-23T18:25:49.920282Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2025-12-23T18:25:49.920307Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2025-12-23T18:25:49.920318Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2025-12-23T18:25:49.920390Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2025-12-23T18:25:49.920411Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2025-12-23T18:25:49.920440Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2025-12-23T18:25:49.920517Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2025-12-23T18:25:49.920538Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2025-12-23T18:25:49.920548Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2025-12-23T18:25:49.920629Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2025-12-23T18:25:49.920678Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:824: [/Path/KesusName]: Disconnected from tablet 2025-12-23T18:25:49.920698Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2025-12-23T18:25:49.920856Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2025-12-23T18:25:49.937043Z node 4 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-12-23T18:25:49.937119Z node 4 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2025-12-23T18:25:49.937278Z node 4 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "/resource" 2025-12-23T18:25:49.937307Z node 4 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:493: [/Path/KesusName]: Resource "/resource" has incorrect name. Maybe this was some error on client side. 2025-12-23T18:25:49.937331Z node 4 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:356: [/Path/KesusName]: ProxySession("/resource", Error: GenericError) 2025-12-23T18:25:49.937387Z node 4 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2025-12-23T18:25:49.937457Z node 4 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "resource//resource" 2025-12-23T18:25:49.937483Z node 4 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:493: [/Path/KesusName]: Resource "resource//resource" has incorrect name. Maybe this was some error on client side. 2025-12-23T18:25:49.937514Z node 4 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:356: [/Path/KesusName]: ProxySession("resource//resource", Error: GenericError) 2025-12-23T18:25:49.943059Z node 5 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-12-23T18:25:49.943126Z node 5 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2025-12-23T18:25:49.943371Z node 5 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res" 2025-12-23T18:25:49.943534Z node 5 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2025-12-23T18:25:49.948535Z node 5 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-12-23T18:25:49.948596Z node 5 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res" 2025-12-23T18:25:49.948628Z node 5 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res", 42) 2025-12-23T18:25:49.948680Z node 5 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-12-23T18:25:49.954375Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-12-23T18:25:49.954488Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2025-12-23T18:25:49.954582Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res0" 2025-12-23T18:25:49.954680Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2025-12-23T18:25:49.954908Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-12-23T18:25:49.954949Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res0" 2025-12-23T18:25:49.954991Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res0", 42) 2025-12-23T18:25:49.955041Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }]) 2025-12-23T18:25:49.955165Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res1" 2025-12-23T18:25:49.955256Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:517: [/Path/KesusName]: Subscribe on resource "res1" 2025-12-23T18:25:49.955403Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-12-23T18:25:49.955427Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res1" 2025-12-23T18:25:49.955452Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res1", 43) 2025-12-23T18:25:49.955484Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-12-23T18:25:49.955555Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res2" 2025-12-23T18:25:49.955608Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:517: [/Path/KesusName]: Subscribe on resource "res2" 2025-12-23T18:25:49.955746Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-12-23T18:25:49.955769Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res2" 2025-12-23T18:25:49.955790Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res2", 44) 2025-12-23T18:25:49.955834Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res2", Normal, {0: Front(1, 2)} }]) 2025-12-23T18:25:49.955953Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/Path/KesusName]: ProxyStats([{"res1", Consumed: 0, Queue: 5}]) 2025-12-23T18:25:49.955979Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/Path/KesusName]: Set info for resource "res1": { Available: 1, QueueWeight: 5 } 2025-12-23T18:25:49.955999Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:662: [/Path/KesusName]: Activate session to "res1". Connected: 1 2025-12-23T18:25:49.956475Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:585: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 43 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 25769805828 } }) 2025-12-23T18:25:49.956514Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-12-23T18:25:49.956697Z node 6 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:824: [/Path/KesusName]: Disconnected from tablet 2025-12-23T18:25:49.956717Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2025-12-23T18:25:49.956794Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:640: [/Path/KesusName]: Mark "res1" for offline allocation. Connected: 0, SessionIsActive: 1, AverageDuration: 0.100000s, AverageAmount: 0.5 2025-12-23T18:25:49.956829Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:614: [/Path/KesusName]: Schedule offline allocation in 0.000000s: [{ "res1", 0.5 }] 2025-12-23T18:25:49.956947Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2025-12-23T18:25:49.957128Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: ... 224037888] Send TEvResourcesAllocated to [50:7587121824708547267:2290]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-12-23T18:27:38.704290Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-12-23T18:27:38.704343Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-12-23T18:27:38.704390Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-12-23T18:27:38.704552Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-12-23T18:27:38.773735Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-12-23T18:27:38.773787Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-12-23T18:27:38.862000Z 2025-12-23T18:27:38.773813Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2025-12-23T18:27:38.774209Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-12-23T18:27:38.774248Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -1.000100898, QueueWeight: 5 } 2025-12-23T18:27:38.774300Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-12-23T18:27:38.775388Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-12-23T18:27:38.801434Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7587121824708547267:2290]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-12-23T18:27:38.801988Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-12-23T18:27:38.802060Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-12-23T18:27:38.802118Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-12-23T18:27:38.802667Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-12-23T18:27:38.870883Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-12-23T18:27:38.870938Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-12-23T18:27:38.962000Z 2025-12-23T18:27:38.870964Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2025-12-23T18:27:38.871158Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-12-23T18:27:38.871192Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -0.0001008983696, QueueWeight: 5 } 2025-12-23T18:27:38.871234Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-12-23T18:27:38.871304Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-12-23T18:27:38.900587Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7587121824708547267:2290]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-12-23T18:27:38.907007Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-12-23T18:27:38.907063Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-12-23T18:27:38.907128Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(0.9998991016, 2)} }]) 2025-12-23T18:27:38.909981Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-12-23T18:27:38.961980Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7587121824708546749:2387] 2025-12-23T18:27:38.962027Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7587121824708546749:2387] 2025-12-23T18:27:38.966869Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0.9998991016. FreeBalance: 0.9998991016 2025-12-23T18:27:38.966909Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-12-23T18:27:39.062000Z 2025-12-23T18:27:38.966934Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2025-12-23T18:27:38.966999Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:275: Charge "Resource" for 5. Balance: 0.9998991016. FreeBalance: 0.9998991016. TicksToFullfill: 5.000504543. DurationToFullfillInUs: 500050.4543. TimeToFullfill: 2025-12-23T18:27:38.459241Z. Now: 2025-12-23T18:27:38.966835Z. LastAllocated: 2025-12-23T18:27:37.959191Z 2025-12-23T18:27:38.970913Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 5, Queue: 0}]) 2025-12-23T18:27:38.970962Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -4.000100898, QueueWeight: 0 } 2025-12-23T18:27:38.971012Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-12-23T18:27:38.971272Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-12-23T18:27:39.007953Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7587121824708547267:2290]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-12-23T18:27:39.011319Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-12-23T18:27:39.011381Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-12-23T18:27:39.011436Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-12-23T18:27:39.011687Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-12-23T18:27:39.062504Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-12-23T18:27:39.963574Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7587121824708546749:2387] 2025-12-23T18:27:39.963618Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7587121824708546749:2387] 2025-12-23T18:27:40.964331Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7587121824708546749:2387] 2025-12-23T18:27:40.964376Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7587121824708546749:2387] 2025-12-23T18:27:41.750891Z node 50 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[50:7587121820413578829:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:27:41.751006Z node 50 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:27:41.761669Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037968897] send [50:7587121820413578833:2111] 2025-12-23T18:27:41.761706Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037968897] push event to server [50:7587121820413578833:2111] 2025-12-23T18:27:41.960332Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7587121824708546749:2387] 2025-12-23T18:27:41.960380Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7587121824708546749:2387] 2025-12-23T18:27:42.206457Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037968897] send [50:7587121820413578833:2111] 2025-12-23T18:27:42.206496Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037968897] push event to server [50:7587121820413578833:2111] 2025-12-23T18:27:42.959819Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7587121824708546749:2387] 2025-12-23T18:27:42.959861Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7587121824708546749:2387] 2025-12-23T18:27:43.969376Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7587121824708546749:2387] 2025-12-23T18:27:43.969423Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7587121824708546749:2387] 2025-12-23T18:27:44.972941Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7587121824708546749:2387] 2025-12-23T18:27:44.972982Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7587121824708546749:2387] 2025-12-23T18:27:45.959866Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7587121824708546749:2387] 2025-12-23T18:27:45.959904Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7587121824708546749:2387] 2025-12-23T18:27:46.666461Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037932033] send [50:7587121820413578792:2094] 2025-12-23T18:27:46.666490Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [50:7587121820413578792:2094] 2025-12-23T18:27:46.973024Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7587121824708546749:2387] 2025-12-23T18:27:46.973065Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7587121824708546749:2387] 2025-12-23T18:27:47.789536Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037968897] send [50:7587121820413578833:2111] 2025-12-23T18:27:47.789583Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037968897] push event to server [50:7587121820413578833:2111] 2025-12-23T18:27:47.961538Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7587121824708546749:2387] 2025-12-23T18:27:47.961585Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7587121824708546749:2387] |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/quoter/ut/unittest |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |90.5%| [TM] {RESULT} ydb/core/quoter/ut/unittest |90.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> TTxDataShardLocalKMeansScan::BuildToPosting [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/oom/py3test >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV2 >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] |90.5%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/oom/py3test >> Donor::CheckOnlineReadRequestToDonor >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> MediatorTest::RebootTargetTablets [GOOD] >> Donor::MultipleEvicts >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileNoConflict >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> MediatorTest::ResendSubset >> Donor::CheckOnlineReadRequestToDonor [GOOD] |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.5%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::CheckOnlineReadRequestToDonor [GOOD] Test command err: RandomSeed# 17281306771620565583 2025-12-23T18:27:57.109324Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:27:57.111746Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1953143580279760253] 2025-12-23T18:27:57.136697Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:0:0:0:2097152:1] 2025-12-23T18:27:57.136950Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 1 PartsResurrected# 1 |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_all_types-pk_types3-all_types3-index3---] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |90.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> Donor::SlayAfterWiping |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |90.5%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut >> Donor::MultipleEvicts [GOOD] >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::MultipleEvicts [GOOD] Test command err: RandomSeed# 11875092265761548978 0 donors: 2025-12-23T18:27:59.045247Z 26 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:27:59.046646Z 26 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11059674971574333810] 2025-12-23T18:27:59.102252Z 26 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 2025-12-23T18:27:59.257595Z 24 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:27:59.266344Z 24 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11059674971574333810] 2025-12-23T18:27:59.294656Z 24 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2025-12-23T18:27:59.407942Z 26 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:27:59.409245Z 26 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11059674971574333810] 2025-12-23T18:27:59.428735Z 26 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 2025-12-23T18:27:59.529501Z 24 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:27:59.530864Z 24 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11059674971574333810] 2025-12-23T18:27:59.545601Z 24 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2025-12-23T18:27:59.657804Z 26 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:27:59.663248Z 26 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11059674971574333810] 2025-12-23T18:27:59.681889Z 26 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 2025-12-23T18:27:59.825730Z 24 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:27:59.828949Z 24 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11059674971574333810] 2025-12-23T18:27:59.874303Z 24 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2025-12-23T18:27:59.995553Z 26 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:27:59.996826Z 26 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11059674971574333810] 2025-12-23T18:28:00.013459Z 26 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 2025-12-23T18:28:00.115058Z 24 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:28:00.116439Z 24 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11059674971574333810] 2025-12-23T18:28:00.139280Z 24 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2025-12-23T18:28:00.274395Z 26 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:28:00.279782Z 26 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11059674971574333810] 2025-12-23T18:28:00.298659Z 26 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> MediatorTest::ResendSubset [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_selectors [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_program ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpointing/ut/unittest >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks [GOOD] Test command err: 2025-12-23T18:25:50.707315Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2025-12-23T18:25:50.707523Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2025-12-23T18:25:50.707572Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-12-23T18:25:50.707910Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-12-23T18:25:50.707946Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2025-12-23T18:25:50.707979Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-12-23T18:25:50.708060Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-12-23T18:25:50.713682Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-12-23T18:25:50.713742Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-12-23T18:25:50.713791Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-12-23T18:25:50.724416Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-12-23T18:25:50.724479Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-12-23T18:25:50.724531Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-12-23T18:25:50.724678Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-12-23T18:25:50.724710Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-12-23T18:25:50.724753Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-12-23T18:25:50.724803Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-12-23T18:25:50.724839Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-12-23T18:25:50.724874Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-12-23T18:25:50.724900Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-12-23T18:25:50.724951Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-12-23T18:25:50.724980Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-12-23T18:25:50.725078Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-12-23T18:25:50.725103Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [1:6:2053], need 1 more acks 2025-12-23T18:25:50.725205Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-12-23T18:25:50.725253Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [1:8:2055], need 0 more acks 2025-12-23T18:25:50.725281Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-12-23T18:25:50.725323Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-12-23T18:25:50.725348Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:1] Checkpoint completed 2025-12-23T18:25:50.725373Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:649: [my-graph-id.42] Got TEvRunGraph 2025-12-23T18:25:50.807971Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2025-12-23T18:25:50.808080Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2025-12-23T18:25:50.808106Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-12-23T18:25:50.808328Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-12-23T18:25:50.808354Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2025-12-23T18:25:50.808382Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-12-23T18:25:50.808424Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-12-23T18:25:50.808549Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-12-23T18:25:50.808574Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-12-23T18:25:50.808610Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-12-23T18:25:50.808713Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-12-23T18:25:50.808745Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-12-23T18:25:50.808785Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-12-23T18:25:50.808886Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-12-23T18:25:50.808914Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-12-23T18:25:50.808952Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-12-23T18:25:50.808976Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-12-23T18:25:50.809003Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-12-23T18:25:50.809028Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-12-23T18:25:50.809048Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-12-23T18:25:50.809094Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-12-23T18:25:50.809118Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-12-23T18:25:50.809215Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-12-23T18:25:50.809241Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [2:6:2053], need 1 more acks 2025-12-23T18:25:50.809281Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-12-23T18:25:50.809307Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [2:8:2055], need 0 more acks 2025-12-23T18:25:50.809325Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-12-23T18:25:50.809369Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-12-23T18:25:50.809393Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:1] Checkpoint completed 2025-12-23T18:25:50.809420Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:649: [my-graph-id.42] Got TEvRunGraph 2025-12-23T18:25:50.887507Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2025-12-23T18:25:50.887634Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2025-12-23T18:25:50.887675Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Wa ... -12-23T18:25:50.972377Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:3] Task state saved, need 1 more acks 2025-12-23T18:25:50.972403Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-12-23T18:25:50.972437Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:3] Task state saved, need 0 more acks 2025-12-23T18:25:50.972458Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-12-23T18:25:50.972495Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:3] Got TEvSetCheckpointPendingCommitStatusResponse 2025-12-23T18:25:50.972516Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:3] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-12-23T18:25:50.972593Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 1 2025-12-23T18:25:50.972630Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:3] State committed [4:6:2053], need 1 more acks 2025-12-23T18:25:50.972660Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 3 2025-12-23T18:25:50.972685Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:3] State committed [4:8:2055], need 0 more acks 2025-12-23T18:25:50.972704Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-12-23T18:25:50.972737Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:3] Got TEvCompleteCheckpointResponse 2025-12-23T18:25:50.972770Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:3] Checkpoint completed 2025-12-23T18:25:50.972809Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:373: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-12-23T18:25:50.972834Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:4] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-12-23T18:25:50.972871Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:4] Got TEvCreateCheckpointResponse 2025-12-23T18:25:50.972894Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:4] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-12-23T18:25:50.972953Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-12-23T18:25:50.972976Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:4] Task state saved, need 2 more acks 2025-12-23T18:25:50.973008Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-12-23T18:25:50.973033Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:4] Task state saved, need 1 more acks 2025-12-23T18:25:50.973058Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-12-23T18:25:50.973081Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:4] Task state saved, need 0 more acks 2025-12-23T18:25:50.973120Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-12-23T18:25:50.973158Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:4] Got TEvSetCheckpointPendingCommitStatusResponse 2025-12-23T18:25:50.973194Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:4] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-12-23T18:25:50.973273Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 1 2025-12-23T18:25:50.973302Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:4] State committed [4:6:2053], need 1 more acks 2025-12-23T18:25:50.973327Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 3 2025-12-23T18:25:50.973350Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:4] State committed [4:8:2055], need 0 more acks 2025-12-23T18:25:50.973370Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-12-23T18:25:50.973403Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:4] Got TEvCompleteCheckpointResponse 2025-12-23T18:25:50.973424Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:4] Checkpoint completed 2025-12-23T18:25:51.037952Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2025-12-23T18:25:51.038078Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2025-12-23T18:25:51.038113Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-12-23T18:25:51.038397Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-12-23T18:25:51.038429Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2025-12-23T18:25:51.038457Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-12-23T18:25:51.038523Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-12-23T18:25:51.038680Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-12-23T18:25:51.038712Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-12-23T18:25:51.038758Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-12-23T18:25:51.038884Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-12-23T18:25:51.038915Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-12-23T18:25:51.038949Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-12-23T18:25:51.039118Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-12-23T18:25:51.039159Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-12-23T18:25:51.039205Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-12-23T18:25:51.039253Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:479: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-12-23T18:25:51.039297Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-12-23T18:25:51.039329Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:479: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-12-23T18:25:51.039358Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:484: [my-graph-id.42] [42:1] Got all acks for aborted checkpoint, aborting in storage Waiting for TEvAbortCheckpointRequest (storage) 2025-12-23T18:25:51.039416Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:590: [my-graph-id.42] [42:1] Got TEvAbortCheckpointResponse 2025-12-23T18:25:51.039446Z node 5 :STREAMS_CHECKPOINT_COORDINATOR WARN: checkpoint_coordinator.cpp:596: [my-graph-id.42] [42:1] Checkpoint aborted 2025-12-23T18:25:51.039481Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:373: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-12-23T18:25:51.039529Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:2] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-12-23T18:25:51.039596Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:2] Got TEvCreateCheckpointResponse 2025-12-23T18:25:51.039624Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:2] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-12-23T18:25:51.107423Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2025-12-23T18:25:51.107519Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 0, ActorsToNotify count: 1, ActorsToWaitFor count: 2 2025-12-23T18:25:51.107557Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:107: [my-graph-id.42] No ingress tasks, coordinator was disabled 2025-12-23T18:25:51.107583Z node 6 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors |90.5%| [TA] $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/checkpointing/ut/unittest >> MediatorTest::ResendNotSubset >> Donor::ConsistentWritesWhenSwitchingToDonorMode |90.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/describer/ut/unittest >> TDescriberTests::CDC [GOOD] 2025-12-23 18:27:47,728 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-12-23 18:27:48,014 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 60 secs timeout. Process tree before termination: pid rss ref pdirt 1066980 59.0M 59.2M 33.1M test_tool run_ut @/home/runner/.ya/build/build_root/8ptz/0033e5/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/test_tool.args 1067387 1.4G 1.4G 896M └─ ydb-core-persqueue-public-describer-ut --trace-path-append /home/runner/.ya/build/build_root/8ptz/0033e5/ydb/core/persqueue/public/describer/ut/test-results/unittest/yt 1088454 1.4G 1.4G 896M └─ ydb-core-persqueue-public-describer-ut --trace-path-append /home/runner/.ya/build/build_root/8ptz/0033e5/ydb/core/persqueue/public/describer/ut/test-results/unittest Test command err: 2025-12-23T18:26:49.462598Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587121618076903716:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:26:49.462919Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033e5/r3tmp/tmpyo4IKd/pdisk_1.dat 2025-12-23T18:26:49.513573Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:26:49.688164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:26:49.688322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:26:49.691748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:26:49.744643Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:26:49.796491Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587121618076903686:2081] 1766514409460805 != 1766514409460808 2025-12-23T18:26:49.798353Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5082, node 1 2025-12-23T18:26:49.858181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0033e5/r3tmp/yandexma1Rwa.tmp 2025-12-23T18:26:49.858217Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0033e5/r3tmp/yandexma1Rwa.tmp 2025-12-23T18:26:49.858394Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0033e5/r3tmp/yandexma1Rwa.tmp 2025-12-23T18:26:49.858564Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:26:49.899445Z INFO: TTestServer started on Port 1524 GrpcPort 5082 2025-12-23T18:26:50.029940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1524 PQClient connected to localhost:5082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514409856 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:26:50.131640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:26:50.150203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:26:50.471957Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:26:51.724425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121626666839315:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:51.724484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121626666839323:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:51.724531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:51.724968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121626666839330:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:51.725014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:51.728862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:26:51.740109Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587121626666839329:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:26:51.964495Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587121626666839396:2644] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:26:52.013936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:26:52.088954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:26:52.142204Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587121626666839405:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:26:52.142634Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YjFhMjgxODktOGZjNGM2YzItZGI3NjU5OWQtYzQ4NjE4Mjg=, ActorId: [1:7587121626666839311:2328], ActorState: ExecuteState, TraceId: 01kd67dj6a9mhjgmtze5s5w02h, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:26:52.145023Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:26:52.192556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587121630961806992:2824] 2025-12-23T18:26:54.466863Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587121618076903716:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:26:54.466948Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-12-23T18:26:58.871918Z :TopicExists INFO: TTopicSdkTestSetup started 2025-12-23T18:26:58.905331Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-12-23T18:26:58.956231Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7587121656731611008:2932] connected; active server actors: 1 2025-12-23T18:26:58.956447Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added ... h Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 16 IsSync: true Partial: 0 } 2025-12-23T18:27:45.743927Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [4:7587121805003915778:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Versions PathId: Partial: 0 } 2025-12-23T18:27:45.743978Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [4:7587121805003915778:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Versions PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [4:7587121826478753280:2744] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1766514458429 PathId: [OwnerId: 72057594046644480, LocalPathId: 48] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2025-12-23T18:27:45.744037Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587121805003915778:2119], cacheItem# { Subscriber: { Subscriber: [4:7587121826478753280:2744] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1766514458429 PathId: [OwnerId: 72057594046644480, LocalPathId: 48] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/PQ/Config/V2/Versions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 16 IsSync: true Partial: 0 } 2025-12-23T18:27:45.744261Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587121856543525051:3105], recipient# [4:7587121856543525049:2497], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:46:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:46:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:27:45.744396Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587121856543525052:3106], recipient# [4:7587121856543525050:2498], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:48:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:48:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:27:45.750184Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7587121805003915778:2119], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:46:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:27:45.750307Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587121805003915778:2119], cacheItem# { Subscriber: { Subscriber: [4:7587121826478753118:2645] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1766514458254 PathId: [OwnerId: 72057594046644480, LocalPathId: 46] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:46:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:27:45.750429Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7587121805003915778:2119], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:48:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:27:45.750490Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587121805003915778:2119], cacheItem# { Subscriber: { Subscriber: [4:7587121826478753280:2744] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1766514458429 PathId: [OwnerId: 72057594046644480, LocalPathId: 48] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:48:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:27:45.750687Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587121856543525055:3107], recipient# [4:7587121805003915764:2252], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:46:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:27:45.750832Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587121856543525056:3108], recipient# [4:7587121805003915764:2252], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:48:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:27:45.751302Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7587121805003915778:2119], request# { ErrorCount: 0 DatabaseName: Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:27:45.751402Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587121805003915778:2119], cacheItem# { Subscriber: { Subscriber: [4:7587121805003915827:2127] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 17 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766514453879 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:27:45.751528Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587121856543525057:3109], recipient# [4:7587121805003915764:2252], result# { ErrorCount: 0 DatabaseName: Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/0033e5/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("60 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/0033e5/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 60 seconds timeout",), {}) |90.5%| [TS] {BAZEL_UPLOAD} ydb/core/persqueue/public/describer/ut/unittest >> Donor::SlayAfterWiping [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuildWithOverlap >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SlayAfterWiping [GOOD] Test command err: RandomSeed# 14639424295809337534 2025-12-23T18:28:02.620422Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:28:02.622689Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 14947915907585831385] 2025-12-23T18:28:02.642973Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_program [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_selectors >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrunV1 |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter >> TDataShardTrace::TestTraceDistributedUpsert+UseSink >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_selectors [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program >> TDataShardTrace::TestTraceDistributedUpsert-UseSink >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium |90.5%| [TM] {RESULT} ydb/core/fq/libs/checkpointing/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> TDataShardTrace::TestTraceDistributedSelect >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] >> MediatorTest::ResendNotSubset [GOOD] >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] >> MediatorTest::OneCoordinatorResendTxNotLost |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring [GOOD] >> test.py::test[solomon-InvalidProject-] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileConflict [GOOD] >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuildWithOverlap [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileNoConflict >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter >> TTxDataShardLocalKMeansScan::BuildToBuild_Ranges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 2295626738543571436 SEND TEvPut with key [1:1:1:0:0:3201024:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:3201024:0] 2025-12-23T18:28:11.324612Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:296:63] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2025-12-23T18:28:11.324868Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:229:17] ServerId# [1:301:68] TabletId# 72057594037932033 PipeClientId# [8:229:17] 2025-12-23T18:28:11.325016Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:299:66] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2025-12-23T18:28:11.325189Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:208:17] ServerId# [1:298:65] TabletId# 72057594037932033 PipeClientId# [5:208:17] 2025-12-23T18:28:11.325317Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:297:64] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2025-12-23T18:28:11.325469Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:295:62] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2025-12-23T18:28:11.325611Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:300:67] TabletId# 72057594037932033 PipeClientId# [7:222:17] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] Test command err: RandomSeed# 13213990273743888588 SEND TEvPut with key [1:1:1:0:0:100:0] 2025-12-23T18:28:11.474298Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-12-23T18:28:11.474784Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2025-12-23T18:28:11.561265Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 |90.5%| [TA] {RESULT} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |90.5%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |90.5%| [TS] {RESULT} ydb/core/persqueue/public/describer/ut/unittest |90.5%| [LD] {RESULT} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:28:10.102845Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:28:10.280871Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:28:10.299446Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:28:10.300232Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:28:10.300321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:28:10.803460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:10.803646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:10.925215Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514485369052 != 1766514485369056 2025-12-23T18:28:10.943122Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:11.006510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:28:11.136768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00313a/r3tmp/tmpCbIhEz/pdisk_1.dat 2025-12-23T18:28:11.496604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:28:11.511511Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:28:11.632635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |90.5%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut >> TMLPReaderTests::TopicWithManyIterationsData [GOOD] >> TMLPReaderTests::TopicWithBigMessage >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumnAliases-default.txt] |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |90.5%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> VDiskBalancing::TestStopOneNode_Block42 >> TDataShardTrace::TestTraceDistributedSelect [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrunV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrunV2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:28:11.249147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:28:11.404265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:28:11.416840Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:28:11.417274Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:28:11.417370Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:28:11.848178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:11.848344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:11.959431Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514487334745 != 1766514487334749 2025-12-23T18:28:11.980549Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:12.029949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:28:12.132328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003139/r3tmp/tmpnznfvZ/pdisk_1.dat 2025-12-23T18:28:12.470196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:28:12.488530Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:28:12.599690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:14.869123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1130:2962], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:14.869283Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1141:2967], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:14.869363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:14.870481Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1146:2972], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:14.870717Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:14.875604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:28:14.902346Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-12-23T18:28:15.082484Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:28:15.180804Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1208:3014] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (DataExecuter -> [(WaitForTableResolve) , (ComputeActor -> [(ForwardWriteActor)]) , (RunTasks) , (WaitTasks) , (Commit -> [(Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)]) , (Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)])])])]) |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest >> TReplicationTests::Create >> TReplicationTests::Disabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:28:11.770081Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:28:11.902784Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:28:11.915349Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:28:11.915800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:28:11.915898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:28:12.272114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:12.272308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:12.368932Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514488314941 != 1766514488314945 2025-12-23T18:28:12.385602Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:12.436903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:28:12.551402Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003122/r3tmp/tmpt62KeY/pdisk_1.dat 2025-12-23T18:28:12.891759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:28:12.906869Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:28:13.027261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:15.374903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1130:2962], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:15.375071Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1141:2967], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:15.375190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:15.376301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1146:2972], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:15.376538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:15.399126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:28:15.428167Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-12-23T18:28:15.578907Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:28:15.680037Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1208:3014] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (LiteralExecuter) , (DataExecuter -> [(WaitForTableResolve) , (RunTasks) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)]) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)])])]) |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |90.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:28:11.894035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:28:12.081279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:28:12.093068Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:28:12.093450Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:28:12.093528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:28:12.475633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:12.475775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:12.567129Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514488609528 != 1766514488609532 2025-12-23T18:28:12.580528Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:12.627582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:28:12.725860Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003120/r3tmp/tmpBiDRGi/pdisk_1.dat 2025-12-23T18:28:13.025697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:28:13.044848Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:28:13.163922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:15.394350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1130:2962], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:15.394560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1141:2967], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:15.394773Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:15.396370Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1146:2972], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:15.396735Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:15.408876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:28:15.439850Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-12-23T18:28:15.596737Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:28:15.702082Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1208:3014] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/mediator/ut/unittest >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:26:54.073834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:26:54.201838Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:26:54.217512Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:26:54.217965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:26:54.218055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:26:54.577562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:26:54.577704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:26:54.677233Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514411168196 != 1766514411168200 2025-12-23T18:26:54.691964Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:26:54.743351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:26:54.864671Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004720/r3tmp/tmpEunxh8/pdisk_1.dat 2025-12-23T18:26:55.199420Z node 1 :TX_MEDIATOR INFO: mediator__schema.cpp:23: tablet# 72057594047365120 TTxSchema Complete 2025-12-23T18:26:55.200239Z node 1 :TX_MEDIATOR INFO: mediator__init.cpp:88: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2025-12-23T18:26:55.200973Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [1:852:2737] connected 2025-12-23T18:26:55.201110Z node 1 :TX_MEDIATOR NOTICE: mediator_impl.cpp:133: tablet# 72057594047365120 actor# [1:835:2727] HANDLE TEvMediatorConfiguration Version# 1 2025-12-23T18:26:55.201661Z node 1 :TX_MEDIATOR DEBUG: mediator__configure.cpp:77: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2025-12-23T18:26:55.201873Z node 1 :TX_MEDIATOR INFO: mediator__init.cpp:64: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2025-12-23T18:26:55.202444Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [1:858:2742] connected 2025-12-23T18:26:55.202539Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:308: tablet# 72057594047365120 FORWARD Watch from# [1:856:2741] to# [1:854:2739] ExecQueue 2025-12-23T18:26:55.202708Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:175: Actor# [1:854:2739] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [1:856:2741] bucket# 0 ... waiting for watcher to connect (done) 2025-12-23T18:26:55.203015Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:308: tablet# 72057594047365120 FORWARD Watch from# [1:856:2741] to# [1:854:2739] ExecQueue 2025-12-23T18:26:55.203081Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:159: Actor# [1:854:2739] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-12-23T18:26:55.203131Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:164: Actor# [1:854:2739] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [1:855:2740] bucket.ActiveActor 2025-12-23T18:26:55.203198Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:380: Actor# [1:855:2740] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [1:856:2741]} 2025-12-23T18:26:55.203304Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:391: Actor# [1:855:2740] Mediator# 72057594047365120 SEND to# [1:856:2741] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} 2025-12-23T18:26:55.214566Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [1:862:2746] connected 2025-12-23T18:26:55.214717Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:139: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-12-23T18:26:55.214796Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [1:860:2744] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 951 Coordinator# 72057594046316545 2025-12-23T18:26:55.215102Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [1:854:2739] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 0}}} marker# M1 2025-12-23T18:26:55.215158Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [1:854:2739] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:855:2740] bucket.ActiveActor step# 1000 2025-12-23T18:26:55.215222Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [1:855:2740] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1000} 2025-12-23T18:26:55.215456Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:171: Actor# [1:855:2740] Mediator# 72057594047365120 SEND to# [1:856:2741] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1000} ... waiting for blocked plan step 2025-12-23T18:26:55.236233Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-12-23T18:26:55.236310Z node 1 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2025-12-23T18:26:55.236489Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594047365120 SEND EvCommitStep to# [1:854:2739] ExecQueue {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:860:2744]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M0 2025-12-23T18:26:55.236615Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [1:854:2739] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:860:2744]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M1 2025-12-23T18:26:55.236684Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 marker# M2 2025-12-23T18:26:55.236734Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [1:854:2739] MediatorId# 72057594047365120 SEND Ev to# [1:855:2740] step# 1010 forTablet# 72057594047365121 txid# 1 marker# M3 2025-12-23T18:26:55.236797Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [1:854:2739] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:855:2740] bucket.ActiveActor step# 1010 2025-12-23T18:26:55.236877Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [1:855:2740] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [1:860:2744]}}} marker# M4 2025-12-23T18:26:55.237018Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [1:855:2740] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-12-23T18:26:55.238783Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [1:855:2740] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [1:884:2758] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-12-23T18:26:55.238885Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-12-23T18:26:55.238973Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [1:855:2740] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 ... waiting for blocked plan step (done) ... waiting for no pending commands 2025-12-23T18:26:55.239395Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:308: tablet# 72057594047365120 FORWARD Watch from# [1:856:2741] to# [1:854:2739] ExecQueue 2025-12-23T18:26:55.239465Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:189: Actor# [1:854:2739] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [1:856:2741] bucket# 0 ... waiting for no pending commands (done) ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for watch updates 2025-12-23T18:26:55.239766Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:342: Actor# [1:855:2740] Mediator# 72057594047365120 HANDLE {TEvPlanStepAccepted TabletId# 72057594047365121 step# 1010} 2025-12-23T18:26:55.239844Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:415: Actor# [1:855:2740] Mediator# 72057594047365120 SEND to# [1:860:2744] {TEvPlanStepAck TabletId# 72057594047365121 step# 1010 txid# 1} 2025-12-23T18:26:55.239980Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:171: Actor# [1:855:2740] Mediator# 72057594047365120 SEND to# [1:856:2741] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1010} ... waiting for watch updates (done) ... waiting for SysViewsRoster update finished 2025-12-23T18:26:59.532731Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:26:59.565638Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:26:59.566166Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:26:59.566240Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:26:59.939402Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:26:59.939521Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:26:59.977778Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie misma ... EDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:900:2766] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 950 Coordinator# 72057594046316546 2025-12-23T18:28:16.234234Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-12-23T18:28:16.234299Z node 12 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2025-12-23T18:28:16.234390Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2025-12-23T18:28:16.234419Z node 12 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2025-12-23T18:28:16.234533Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594047365120 SEND EvCommitStep to# [12:854:2739] ExecQueue {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:897:2763]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:900:2766]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M0 2025-12-23T18:28:16.234664Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [12:854:2739] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:897:2763]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:900:2766]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M1 2025-12-23T18:28:16.234715Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 txid# 2 marker# M2 2025-12-23T18:28:16.234801Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:854:2739] MediatorId# 72057594047365120 SEND Ev to# [12:855:2740] step# 1010 forTablet# 72057594047365121 txid# 1 txid# 2 marker# M3 2025-12-23T18:28:16.234888Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 1 txid# 2 marker# M2 2025-12-23T18:28:16.234924Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:854:2739] MediatorId# 72057594047365120 SEND Ev to# [12:855:2740] step# 1010 forTablet# 72057594047365122 txid# 1 txid# 2 marker# M3 2025-12-23T18:28:16.234959Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [12:854:2739] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [12:855:2740] bucket.ActiveActor step# 1010 2025-12-23T18:28:16.235065Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [12:855:2740] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:897:2763]}{TTx Moderator# 0 txid# 2 AckTo# [12:900:2766]}}} marker# M4 2025-12-23T18:28:16.235272Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [12:855:2740] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:897:2763]}{TTx Moderator# 0 txid# 2 AckTo# [12:900:2766]}}} marker# M4 2025-12-23T18:28:16.235402Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [12:855:2740] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-12-23T18:28:16.236034Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:855:2740] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:908:2772] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-12-23T18:28:16.236089Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-12-23T18:28:16.236121Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2025-12-23T18:28:16.236156Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:855:2740] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 2025-12-23T18:28:16.236436Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:855:2740] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:909:2773] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-12-23T18:28:16.236461Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2025-12-23T18:28:16.236481Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2025-12-23T18:28:16.236502Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:855:2740] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 2025-12-23T18:28:16.247590Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [12:912:2776] connected 2025-12-23T18:28:16.247728Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:139: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-12-23T18:28:16.247808Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:910:2774] Cookie# 2 CompleteStep# 1010 LatestKnownStep# 1010 SubjectiveTime# 951 Coordinator# 72057594046316546 2025-12-23T18:28:16.248149Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2025-12-23T18:28:16.248222Z node 12 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2025-12-23T18:28:16.248313Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:223: tablet# 72057594047365120 SEND EvRequestLostAcks to# [12:854:2739] ExecQueue step {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} 2025-12-23T18:28:16.248440Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:130: Actor# [12:854:2739] MediatorId# 72057594047365120 HANDLE TEvRequestLostAcks {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} AckTo# [12:910:2774] 2025-12-23T18:28:16.248504Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 2 marker# M2 2025-12-23T18:28:16.248590Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:854:2739] MediatorId# 72057594047365120 SEND Ev to# [12:855:2740] step# 1010 forTablet# 72057594047365121 txid# 2 marker# M3 2025-12-23T18:28:16.248652Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 2 marker# M2 2025-12-23T18:28:16.248707Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:854:2739] MediatorId# 72057594047365120 SEND Ev to# [12:855:2740] step# 1010 forTablet# 72057594047365122 txid# 2 marker# M3 2025-12-23T18:28:16.248807Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:222: Actor# [12:855:2740] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:910:2774]}}} 2025-12-23T18:28:16.248889Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:222: Actor# [12:855:2740] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:910:2774]}}} 2025-12-23T18:28:16.262078Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:294: Actor# [12:855:2740] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365121 ClientId: [12:904:2770] ServerId: [12:908:2772] } 2025-12-23T18:28:16.279817Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:855:2740] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:933:2786] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-12-23T18:28:16.279899Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-12-23T18:28:16.279931Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2025-12-23T18:28:16.279966Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:855:2740] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 2025-12-23T18:28:16.292386Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:28:16.328724Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:294: Actor# [12:855:2740] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365122 ClientId: [12:905:2771] ServerId: [12:909:2773] } 2025-12-23T18:28:16.348147Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:855:2740] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:972:2802] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-12-23T18:28:16.348245Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2025-12-23T18:28:16.348293Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2025-12-23T18:28:16.348337Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:855:2740] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 |90.5%| [TM] {RESULT} ydb/core/tx/mediator/ut/unittest |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/mediator/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> TReplicationTests::Create [GOOD] >> TReplicationTests::CreateDropRecreate |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |90.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects >> TReplicationTests::Disabled [GOOD] >> TReplicationTests::CreateSequential |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertNotBlockedByOlderVolatile >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] >> VDiskBalancing::TestStopOneNode_Mirror3dc >> TStateStorageConfig::UniformityTest [GOOD] >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame1 >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob >> VDiskBalancing::TestRandom_Mirror3dc >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::ConsistencyLevel >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] Test command err: RandomSeed# 9736087201054813664 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2025-12-23T18:28:18.671450Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:296:63] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2025-12-23T18:28:18.671782Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:229:17] ServerId# [1:301:68] TabletId# 72057594037932033 PipeClientId# [8:229:17] 2025-12-23T18:28:18.671964Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:299:66] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2025-12-23T18:28:18.672154Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:208:17] ServerId# [1:298:65] TabletId# 72057594037932033 PipeClientId# [5:208:17] 2025-12-23T18:28:18.672320Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:297:64] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2025-12-23T18:28:18.672583Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:295:62] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2025-12-23T18:28:18.672787Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:300:67] TabletId# 72057594037932033 PipeClientId# [7:222:17] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.5%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/rate_limiter/ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] Test command err: 2025-12-23T18:26:42.567579Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587121584909340441:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:26:42.567674Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046de/r3tmp/tmpYRAr1O/pdisk_1.dat 2025-12-23T18:26:42.730439Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:26:42.753567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:26:42.753653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:26:42.796239Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:26:42.797134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63076, node 1 2025-12-23T18:26:42.852357Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:26:42.852374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:26:42.852379Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:26:42.852447Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:26:42.897662Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514402849 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:26:43.030102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:26:43.084946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2025-12-23T18:26:45.163108Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587121599124148086:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:26:45.163792Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046de/r3tmp/tmpAAps29/pdisk_1.dat 2025-12-23T18:26:45.171792Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:26:45.227104Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:26:45.267255Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:26:45.267313Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 23699, node 4 2025-12-23T18:26:45.284918Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:26:45.298513Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:26:45.298531Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:26:45.298535Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:26:45.298593Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:26:45.364457Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514405306 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:26:45.416138Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:26:45.449758Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2025-12-23T18:26:48.059799Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7587121610621357111:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:26:48.059855Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046de/r3tmp/tmpW1icKK/pdisk_1.dat 2025-12-23T18:26:48.074389Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:26:48.139557Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:26:48.180512Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:26:48.180582Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:26:48.188488Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9234, node 7 2025-12-23T18:26:48.225686Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:26:48.225712Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:26:48.225718Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:26:48.225790Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:26:48.360645Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514408232 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:26:48.390583Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alt ... 18:28:00.110843Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 6099, node 31 2025-12-23T18:28:00.257303Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:28:00.257332Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:28:00.257343Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:28:00.257464Z node 31 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514480185 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:00.639433Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:28:00.646872Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-12-23T18:28:00.776892Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2025-12-23T18:28:00.854901Z node 31 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046de/r3tmp/tmpsp2Xgx/pdisk_1.dat 2025-12-23T18:28:07.354443Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:07.354606Z node 34 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:28:07.554851Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:07.631278Z node 34 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:07.648125Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:07.648256Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:07.657289Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17513, node 34 2025-12-23T18:28:07.802144Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:28:07.802173Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:28:07.802186Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:28:07.802307Z node 34 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514487752 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:08.028628Z node 34 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:28:08.074151Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:28:08.139743Z node 34 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2025-12-23T18:28:08.338654Z node 34 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:28:14.136342Z node 37 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[37:7587121982887009748:2203];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:14.136542Z node 37 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046de/r3tmp/tmppQdzwq/pdisk_1.dat 2025-12-23T18:28:14.177934Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:14.312357Z node 37 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:14.338347Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:14.338474Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:14.368693Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5313, node 37 2025-12-23T18:28:14.432991Z node 38 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.006615s 2025-12-23T18:28:14.438639Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:28:14.503593Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:28:14.503631Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:28:14.503642Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:28:14.503750Z node 37 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514494444 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:14.690838Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:28:14.783663Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2025-12-23T18:28:15.137770Z node 37 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |90.5%| [TM] {BAZEL_UPLOAD} ydb/services/rate_limiter/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |90.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |90.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |90.5%| [TM] {RESULT} ydb/services/rate_limiter/ut/unittest |90.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |90.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> TReplicationTests::ConsistencyLevel [GOOD] >> TReplicationTests::CommitInterval |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |90.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateWithoutCredentials >> VDiskBalancing::TestRandom_Block42 >> YdbSdkSessionsPool::StressTestAsync/0 [GOOD] >> YdbSdkSessionsPool::StressTestAsync/1 >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] >> test.py::test[solomon-LabelColumnAliases-default.txt] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] >> TReplicationTests::CommitInterval [GOOD] >> TReplicationTests::Alter >> TDataShardTrace::TestTraceDistributedSelectViaReadActors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 14218187538966428954 SEND TEvPut with key [1:1:1:0:0:3201024:0] 2025-12-23T18:28:22.585988Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-12-23T18:28:22.586444Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2025-12-23T18:28:22.687022Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::SecureMode |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |90.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> TGRpcStreamingTest::ClientDisconnects |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] Test command err: RandomSeed# 2755005936850908584 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2025-12-23T18:28:22.659567Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] Test command err: RandomSeed# 5596757212168208680 SEND TEvPut with key [1:1:1:0:0:533504:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:533504:0] 2025-12-23T18:28:23.140803Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |90.6%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig >> TReplicationTests::SecureMode [GOOD] >> TReplicationTests::Describe >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] Test command err: RandomSeed# 2784295947883195571 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:100:0] 2025-12-23T18:28:24.687516Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:6347:836] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Start compaction Finish compaction |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> TGRpcStreamingTest::ClientNeverWrites >> TGRpcStreamingTest::WritesDoneFromClient >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink >> KeyValueGRPCService::SimpleWriteReadOverrunV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadRangeV1 >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertNotBlockedByOlderVolatile [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitNotBlockedByOlderVolatile >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute >> KqpEffects::DeletePkPrefixWithIndex >> KqpEffects::InsertAbort_Literal_Success >> KqpImmediateEffects::ConflictingKeyW1RR2 >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] >> TGRpcStreamingTest::ClientDisconnects [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] |90.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable >> KqpImmediateEffects::ConflictingKeyR1WR2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2025-12-23T18:28:26.579622Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122035462265120:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:26.579700Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:28:26.636827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004675/r3tmp/tmpL1EHA3/pdisk_1.dat 2025-12-23T18:28:27.062901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:27.102028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:27.102127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:27.206760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:28:27.294132Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122035462264908:2081] 1766514506537197 != 1766514506537200 2025-12-23T18:28:27.313073Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:27.322232Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:28:27.452329Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d381bd11080] stream accepted Name# Session ok# true peer# ipv6:[::1]:58724 2025-12-23T18:28:27.455030Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d381bd11080] facade attach Name# Session actor# [1:7587122039757232971:2465] peer# ipv6:[::1]:58724 2025-12-23T18:28:27.455074Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7d381bd11080] facade write Name# Session data# peer# ipv6:[::1]:58724 2025-12-23T18:28:27.455394Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:399: [0x7d381bd11080] facade write Name# Session data# peer# ipv6:[::1]:58724 grpc status# (0) message# 2025-12-23T18:28:27.460932Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d381bd11080] write finished Name# Session ok# true peer# ipv6:[::1]:58724 2025-12-23T18:28:27.461049Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 2025-12-23T18:28:27.461425Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d381bd11080] stream done notification Name# Session ok# true peer# ipv6:[::1]:58724 2025-12-23T18:28:27.461472Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d381bd11080] write finished Name# Session ok# true peer# ipv6:[::1]:58724 2025-12-23T18:28:27.461503Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d381bd11080] stream finished Name# Session ok# true peer# ipv6:[::1]:58724 grpc status# (0) message# 2025-12-23T18:28:27.461563Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d381bd11080] deregistering request Name# Session peer# ipv6:[::1]:58724 (finish done) 2025-12-23T18:28:27.463215Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 2025-12-23T18:28:27.577930Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2025-12-23T18:28:26.090897Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122032323057117:2143];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:26.091602Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046a1/r3tmp/tmpvc9IKx/pdisk_1.dat 2025-12-23T18:28:26.606349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:26.612848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:26.612964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:26.706678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:28:26.722359Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:26.725629Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122032323057004:2081] 1766514506072780 != 1766514506072783 2025-12-23T18:28:26.911069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:28:27.031568Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7de6e0a6f880] stream accepted Name# Session ok# true peer# ipv6:[::1]:56370 2025-12-23T18:28:27.034966Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7de6e0a6f880] stream done notification Name# Session ok# true peer# ipv6:[::1]:56370 2025-12-23T18:28:27.035491Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7de6e0a6f880] facade attach Name# Session actor# [1:7587122036618025010:2405] peer# ipv6:[::1]:56370 2025-12-23T18:28:27.035513Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:230: Received TEvNotifiedWhenDone 2025-12-23T18:28:27.042969Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7de6e0a6f880] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2025-12-23T18:28:27.043019Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7de6e0a6f880] deregistering request Name# Session peer# unknown (finish done) 2025-12-23T18:28:27.122897Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> TMLPReaderTests::TopicWithBigMessage [GOOD] >> TMLPReaderTests::TopicWithKeepMessageOrder >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] [SKIPPED] >> test.py::test[solomon-UnknownSetting-] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink >> TGRpcStreamingTest::ClientNeverWrites [GOOD] >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateQuery >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2025-12-23T18:28:29.208533Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122044672157409:2195];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:29.219882Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004671/r3tmp/tmpzkG9Rg/pdisk_1.dat 2025-12-23T18:28:29.708920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:29.709129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:29.718164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:29.751497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:28:29.913115Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:29.916733Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122044672157229:2081] 1766514509156033 != 1766514509156036 2025-12-23T18:28:29.948731Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:28:30.168089Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d528540e680] stream accepted Name# Session ok# true peer# ipv6:[::1]:54282 2025-12-23T18:28:30.175144Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d528540e680] facade attach Name# Session actor# [1:7587122048967125268:2441] peer# ipv6:[::1]:54282 2025-12-23T18:28:30.175179Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7d528540e680] facade read Name# Session peer# ipv6:[::1]:54282 2025-12-23T18:28:30.175273Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7d528540e680] facade write Name# Session data# peer# ipv6:[::1]:54282 2025-12-23T18:28:30.175565Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7d528540e680] facade finish Name# Session peer# ipv6:[::1]:54282 grpc status# (0) message# 2025-12-23T18:28:30.180367Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d528540e680] write finished Name# Session ok# true peer# ipv6:[::1]:54282 2025-12-23T18:28:30.180747Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7d528540e680] read finished Name# Session ok# false data# peer# ipv6:[::1]:54282 2025-12-23T18:28:30.180772Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d528540e680] stream done notification Name# Session ok# true peer# ipv6:[::1]:54282 2025-12-23T18:28:30.180802Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d528540e680] stream finished Name# Session ok# true peer# ipv6:[::1]:54282 grpc status# (0) message# 2025-12-23T18:28:30.180850Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d528540e680] deregistering request Name# Session peer# ipv6:[::1]:54282 (finish done) 2025-12-23T18:28:30.181101Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:187: Received TEvWriteFinished, success = 1 2025-12-23T18:28:30.181119Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:181: Received TEvReadFinished, success = 0 2025-12-23T18:28:30.181127Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:194: Received TEvNotifiedWhenDone 2025-12-23T18:28:30.224106Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2025-12-23T18:28:29.291675Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122048345000559:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:29.293921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:28:29.322363Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:28:29.339452Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.028105s test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004672/r3tmp/tmpsz5dyT/pdisk_1.dat 2025-12-23T18:28:29.750946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:29.764392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:29.764482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:29.799770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:28:29.924356Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:29.930870Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122048345000519:2081] 1766514509259081 != 1766514509259084 2025-12-23T18:28:30.026870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:28:30.200243Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d9ddb70e680] stream accepted Name# Session ok# true peer# ipv6:[::1]:46556 2025-12-23T18:28:30.200518Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d9ddb70e680] facade attach Name# Session actor# [1:7587122052639968579:2466] peer# ipv6:[::1]:46556 2025-12-23T18:28:30.200544Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7d9ddb70e680] facade read Name# Session peer# ipv6:[::1]:46556 2025-12-23T18:28:30.200784Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7d9ddb70e680] read finished Name# Session ok# false data# peer# ipv6:[::1]:46556 2025-12-23T18:28:30.200921Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:302: Received TEvReadFinished, success = 0 2025-12-23T18:28:30.200946Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7d9ddb70e680] facade finish Name# Session peer# ipv6:[::1]:46556 grpc status# (9) message# Everything is A-OK 2025-12-23T18:28:30.201297Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d9ddb70e680] stream done notification Name# Session ok# true peer# ipv6:[::1]:46556 2025-12-23T18:28:30.201343Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d9ddb70e680] stream finished Name# Session ok# true peer# ipv6:[::1]:46556 grpc status# (9) message# Everything is A-OK 2025-12-23T18:28:30.201360Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d9ddb70e680] deregistering request Name# Session peer# ipv6:[::1]:46556 (finish done) 2025-12-23T18:28:30.201461Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:312: Received TEvNotifiedWhenDone 2025-12-23T18:28:30.299429Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] |90.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |90.6%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut >> TControlPlaneProxyTest::ShouldSendCreateQuery [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated >> TReplicationTests::DropReplicationWithUnknownSecret ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:28:29.073234Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:28:29.213839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:28:29.226431Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:28:29.226912Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:28:29.227007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:28:29.575740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:29.575938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:29.670273Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514505561310 != 1766514505561314 2025-12-23T18:28:29.685075Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:29.738988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:28:29.846453Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00311c/r3tmp/tmpypVpjC/pdisk_1.dat 2025-12-23T18:28:30.193331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:28:30.212357Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:28:30.361394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:32.836270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1130:2962], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:32.836444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1141:2967], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:32.836543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:32.837604Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1146:2972], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:32.837863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:32.850174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:28:32.896665Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-12-23T18:28:33.062658Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:28:33.252053Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1208:3014] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |90.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |90.6%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |90.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |90.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated [GOOD] >> TControlPlaneProxyTest::ShouldSendListQueries |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink |90.6%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] >> KqpEffects::DeletePkPrefixWithIndex [GOOD] >> KqpEffects::AlterDuringUpsertTransaction+UseSink >> AssignTxId::Basic |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |90.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitNotBlockedByOlderVolatile [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertBlockedByOlderVolatile |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |90.6%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut >> TControlPlaneProxyTest::ShouldSendListQueries [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:28:18.548727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:28:18.548838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:28:18.548881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:28:18.548969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:28:18.549016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:28:18.549048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:28:18.549131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:28:18.549242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:28:18.550245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:28:18.550646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:28:18.650676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:28:18.650764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:18.663845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:28:18.664069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:28:18.664300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:28:18.680871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:28:18.681239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:28:18.682117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:28:18.682432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:28:18.687791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:28:18.688037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:28:18.689906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:28:18.689985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:28:18.690211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:28:18.690277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:28:18.690331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:28:18.690466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:28:18.877460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.884177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.884424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.884527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.884608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.884682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.884759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.884879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.884978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.885456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.885564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.885666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.885756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.885853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.885953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... SHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:28:38.729118Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:28:38.729179Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:28:38.729240Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:28:38.729296Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:28:38.729344Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:28:38.729521Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:28:38.729585Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:28:38.729641Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-12-23T18:28:38.729689Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2025-12-23T18:28:38.730924Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 274137603, Sender [10:210:2211], Recipient [10:127:2152]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 10 } 2025-12-23T18:28:38.730978Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5331: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-12-23T18:28:38.731048Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:28:38.731132Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:28:38.731178Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:28:38.731234Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-12-23T18:28:38.731295Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:28:38.731410Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:28:38.732262Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 274137603, Sender [10:210:2211], Recipient [10:127:2152]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 } 2025-12-23T18:28:38.732305Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5331: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-12-23T18:28:38.732366Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:28:38.732455Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:28:38.732490Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:28:38.732520Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2025-12-23T18:28:38.732553Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:28:38.732648Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:28:38.732698Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:28:38.733708Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435084, Sender [10:127:2152], Recipient [10:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-12-23T18:28:38.733776Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5438: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-12-23T18:28:38.733847Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:28:38.733910Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:28:38.734011Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:28:38.735839Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:28:38.736489Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:28:38.736534Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:28:38.738201Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:28:38.738241Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:28:38.738354Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:28:38.738628Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:28:38.738689Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:28:38.739143Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [10:847:2812], Recipient [10:127:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:28:38.739227Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:28:38.739277Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:28:38.739461Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124996, Sender [10:794:2759], Recipient [10:127:2152]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-12-23T18:28:38.739506Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-12-23T18:28:38.739600Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:28:38.739722Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:28:38.739780Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [10:845:2810] 2025-12-23T18:28:38.740052Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [10:847:2812], Recipient [10:127:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:28:38.740097Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:28:38.740148Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-12-23T18:28:38.740595Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [10:848:2813], Recipient [10:127:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-12-23T18:28:38.740665Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:28:38.740805Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:28:38.741040Z node 10 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 248us result status StatusPathDoesNotExist 2025-12-23T18:28:38.741233Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:28:18.274714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:28:18.274815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:28:18.274851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:28:18.274888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:28:18.274950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:28:18.274982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:28:18.275044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:28:18.275123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:28:18.275956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:28:18.276280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:28:18.381769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:28:18.381838Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:18.388320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:28:18.388581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:28:18.388775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:28:18.391770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:28:18.392001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:28:18.392766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:28:18.393110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:28:18.395280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:28:18.395473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:28:18.396724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:28:18.396782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:28:18.396905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:28:18.396970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:28:18.397015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:28:18.397164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:28:18.586009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.587193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.587324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.587398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.587467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.587561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.587652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.587739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.587822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.587888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.587977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.588063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.588150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.588238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:28:18.588321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 813 RawX2: 42949675740 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:28:38.729624Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-12-23T18:28:38.729804Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 813 RawX2: 42949675740 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:28:38.729911Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:28:38.730076Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 813 RawX2: 42949675740 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:28:38.730184Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:28:38.730256Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1090: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2025-12-23T18:28:38.739069Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:28:38.740033Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:28:38.761732Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 42949675655 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:28:38.761831Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:28:38.761988Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 709 RawX2: 42949675655 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:28:38.762071Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:28:38.762169Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 709 RawX2: 42949675655 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:28:38.762242Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:28:38.762311Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:28:38.762369Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:28:38.762438Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:28:38.762490Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:28:38.773023Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:28:38.773615Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:28:38.773708Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-12-23T18:28:38.773799Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-12-23T18:28:38.773871Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-12-23T18:28:38.774010Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-12-23T18:28:38.774097Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2025-12-23T18:28:38.777009Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:28:38.777120Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:28:38.777342Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:28:38.777405Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:28:38.777466Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:28:38.777517Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:28:38.777577Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:28:38.777670Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [10:737:2715] message: TxId: 102 2025-12-23T18:28:38.777750Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:28:38.777816Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:28:38.777862Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:28:38.778097Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:28:38.778163Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:28:38.780707Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:28:38.780792Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [10:843:2802] TestWaitNotification: OK eventTxId 102 2025-12-23T18:28:38.781543Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:28:38.781844Z node 10 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 338us result status StatusSuccess 2025-12-23T18:28:38.782445Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_replication/unittest >> KqpEffects::InsertAbort_Literal_Success [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_replication/unittest >> test.py::test[solomon-UnknownSetting-] [GOOD] |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |90.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge >> KqpImmediateEffects::ConflictingKeyW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WR2 |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |90.6%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage >> TTxDataShardLocalKMeansScan::BuildToBuild_Ranges [GOOD] >> TTxDataShardPrefixKMeansScan::BadRequest >> TControlPlaneProxyTest::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetQueryStatus |90.6%| [TM] {asan, default-linux-x86_64, pic, release} ydb/tests/fq/solomon/py3test >> test.py::test[solomon-UnknownSetting-] [GOOD] |90.6%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/solomon/py3test |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |90.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> KqpImmediateEffects::ConflictingKeyR1WR2 [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure >> KqpImmediateEffects::ConflictingKeyR1WRR2 >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program [GOOD] |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/compatibility/ydb-tests-compatibility |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/compatibility/ydb-tests-compatibility |90.6%| [TM] {RESULT} ydb/tests/fq/solomon/py3test |90.6%| [LD] {RESULT} $(B)/ydb/tests/compatibility/ydb-tests-compatibility >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |90.6%| [LD] {RESULT} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut >> TControlPlaneProxyTest::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyQuery >> KeyValueGRPCService::SimpleWriteReadRangeV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadRangeV2 |90.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types1-all_types1-index1] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success >> TControlPlaneProxyTest::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame1 [GOOD] >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame2 >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] [GOOD] >> AssignTxId::Basic [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range >> TControlPlaneProxyTest::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendControlQuery >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertBlockedByOlderVolatile [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertDuplicateKeyAtSnapshot >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] >> KqpEffects::AlterDuringUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterDuringUpsertTransaction-UseSink >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok >> TMLPReaderTests::TopicWithKeepMessageOrder [GOOD] >> TMLPWriterTests::TopicNotExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic [GOOD] Test command err: 2025-12-23T18:28:39.687482Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122091213404616:2258];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:39.687575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003dd6/r3tmp/tmpQldTZD/pdisk_1.dat 2025-12-23T18:28:40.287259Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:40.324276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:40.324383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:40.474936Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122091213404386:2081] 1766514519661213 != 1766514519661216 2025-12-23T18:28:40.487819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:28:40.492000Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:40.502897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:28:40.677775Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7618 TServer::EnableGrpc on GrpcPort 28246, node 1 2025-12-23T18:28:41.179576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:28:41.179610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:28:41.179618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:28:41.179720Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514520547 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:41.611454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:28:41.624168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:28:44.686322Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122091213404616:2258];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:44.686423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:28:44.850139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122112688241773:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:44.850323Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:44.850731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122112688241783:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:44.850810Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:45.264026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:491) 2025-12-23T18:28:45.275745Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:41: [controller 72075186224037888] OnActivateExecutor 2025-12-23T18:28:45.275821Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init_schema.cpp:17: [controller 72075186224037888][TxInitSchema] Execute 2025-12-23T18:28:45.283878Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init_schema.cpp:26: [controller 72075186224037888][TxInitSchema] Complete 2025-12-23T18:28:45.283965Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init.cpp:245: [controller 72075186224037888][TxInit] Execute 2025-12-23T18:28:45.284224Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init.cpp:250: [controller 72075186224037888][TxInit] Complete 2025-12-23T18:28:45.284233Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:128: [controller 72075186224037888] SwitchToWork 2025-12-23T18:28:45.296153Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:171: [controller 72075186224037888] Handle NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 38 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:28246" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } Database: "/Root" 2025-12-23T18:28:45.296476Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_create_replication.cpp:22: [controller 72075186224037888][TxCreateReplication] Execute: NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 38 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:28246" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } Database: "/Root" 2025-12-23T18:28:45.296547Z node 1 :REPLICATION_CONTROLLER NOTICE: tx_create_replication.cpp:43: [controller 72075186224037888][TxCreateReplication] Add replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T18:28:45.297411Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_create_replication.cpp:58: [controller 72075186224037888][TxCreateReplication] Complete 2025-12-23T18:28:45.297445Z node 1 :REPLICATION_CONTROLLER INFO: tx_create_replication.cpp:68: [controller 72075186224037888][TxCreateReplication] Discover tenant nodes: tenant# /Root 2025-12-23T18:28:45.306580Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:335: [controller 72075186224037888] Handle NKikimr::TEvDiscovery::TEvDiscoveryData 2025-12-23T18:28:45.306633Z node 1 :REPLICATION_CONTROLLER DEBUG: controller.cpp:359: [controller 72075186224037888] Create session: nodeId# 1 TClient::Ls request: /Root/replication 2025-12-23T18:28:45.326248Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-12-23T18:28:45.326292Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/table, status# SCHEME_ERROR, issues# {
: Error: Path not found }, iteration# 0 2025-12-23T18:28:45.326453Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:201: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-12-23T18:28:45.326601Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186224037888][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-12-23T18:28:45.326648Z node 1 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186224037888][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /Root/table: SCHEME_ERROR ({
: Error: Path not found }) 2025-12-23T18:28:45.328013Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186224037888][TxDiscoveryTargetsResult] Complete TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "replication" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766514525349 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsIn... (TRUNCATED) 2025-12-23T18:28:45.348135Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 1 TxId: 0 } 2025-12-23T18:28:45.348229Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 0 2025-12-23T18:28:45.348293Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 1, assigned# 0, allocated# 0, exhausted# 1 2025-12-23T18:28:45.348395Z node 1 :REPLICATION_CONTROLLER TRACE: tx_assign_tx_id.cpp:174: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-12-23T18:28:45.348435Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 5 2025-12-23T18:28:45.349592Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-12-23T18:28:45.355048Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 0 } 2025-12-23T18:28:45.355113Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-12-23T18:28:45.355184Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-12-23T18:28:45.355856Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 18446744073709551615 } 2025-12-23T18:28:45.355890Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-12-23T18:28:45.355941Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-12-23T18:28:45.356581Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 10000 TxId: 0 } 2025-12-23T18:28:45.356612Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-12-23T18:28:45.357226Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-12-23T18:28:45.357683Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 5000 TxId: 0 } 2025-12-23T18:28:45.357713Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 2, allocated# 3 2025-12-23T18:28:45.357747Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-12-23T18:28:45.358276Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 20000 TxId: 0 } Versions { Step: 30000 TxId: 0 } Versions { Step: 40000 TxId: 0 } 2025-12-23T18:28:45.358310Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 3, assigned# 2, allocated# 3 2025-12-23T18:28:45.358806Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 0, exhausted# 0 2025-12-23T18:28:45.358902Z node 1 :REPLICATION_CONTROLLER TRACE: tx_assign_tx_id.cpp:174: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-12-23T18:28:45.358946Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 0, assigned# 5, allocated# 5 2025-12-23T18:28:45.358984Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2025-12-23T18:28:45.359764Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 50000 TxId: 0 } 2025-12-23T18:28:45.359805Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 5, allocated# 5 2025-12-23T18:28:45.360345Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots >> TControlPlaneProxyTest::ShouldSendControlQuery [GOOD] |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots >> TControlPlaneProxyTest::ShouldSendGetResultData >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame2 [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> HullReplWriteSst::Basic [GOOD] |90.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |90.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |90.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |90.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |90.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5661, MsgBus: 13796 2025-12-23T18:28:29.402337Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122044404468218:2190];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:29.402464Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b14/r3tmp/tmpAu4j4O/pdisk_1.dat 2025-12-23T18:28:29.751049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:29.751715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:29.782347Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:29.803071Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122044404468053:2081] 1766514509366463 != 1766514509366466 2025-12-23T18:28:29.824573Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:28:29.826260Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5661, node 1 2025-12-23T18:28:29.937913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:28:29.937939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:28:29.937945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:28:29.938049Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:28:30.017361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13796 2025-12-23T18:28:30.410403Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:13796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514509879 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:30.717961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:28:30.755339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:28:31.027586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:31.224541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:31.311555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:33.845200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122061584339104:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:33.845320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:33.848821Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122061584339114:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:33.848946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:34.219120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:34.271827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:34.347440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:34.400095Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122044404468218:2190];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:34.400147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:28:34.435413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:34.485787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:34.528759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:34.574798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:34.625406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:34.710889Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122065879307281:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:34.710993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:34.711361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122065879307287:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:34.711376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122065879307286:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:34.711413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2 ... ableGrpc on GrpcPort 22737, node 2 2025-12-23T18:28:39.521702Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:28:39.521724Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:28:39.521732Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:28:39.521802Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:28:39.531549Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10978 TClient is connected to server localhost:10978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514519420 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:40.187801Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:28:40.195793Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:28:40.203300Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:40.214943Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:28:40.279635Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:40.507609Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:28:40.594792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:43.188335Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122106651197630:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:43.188434Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:43.188733Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122106651197639:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:43.188783Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:43.341739Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:43.392551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:43.474926Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:43.542593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:43.591098Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:43.673600Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:43.768917Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:43.857959Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:44.020312Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122110946165810:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:44.020410Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:44.020464Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122110946165815:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:44.023204Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122110946165818:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:44.023288Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:44.024090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:28:44.035859Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122110946165817:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:28:44.126194Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122110946165870:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:28:44.234196Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122089471326636:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:44.241493Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:28:46.287072Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict+UseSink >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok >> TTxDataShardPrefixKMeansScan::BadRequest [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPosting >> TControlPlaneProxyTest::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyTest::ShouldSendListJobs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/repl/ut/unittest >> HullReplWriteSst::Basic [GOOD] Test command err: commit chunk# 1 {ChunkIdx: 1 Offset: 101232640 Size: 32981792} 749584 commit chunk# 2 {ChunkIdx: 2 Offset: 101232640 Size: 32983112} 749614 commit chunk# 3 {ChunkIdx: 3 Offset: 101253120 Size: 32960936} 749110 commit chunk# 4 {ChunkIdx: 4 Offset: 101236736 Size: 32979548} 749533 commit chunk# 5 {ChunkIdx: 5 Offset: 101220352 Size: 32996224} 749912 commit chunk# 6 {ChunkIdx: 6 Offset: 101253120 Size: 32961244} 749117 commit chunk# 7 {ChunkIdx: 7 Offset: 101212160 Size: 33004936} 750110 commit chunk# 8 {ChunkIdx: 8 Offset: 101216256 Size: 32999084} 749977 commit chunk# 9 {ChunkIdx: 9 Offset: 101220352 Size: 32997236} 749935 commit chunk# 10 {ChunkIdx: 10 Offset: 101187584 Size: 33030104} 750682 commit chunk# 11 {ChunkIdx: 11 Offset: 101224448 Size: 32990328} 749778 commit chunk# 12 {ChunkIdx: 12 Offset: 101216256 Size: 33001460} 750031 commit chunk# 13 {ChunkIdx: 13 Offset: 101240832 Size: 32976864} 749472 commit chunk# 14 {ChunkIdx: 14 Offset: 101212160 Size: 33001900} 750041 commit chunk# 15 {ChunkIdx: 15 Offset: 101244928 Size: 32970924} 749337 commit chunk# 16 {ChunkIdx: 16 Offset: 101240832 Size: 32976248} 749458 commit chunk# 17 {ChunkIdx: 17 Offset: 101224448 Size: 32992880} 749836 commit chunk# 18 {ChunkIdx: 18 Offset: 101244928 Size: 32972772} 749379 commit chunk# 19 {ChunkIdx: 19 Offset: 101249024 Size: 32968680} 749286 commit chunk# 20 {ChunkIdx: 20 Offset: 101208064 Size: 33007576} 750170 commit chunk# 21 {ChunkIdx: 21 Offset: 101208064 Size: 33007664} 750172 commit chunk# 22 {ChunkIdx: 22 Offset: 101253120 Size: 32961772} 749129 commit chunk# 23 {ChunkIdx: 23 Offset: 101228544 Size: 32987072} 749704 commit chunk# 24 {ChunkIdx: 24 Offset: 101228544 Size: 32986456} 749690 commit chunk# 25 {ChunkIdx: 25 Offset: 101203968 Size: 33010304} 750232 commit chunk# 26 {ChunkIdx: 26 Offset: 101220352 Size: 32994376} 749870 commit chunk# 27 {ChunkIdx: 27 Offset: 101208064 Size: 33008192} 750184 commit chunk# 28 {ChunkIdx: 28 Offset: 101253120 Size: 32962828} 749153 commit chunk# 29 {ChunkIdx: 29 Offset: 101224448 Size: 32993276} 749845 commit chunk# 30 {ChunkIdx: 30 Offset: 101224448 Size: 32993100} 749841 commit chunk# 31 {ChunkIdx: 31 Offset: 101208064 Size: 33007532} 750169 commit chunk# 32 {ChunkIdx: 32 Offset: 101236736 Size: 32980428} 749553 commit chunk# 33 {ChunkIdx: 33 Offset: 101253120 Size: 32964280} 749186 commit chunk# 34 {ChunkIdx: 34 Offset: 101216256 Size: 33001460} 750031 commit chunk# 35 {ChunkIdx: 35 Offset: 101249024 Size: 32964852} 749199 commit chunk# 36 {ChunkIdx: 36 Offset: 101236736 Size: 32977392} 749484 commit chunk# 37 {ChunkIdx: 37 Offset: 101257216 Size: 32957548} 749033 commit chunk# 38 {ChunkIdx: 38 Offset: 101228544 Size: 32988348} 749733 commit chunk# 39 {ChunkIdx: 39 Offset: 101224448 Size: 32993276} 749845 commit chunk# 40 {ChunkIdx: 40 Offset: 101220352 Size: 32994508} 749873 |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TControlPlaneProxyTest::ShouldSendListJobs [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeJob |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut/unittest >> TStateStorageConfigCompareWithOld::TestReplicaActorIdAndSelectionIsSame2 [GOOD] |90.7%| [TA] $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::ConflictingKeyW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut/unittest >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RR2 |90.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo_Table >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll >> TControlPlaneProxyTest::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateConnection |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |90.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_locks/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |90.7%| [TA] $(B)/ydb/core/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendListConnections >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertDuplicateKeyAtSnapshot [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertMissingKeyAtSnapshot >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> KqpEffects::AlterDuringUpsertTransaction-UseSink [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] >> TControlPlaneProxyTest::ShouldSendListConnections [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeConnection >> DataShardWrite::AsyncIndexKeySizeConstraint |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_locks/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24436, MsgBus: 30230 2025-12-23T18:28:34.430322Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122067503190713:2244];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:34.432298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:28:34.448824Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005aeb/r3tmp/tmp5THL1e/pdisk_1.dat 2025-12-23T18:28:34.865262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:34.867342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:34.867489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:34.894951Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:34.894983Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122067503190508:2081] 1766514514398775 != 1766514514398778 2025-12-23T18:28:34.908401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24436, node 1 2025-12-23T18:28:35.203127Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:28:35.231490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:28:35.231522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:28:35.231528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:28:35.231613Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:28:35.414950Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:30230 TClient is connected to server localhost:30230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514514954 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:36.473789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:28:36.510480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:36.737997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:36.998728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:37.164490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:39.417613Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122067503190713:2244];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:39.417732Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:28:39.754495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122088978028866:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:39.754671Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:39.755054Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122088978028876:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:39.755134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:40.599550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:40.668854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:40.732018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:40.782344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:40.851302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:40.937146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:40.987560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:41.073513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:41.195262Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122097567964353:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:41.195330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:41.195407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122097567964358:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:41.202680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp ... EnableGrpc on GrpcPort 31082, node 2 2025-12-23T18:28:45.626091Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:28:45.684696Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:28:45.684718Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:28:45.684726Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:28:45.684823Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3618 2025-12-23T18:28:46.291261Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514525545 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:46.381294Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:28:46.386938Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:28:46.397123Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:46.471956Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:46.683535Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:28:46.783722Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:49.862897Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122132210746920:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:49.863009Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:49.866872Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122132210746930:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:49.866971Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:49.962643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:50.011233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:50.080216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:50.171313Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:50.246137Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:50.336307Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122115030875918:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:50.341038Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:28:50.376645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:50.459214Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:50.569739Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:50.758145Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122136505715103:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:50.758275Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:50.759535Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122136505715108:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:50.759614Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122136505715109:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:50.759928Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:50.764638Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:28:50.784755Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122136505715112:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:28:50.870184Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122136505715165:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:28:54.656737Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KeyValueGRPCService::SimpleWriteReadRangeV2 [GOOD] >> KeyValueGRPCService::SimpleWriteListRangeV1 |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |90.7%| [TA] {RESULT} $(B)/ydb/core/base/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.7%| [LD] {RESULT} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut >> TControlPlaneProxyTest::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyConnection >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success >> DataShardWrite::ReplaceImmediate >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterDuringUpsertTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13682, MsgBus: 64353 2025-12-23T18:28:29.982147Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122044970979018:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:29.982635Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005af0/r3tmp/tmpvtkHIl/pdisk_1.dat 2025-12-23T18:28:30.516950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:30.517096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:30.644248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:30.692066Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122044970978897:2081] 1766514509972493 != 1766514509972496 2025-12-23T18:28:30.701704Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:30.715331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13682, node 1 2025-12-23T18:28:31.004960Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:28:31.012984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:28:31.051800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:28:31.051822Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:28:31.051829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:28:31.051919Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64353 TClient is connected to server localhost:64353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514510817 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:28:31.780219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:28:31.788113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:28:31.804925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:31.970243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:32.246025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:32.366181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:34.719216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122066445817256:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:34.719351Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:34.721982Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122066445817266:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:34.722074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:34.979074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122044970979018:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:34.983361Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:28:35.089559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:35.137280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:35.174639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:35.222830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:35.274786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:35.322309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:35.378345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:35.467125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:35.569906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122070740785435:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:35.570045Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:35.570232Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122070740785440:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:35.570514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122070740785441:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:35.570569Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [W ... rsion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514528422 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:49.477695Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:28:49.491903Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:28:49.510794Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:49.646923Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:49.880811Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:49.987155Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:52.516382Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122146511767356:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:52.516484Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:52.516888Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122146511767365:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:52.516941Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:52.596991Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:52.676401Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:52.727974Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:52.778950Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:52.836268Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:52.943104Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:53.028809Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:53.139801Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587122129331896340:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:53.140286Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:28:53.153888Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:53.288545Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122150806735540:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:53.288644Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:53.289141Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122150806735546:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:53.289198Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122150806735545:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:53.289245Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:53.294018Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:28:53.311992Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-12-23T18:28:53.312229Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587122150806735549:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:28:53.378354Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587122150806735601:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:28:55.858188Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:56.106392Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T18:28:56.142599Z node 3 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037927 cannot parse tx 281474976715675: Table '/Root/TestTable' scheme changed. 2025-12-23T18:28:56.142910Z node 3 :KQP_EXECUTER ERROR: kqp_data_executer.cpp:843: ActorId: [3:7587122163691637909:2524] TxId: 281474976715675. Ctx: { TraceId: 01kd67hbpb7v23qdxt23kxrjty, Database: /Root, SessionId: ydb://session/3?node_id=3&id=YzIzOTFhMDUtNDQyOTU1MWQtZTdmZWEyNjktOWY2M2QxNzk=, PoolId: default, IsStreamingQuery: 0}. ERROR: [SCHEME_CHANGED] Table '/Root/TestTable' scheme changed.; 2025-12-23T18:28:56.143297Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=YzIzOTFhMDUtNDQyOTU1MWQtZTdmZWEyNjktOWY2M2QxNzk=, ActorId: [3:7587122159396670481:2524], ActorState: ExecuteState, TraceId: 01kd67hbpb7v23qdxt23kxrjty, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Table \'/Root/TestTable\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Data execution" issue_code: 2019 severity: 1 } >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> TControlPlaneProxyTest::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteConnection >> TCacheTest::Recreate >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |90.7%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark >> TCacheTest::Recreate [GOOD] >> TCacheTest::SysLocks >> TCacheTest::Navigate >> TControlPlaneProxyTest::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendTestConnection >> TCacheTest::SysLocks [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types1-all_types1-index1] [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] >> TCacheTest::Navigate [GOOD] >> TCacheTest::PathBelongsToDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31041, MsgBus: 7120 2025-12-23T18:28:30.053771Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122048729447744:2145];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:30.054223Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005aef/r3tmp/tmpMZkZjB/pdisk_1.dat 2025-12-23T18:28:30.723003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:30.723129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:30.780731Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:30.814823Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122048729447635:2081] 1766514510010456 != 1766514510010459 2025-12-23T18:28:30.857839Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:30.881969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31041, node 1 2025-12-23T18:28:31.041316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:28:31.059350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:28:31.059373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:28:31.059378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:28:31.059464Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:28:31.064610Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7120 TClient is connected to server localhost:7120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514510929 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:31.794270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:28:31.800683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:28:31.826840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:32.059808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:32.336714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:32.428445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:35.048517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122048729447744:2145];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:35.048639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:28:35.444687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122070204285997:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:35.444820Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:35.445282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122070204286007:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:35.445320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:35.990421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.059647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.114219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.168691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.226624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.295634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.434329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.553640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.728700Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122074499254182:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:36.728798Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:36.729324Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122074499254187:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:36.729368Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122074499254188:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:36.729492Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [Work ... p:311) waiting... 2025-12-23T18:28:51.942953Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:28:51.950801Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:28:52.093621Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:52.311844Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:52.393061Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:55.630894Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122159821971586:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:55.631008Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:55.631670Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122159821971596:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:55.631733Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:55.746638Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587122138347133286:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:55.746722Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:28:55.982624Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:56.069781Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:56.154902Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:56.278501Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:56.394113Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:56.494628Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:56.609452Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:56.753096Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:56.968958Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122164116939778:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:56.969071Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:56.969658Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122164116939784:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:56.969748Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:56.969810Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122164116939783:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:56.987290Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:28:57.020595Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587122164116939787:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:28:57.107439Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587122168411907136:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:00.265585Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2025-12-23T18:29:00.265889Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:29:00.266097Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:29:00.266402Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:966: SelfId: [3:7587122181296809377:2537], Table: `/Root/TwoShard` ([72057594046644480:38:1]), SessionActorId: [3:7587122181296809355:2537]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[3:7587122181296809377:2537].{
: Error: Conflict with existing key., code: 2012 } 2025-12-23T18:29:00.266525Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [3:7587122181296809370:2537], SessionActorId: [3:7587122181296809355:2537], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7587122181296809355:2537]. 2025-12-23T18:29:00.267100Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=3&id=M2M0YmVhOTYtMWMyZDE5NzUtZTRhOGU3ZDItZTBiMGQyOTU=, ActorId: [3:7587122181296809355:2537], ActorState: ExecuteState, TraceId: 01kd67hfhc2y4hzmbpqzjynn94, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7587122181296809371:2537] from: [3:7587122181296809370:2537] 2025-12-23T18:29:00.267258Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [3:7587122181296809371:2537] TxId: 281474976710673. Ctx: { TraceId: 01kd67hfhc2y4hzmbpqzjynn94, Database: /Root, SessionId: ydb://session/3?node_id=3&id=M2M0YmVhOTYtMWMyZDE5NzUtZTRhOGU3ZDItZTBiMGQyOTU=, PoolId: default, IsStreamingQuery: 0}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-12-23T18:29:00.267645Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=M2M0YmVhOTYtMWMyZDE5NzUtZTRhOGU3ZDItZTBiMGQyOTU=, ActorId: [3:7587122181296809355:2537], ActorState: ExecuteState, TraceId: 01kd67hfhc2y4hzmbpqzjynn94, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Constraint violated. Table: `/Root/TwoShard`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2025-12-23T18:29:02.259928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:29:02.260019Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... tep: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2025-12-23T18:29:03.677311Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::List >> TCacheTest::MigrationCommon |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> TCacheTest::PathBelongsToDomain [GOOD] >> TControlPlaneProxyTest::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateBinding |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |90.7%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] >> TMLPWriterTests::TopicNotExists [GOOD] >> TMLPWriterTests::EmptyWrite >> TCacheTest::List [GOOD] >> TCacheTest::MigrationCommit >> DataShardWrite::AsyncIndexKeySizeConstraint [GOOD] >> DataShardWrite::DeleteImmediate >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertMissingKeyAtSnapshot [GOOD] >> DataShardSnapshotIsolation::ReadWriteObserveOwnChanges >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2025-12-23T18:29:03.814424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:29:03.814503Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... Step5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2025-12-23T18:29:05.030115Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:29:05.041923Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-12-23T18:29:05.043401Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2025-12-23T18:29:05.058127Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:29:05.058360Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestModificationResults wait txId: 104 2025-12-23T18:29:05.059765Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 104:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 FAKE_COORDINATOR: Erasing txId 104 TestModificationResult got TxId: 104, wait until txId: 104 TestModificationResults wait txId: 105 FAKE_COORDINATOR: Add transaction: 105 at step: 5000042 FAKE_COORDINATOR: advance: minStep5000042 State->FrontStep: 5000041 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000042 FAKE_COORDINATOR: Erasing txId 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 104 TestWaitNotification wait txId: 105 2025-12-23T18:29:05.074379Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:29:05.074609Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 2025-12-23T18:29:05.081894Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:505:2473], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:29:05.082556Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:507:2475], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:29:05.082963Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:509:2477], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:29:05.084937Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:522:2484], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] 2025-12-23T18:29:05.086338Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:531:2487], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] 2025-12-23T18:29:05.087612Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:539:2495], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:29:05.087925Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:541:2497], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:29:05.088266Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:543:2499], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:29:05.089140Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:549:2505], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] 2025-12-23T18:29:05.089460Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:551:2507], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> TControlPlaneProxyTest::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::MigrationDeletedPathNavigate >> TCacheTest::WatchRoot >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] >> TCacheTest::MigrationCommit [GOOD] >> TCacheTest::CookiesArePreserved >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] [GOOD] |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |90.7%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] Test command err: Trying to start YDB, gRPC: 16480, MsgBus: 28951 2025-12-23T18:28:32.421992Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122060429007387:2237];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:32.422115Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:28:32.457763Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005aec/r3tmp/tmptfMame/pdisk_1.dat 2025-12-23T18:28:32.950220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:32.967977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:33.022709Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:33.038451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:28:33.086862Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122060429007173:2081] 1766514512381596 != 1766514512381599 2025-12-23T18:28:33.091428Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16480, node 1 2025-12-23T18:28:33.304512Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:28:33.347265Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:28:33.347286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:28:33.347292Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:28:33.347358Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:28:33.403025Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28951 TClient is connected to server localhost:28951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514513099 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:34.466145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:28:34.479141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:34.712402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:34.977065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:35.133571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:37.414894Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122060429007387:2237];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:37.414976Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:28:37.796146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122081903845525:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:37.796292Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:37.796872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122081903845535:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:37.796930Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:38.408551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:38.464147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:38.508724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:38.570425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:38.626565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:38.716364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:38.805300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:38.918185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:39.070894Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122090493781002:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:39.071018Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:39.075455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122090493781007:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:39.075532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122090493781008:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:39.075720Z node 1 :KQP_WORKLOAD_SE ... ableGrpc on GrpcPort 21962, node 3 2025-12-23T18:28:54.422898Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:28:54.462412Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:28:54.462444Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:28:54.462456Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:28:54.462558Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:28:54.857498Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28897 TClient is connected to server localhost:28897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514534330 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:55.553439Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:28:55.574986Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:28:55.597910Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:55.712500Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:55.995576Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:56.098278Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:58.818228Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587122151307735248:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:58.818308Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:28:59.642902Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122177077540887:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:59.643010Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:59.643670Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122177077540897:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:59.643739Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:59.737553Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:59.800351Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:59.879921Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:59.940387Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:00.009808Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:00.122416Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:00.201926Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:00.325156Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:00.565714Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122181372509070:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:00.565811Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:00.566125Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122181372509075:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:00.566162Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122181372509076:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:00.566260Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:00.570322Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:00.613070Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587122181372509079:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:29:00.719662Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587122181372509134:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:03.645354Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> TCacheTest::WatchRoot [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction >> TStorageTenantTest::CreateSolomonInsideSubDomain >> TCacheTest::CookiesArePreserved [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeBinding |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |90.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut >> TStorageTenantTest::CreateTableInsideSubDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 19794, MsgBus: 63815 2025-12-23T18:28:30.470326Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122049995980752:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:30.471555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:28:30.532239Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005aee/r3tmp/tmpxP13eQ/pdisk_1.dat 2025-12-23T18:28:31.142904Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:31.152530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:31.152609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:31.279256Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122049995980719:2081] 1766514510440541 != 1766514510440544 2025-12-23T18:28:31.283583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:28:31.285704Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:31.328541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 19794, node 1 2025-12-23T18:28:31.431172Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:28:31.503414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:28:31.503438Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:28:31.503449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:28:31.503526Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63815 TClient is connected to server localhost:63815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514511349 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:32.316357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:28:32.323390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:28:32.336803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:32.630697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:32.978512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:33.125707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:35.457301Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122049995980752:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:35.457456Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:28:35.534301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122071470819068:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:35.534449Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:35.535581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122071470819078:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:35.535738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:36.322353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.387479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.433229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.491186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.553541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.645939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.760863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.821734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:28:36.936678Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122075765787255:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:36.936790Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:36.937134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122075765787260:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:28:36.937189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122075765787261:2490], DatabaseId: /Root, PoolId: defau ... ration type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:55.586950Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:55.730269Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:28:58.736418Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587122147439701328:2206];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:58.736494Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:29:01.416435Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122181799441420:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:01.416508Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:01.416743Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122181799441430:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:01.416773Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:01.661875Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:01.723226Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:01.821204Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:01.911456Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:01.987889Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:02.087021Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:02.134699Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:02.210968Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:02.413087Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122186094409607:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:02.413197Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:02.413582Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122186094409612:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:02.413638Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587122186094409613:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:02.413735Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:02.419736Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:02.442120Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-12-23T18:29:02.442409Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587122186094409616:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:29:02.540691Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587122186094409667:3788] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:05.388693Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:06.250652Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=6; 2025-12-23T18:29:06.255108Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 6 at tablet 72075186224037927 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-12-23T18:29:06.255322Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 6 at tablet 72075186224037927 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-12-23T18:29:06.255595Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [3:7587122203274279456:2545], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [3:7587122198979311876:2545]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[3:7587122203274279456:2545].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2025-12-23T18:29:06.256117Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [3:7587122203274279421:2545], SessionActorId: [3:7587122198979311876:2545], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[3:7587122198979311876:2545]. 2025-12-23T18:29:06.256393Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=3&id=MzBjY2FiMTMtMWIwOWRhZGQtNDFhODdhM2ItY2FiYjc5N2E=, ActorId: [3:7587122198979311876:2545], ActorState: ExecuteState, TraceId: 01kd67hnc95ndwxy2cyhd6t30g, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7587122203274279450:2545] from: [3:7587122203274279421:2545] 2025-12-23T18:29:06.256503Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [3:7587122203274279450:2545] TxId: 281474976715677. Ctx: { TraceId: 01kd67hnc95ndwxy2cyhd6t30g, Database: /Root, SessionId: ydb://session/3?node_id=3&id=MzBjY2FiMTMtMWIwOWRhZGQtNDFhODdhM2ItY2FiYjc5N2E=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } 2025-12-23T18:29:06.256925Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=MzBjY2FiMTMtMWIwOWRhZGQtNDFhODdhM2ItY2FiYjc5N2E=, ActorId: [3:7587122198979311876:2545], ActorState: ExecuteState, TraceId: 01kd67hnc95ndwxy2cyhd6t30g, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/TestImmediateEffects`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } } |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CookiesArePreserved [GOOD] Test command err: 2025-12-23T18:29:05.225490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:29:05.225582Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... RDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2025-12-23T18:29:08.595160Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-12-23T18:29:08.606653Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:29:08.624829Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TStorageTenantTest::DeclareAndDefine |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite >> TControlPlaneProxyTest::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyBinding |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |90.7%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut >> TAuditTest::AuditDisabledWithoutAppData [GOOD] >> TAuditTest::OptionsRequestsAreNotAudited [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteBinding |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartNo_Table [GOOD] >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::OptionsRequestsAreNotAudited [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::AuditDisabledWithoutAppData [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] [GOOD] |90.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> TxUsage::WriteToTopic_Demo_19_RestartNo_Query |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |90.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |90.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows >> KeyValueGRPCService::SimpleWriteListRangeV1 [GOOD] >> KeyValueGRPCService::SimpleWriteListRangeV2 >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] |90.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |90.8%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> DataShardSnapshotIsolation::ReadWriteObserveOwnChanges [GOOD] >> DataShardSnapshotIsolation::ReadWriteCommitConflictThenRead |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] Test command err: 2025-12-23T18:25:36.595048Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:682: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-12-23T18:25:37.504074Z node 2 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:752: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:38.280125Z node 3 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:817: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:39.158500Z node 4 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:884: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:39.697636Z node 5 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:40.151055Z node 6 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:40.534681Z node 7 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:41.007249Z node 8 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1158: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:41.442776Z node 9 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1223: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-12-23T18:25:41.911862Z node 10 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1288: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:42.409324Z node 11 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:42.841484Z node 12 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:43.364205Z node 13 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1507: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:43.893857Z node 14 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1572: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:44.329100Z node 15 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:45.083886Z node 16 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:45.916640Z node 17 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1793: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:46.410506Z node 18 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:46.817974Z node 19 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:47.238972Z node 20 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:47.717468Z node 21 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2126: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:48.199728Z node 22 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2191: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:48.637739Z node 23 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:25:49.143019Z node 24 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:13.022899Z node 72 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:682: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-12-23T18:26:13.557059Z node 73 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:752: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:14.070004Z node 74 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:817: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:14.597383Z node 75 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:884: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:15.111251Z node 76 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:15.636034Z node 77 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:16.215571Z node 78 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:16.734886Z node 79 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1158: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:17.231129Z node 80 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1223: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-12-23T18:26:17.799431Z node 81 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1288: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:18.303681Z node 82 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:18.850008Z node 83 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:19.401007Z node 84 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1507: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:19.915729Z node 85 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1572: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:20.460426Z node 86 :YQ_CONTROL_PLANE_STORAG ... L_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:22.556651Z node 90 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:23.083960Z node 91 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:23.628321Z node 92 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2126: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:24.171396Z node 93 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2191: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:24.730363Z node 94 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:26:25.269064Z node 95 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:27:36.594203Z node 163 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:682: CreateQueryRequest, validation failed: test_user_3@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-12-23T18:27:41.648025Z node 166 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:884: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:27:42.774790Z node 167 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:27:43.932243Z node 168 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:27:45.040156Z node 169 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:27:46.577617Z node 170 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1158: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:27:50.762128Z node 173 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:27:54.633188Z node 176 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:27:56.195617Z node 177 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1793: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:27:57.721719Z node 178 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:27:59.252855Z node 179 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:03.194012Z node 182 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:04.514237Z node 183 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:07.412825Z node 185 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:752: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:08.580653Z node 186 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:817: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:11.353469Z node 188 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:12.651995Z node 189 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:13.770761Z node 190 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:16.318174Z node 192 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1223: ListJobsRequest, validation failed: test_user_4@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-12-23T18:28:17.579174Z node 193 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1288: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:18.917409Z node 194 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:20.103506Z node 195 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1507: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:21.708809Z node 196 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1572: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:23.323918Z node 197 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:24.910406Z node 198 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1793: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:26.850265Z node 199 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:28.358541Z node 200 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:29.698468Z node 201 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2126: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:31.138558Z node 202 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2191: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:32.697605Z node 203 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-12-23T18:28:34.301096Z node 204 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/control_plane_proxy/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::IncrementImmediate |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> TSyncBrokerTests::ShouldReturnToken >> TSyncBrokerTests::ShouldReturnToken [GOOD] >> TSyncBrokerTests::ShouldReleaseToken >> TSyncBrokerTests::ShouldReleaseToken [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2025-12-23T18:29:09.532734Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122219129416155:2241];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:29:09.532774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:29:09.621799Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.009217s 2025-12-23T18:29:09.633225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056eb/r3tmp/tmpOPxZwS/pdisk_1.dat 2025-12-23T18:29:10.006397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:29:10.101819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:10.117615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:10.164433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:10.482404Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:10.520074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:29:10.562903Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:61979 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:29:10.948390Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587122219129416197:2119] Handle TEvNavigate describe path dc-1 2025-12-23T18:29:10.948528Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587122223424384298:2745] HANDLE EvNavigateScheme dc-1 2025-12-23T18:29:10.948748Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587122219129416225:2133], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:10.948951Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587122219129416475:2301][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587122219129416225:2133], cookie# 1 2025-12-23T18:29:10.951054Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122219129416528:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122219129416525:2301], cookie# 1 2025-12-23T18:29:10.951103Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122219129416529:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122219129416526:2301], cookie# 1 2025-12-23T18:29:10.951134Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122219129416530:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122219129416527:2301], cookie# 1 2025-12-23T18:29:10.951781Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122219129415879:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122219129416528:2301], cookie# 1 2025-12-23T18:29:10.951856Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122219129415882:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122219129416529:2301], cookie# 1 2025-12-23T18:29:10.951878Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122219129415885:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122219129416530:2301], cookie# 1 2025-12-23T18:29:10.952747Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122219129416528:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122219129415879:2051], cookie# 1 2025-12-23T18:29:10.952772Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122219129416529:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122219129415882:2054], cookie# 1 2025-12-23T18:29:10.952790Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122219129416530:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122219129415885:2057], cookie# 1 2025-12-23T18:29:10.952854Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122219129416475:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122219129416525:2301], cookie# 1 2025-12-23T18:29:10.952908Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587122219129416475:2301][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:29:10.952939Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122219129416475:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122219129416526:2301], cookie# 1 2025-12-23T18:29:10.952982Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587122219129416475:2301][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:29:10.953032Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122219129416475:2301][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122219129416527:2301], cookie# 1 2025-12-23T18:29:10.953061Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587122219129416475:2301][/dc-1] Sync cookie mismatch: sender# [1:7587122219129416527:2301], cookie# 1, current cookie# 0 2025-12-23T18:29:10.953300Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587122219129416225:2133], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:29:10.953413Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587122219129416225:2133], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587122219129416475:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:29:10.953601Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587122219129416225:2133], cacheItem# { Subscriber: { Subscriber: [1:7587122219129416475:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:29:10.970177Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587122223424384301:2748], recipient# [1:7587122223424384298:2745], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:29:10.970278Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587122223424384298:2745] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:29:11.024212Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587122223424384298:2745] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:29:11.028141Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587122223424384298:2745] Handle TEvDescribeSchemeResult Forward to# [1:7587122223424384297:2744] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPath ... precated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-12-23T18:29:12.621812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 6 2025-12-23T18:29:12.622033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-12-23T18:29:12.622194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2025-12-23T18:29:12.622277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-12-23T18:29:12.622344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 5 2025-12-23T18:29:12.622414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-12-23T18:29:12.622486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 4 2025-12-23T18:29:12.622559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-12-23T18:29:12.622653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2025-12-23T18:29:12.622718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-12-23T18:29:12.622849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2025-12-23T18:29:12.622976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2025-12-23T18:29:12.623233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2025-12-23T18:29:12.623397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-12-23T18:29:12.623539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2025-12-23T18:29:12.623646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:29:12.623679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2025-12-23T18:29:12.623743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2025-12-23T18:29:12.623863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:29:12.623885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2025-12-23T18:29:12.624010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:29:12.648487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2025-12-23T18:29:12.648531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-12-23T18:29:12.648574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2025-12-23T18:29:12.648581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-12-23T18:29:12.659833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-12-23T18:29:12.659875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-12-23T18:29:12.659932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2025-12-23T18:29:12.659940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-12-23T18:29:12.659964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2025-12-23T18:29:12.659972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-12-23T18:29:12.660080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-12-23T18:29:12.660088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-12-23T18:29:12.666994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:8 2025-12-23T18:29:12.667027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-12-23T18:29:12.667066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2025-12-23T18:29:12.667098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-12-23T18:29:12.667135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046644480 2025-12-23T18:29:12.667200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:29:12.667230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:29:12.667257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2025-12-23T18:29:12.667358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-12-23T18:29:12.675915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:29:12.900052Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587122225792649841:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:12.901316Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587122230087617476:2308], recipient# [3:7587122230087617475:2305], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:12.903103Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:13.899601Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587122225792649841:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:13.899807Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587122234382584774:2309], recipient# [3:7587122234382584773:2306], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:14.903086Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587122225792649841:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:14.903234Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587122238677552072:2310], recipient# [3:7587122238677552071:2307], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId >> TSyncNeighborsTests::SerDes3 [GOOD] >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId [GOOD] >> TSyncNeighborsTests::SerDes1 [GOOD] |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |90.8%| [TM] {RESULT} ydb/core/fq/libs/control_plane_proxy/ut/unittest |90.8%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseToken [GOOD] Test command err: 2025-12-23T18:29:17.968748Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2025-12-23T18:29:18.076086Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2025-12-23T18:29:18.076201Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:142: TEvReleaseSyncToken, token released: VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 0 waiting# 0 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-12-23T18:29:10.102919Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122223281922167:2153];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:29:10.162127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056e5/r3tmp/tmpsShnYr/pdisk_1.dat 2025-12-23T18:29:10.718930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:29:10.932272Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:10.964429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:10.964518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:11.012415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:11.016104Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:29:11.166918Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:20830 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:29:11.387222Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587122223281922314:2125] Handle TEvNavigate describe path dc-1 2025-12-23T18:29:11.387282Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587122227576890400:2754] HANDLE EvNavigateScheme dc-1 2025-12-23T18:29:11.387408Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587122223281922334:2137], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:11.387521Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587122223281922568:2290][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587122223281922334:2137], cookie# 1 2025-12-23T18:29:11.389215Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122223281922603:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122223281922600:2290], cookie# 1 2025-12-23T18:29:11.389247Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122223281922604:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122223281922601:2290], cookie# 1 2025-12-23T18:29:11.389270Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122223281922605:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122223281922602:2290], cookie# 1 2025-12-23T18:29:11.389308Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122218986954693:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122223281922603:2290], cookie# 1 2025-12-23T18:29:11.389351Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122218986954696:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122223281922604:2290], cookie# 1 2025-12-23T18:29:11.389376Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122218986954699:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122223281922605:2290], cookie# 1 2025-12-23T18:29:11.389419Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122223281922603:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122218986954693:2051], cookie# 1 2025-12-23T18:29:11.389457Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122223281922604:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122218986954696:2054], cookie# 1 2025-12-23T18:29:11.389475Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122223281922605:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122218986954699:2057], cookie# 1 2025-12-23T18:29:11.389530Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122223281922568:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122223281922600:2290], cookie# 1 2025-12-23T18:29:11.389566Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587122223281922568:2290][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:29:11.389588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122223281922568:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122223281922601:2290], cookie# 1 2025-12-23T18:29:11.389609Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587122223281922568:2290][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:29:11.389634Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122223281922568:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122223281922602:2290], cookie# 1 2025-12-23T18:29:11.389648Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587122223281922568:2290][/dc-1] Sync cookie mismatch: sender# [1:7587122223281922602:2290], cookie# 1, current cookie# 0 2025-12-23T18:29:11.389724Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587122223281922334:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:29:11.389798Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587122223281922334:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587122223281922568:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:29:11.389889Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587122223281922334:2137], cacheItem# { Subscriber: { Subscriber: [1:7587122223281922568:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:29:11.394103Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587122227576890401:2755], recipient# [1:7587122227576890400:2754], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:29:11.394202Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587122227576890400:2754] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:29:11.483577Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587122227576890400:2754] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:29:11.486485Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587122227576890400:2754] Handle TEvDescribeSchemeResult Forward to# [1:7587122227576890399:2753] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { ... CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1766514553300 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: ... (TRUNCATED) 2025-12-23T18:29:13.481826Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-12-23T18:29:13.484689Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7587122218986954693:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7587122231441174907:2106] 2025-12-23T18:29:13.484734Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7587122218986954693:2051] Unsubscribe: subscriber# [3:7587122231441174907:2106], path# /dc-1/USER_0 2025-12-23T18:29:13.484788Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7587122218986954696:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7587122231441174908:2106] 2025-12-23T18:29:13.484802Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7587122218986954696:2054] Unsubscribe: subscriber# [3:7587122231441174908:2106], path# /dc-1/USER_0 2025-12-23T18:29:13.484825Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7587122218986954699:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7587122231441174909:2106] 2025-12-23T18:29:13.484835Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7587122218986954699:2057] Unsubscribe: subscriber# [3:7587122231441174909:2106], path# /dc-1/USER_0 2025-12-23T18:29:13.487848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:29:13.506332Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587122231441175086:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:13.506450Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587122231441175086:2107], cacheItem# { Subscriber: { Subscriber: [3:7587122231441175141:2121] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:13.506860Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587122235736142813:2359], recipient# [3:7587122235736142812:2311], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:14.518950Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587122231441175086:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:14.519091Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587122231441175086:2107], cacheItem# { Subscriber: { Subscriber: [3:7587122231441175141:2121] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:14.519218Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587122240031110120:2361], recipient# [3:7587122240031110119:2312], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:15.526870Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587122231441175086:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:15.526988Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587122231441175086:2107], cacheItem# { Subscriber: { Subscriber: [3:7587122231441175141:2121] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:15.527054Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587122244326077418:2362], recipient# [3:7587122244326077417:2313], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] Test command err: 2025-12-23T18:29:08.779972Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122212002837819:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:29:08.780023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00570f/r3tmp/tmpnOpkn2/pdisk_1.dat 2025-12-23T18:29:09.314486Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:29:09.692473Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:09.694054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:09.694166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:09.698684Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:29:09.723515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:09.847330Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:10.225372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:9977 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:29:10.442611Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587122212002838045:2144] Handle TEvNavigate describe path dc-1 2025-12-23T18:29:10.442679Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587122220592773433:2771] HANDLE EvNavigateScheme dc-1 2025-12-23T18:29:10.442824Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587122212002838068:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:10.442920Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587122216297805558:2284][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587122212002838068:2157], cookie# 1 2025-12-23T18:29:10.444528Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122216297805562:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122216297805559:2284], cookie# 1 2025-12-23T18:29:10.444573Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122216297805563:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122216297805560:2284], cookie# 1 2025-12-23T18:29:10.444589Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122216297805564:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122216297805561:2284], cookie# 1 2025-12-23T18:29:10.444654Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122212002837690:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122216297805562:2284], cookie# 1 2025-12-23T18:29:10.444691Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122212002837693:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122216297805563:2284], cookie# 1 2025-12-23T18:29:10.444708Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122212002837696:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122216297805564:2284], cookie# 1 2025-12-23T18:29:10.444750Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122216297805562:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122212002837690:2051], cookie# 1 2025-12-23T18:29:10.444770Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122216297805563:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122212002837693:2054], cookie# 1 2025-12-23T18:29:10.444809Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122216297805564:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122212002837696:2057], cookie# 1 2025-12-23T18:29:10.444852Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122216297805558:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122216297805559:2284], cookie# 1 2025-12-23T18:29:10.444880Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587122216297805558:2284][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:29:10.444901Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122216297805558:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122216297805560:2284], cookie# 1 2025-12-23T18:29:10.444924Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587122216297805558:2284][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:29:10.444949Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122216297805558:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122216297805561:2284], cookie# 1 2025-12-23T18:29:10.444962Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587122216297805558:2284][/dc-1] Sync cookie mismatch: sender# [1:7587122216297805561:2284], cookie# 1, current cookie# 0 2025-12-23T18:29:10.445018Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587122212002838068:2157], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:29:10.445097Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587122212002838068:2157], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587122216297805558:2284] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:29:10.445227Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587122212002838068:2157], cacheItem# { Subscriber: { Subscriber: [1:7587122216297805558:2284] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:29:10.451624Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587122220592773434:2772], recipient# [1:7587122220592773433:2771], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:29:10.451721Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587122220592773433:2771] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:29:10.501736Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587122220592773433:2771] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:29:10.504908Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587122220592773433:2771] Handle TEvDescribeSchemeResult Forward to# [1:7587122220592773432:2770] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: E ... IBER INFO: subscriber.cpp:867: [main][3:7587122224999439315:2112][/dc-1/USER_0] Path was already updated: owner# [3:7587122224999439240:2109], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 38], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 38], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:29:11.527453Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7587122224999439240:2109], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Strong: 1 } 2025-12-23T18:29:11.527557Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7587122224999439240:2109], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7587122224999439315:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1766514550990 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] DomainId: [OwnerId: 72057594046644480, LocalPathId: 38] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [3:7587122224999439315:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1766514550990 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] DomainId: [OwnerId: 72057594046644480, LocalPathId: 38] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2025-12-23T18:29:11.537103Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587122212002837693:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7587122224999439320:2112] 2025-12-23T18:29:11.538042Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587122212002837696:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7587122224999439321:2112] 2025-12-23T18:29:11.539431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 4, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:29:11.539504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:29:11.539521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:29:11.539531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:29:11.539542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:29:11.551614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-12-23T18:29:11.551982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 5 2025-12-23T18:29:11.552272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-12-23T18:29:11.552493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 4 2025-12-23T18:29:11.552617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-12-23T18:29:11.552792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2025-12-23T18:29:11.552898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-12-23T18:29:11.553026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2025-12-23T18:29:11.553192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715660 2025-12-23T18:29:11.553264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715660 2025-12-23T18:29:11.553358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:29:11.553379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2025-12-23T18:29:11.553496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:29:11.553662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:29:11.553682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2025-12-23T18:29:11.553739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-12-23T18:29:11.554449Z node 1 :HIVE WARN: tx__block_storage_result.cpp:34: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037889 2025-12-23T18:29:11.556783Z node 1 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-12-23T18:29:11.564841Z node 1 :HIVE WARN: tx__block_storage_result.cpp:34: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2025-12-23T18:29:11.573610Z node 1 :HIVE WARN: tx__block_storage_result.cpp:34: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037890 2025-12-23T18:29:11.574949Z node 1 :HIVE WARN: tx__block_storage_result.cpp:34: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037891 TabletID: 72075186224037888 Status: OK Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } TabletType: Coordinator Version: 1 TenantIdOwner: 72057594046644480 TenantIdLocalId: 38 } 2025-12-23T18:29:11.581605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2025-12-23T18:29:11.581661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-12-23T18:29:11.581732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-12-23T18:29:11.581742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-12-23T18:29:11.581766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2025-12-23T18:29:11.581774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-12-23T18:29:11.581804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-12-23T18:29:11.581829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-12-23T18:29:11.581868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:29:11.581909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:29:11.583660Z node 1 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-12-23T18:29:11.836091Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587122212002838068:2157], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:11.836229Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587122212002838068:2157], cacheItem# { Subscriber: { Subscriber: [1:7587122216297805805:2478] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:11.836378Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587122224887741293:3194], recipient# [1:7587122224887741292:2322], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes1 [GOOD] Test command err: 2025-12-23T18:29:18.413648Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2025-12-23T18:29:18.413877Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:51: TEvQuerySyncToken, token sent (1): VDisk actor id# [0:1:1] actor id# [1:6:2053] active# 1 waiting# 0 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |90.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> TSyncBrokerTests::ShouldEnqueue |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldEnqueue [GOOD] >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2025-12-23T18:29:10.843883Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122224120109639:2087];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:29:10.843963Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056e2/r3tmp/tmpIpDVXC/pdisk_1.dat 2025-12-23T18:29:11.368783Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:29:11.424522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:11.424619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:11.447558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:11.617896Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:11.658368Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:29:11.906168Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:27866 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:29:12.002096Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587122224120109850:2119] Handle TEvNavigate describe path dc-1 2025-12-23T18:29:12.002166Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587122232710045247:2744] HANDLE EvNavigateScheme dc-1 2025-12-23T18:29:12.002280Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587122224120109856:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:12.002373Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587122228415077410:2291][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587122224120109856:2121], cookie# 1 2025-12-23T18:29:12.004224Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122228415077445:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122228415077442:2291], cookie# 1 2025-12-23T18:29:12.004274Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122228415077446:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122228415077443:2291], cookie# 1 2025-12-23T18:29:12.004291Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122228415077447:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122228415077444:2291], cookie# 1 2025-12-23T18:29:12.004325Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122224120109530:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122228415077445:2291], cookie# 1 2025-12-23T18:29:12.004371Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122224120109533:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122228415077446:2291], cookie# 1 2025-12-23T18:29:12.004431Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122224120109536:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122228415077447:2291], cookie# 1 2025-12-23T18:29:12.004491Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122228415077445:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122224120109530:2050], cookie# 1 2025-12-23T18:29:12.004510Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122228415077446:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122224120109533:2053], cookie# 1 2025-12-23T18:29:12.004526Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122228415077447:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122224120109536:2056], cookie# 1 2025-12-23T18:29:12.004569Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122228415077410:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122228415077442:2291], cookie# 1 2025-12-23T18:29:12.004596Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587122228415077410:2291][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:29:12.004623Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122228415077410:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122228415077443:2291], cookie# 1 2025-12-23T18:29:12.004654Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587122228415077410:2291][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:29:12.004684Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122228415077410:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122228415077444:2291], cookie# 1 2025-12-23T18:29:12.004696Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587122228415077410:2291][/dc-1] Sync cookie mismatch: sender# [1:7587122228415077444:2291], cookie# 1, current cookie# 0 2025-12-23T18:29:12.004751Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587122224120109856:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:29:12.004862Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587122224120109856:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587122228415077410:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:29:12.004953Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587122224120109856:2121], cacheItem# { Subscriber: { Subscriber: [1:7587122228415077410:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:29:12.009431Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587122232710045248:2745], recipient# [1:7587122232710045247:2744], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:29:12.009516Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587122232710045247:2744] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:29:12.060898Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587122232710045247:2744] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:29:12.063854Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587122232710045247:2744] Handle TEvDescribeSchemeResult Forward to# [1:7587122232710045246:2743] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Childr ... DoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:16.081949Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7587122249889915224:3330][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7587122224120109530:2050] 2025-12-23T18:29:16.081977Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7587122249889915225:3330][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7587122224120109533:2053] 2025-12-23T18:29:16.082013Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7587122249889915212:3330][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7587122249889915221:3330] 2025-12-23T18:29:16.082108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7587122249889915212:3330][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7587122249889915222:3330] 2025-12-23T18:29:16.082145Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122249889915212:3330][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7587122224120109856:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:29:16.082178Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587122224120109530:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587122249889915224:3330] 2025-12-23T18:29:16.082196Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587122224120109533:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587122249889915225:3330] 2025-12-23T18:29:16.082264Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587122224120109856:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-12-23T18:29:16.082331Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587122224120109856:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7587122249889915212:3330] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:29:16.082422Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587122224120109856:2121], cacheItem# { Subscriber: { Subscriber: [1:7587122249889915212:3330] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:16.082525Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587122249889915227:3331], recipient# [1:7587122249889915207:2324], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:16.082761Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587122224120109536:2056] Subscribe: subscriber# [1:7587122249889915226:3330], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:29:16.082852Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7587122249889915226:3330][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7587122224120109536:2056] 2025-12-23T18:29:16.082879Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7587122249889915212:3330][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7587122249889915223:3330] 2025-12-23T18:29:16.082908Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7587122249889915212:3330][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7587122224120109856:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:29:16.082928Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587122224120109536:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587122249889915226:3330] 2025-12-23T18:29:16.853547Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587122224120109856:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:16.853674Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587122224120109856:2121], cacheItem# { Subscriber: { Subscriber: [1:7587122228415077938:2737] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:16.853763Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587122249889915246:3334], recipient# [1:7587122249889915245:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:16.859151Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587122224120109856:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:16.859338Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587122224120109856:2121], cacheItem# { Subscriber: { Subscriber: [1:7587122228415077938:2737] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:16.859434Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587122249889915248:3335], recipient# [1:7587122249889915247:2331], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:17.073651Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587122224120109856:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:17.073784Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587122224120109856:2121], cacheItem# { Subscriber: { Subscriber: [1:7587122249889915190:3327] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:17.073868Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587122254184882549:3339], recipient# [1:7587122254184882548:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] Test command err: 2025-12-23T18:29:19.313439Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2025-12-23T18:29:19.313597Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 1 waiting# 1 2025-12-23T18:29:19.418867Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2025-12-23T18:29:19.419019Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 1 2025-12-23T18:29:19.419119Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:82: TEvQuerySyncToken, enqueued (1): VDisk actor id# [0:1:2] actor id# [2:7:2054] active# 1 waiting# 1 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks1 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase >> TTxDataShardPrefixKMeansScan::BuildToPosting [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds >> TTxDataShardPrefixKMeansScan::BuildToPostingWithOverlap >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds >> EraseRowsTests::EraseRowsShouldSuccess >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds >> DistributedEraseTests::ConditionalEraseRowsShouldErase >> TMLPWriterTests::EmptyWrite [GOOD] >> TMLPWriterTests::WriteOneMessage >> DataShardWrite::IncrementImmediate [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2025-12-23T18:29:08.883035Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.012788s 2025-12-23T18:29:09.035227Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122216503743123:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:29:09.035310Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:29:09.319947Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587122219397177353:2175];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:29:09.319991Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056ee/r3tmp/tmp40Kwpj/pdisk_1.dat 2025-12-23T18:29:09.718852Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:29:09.895274Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:29:10.004226Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:29:10.060019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:10.060132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:10.068198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:10.068284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:10.087929Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:29:10.088230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:10.088489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:10.191944Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:10.217437Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:29:10.278931Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:10.280403Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:10.323590Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:10.379984Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:29:10.733704Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:22832 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:29:10.935414Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587122216503743344:2145] Handle TEvNavigate describe path dc-1 2025-12-23T18:29:10.935480Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587122220798711392:2728] HANDLE EvNavigateScheme dc-1 2025-12-23T18:29:10.935593Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587122216503743350:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:10.935691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587122216503743567:2287][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587122216503743350:2147], cookie# 1 2025-12-23T18:29:10.937478Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122216503743604:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122216503743601:2287], cookie# 1 2025-12-23T18:29:10.937536Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122216503743605:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122216503743602:2287], cookie# 1 2025-12-23T18:29:10.937575Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587122216503743606:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122216503743603:2287], cookie# 1 2025-12-23T18:29:10.937628Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122212208775692:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122216503743604:2287], cookie# 1 2025-12-23T18:29:10.937664Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122212208775695:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122216503743605:2287], cookie# 1 2025-12-23T18:29:10.937684Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587122212208775698:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587122216503743606:2287], cookie# 1 2025-12-23T18:29:10.937736Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122216503743604:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122212208775692:2052], cookie# 1 2025-12-23T18:29:10.937756Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122216503743605:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122212208775695:2055], cookie# 1 2025-12-23T18:29:10.937773Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587122216503743606:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122212208775698:2058], cookie# 1 2025-12-23T18:29:10.937820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122216503743567:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122216503743601:2287], cookie# 1 2025-12-23T18:29:10.937858Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587122216503743567:2287][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:29:10.937878Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122216503743567:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122216503743602:2287], cookie# 1 2025-12-23T18:29:10.937902Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587122216503743567:2287][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:29:10.937931Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587122216503743567:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587122216503743603:2287], cookie# 1 2025-12-23T18:29:10.937945Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587122216503743567:2287][/dc-1] Sync cookie mismatch: sender# [1:7587122216503743603:2287], cookie# 1, current cookie# 0 2025-12-23T18:29:10.938007Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587122216503743350:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:29:10.938106Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587122216503743350:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587122216503743567:2287] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:29:10.938249Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587122216503743350:2147], cacheItem# { Subscriber: { Subscriber: [1:7587122216503743567:2287] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:29:10.942523Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587122220798711393:2729], recipient# [1:7587122220798711392:2728], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:29:10.942618Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587122220798711392:2728] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:29:11.071753Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587122220798711392:2728] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPriv ... valid> DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:19.523729Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587122261600065393:2235], recipient# [3:7587122261600065392:2537], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:19.534945Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587122240125228562:2179], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:19.535086Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587122240125228562:2179], cacheItem# { Subscriber: { Subscriber: [3:7587122253010130713:2213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:19.535166Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587122240125228562:2179], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:19.535234Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587122240125228562:2179], cacheItem# { Subscriber: { Subscriber: [3:7587122253010130712:2212] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:19.535288Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587122240125228562:2179], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:19.535365Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587122240125228562:2179], cacheItem# { Subscriber: { Subscriber: [3:7587122253010130713:2213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:19.535454Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587122261600065397:2236], recipient# [3:7587122261600065394:2538], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:19.535540Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587122261600065398:2237], recipient# [3:7587122261600065395:2539], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:19.535607Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587122261600065399:2238], recipient# [3:7587122261600065396:2540], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:20.171236Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7587122219397177462:2109], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:20.171410Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7587122219397177462:2109], cacheItem# { Subscriber: { Subscriber: [2:7587122249461948621:2128] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:20.171536Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7587122266641817852:2144], recipient# [2:7587122266641817851:2315], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:20.338520Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7587122219397177462:2109], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:20.338687Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7587122219397177462:2109], cacheItem# { Subscriber: { Subscriber: [2:7587122223692144786:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:20.338949Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7587122266641817854:2145], recipient# [2:7587122266641817853:2316], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:20.383800Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7587122219397177462:2109], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:29:20.383957Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7587122219397177462:2109], cacheItem# { Subscriber: { Subscriber: [2:7587122223692144786:2116] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:29:20.384076Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7587122266641817859:2146], recipient# [2:7587122266641817858:2317], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> EraseRowsTests::ConditionalEraseRowsShouldNotErase |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TSyncBrokerTests::ShouldProcessAfterRelease |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TSyncBrokerTests::ShouldProcessAfterRelease [GOOD] >> TSyncBrokerTests::ShouldReleaseInQueue >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/backup_collection/ydb-tests-functional-backup_collection |90.8%| [LD] {RESULT} $(B)/ydb/tests/functional/backup_collection/ydb-tests-functional-backup_collection |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/backup_collection/ydb-tests-functional-backup_collection >> DataShardSnapshotIsolation::ReadWriteCommitConflictThenRead [GOOD] >> DataShardSnapshotIsolation::ReadWriteCommitConflictWhileReading ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] Test command err: 2025-12-23T18:29:24.021350Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2025-12-23T18:29:24.021521Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 1 waiting# 1 2025-12-23T18:29:24.021641Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:111: ProcessQueue(), token sent: VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 0 waiting# 1 2025-12-23T18:29:24.021692Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:122: ProcessQueue() done: active# 1 waiting# 0 2025-12-23T18:29:24.021733Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:142: TEvReleaseSyncToken, token released: VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2025-12-23T18:29:24.125995Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2025-12-23T18:29:24.126141Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 1 2025-12-23T18:29:24.126211Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:161: TEvReleaseSyncToken, removed from queue: VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 0 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> test_s3.py::TestPgYdbS3TTL::test_s3[table_all_types-pk_types3-all_types3-index3---] [GOOD] |90.8%| [TA] $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile |90.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> KeyValueGRPCService::SimpleWriteListRangeV2 [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatusV1 |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartNo_Query [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Table |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 >> TAuditTest::OtherGetRequestsAreAudited [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] |90.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |90.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds |90.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::OtherGetRequestsAreAudited [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |90.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows |90.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |90.8%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |90.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |90.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> DataShardWrite::UncommittedUpdateLockMissingRow |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |90.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite |90.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |90.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |90.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TAuditTest::DeniedPathsAreNotAudited [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::DeniedPathsAreNotAudited [GOOD] |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TTxDataShardPrefixKMeansScan::BuildToPostingWithOverlap [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuild >> DataShardSnapshotIsolation::ReadWriteCommitConflictWhileReading [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |90.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk >> TNebiusAccessServiceTest::Authenticate >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] >> TNebiusAccessServiceTest::PassRequestId >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate >> BSCReadOnlyPDisk::ReadOnlySlay >> TNebiusAccessServiceTest::PassRequestId [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] Test command err: 2025-12-23T18:29:33.913594Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cf9ef9e2ed0] Connect to grpc://localhost:9894 2025-12-23T18:29:33.931451Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf9ef9e2ed0] Request AuthenticateRequest { iam_token: "**** (3C4833B6)" } 2025-12-23T18:29:33.951416Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf9ef9e2ed0] Status 7 Permission Denied 2025-12-23T18:29:33.953457Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf9ef9e2ed0] Request AuthenticateRequest { iam_token: "**** (86DDB286)" } 2025-12-23T18:29:33.956473Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf9ef9e2ed0] Response AuthenticateResponse { account { user_account { id: "1234" } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-12-23T18:29:34.093572Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d28478e3cd0]{reqId} Connect to grpc://localhost:32065 2025-12-23T18:29:34.097612Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d28478e3cd0]{reqId} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-12-23T18:29:34.117632Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d28478e3cd0]{reqId} Response AuthenticateResponse { account { user_account { id: "1234" } } } |90.9%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:25.381200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:25.533044Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:25.546926Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:25.547372Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:25.547457Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:25.883709Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:25.883864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:25.966073Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514562167042 != 1766514562167046 2025-12-23T18:29:25.979401Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:26.031586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:26.136361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00610c/r3tmp/tmpm5Lv1V/pdisk_1.dat 2025-12-23T18:29:26.444678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:26.459987Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:26.566969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:26.603417Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:29:26.603730Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:26.653931Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:26.654253Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:26.656220Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:26.656313Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:26.656403Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:26.656810Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:26.656944Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:26.657041Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:29:26.667903Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:26.697485Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:26.697727Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:26.697855Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:29:26.697909Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:26.697951Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:26.697993Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:26.698505Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:26.698610Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:26.699065Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:26.699119Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:26.699167Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:26.699245Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:26.699329Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:29:26.699469Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:26.699751Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:29:26.699862Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:29:26.701839Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:26.713006Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:26.713146Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:29:26.846293Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:29:26.857197Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:29:26.857342Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:26.858315Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:26.858378Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:26.858430Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:26.858776Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:26.858944Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:26.859284Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:26.859385Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:26.861825Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:26.862382Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:26.864299Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:26.864357Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:26.864879Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:26.864975Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:26.866350Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:26.866444Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:26.866501Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:26.866587Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:26.866645Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:26.866726Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:26.871080Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:26.872828Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:26.872902Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 7207518622403 ... hard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:32.639716Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:32.639761Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:32.640199Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:32.640327Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:32.640408Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:32.640450Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:32.640497Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:32.640556Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:32.640651Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:882:2762], serverId# [2:887:2765], sessionId# [0:0:0] 2025-12-23T18:29:32.641153Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:32.641416Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:29:32.641498Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:29:32.643723Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:32.656714Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:32.656832Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:29:32.797224Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:916:2782], serverId# [2:918:2784], sessionId# [0:0:0] 2025-12-23T18:29:32.798068Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:29:32.798161Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:32.803363Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:32.803457Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:32.803525Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:32.803962Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:32.804162Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:32.804860Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:32.804966Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:32.805558Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:32.806108Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:32.812446Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:32.812526Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:32.813771Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:32.813877Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:32.814727Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:32.819084Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:32.819167Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:32.819256Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:32.819319Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:32.819431Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:32.821697Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:32.824962Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:29:32.825230Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:32.825310Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:29:32.841428Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:952:2810], serverId# [2:953:2811], sessionId# [0:0:0] 2025-12-23T18:29:32.841618Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T18:29:32.855486Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T18:29:32.855577Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:32.855975Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:952:2810], serverId# [2:953:2811], sessionId# [0:0:0] 2025-12-23T18:29:32.858508Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:958:2816], serverId# [2:959:2817], sessionId# [0:0:0] 2025-12-23T18:29:32.858733Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T18:29:32.859092Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T18:29:32.859160Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:32.859409Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:958:2816], serverId# [2:959:2817], sessionId# [0:0:0] 2025-12-23T18:29:32.861782Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:963:2821], serverId# [2:964:2822], sessionId# [0:0:0] 2025-12-23T18:29:32.861965Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T18:29:32.862207Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T18:29:32.862260Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:32.862520Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:963:2821], serverId# [2:964:2822], sessionId# [0:0:0] 2025-12-23T18:29:32.869232Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:968:2826], serverId# [2:969:2827], sessionId# [0:0:0] 2025-12-23T18:29:32.869450Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T18:29:32.869684Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T18:29:32.869739Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:32.869959Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:968:2826], serverId# [2:969:2827], sessionId# [0:0:0] 2025-12-23T18:29:32.872133Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:973:2831], serverId# [2:974:2832], sessionId# [0:0:0] 2025-12-23T18:29:32.872299Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T18:29:32.872702Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T18:29:32.872767Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:32.873020Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:973:2831], serverId# [2:974:2832], sessionId# [0:0:0] 2025-12-23T18:29:32.875023Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:978:2836], serverId# [2:979:2837], sessionId# [0:0:0] 2025-12-23T18:29:32.875172Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T18:29:32.875445Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T18:29:32.875501Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:32.875765Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:978:2836], serverId# [2:979:2837], sessionId# [0:0:0] |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:25.481621Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:25.626537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:25.645166Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:25.645626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:25.645698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:25.992205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:25.992391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:26.082950Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514562178094 != 1766514562178098 2025-12-23T18:29:26.096686Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:26.146949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:26.238939Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00610d/r3tmp/tmpyvkrMF/pdisk_1.dat 2025-12-23T18:29:26.550624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:26.565494Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:26.671256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:26.713928Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:29:26.714163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:26.770404Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:26.770577Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:26.772565Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:26.772666Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:26.772755Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:26.773133Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:26.773277Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:26.773397Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:29:26.784266Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:26.835124Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:26.835364Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:26.835481Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:29:26.835535Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:26.835601Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:26.835641Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:26.836136Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:26.836370Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:26.836820Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:26.836865Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:26.836910Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:26.836952Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:26.837047Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:29:26.837183Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:26.837450Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:29:26.837544Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:29:26.839626Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:26.850694Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:26.850849Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:29:26.981815Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:29:26.987748Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:29:26.987865Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:26.989125Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:26.989196Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:26.989268Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:26.989677Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:26.989888Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:26.990311Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:26.990407Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:26.992730Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:26.993287Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:26.995583Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:26.995639Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:26.996123Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:26.996211Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:26.997699Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:26.997746Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:26.997804Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:26.997887Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:26.997944Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:26.998038Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.004371Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:27.006129Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:27.006209Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 7207518622403 ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:32.815430Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:32.815752Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:32.815929Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:32.816493Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:32.816581Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:32.817270Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:32.817836Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:32.829989Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:32.830079Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:32.831427Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:32.831528Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:32.832363Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:32.832424Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:32.832490Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:32.832570Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:32.832636Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:32.832740Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:32.841564Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:32.843678Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:29:32.843949Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:32.844029Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:29:32.871191Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:950:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:32.871271Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:961:2813], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:32.871599Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:32.872064Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:966:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:32.872242Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:32.875715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:32.881425Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:33.005405Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:33.009521Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:964:2816], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:29:33.046267Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1022:2855] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:33.177328Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1053:2872], serverId# [2:1054:2873], sessionId# [0:0:0] 2025-12-23T18:29:33.177882Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2025-12-23T18:29:33.178088Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=4 2025-12-23T18:29:33.191648Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:33.197394Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1061:2879], serverId# [2:1062:2880], sessionId# [0:0:0] 2025-12-23T18:29:33.198451Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T18:29:33.209942Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T18:29:33.210035Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:33.210345Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:33.210404Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-12-23T18:29:33.210750Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:33.210813Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:33.210873Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:33.210950Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:33.211053Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1061:2879], serverId# [2:1062:2880], sessionId# [0:0:0] 2025-12-23T18:29:33.212053Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:33.212434Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:33.212646Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:33.212705Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:33.212762Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-12-23T18:29:33.213012Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:33.213077Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:33.213705Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-12-23T18:29:33.214023Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:29:33.214260Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-12-23T18:29:33.214333Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-12-23T18:29:33.216463Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:33.216517Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2025-12-23T18:29:33.216620Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:33.216657Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:33.216695Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-12-23T18:29:33.216810Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:33.216860Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:33.216911Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot_isolation/unittest >> DataShardSnapshotIsolation::ReadWriteCommitConflictWhileReading [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:26:53.423673Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:26:53.587336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:26:53.599170Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:26:53.599658Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:26:53.599748Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:26:53.936439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:26:53.936597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:26:54.020908Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514410867716 != 1766514410867720 2025-12-23T18:26:54.038839Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:26:54.092264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:26:54.187927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0061de/r3tmp/tmpuIatEF/pdisk_1.dat 2025-12-23T18:26:54.530858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:26:54.547427Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:26:55.178499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:864:2747], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:55.178786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:55.179179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:882:2752], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:55.179274Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:26:55.193519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:26:55.249768Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:903:2772], Recipient [1:911:2778]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:26:55.251072Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:903:2772], Recipient [1:911:2778]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:26:55.251417Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:911:2778] 2025-12-23T18:26:55.251724Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:26:55.320745Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:903:2772], Recipient [1:911:2778]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:26:55.321713Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:26:55.321845Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:26:55.323802Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:26:55.323897Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:26:55.323965Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:26:55.324394Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:26:55.324562Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:26:55.324684Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:927:2778] in generation 1 2025-12-23T18:26:55.325148Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:26:55.374264Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:26:55.374562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:26:55.374705Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:929:2788] 2025-12-23T18:26:55.374779Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:26:55.374824Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:26:55.374878Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:26:55.375145Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:911:2778], Recipient [1:911:2778]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:26:55.375202Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:26:55.375618Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:26:55.375723Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:26:55.375801Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:26:55.375843Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:26:55.375913Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:26:55.375971Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:26:55.376020Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:26:55.376068Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:26:55.376124Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:26:55.376268Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:918:2782], Recipient [1:911:2778]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:26:55.376314Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:26:55.376363Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:910:2777], serverId# [1:918:2782], sessionId# [0:0:0] 2025-12-23T18:26:55.376795Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:918:2782] 2025-12-23T18:26:55.376851Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:26:55.376948Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:26:55.377229Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:26:55.377311Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:26:55.377417Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:26:55.377478Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:26:55.377520Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:26:55.377564Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:26:55.377600Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:26:55.377990Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:26:55.378040Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:26:55.378081Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:26:55.378120Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:26:55.378186Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp: ... 86224037888 read iterator# {[19:1126:2933], 0} exhausted 2025-12-23T18:29:32.082278Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:29:32.082314Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:29:32.082346Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:29:32.082382Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:29:32.082455Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:29:32.082488Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:29:32.082533Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2025-12-23T18:29:32.082621Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:29:32.082842Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:29:32.380277Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [19:1136:2943], Recipient [19:918:2781]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:32.380410Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:32.380517Z node 19 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [19:1135:2942], serverId# [19:1136:2943], sessionId# [0:0:0] 2025-12-23T18:29:32.381004Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [19:1134:2941], Recipient [19:918:2781]: NKikimrDataEvents.TEvWrite TxId: 0 TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true } SendingShards: 72075186224037888 ReceivingShards: 72075186224037888 Op: Commit } LockMode: OPTIMISTIC_SNAPSHOT_ISOLATION 2025-12-23T18:29:32.381061Z node 19 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2025-12-23T18:29:32.381253Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [19:918:2781], Recipient [19:918:2781]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-12-23T18:29:32.381302Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-12-23T18:29:32.381449Z node 19 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2025-12-23T18:29:32.381690Z node 19 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: TxId: 0 TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true } SendingShards: 72075186224037888 ReceivingShards: 72075186224037888 Op: Commit } LockMode: OPTIMISTIC_SNAPSHOT_ISOLATION 2025-12-23T18:29:32.381855Z node 19 :TX_DATASHARD TRACE: key_validator.cpp:33: -- AddReadRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2025-12-23T18:29:32.381995Z node 19 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2025-12-23T18:29:32.382161Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckWrite 2025-12-23T18:29:32.382268Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:29:32.382346Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckWrite 2025-12-23T18:29:32.382420Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:29:32.382473Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:29:32.382541Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-12-23T18:29:32.382635Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2025-12-23T18:29:32.382722Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:29:32.382909Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:29:32.382950Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BlockFailPoint 2025-12-23T18:29:32.382985Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BlockFailPoint 2025-12-23T18:29:32.383020Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:29:32.383050Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BlockFailPoint 2025-12-23T18:29:32.383078Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteWrite 2025-12-23T18:29:32.383106Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteWrite 2025-12-23T18:29:32.383162Z node 19 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:7] at 72075186224037888 2025-12-23T18:29:32.383259Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-12-23T18:29:32.383463Z node 19 :TX_DATASHARD TRACE: datashard_kqp.cpp:834: KqpCommitLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true 2025-12-23T18:29:32.383554Z node 19 :TX_DATASHARD TRACE: datashard_user_db.cpp:517: Committing changes lockId# 281474976715661 in localTid# 1001 shard# 72075186224037888 2025-12-23T18:29:32.383694Z node 19 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:7] at 72075186224037888 2025-12-23T18:29:32.383778Z node 19 :TX_DATASHARD TRACE: locks.cpp:194: Lock 281474976715662 marked broken at v{min} 2025-12-23T18:29:32.384341Z node 19 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2025-12-23T18:29:32.384463Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:29:32.384518Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteWrite 2025-12-23T18:29:32.384593Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit FinishProposeWrite 2025-12-23T18:29:32.384655Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:29:32.384756Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:29:32.384807Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit FinishProposeWrite 2025-12-23T18:29:32.384874Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:29:32.384938Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:29:32.384993Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:29:32.385025Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:29:32.385068Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2025-12-23T18:29:32.385995Z node 19 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-12-23T18:29:32.386089Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:7] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:29:32.386188Z node 19 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 7 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-12-23T18:29:32.386325Z node 19 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:32.387023Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553218, Sender [19:1126:2933], Recipient [19:918:2781]: NKikimrTxDataShard.TEvReadAck ReadId: 0 SeqNo: 1 MaxRows: 1 MaxBytes: 4294967295 2025-12-23T18:29:32.387232Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3660: 72075186224037888 ReadAck for read iterator# {[19:1126:2933], 0}: { ReadId: 0 SeqNo: 1 MaxRows: 1 MaxBytes: 4294967295 }, read continued, bytesLeft# 4294967295, rowsLeft# 1 2025-12-23T18:29:32.387361Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553217, Sender [19:918:2781], Recipient [19:918:2781]: NKikimr::TEvDataShard::TEvReadContinue 2025-12-23T18:29:32.387480Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3081: 72075186224037888 ReadContinue for iterator# {[19:1126:2933], 0}, firstUnprocessedQuery# 0 2025-12-23T18:29:32.387602Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3174: 72075186224037888 ReadContinue: iterator# {[19:1126:2933], 0}, FirstUnprocessedQuery# 0 2025-12-23T18:29:32.387942Z node 19 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:3277: 72075186224037888 read iterator# {[19:1126:2933], 0} TTxReadContinue::Execute() found broken lock# 281474976715662 2025-12-23T18:29:32.388502Z node 19 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:3321: 72075186224037888 read iterator# {[19:1126:2933], 0} TTxReadContinue::Execute() finished with error, aborting: ReadId: 0 SeqNo: 2 Status { Code: ABORTED Issues { message: "Read conflict with concurrent transaction" severity: 1 } } BrokenTxLocks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 18446744073709551615 SchemeShard: 72057594046644480 PathId: 38 } NodeId: 19 |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot_isolation/unittest >> PartitionStats::Collector >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile >> PartitionStats::Collector [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] Test command err: RandomSeed# 1998316593201043915 2025-12-23T18:29:34.967470Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.967585Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.967628Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.967675Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.967721Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.967759Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.967800Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.967838Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.969148Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.969281Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.969349Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.969400Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.969453Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.969504Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.969560Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.969629Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.969718Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.969778Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.969818Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.969854Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.969891Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.969929Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.969985Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.970049Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:34.972307Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.972396Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.972460Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.972518Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.972591Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.972658Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.972710Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:34.972760Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:35.196826Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:1:0]} 2025-12-23T18:29:35.196922Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:2:0]} 2025-12-23T18:29:35.196977Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:3:0]} 2025-12-23T18:29:35.197024Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:4:0]} 2025-12-23T18:29:35.197071Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:5:0]} 2025-12-23T18:29:35.197118Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:6:0]} 2025-12-23T18:29:35.197168Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:7:0]} |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:26.781438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:26.907181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:26.918947Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:26.919414Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:26.919498Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:27.215775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:27.215905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:27.299643Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514563664894 != 1766514563664898 2025-12-23T18:29:27.313429Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:27.362955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:27.490033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/006105/r3tmp/tmp5SFYG2/pdisk_1.dat 2025-12-23T18:29:27.781587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:27.797559Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:27.910869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:27.963729Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:29:27.964057Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:28.021225Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:28.021398Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:28.023535Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:28.023657Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:28.023737Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:28.024174Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:28.024343Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:28.024466Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:29:28.035375Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:28.087608Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:28.087865Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:28.088030Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:29:28.088071Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:28.088114Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:28.088157Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:28.088707Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:28.088800Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:28.089314Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:28.089381Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:28.089439Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:28.089486Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:28.089563Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:29:28.089700Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:28.090006Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:29:28.090126Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:29:28.092042Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:28.103479Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:28.103645Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:29:28.225253Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:29:28.239955Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:29:28.240057Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:28.241280Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:28.241341Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:28.241391Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:28.241643Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:28.241826Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:28.242234Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:28.242312Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:28.244377Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:28.244889Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:28.246550Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:28.246599Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:28.247103Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:28.247234Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:28.248638Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:28.248688Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:28.248752Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:28.248837Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:28.248901Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:28.248994Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:28.253928Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:28.255686Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:28.255746Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 7207518622403 ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:33.970232Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:33.970454Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:33.970631Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:33.971521Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:33.971604Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:33.972090Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:33.972593Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:33.974305Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:33.974361Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:33.981654Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:33.981751Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:33.982514Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:33.982565Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:33.982613Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:33.982716Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:33.982806Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:33.984250Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:33.986099Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:33.987413Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:29:33.987547Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:33.987598Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:29:33.997234Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:950:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:33.997367Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:961:2813], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:33.997876Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:33.998595Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:966:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:33.998815Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:34.003643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:34.010052Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:34.157670Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:34.160705Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:964:2816], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:29:34.200938Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1022:2855] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:34.307175Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1053:2872], serverId# [2:1054:2873], sessionId# [0:0:0] 2025-12-23T18:29:34.307737Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2025-12-23T18:29:34.307925Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=4 2025-12-23T18:29:34.319041Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:34.324107Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1061:2879], serverId# [2:1062:2880], sessionId# [0:0:0] 2025-12-23T18:29:34.325237Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T18:29:34.336634Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T18:29:34.336731Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:34.337029Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:34.337076Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-12-23T18:29:34.337402Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:34.337450Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:34.337497Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:34.337559Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:34.337652Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1061:2879], serverId# [2:1062:2880], sessionId# [0:0:0] 2025-12-23T18:29:34.338696Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:34.339107Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:34.339310Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:34.339358Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:34.339415Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-12-23T18:29:34.339654Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:34.339724Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:34.340438Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-12-23T18:29:34.340749Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:29:34.340951Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-12-23T18:29:34.341008Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-12-23T18:29:34.345018Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:34.345085Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2025-12-23T18:29:34.345209Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:34.345251Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:34.345296Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-12-23T18:29:34.345439Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:34.345503Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:34.345549Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> TSequence::CreateSequenceParallel >> TSequence::CreateSequence >> KeyValueGRPCService::SimpleGetStorageChannelStatusV1 [GOOD] >> PartitionStats::CollectorOverload >> KeyValueGRPCService::SimpleGetStorageChannelStatusV2 >> PartitionStats::CollectorOverload [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 16996569368228358616 2025-12-23T18:29:36.618562Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.618687Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.618726Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.618804Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.618844Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.618885Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.618925Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.620326Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:36.620417Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:36.620469Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:36.620534Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:36.620607Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:36.620673Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:36.620747Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:36.620834Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.620896Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.620964Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.621042Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.621078Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.621113Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.621176Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:29:36.623267Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:36.623366Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:36.623421Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:36.623492Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:36.623554Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:36.623602Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:29:36.623654Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |90.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_snapshot_isolation/unittest |90.9%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:26.862635Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:26.977186Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:26.986318Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:26.986671Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:26.986732Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:27.285683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:27.285854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:27.377167Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514563403986 != 1766514563403990 2025-12-23T18:29:27.391233Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:27.445997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:27.540795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/006106/r3tmp/tmp8jAtRJ/pdisk_1.dat 2025-12-23T18:29:27.868297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:27.882928Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:27.993416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:28.037801Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:29:28.038157Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:28.086925Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:28.087093Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:28.088981Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:28.089089Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:28.089167Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:28.089548Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:28.089679Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:28.089781Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:29:28.100642Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:28.141580Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:28.141820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:28.141958Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:29:28.142009Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:28.142056Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:28.142105Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:28.142666Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:28.142804Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:28.143306Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:28.143357Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:28.143407Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:28.143449Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:28.143538Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:29:28.143677Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:28.143952Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:29:28.144057Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:29:28.146095Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:28.157850Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:28.157961Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:29:28.276324Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:29:28.288473Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:29:28.288594Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:28.289820Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:28.289891Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:28.289940Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:28.290240Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:28.290397Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:28.290908Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:28.290998Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:28.300056Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:28.300699Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:28.302892Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:28.302948Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:28.303509Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:28.303609Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:28.305319Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:28.305380Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:28.305462Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:28.305577Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:28.305638Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:28.305733Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:28.317260Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:28.319235Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:28.319321Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 7207518622403 ... ain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-12-23T18:29:35.821403Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-12-23T18:29:35.821459Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-12-23T18:29:35.861714Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1451:3221] 2025-12-23T18:29:35.861992Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:35.876095Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:35.876257Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:35.877868Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-12-23T18:29:35.877951Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037894 2025-12-23T18:29:35.878022Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037894 2025-12-23T18:29:35.878380Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:35.878763Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:35.878849Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037894 persisting started state actor id [2:1467:3221] in generation 1 2025-12-23T18:29:35.908583Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:35.908669Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037894 2025-12-23T18:29:35.908767Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:35.908862Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037894, actorId: [2:1469:3231] 2025-12-23T18:29:35.908897Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037894 2025-12-23T18:29:35.908929Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2025-12-23T18:29:35.908971Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-12-23T18:29:35.909350Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037894 2025-12-23T18:29:35.909445Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2025-12-23T18:29:35.909514Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2025-12-23T18:29:35.909547Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:35.909584Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037894 TxInFly 0 2025-12-23T18:29:35.909620Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2025-12-23T18:29:35.909708Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1450:3220], serverId# [2:1454:3222], sessionId# [0:0:0] 2025-12-23T18:29:35.910151Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-12-23T18:29:35.910378Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037894 txId 281474976715663 ssId 72057594046644480 seqNo 2:7 2025-12-23T18:29:35.910449Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715663 at tablet 72075186224037894 2025-12-23T18:29:35.910954Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-12-23T18:29:35.921786Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-12-23T18:29:35.921887Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037894 not sending time cast registration request in state WaitScheme 2025-12-23T18:29:36.074639Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1476:3238], serverId# [2:1477:3239], sessionId# [0:0:0] 2025-12-23T18:29:36.076312Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715663 at step 5000 at tablet 72075186224037894 { Transactions { TxId: 281474976715663 AckTo { RawX1: 0 RawX2: 0 } } Step: 5000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2025-12-23T18:29:36.076366Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-12-23T18:29:36.076768Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2025-12-23T18:29:36.076816Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:36.076857Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [5000:281474976715663] in PlanQueue unit at 72075186224037894 2025-12-23T18:29:36.077116Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037894 loaded tx from db 5000:281474976715663 keys extracted: 0 2025-12-23T18:29:36.077247Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:36.077662Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2025-12-23T18:29:36.077731Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 44] schema version# 1 2025-12-23T18:29:36.078175Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:36.078571Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:36.081840Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 4500 2025-12-23T18:29:36.081895Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-12-23T18:29:36.083796Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 5000} 2025-12-23T18:29:36.083869Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2025-12-23T18:29:36.084956Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-12-23T18:29:36.085068Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:29:36.085280Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-12-23T18:29:36.085353Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-12-23T18:29:36.085421Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-12-23T18:29:36.085752Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-12-23T18:29:36.085820Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:36.086156Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2025-12-23T18:29:36.086203Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037894 2025-12-23T18:29:36.086242Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037894 2025-12-23T18:29:36.086299Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [5000 : 281474976715663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:36.086346Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-12-23T18:29:36.086439Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-12-23T18:29:36.089693Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 5000 2025-12-23T18:29:36.092547Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037894 state Ready 2025-12-23T18:29:36.092613Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-12-23T18:29:36.099913Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1505:3261], serverId# [2:1506:3262], sessionId# [0:0:0] 2025-12-23T18:29:36.100200Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1505:3261], serverId# [2:1506:3262], sessionId# [0:0:0] 2025-12-23T18:29:36.101923Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1510:3266], serverId# [2:1511:3267], sessionId# [0:0:0] 2025-12-23T18:29:36.102183Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1510:3266], serverId# [2:1511:3267], sessionId# [0:0:0] 2025-12-23T18:29:36.104510Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1515:3271], serverId# [2:1516:3272], sessionId# [0:0:0] 2025-12-23T18:29:36.104766Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1515:3271], serverId# [2:1516:3272], sessionId# [0:0:0] |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites >> DataShardWrite::UncommittedUpdateLockMissingRow [GOOD] >> DataShardWrite::UncommittedUpdateLockNewRowAboveSnapshot >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] >> TCacheTest::MigrationDeletedPathNavigate [GOOD] >> TMLPWriterTests::WriteOneMessage [GOOD] >> TMLPWriterTests::WriteTwoMessage_OnePartition >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] >> ReadOnlyVDisk::TestStorageLoad >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] >> ReadOnlyVDisk::TestReads >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_all_types-pk_types3-all_types3-index3---] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2025-12-23T18:29:05.267942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:29:05.268030Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ted because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720669 2025-12-23T18:29:08.384724Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720670 2025-12-23T18:29:08.384783Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pool_classifiers', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.384911Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/nodes', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.385115Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720671 2025-12-23T18:29:08.385163Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_hour', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.385452Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720672 2025-12-23T18:29:08.385514Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720673 2025-12-23T18:29:08.385567Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_minute', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.385733Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720674 2025-12-23T18:29:08.385776Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_minute', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.385813Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720675 2025-12-23T18:29:08.385849Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_hour', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.386048Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_request_units_one_hour', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.386231Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720676 2025-12-23T18:29:08.386322Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720677 2025-12-23T18:29:08.386383Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_hour', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.386541Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/pg_tables', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.386644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720678 2025-12-23T18:29:08.386726Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_duration_one_minute', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.387008Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720679 2025-12-23T18:29:08.387085Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_group_members', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.387159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720680 2025-12-23T18:29:08.387222Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720681 2025-12-23T18:29:08.387284Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_users', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.387340Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pools', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.387424Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720682 2025-12-23T18:29:08.387483Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/streaming_queries', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.387543Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720683 2025-12-23T18:29:08.387584Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_permissions', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.387644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720684 2025-12-23T18:29:08.387921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720685 2025-12-23T18:29:08.387977Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_minute', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.388037Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720686 2025-12-23T18:29:08.388082Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_effective_permissions', reason: Schema is in ReadOnly mode 2025-12-23T18:29:08.388177Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_groups', reason: Schema is in ReadOnly mode TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-12-23T18:29:08.389782Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5804: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 40] 2025-12-23T18:29:08.389841Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5804: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:29:08.390316Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__operation_upgrade_subdomain.cpp:1466: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-12-23T18:29:08.390498Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:573) TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-12-23T18:29:08.412519Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6291: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-12-23T18:29:08.413261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6291: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-12-23T18:29:08.464012Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72075186233409549 OwnerIdx: 4 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:778:2067] recipient: [2:775:2668] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:778:2067] recipient: [2:775:2668] Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:780:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:780:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409550 is [2:782:2672] sender: [2:783:2067] recipient: [2:775:2668] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 TestWaitNotification: OK eventTxId 108 2025-12-23T18:29:11.032068Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:29:11.032147Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:11.091000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:29:11.091072Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded >> ReadOnlyVDisk::TestSync >> ReadOnlyVDisk::TestGarbageCollect |90.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:24.850168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:24.982716Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:24.997142Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:24.997550Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:24.997655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:25.344779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:25.344935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:25.476332Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514561644104 != 1766514561644108 2025-12-23T18:29:25.508427Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:25.559977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:25.669884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0060fd/r3tmp/tmpPM7tIN/pdisk_1.dat 2025-12-23T18:29:25.999436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:26.016293Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:26.132834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:26.185956Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:906:2780] 2025-12-23T18:29:26.186300Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:26.237115Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:26.237460Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:26.239363Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:26.239468Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:26.239535Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:26.239912Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:26.240203Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:910:2782] 2025-12-23T18:29:26.240417Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:26.249658Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:26.249769Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:947:2780] in generation 1 2025-12-23T18:29:26.250468Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:914:2786] 2025-12-23T18:29:26.250675Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:26.260465Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:26.260620Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:26.262060Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:29:26.262152Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:29:26.262202Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:29:26.262480Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:26.262704Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:26.262804Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:955:2782] in generation 1 2025-12-23T18:29:26.263170Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:26.263275Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:26.264799Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-12-23T18:29:26.264863Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2025-12-23T18:29:26.264912Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2025-12-23T18:29:26.265209Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:26.265315Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:26.265378Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:956:2786] in generation 1 2025-12-23T18:29:26.276471Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:26.313327Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:26.313578Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:26.313708Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:960:2812] 2025-12-23T18:29:26.313753Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:26.313790Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:26.313832Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:26.314180Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:26.314224Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:29:26.314288Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:26.314351Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:961:2813] 2025-12-23T18:29:26.314389Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:29:26.314426Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:29:26.314451Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:29:26.314843Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:26.314882Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2025-12-23T18:29:26.314935Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:26.314992Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:962:2814] 2025-12-23T18:29:26.315016Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2025-12-23T18:29:26.315040Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-12-23T18:29:26.315083Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:29:26.315377Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:26.315481Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:26.315695Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:26.315745Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:26.315797Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:26.315863Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:26.315915Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T18:29:26.315970Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T18:29:26.316129Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:897:2775], serverId# [1:911:2783], sessionId# [0:0:0] 2025-12-23T18:29:26.316177Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:29:26.316201Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:26.316223Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:29:26.316253Z no ... abletResult at 72075186224037888 time 0 2025-12-23T18:29:37.856597Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:37.857434Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:37.857532Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:37.860213Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:37.860280Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:37.860341Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:37.860426Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:37.860501Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:37.860614Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:37.862195Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:37.864945Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:29:37.865648Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:37.865714Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:29:37.876545Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:950:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:37.876699Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:961:2813], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:37.876788Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:37.878160Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:37.878569Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:37.883236Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:37.890935Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:38.019565Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:38.023451Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:964:2816], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:29:38.062714Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1022:2855] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:38.166549Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2872], serverId# [3:1054:2873], sessionId# [0:0:0] 2025-12-23T18:29:38.167084Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2025-12-23T18:29:38.167272Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=3 2025-12-23T18:29:38.179442Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:38.448200Z node 3 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] { items { uint64_value: 0 } } 2025-12-23T18:29:38.465252Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1093:2904], serverId# [3:1094:2905], sessionId# [0:0:0] 2025-12-23T18:29:38.466300Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T18:29:38.487645Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T18:29:38.487767Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:38.487854Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2572: Waiting for PlanStep# 2501 from mediator time cast 2025-12-23T18:29:38.488674Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3810: Notified by mediator time cast with PlanStep# 2501 at tablet 72075186224037888 2025-12-23T18:29:38.488759Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:38.488935Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:38.488983Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037888 2025-12-23T18:29:38.489361Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:38.489413Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:38.489470Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:38.489536Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:38.489620Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1093:2904], serverId# [3:1094:2905], sessionId# [0:0:0] 2025-12-23T18:29:38.577187Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2025-12-23T18:29:38.577355Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-12-23T18:29:38.585237Z node 3 :TX_DATASHARD INFO: datashard_write_operation.cpp:804: Write transaction 6 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2025-12-23T18:29:38.585493Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-12-23T18:29:38.585704Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-12-23T18:29:38.585777Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:38.586017Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [3:1115:2878], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [3:1060:2878]Got LOCKS BROKEN for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:1115:2878].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-12-23T18:29:38.586594Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [3:1108:2878], SessionActorId: [3:1060:2878], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:1060:2878]. 2025-12-23T18:29:38.587006Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=3&id=YWFkNjJjYzktNDQ0MjQ5MDgtNDI3MDMxMzAtYmRmNGVkZDY=, ActorId: [3:1060:2878], ActorState: ExecuteState, TraceId: 01kd67jn1t60svkpe87b8sg4cr, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:1109:2878] from: [3:1108:2878] 2025-12-23T18:29:38.587176Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [3:1109:2878] TxId: 281474976715662. Ctx: { TraceId: 01kd67jn1t60svkpe87b8sg4cr, Database: , SessionId: ydb://session/3?node_id=3&id=YWFkNjJjYzktNDQ0MjQ5MDgtNDI3MDMxMzAtYmRmNGVkZDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-12-23T18:29:38.587588Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=YWFkNjJjYzktNDQ0MjQ5MDgtNDI3MDMxMzAtYmRmNGVkZDY=, ActorId: [3:1060:2878], ActorState: ExecuteState, TraceId: 01kd67jn1t60svkpe87b8sg4cr, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/table-1`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } 2025-12-23T18:29:38.588610Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:7] at 72075186224037888 2025-12-23T18:29:38.588679Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:7] at 72075186224037888 2025-12-23T18:29:38.588826Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> ReadOnlyVDisk::TestDiscover >> TestMalformedRequest::CompressedDeflateContentLengthCorrect >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows >> TestMalformedRequest::CompressedDeflateContentLengthLower ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:25.814205Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:25.988121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:26.002637Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:26.003158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:26.003247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:26.291258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:26.291416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:26.383476Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514562484715 != 1766514562484719 2025-12-23T18:29:26.401777Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:26.453835Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:26.548764Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/006109/r3tmp/tmpmwxBzR/pdisk_1.dat 2025-12-23T18:29:26.875428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:26.892476Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:27.003791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:27.045677Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:29:27.045998Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:27.100359Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:27.100581Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:27.102345Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:27.102436Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:27.102516Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:27.102952Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:27.103114Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:27.103204Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:29:27.114104Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:27.167424Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:27.167658Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:27.167817Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:29:27.167868Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:27.167915Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:27.167953Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.168454Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:27.168558Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:27.169135Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:27.169187Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:27.169232Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:27.169298Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:27.169356Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:29:27.169456Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:27.169685Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:29:27.169761Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:29:27.171180Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:27.182211Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:27.182365Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:29:27.308433Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:29:27.313540Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:29:27.313639Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.314849Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:27.314921Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:27.314983Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:27.315306Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:27.315489Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:27.315879Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:27.315981Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:27.318404Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:27.318838Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:27.320700Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:27.320756Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.321234Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:27.321348Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:27.323042Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:27.323097Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:27.323146Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:27.323228Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:27.323284Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:27.323381Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.327725Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:27.329673Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:27.329758Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 7207518622403 ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:38.733620Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:38.733913Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:38.734053Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:38.734935Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:38.735012Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:38.735488Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:38.735940Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:38.738024Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:38.738082Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:38.738797Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:38.738880Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:38.740256Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:38.740307Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:38.740374Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:38.740446Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:38.740504Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:38.740599Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:38.741887Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:38.745650Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:29:38.746328Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:38.746404Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:29:38.756017Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:950:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.756127Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:961:2813], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.756193Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.757371Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.757791Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.761964Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:38.768795Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:38.892381Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:38.896271Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:964:2816], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:29:38.933722Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1022:2855] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:39.070955Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2872], serverId# [3:1054:2873], sessionId# [0:0:0] 2025-12-23T18:29:39.071498Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2025-12-23T18:29:39.071717Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-12-23T18:29:39.082851Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:39.089099Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1061:2879], serverId# [3:1062:2880], sessionId# [0:0:0] 2025-12-23T18:29:39.090202Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T18:29:39.101822Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T18:29:39.101910Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:39.102260Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:39.102321Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-12-23T18:29:39.102512Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:39.102568Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:39.102628Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:39.102697Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:39.103003Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1061:2879], serverId# [3:1062:2880], sessionId# [0:0:0] 2025-12-23T18:29:39.104101Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:39.104495Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:39.104737Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:39.104811Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:39.104865Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-12-23T18:29:39.105101Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:39.105176Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:39.105907Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-12-23T18:29:39.106218Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:29:39.106441Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-12-23T18:29:39.106504Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-12-23T18:29:39.108596Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:39.108657Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2025-12-23T18:29:39.108814Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:39.108853Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:39.108895Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-12-23T18:29:39.109021Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:39.109075Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:39.109124Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:25.535704Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:25.673016Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:25.685458Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:25.685902Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:25.685995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:26.016973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:26.017155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:26.119389Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514562170814 != 1766514562170818 2025-12-23T18:29:26.142128Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:26.197909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:26.310587Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00610b/r3tmp/tmpmKoJDA/pdisk_1.dat 2025-12-23T18:29:26.633030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:26.647305Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:26.762968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:26.818271Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:29:26.821700Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:26.884807Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:26.884995Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:26.887385Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:26.887487Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:26.887576Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:26.888049Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:26.888219Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:26.888358Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:29:26.899570Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:26.958758Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:26.959049Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:26.959224Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:29:26.959269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:26.959313Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:26.959358Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:26.959989Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:26.960119Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:26.960666Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:26.960718Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:26.960777Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:26.960850Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:26.960945Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:29:26.961110Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:26.961491Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:29:26.961620Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:29:26.964042Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:26.974940Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:26.975086Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:29:27.096773Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:29:27.111659Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:29:27.111763Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.112928Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:27.112994Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:27.113048Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:27.113346Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:27.113538Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:27.113951Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:27.114056Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:27.116527Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:27.117030Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:27.119381Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:27.119441Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.120064Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:27.120166Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:27.121736Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:27.121787Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:27.121841Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:27.121918Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:27.121971Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:27.122054Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.126841Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:27.129015Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:27.129096Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 7207518622403 ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:38.686600Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:38.691370Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:38.691635Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:38.692530Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:38.692615Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:38.693159Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:38.693665Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:38.696077Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:38.696143Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:38.696910Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:38.696987Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:38.698917Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:38.698968Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:38.699019Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:38.699082Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:38.699151Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:38.699236Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:38.700418Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:38.703824Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:29:38.704450Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:38.704522Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:29:38.713619Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:950:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.713726Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:961:2813], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.713795Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.715081Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.715516Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.719531Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:38.726260Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:38.854249Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:38.857762Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:964:2816], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:29:38.892152Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1022:2855] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:39.046925Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2872], serverId# [3:1054:2873], sessionId# [0:0:0] 2025-12-23T18:29:39.047491Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2025-12-23T18:29:39.047682Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-12-23T18:29:39.063407Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:39.068188Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1061:2879], serverId# [3:1062:2880], sessionId# [0:0:0] 2025-12-23T18:29:39.069392Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T18:29:39.084235Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T18:29:39.084336Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:39.084679Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:39.084739Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-12-23T18:29:39.084971Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:39.085028Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:39.085091Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:39.085170Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:39.085479Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1061:2879], serverId# [3:1062:2880], sessionId# [0:0:0] 2025-12-23T18:29:39.086609Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:39.087065Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:39.087289Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:39.087346Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:39.087403Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-12-23T18:29:39.087673Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:39.087746Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:39.088439Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-12-23T18:29:39.088803Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 48, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:29:39.089026Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-12-23T18:29:39.089087Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-12-23T18:29:39.091168Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:39.091229Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2025-12-23T18:29:39.091399Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:39.091442Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:39.091486Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-12-23T18:29:39.091628Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:39.091691Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:39.091742Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:26.056657Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:26.191883Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:26.204417Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:26.204903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:26.204997Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:26.522992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:26.523173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:26.605164Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514562774436 != 1766514562774440 2025-12-23T18:29:26.621836Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:26.673447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:26.763668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/006108/r3tmp/tmpYs6O5d/pdisk_1.dat 2025-12-23T18:29:27.078371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:27.094314Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:27.199348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:27.231591Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:29:27.231903Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:27.275405Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:27.275580Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:27.277125Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:27.277190Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:27.277244Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:27.277592Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:27.277707Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:27.277794Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:29:27.288676Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:27.363176Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:27.363419Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:27.363549Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:29:27.363599Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:27.363635Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:27.363674Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.364249Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:27.364362Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:27.364830Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:27.364874Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:27.364913Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:27.364979Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:27.365049Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:29:27.365171Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:27.365449Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:29:27.365556Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:29:27.367498Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:27.378498Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:27.378616Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:29:27.507355Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:29:27.511950Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:29:27.512033Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.513025Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:27.513106Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:27.513165Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:27.513451Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:27.513619Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:27.513967Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:27.514039Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:27.526876Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:27.527538Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:27.529876Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:27.529937Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.530573Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:27.530674Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:27.532334Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:27.532393Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:27.532460Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:27.532590Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:27.532666Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:27.532778Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.539484Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:27.541384Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:27.541462Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 7207518622403 ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:38.879219Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:38.879543Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:38.879693Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:38.880552Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:38.880649Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:38.881160Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:38.881612Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:38.883875Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:38.883937Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:38.884722Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:38.884814Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:38.886394Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:38.886452Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:38.886506Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:38.886576Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:38.886635Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:38.886784Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:38.888099Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:38.890256Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:29:38.891702Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:38.891785Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:29:38.901737Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:950:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.901859Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:961:2813], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.901953Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.903283Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.903772Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.908358Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:38.915816Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:39.048091Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:39.051684Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:964:2816], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:29:39.092751Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1022:2855] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:39.208438Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2872], serverId# [3:1054:2873], sessionId# [0:0:0] 2025-12-23T18:29:39.208945Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2025-12-23T18:29:39.209148Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=4 2025-12-23T18:29:39.221776Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:39.226366Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1061:2879], serverId# [3:1062:2880], sessionId# [0:0:0] 2025-12-23T18:29:39.227322Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T18:29:39.239090Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T18:29:39.239183Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:39.239434Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:39.239508Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-12-23T18:29:39.239705Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:39.239761Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:39.239829Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:39.239892Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:39.241601Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1061:2879], serverId# [3:1062:2880], sessionId# [0:0:0] 2025-12-23T18:29:39.242883Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:39.245160Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:39.245448Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:39.245505Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:39.245564Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-12-23T18:29:39.245818Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:39.245886Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:39.246575Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-12-23T18:29:39.246876Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:29:39.247073Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-12-23T18:29:39.247132Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-12-23T18:29:39.249024Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:39.249078Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2025-12-23T18:29:39.253140Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:39.253209Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:39.253255Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-12-23T18:29:39.253402Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:39.253473Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:39.253529Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Query |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:25.541116Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:25.720897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:25.747491Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:25.747925Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:25.748017Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:26.126384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:26.126584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:26.230734Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514562241091 != 1766514562241095 2025-12-23T18:29:26.247612Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:26.299504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:26.390809Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00610f/r3tmp/tmpFJ5Q3g/pdisk_1.dat 2025-12-23T18:29:26.714383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:26.729144Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:26.835499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:26.876467Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:29:26.876788Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:26.942967Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:26.943077Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:26.944614Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:26.944726Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:26.944792Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:26.945119Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:26.945281Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:26.945405Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:29:26.956374Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:26.983800Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:26.984031Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:26.984145Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:29:26.984182Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:26.984215Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:26.984249Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:26.984760Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:26.984865Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:26.985286Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:26.985332Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:26.985372Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:26.985425Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:26.985498Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:29:26.985629Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:26.985905Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:29:26.986006Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:29:26.987950Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:26.999647Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:26.999805Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:29:27.123575Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:29:27.134094Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:29:27.134205Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.135298Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:27.135358Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:27.135406Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:27.135679Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:27.135859Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:27.136213Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:27.136299Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:27.138275Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:27.138628Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:27.140513Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:27.140567Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.141059Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:27.141149Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:27.142568Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:27.142613Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:27.142674Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:27.142861Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:27.142921Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:27.143023Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.153651Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:27.156284Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:27.156348Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 7207518622403 ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:38.968871Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:29:38.969247Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:29:38.969425Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:38.970399Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:38.970492Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:29:38.972599Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:38.973193Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:38.975890Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:29:38.975964Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:38.976930Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:29:38.977050Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:38.980546Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:38.980625Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:38.980692Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:29:38.980780Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:38.980852Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:29:38.980949Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:38.982476Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:38.984898Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:29:38.986025Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:29:38.986156Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:29:38.997974Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:950:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.998123Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:961:2813], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.998246Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:38.999793Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:39.000306Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:39.005290Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:39.013185Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:39.141262Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:29:39.147149Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:964:2816], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:29:39.184257Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1022:2855] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:39.373384Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2872], serverId# [3:1054:2873], sessionId# [0:0:0] 2025-12-23T18:29:39.373947Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2025-12-23T18:29:39.374199Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-12-23T18:29:39.385473Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:39.389249Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1061:2879], serverId# [3:1062:2880], sessionId# [0:0:0] 2025-12-23T18:29:39.390350Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T18:29:39.404991Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T18:29:39.405058Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:39.405273Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:39.405314Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-12-23T18:29:39.405484Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:39.405534Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:39.405582Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:39.405631Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:39.405900Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1061:2879], serverId# [3:1062:2880], sessionId# [0:0:0] 2025-12-23T18:29:39.406946Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:39.407356Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:39.407601Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:39.407657Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:39.407712Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-12-23T18:29:39.407965Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:39.408036Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:39.408659Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-12-23T18:29:39.408994Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 43, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:29:39.409199Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-12-23T18:29:39.409260Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-12-23T18:29:39.475508Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:39.475577Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2025-12-23T18:29:39.475807Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:39.475873Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:39.475915Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-12-23T18:29:39.476076Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:39.476142Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:39.476202Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence >> JsonProtoConversion::JsonToProtoArray [GOOD] >> JsonProtoConversion::JsonToProtoMap [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] >> listing_paging.py::TestListingPaging::test_listing_paging_solomon [GOOD] >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] >> JsonProtoConversion::ProtoMapToJson [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile >> TestMalformedRequest::ContentLengthCorrect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2025-12-23T18:29:07.715293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:29:07.715376Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... tep: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2025-12-23T18:29:09.238873Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:25.351020Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:25.476020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:25.488440Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:25.488885Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:25.488983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:25.972886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:25.973085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:26.072792Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514562127012 != 1766514562127016 2025-12-23T18:29:26.097759Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:26.145502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:26.239247Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/006114/r3tmp/tmpJ8irgt/pdisk_1.dat 2025-12-23T18:29:26.571396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:26.586014Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:26.695261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:26.744704Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:906:2780] 2025-12-23T18:29:26.744959Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:26.791992Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:26.792251Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:26.793951Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:26.794040Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:26.794133Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:26.794560Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:26.794923Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:910:2782] 2025-12-23T18:29:26.795137Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:26.804214Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:26.804324Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:947:2780] in generation 1 2025-12-23T18:29:26.805012Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:914:2786] 2025-12-23T18:29:26.805178Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:26.815221Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:26.815389Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:26.816966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:29:26.817042Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:29:26.817100Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:29:26.817430Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:26.817662Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:26.817749Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:955:2782] in generation 1 2025-12-23T18:29:26.818148Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:26.818242Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:26.819774Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-12-23T18:29:26.819855Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2025-12-23T18:29:26.819914Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2025-12-23T18:29:26.820279Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:26.820393Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:26.820458Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:956:2786] in generation 1 2025-12-23T18:29:26.832326Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:26.874728Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:26.874993Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:26.875157Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:960:2812] 2025-12-23T18:29:26.875222Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:26.875268Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:26.875310Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:26.875685Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:26.875732Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:29:26.875792Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:26.875858Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:961:2813] 2025-12-23T18:29:26.875883Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:29:26.875908Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:29:26.875934Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:29:26.876288Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:26.876339Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2025-12-23T18:29:26.876397Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:26.876451Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:962:2814] 2025-12-23T18:29:26.876476Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2025-12-23T18:29:26.876500Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-12-23T18:29:26.876528Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:29:26.876774Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:26.876889Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:26.877128Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:26.877195Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:26.877261Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:26.877334Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:26.877393Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T18:29:26.877443Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T18:29:26.877573Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:897:2775], serverId# [1:911:2783], sessionId# [0:0:0] 2025-12-23T18:29:26.877619Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:29:26.877643Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:26.877668Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:29:26.877698Z no ... node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-12-23T18:29:41.772541Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 7, finished edge# 0, front# 0 2025-12-23T18:29:41.772800Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-12-23T18:29:41.772830Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 8, finished edge# 0, front# 0 2025-12-23T18:29:41.775388Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-12-23T18:29:41.775436Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-12-23T18:29:41.775857Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:256: 72075186224037889 snapshot complete for split OpId 281474976715663 2025-12-23T18:29:41.776119Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976715663 2025-12-23T18:29:41.776185Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976715663 2025-12-23T18:29:41.776225Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976715663 2025-12-23T18:29:41.776260Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976715663 2025-12-23T18:29:41.776519Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976715663 2025-12-23T18:29:41.776753Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976715663 2025-12-23T18:29:41.776794Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976715663 2025-12-23T18:29:41.776831Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976715663 2025-12-23T18:29:41.776868Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976715663 2025-12-23T18:29:41.776985Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976715663 2025-12-23T18:29:41.777569Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:424: 72075186224037889 Sending snapshots from src for split OpId 281474976715663 2025-12-23T18:29:41.777789Z node 3 :TX_DATASHARD DEBUG: datashard_impl.h:2361: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037892 size 221 2025-12-23T18:29:41.777918Z node 3 :TX_DATASHARD DEBUG: datashard_impl.h:2361: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037891 size 215 2025-12-23T18:29:41.778263Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [3:1393:3096], serverId# [3:1394:3097], sessionId# [0:0:0] 2025-12-23T18:29:41.778312Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037892, clientId# [3:1392:3095], serverId# [3:1395:3098], sessionId# [0:0:0] 2025-12-23T18:29:41.778459Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:175: 72075186224037891 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2025-12-23T18:29:41.779312Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:175: 72075186224037892 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2025-12-23T18:29:41.781075Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037891 ack snapshot OpId 281474976715663 2025-12-23T18:29:41.781298Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037891 2025-12-23T18:29:41.781419Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:41.781527Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2025-12-23T18:29:41.781604Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [3:1398:3101] 2025-12-23T18:29:41.781644Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2025-12-23T18:29:41.781695Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037891 2025-12-23T18:29:41.781742Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-12-23T18:29:41.781887Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:461: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715663 2025-12-23T18:29:41.782675Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 3000 2025-12-23T18:29:41.782728Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-12-23T18:29:41.783004Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2025-12-23T18:29:41.783046Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:41.783082Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2025-12-23T18:29:41.783116Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2025-12-23T18:29:41.783272Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1393:3096], serverId# [3:1394:3097], sessionId# [0:0:0] 2025-12-23T18:29:41.783337Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037892 ack snapshot OpId 281474976715663 2025-12-23T18:29:41.783443Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037892 2025-12-23T18:29:41.783518Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:29:41.783583Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2025-12-23T18:29:41.783634Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037892, actorId: [3:1400:3103] 2025-12-23T18:29:41.783661Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037892 2025-12-23T18:29:41.783696Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037892 2025-12-23T18:29:41.783722Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-12-23T18:29:41.783829Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:461: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715663 2025-12-23T18:29:41.784450Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 3000 2025-12-23T18:29:41.784487Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-12-23T18:29:41.784573Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2025-12-23T18:29:41.784603Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:41.784630Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037892 TxInFly 0 2025-12-23T18:29:41.784662Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2025-12-23T18:29:41.784815Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1392:3095], serverId# [3:1395:3098], sessionId# [0:0:0] 2025-12-23T18:29:41.784969Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 2500 next step 3000 2025-12-23T18:29:41.785035Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037891: waitStep# 3000 readStep# 3000 observedStep# 3000 2025-12-23T18:29:41.785321Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 2500 next step 3000 2025-12-23T18:29:41.785358Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037892: waitStep# 3000 readStep# 3000 observedStep# 3000 2025-12-23T18:29:41.810773Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037889 ack split to schemeshard 281474976715663 2025-12-23T18:29:41.815655Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976715663, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-12-23T18:29:41.817652Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2025-12-23T18:29:41.817717Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-12-23T18:29:41.818090Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:29:41.818153Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:21: Progress tx at non-ready tablet 72075186224037889 state 5 2025-12-23T18:29:41.818328Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1286:3016], serverId# [3:1287:3017], sessionId# [0:0:0] 2025-12-23T18:29:41.818504Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037889 ack split partitioning changed to schemeshard 281474976715663 2025-12-23T18:29:41.818577Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-12-23T18:29:41.818643Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 >> BSCReadOnlyPDisk::ReadOnlyNotAllowed >> TestMalformedRequest::CompressedGzipContentLengthLower |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable >> TestMalformedRequest::ContentLengthLower >> DistributedEraseTests::ConditionalEraseRowsCheckLimits [GOOD] >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> TestMalformedRequest::CompressedDeflateContentLengthNone >> TestMalformedRequest::ContentLengthNone >> ReadOnlyVDisk::TestWrites [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 14553652825373029696 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-12-23T18:29:40.575638Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-12-23T18:29:40.580628Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-12-23T18:29:40.585812Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-12-23T18:29:40.588778Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-12-23T18:29:40.601085Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-12-23T18:29:40.605589Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-12-23T18:29:40.609103Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-12-23T18:29:40.612296Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-12-23T18:29:41.957038Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] 2025-12-23T18:29:41.957188Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2025-12-23T18:29:41.957345Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2025-12-23T18:29:41.958350Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: [67de3c327b0b3cf7] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2025-12-23T18:29:41.963064Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.475 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.476 sample PartId# [1:1:11:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.476 sample PartId# [1:1:11:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.476 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.477 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.477 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 1.619 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.692 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 1.875 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.912 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPutResult{ TimestampMs# 1.945 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-12-23T18:29:41.965711Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] 2025-12-23T18:29:41.965960Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2025-12-23T18:29:41.967541Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-12-23T18:29:41.969990Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:705] 2025-12-23T18:29:41.971272Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2025-12-23T18:29:41.972542Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { Order ... ey [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2025-12-23T18:29:45.123537Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2025-12-23T18:29:45.123713Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2025-12-23T18:29:45.127095Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2025-12-23T18:29:45.128208Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2025-12-23T18:29:45.132437Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2025-12-23T18:29:45.135572Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2025-12-23T18:29:45.135688Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2025-12-23T18:29:45.138827Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2025-12-23T18:29:45.138927Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2025-12-23T18:29:45.142037Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2025-12-23T18:29:45.142163Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2025-12-23T18:29:45.145242Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] 2025-12-23T18:29:45.145583Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2025-12-23T18:29:45.149411Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2025-12-23T18:29:45.149541Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2025-12-23T18:29:45.152474Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:719] 2025-12-23T18:29:45.152631Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:712] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 10653613753528226015 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-12-23T18:29:42.746212Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-12-23T18:29:42.754574Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-12-23T18:29:42.761369Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-12-23T18:29:42.765588Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-12-23T18:29:42.777886Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-12-23T18:29:42.782113Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-12-23T18:29:42.786563Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-12-23T18:29:42.790642Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-12-23T18:29:45.473610Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2025-12-23T18:29:45.473751Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2025-12-23T18:29:45.473907Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2025-12-23T18:29:45.474905Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: [61b561383a419b46] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2025-12-23T18:29:45.479740Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.463 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.464 sample PartId# [1:1:11:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.464 sample PartId# [1:1:11:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.465 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.465 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.466 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 1.616 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.686 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 1.869 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.905 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPutResult{ TimestampMs# 1.937 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-12-23T18:29:45.482431Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2025-12-23T18:29:45.482991Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2025-12-23T18:29:45.484627Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-12-23T18:29:45.487338Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2025-12-23T18:29:45.488444Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2025-12-23T18:29:45.489660Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { Order ... { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2025-12-23T18:29:45.503386Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2025-12-23T18:29:45.503717Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2025-12-23T18:29:45.503805Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:17:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUEUUU } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUSU } { OrderNumber# 5 Situations# UUUUUS } { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2025-12-23T18:29:45.506975Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2025-12-23T18:29:45.507309Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] 2025-12-23T18:29:45.507492Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:18:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 7 Situations# SUUUUU } { OrderNumber# 0 Situations# UEUUUU } { OrderNumber# 1 Situations# UUEUUU } { OrderNumber# 2 Situations# UUUEUU } { OrderNumber# 3 Situations# UUUUSU } { OrderNumber# 4 Situations# UUUUUS } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2025-12-23T18:29:45.510891Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2025-12-23T18:29:45.511237Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2025-12-23T18:29:45.511372Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:19:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# UUEUUU } { OrderNumber# 1 Situations# UUUEUU } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } { OrderNumber# 4 Situations# UUSUUU } { OrderNumber# 5 Situations# UUUUSU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2025-12-23T18:29:45.514767Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:705] 2025-12-23T18:29:45.514958Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:719] 2025-12-23T18:29:45.515119Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:712] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:20:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2025-12-23T18:29:45.523278Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2025-12-23T18:29:45.523560Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5334:713] 2025-12-23T18:29:45.523671Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5341:720] 2025-12-23T18:29:45.524669Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: [ff39b70039b76adf] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} Marker# BPG29 2025-12-23T18:29:45.525162Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: {BPG72@dsproxy_get.cpp:427} Query history GroupId# 2181038080 HandleClass# FastRead History# THistory { Entries# [ TEvVGet{ TimestampMs# 0.609 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVGet{ TimestampMs# 0.61 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVGet{ TimestampMs# 0.61 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVGet{ TimestampMs# 0.61 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 6 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVGet{ TimestampMs# 0.611 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 6 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVGet{ TimestampMs# 0.611 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVGet{ TimestampMs# 0.611 sample PartId# [1:1:11:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVGet{ TimestampMs# 0.611 sample PartId# [1:1:11:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVGetResult{ TimestampMs# 4.104 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# OK } TEvVGetResult{ TimestampMs# 5.145 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVGetResult{ TimestampMs# 5.205 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVGetResult{ TimestampMs# 5.244 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVGetResult{ TimestampMs# 5.283 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVGetResult{ TimestampMs# 5.686 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPut{ TimestampMs# 5.722 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 5.723 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 5.723 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVGetResult{ TimestampMs# 6.655 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# OK } TEvVPut{ TimestampMs# 6.669 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVGetResult{ TimestampMs# 6.723 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# OK } TEvVPut{ TimestampMs# 6.728 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 6.772 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } PutAcceleration{ TimestampMs# 6.776 } PutAcceleration{ TimestampMs# 6.8 } TEvVPutResult{ TimestampMs# 7.134 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPutResult{ TimestampMs# 7.595 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } 2025-12-23T18:29:45.525337Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5334:713] 2025-12-23T18:29:45.525424Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5341:720] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] Test command err: RandomSeed# 6917862968040273522 >> DataShardWrite::UncommittedUpdateLockNewRowAboveSnapshot [GOOD] >> DataShardWrite::UncommittedUpdateLockDeletedRowAboveSnapshot |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] Test command err: RandomSeed# 1247354513892806850 2025-12-23T18:29:36.251306Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:29:36.253713Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13132048656237624529] 2025-12-23T18:29:36.286581Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence |90.9%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 14437483717336010769 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-12-23T18:29:42.827489Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-12-23T18:29:43.292065Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] 2025-12-23T18:29:43.293532Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:711] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-12-23T18:29:43.870389Z 3 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:718] 2025-12-23T18:29:43.871866Z 1 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] 2025-12-23T18:29:43.872747Z 2 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:711] 2025-12-23T18:29:43.873116Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: [c047672ad01b6ea7] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2025-12-23T18:29:43.873502Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.634 sample PartId# [1:1:5:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.636 sample PartId# [1:1:5:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.636 sample PartId# [1:1:5:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.637 sample PartId# [1:1:5:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPut{ TimestampMs# 0.637 sample PartId# [1:1:5:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPut{ TimestampMs# 0.638 sample PartId# [1:1:5:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 2.227 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 2.323 sample PartId# [1:1:5:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 3.206 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 3.26 sample PartId# [1:1:5:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 3.519 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPutResult{ TimestampMs# 3.541 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVPutResult{ TimestampMs# 3.557 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 3.576 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVPutResult{ TimestampMs# 3.621 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVPutResult{ TimestampMs# 3.832 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} |90.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestMalformedRequest::CompressedDeflateContentLengthCorrect [GOOD] >> BSCReadOnlyPDisk::ReadOnlyOneByOne |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] >> TNebiusAccessServiceTest::Authorize >> TestMalformedRequest::CompressedDeflateContentLengthLower [GOOD] >> TNebiusAccessServiceTest::Authorize [GOOD] |90.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |90.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |90.9%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] |91.0%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> TestMalformedRequest::CompressedDeflateContentLengthHigher >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate >> KeyValueGRPCService::SimpleGetStorageChannelStatusV2 [GOOD] >> KeyValueGRPCService::SimpleCreateAlterDropVolume ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] Test command err: 2025-12-23T18:29:49.532496Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ccfd7ce4750] Connect to grpc://localhost:15518 2025-12-23T18:29:49.545995Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ccfd7ce4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-12-23T18:29:49.556932Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ccfd7ce4750] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user_id" } } } } } 2025-12-23T18:29:49.558424Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ccfd7ce4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (79225CA9)" } } } 2025-12-23T18:29:49.561122Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ccfd7ce4750] Status 7 Permission Denied 2025-12-23T18:29:49.562036Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ccfd7ce4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "denied" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-12-23T18:29:49.564445Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ccfd7ce4750] Status 7 Permission Denied 2025-12-23T18:29:49.566902Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ccfd7ce4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "p" } } iam_token: "**** (717F937C)" } } } 2025-12-23T18:29:49.568820Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ccfd7ce4750] Status 7 Permission Denied |91.0%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] Test command err: 2025-12-23T18:29:41.532858Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122355849300287:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:29:41.532937Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c55/r3tmp/tmp1FpUl1/pdisk_1.dat 2025-12-23T18:29:41.822316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:29:41.954656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:41.954766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:41.974046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:42.046183Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122355849300263:2081] 1766514581532071 != 1766514581532074 2025-12-23T18:29:42.048526Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18210, node 1 2025-12-23T18:29:42.115623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:29:42.234075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:29:42.234135Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:29:42.234154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:29:42.234248Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:29:42.552234Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514582021 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:29:42.643249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:29:42.650550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:1562 2025-12-23T18:29:42.910379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:29:42.928155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:29:42.936146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:29:42.985132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:43.136418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:43.213957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:43.273153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:43.319994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:43.363138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:43.405129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:43.449029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:43.493353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:43.526661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:45.761120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122373029171110:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:45.761254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:45.762828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122373029171122:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:45.763720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122373029171124:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:45.763826Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:45.767343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:45.788266Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122373029171125:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:29:45.872942Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122373029171177:3108] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:46.368562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo un ... ersion" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:48.003133Z node 1 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:48.003149Z node 1 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 13ms 2025-12-23T18:29:48.003161Z node 1 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 15ms 2025-12-23T18:29:48.003476Z node 1 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:48.003511Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-12-23T18:29:48.003582Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 16ms 2025-12-23T18:29:48.003617Z node 1 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:48.003644Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-12-23T18:29:48.003735Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 16ms 2025-12-23T18:29:48.004049Z node 1 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:48.004245Z node 1 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:48.161996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7587122381619106463:2437]: Pool not found 2025-12-23T18:29:48.162210Z node 1 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-12-23T18:29:48.484716Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7587122381619106461:2436]: Pool not found 2025-12-23T18:29:48.485499Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-12-23T18:29:48.488764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122385914073880:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:48.488864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7587122385914073881:2457], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:29:48.488915Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:48.493495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122385914073884:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:48.493579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:48.828729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7587122385914073878:2455]: Pool not found 2025-12-23T18:29:48.829378Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2025-12-23T18:29:48.879568Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:46236) incoming connection opened 2025-12-23T18:29:48.879706Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:46236) -> (POST /Root, 24 bytes) Http output full {"__type":"AccessDeniedException","message":"Failed to decode POST body"} 2025-12-23T18:29:48.879984Z node 1 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [387b:8ed:657b:0:207b:8ed:657b:0] request [CreateQueue] url [/Root] database [/Root] requestId: 9d69079-7e7493ff-189bbce6-d9757b90 2025-12-23T18:29:48.881091Z node 1 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [9d69079-7e7493ff-189bbce6-d9757b90] reply with status: BAD_REQUEST message: Failed to decode POST body 2025-12-23T18:29:48.881274Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:46236) <- (400 AccessDeniedException, 73 bytes) 2025-12-23T18:29:48.881332Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:46236) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Content-Encoding: deflate Content-Length: 32 {"QueueName": "Example"} 2025-12-23T18:29:48.881366Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:46236) Response: HTTP/1.1 400 AccessDeniedException Connection: keep-alive x-amzn-requestid: 9d69079-7e7493ff-189bbce6-d9757b90 Content-Type: application/x-amz-json-1.1 Content-Length: 73 2025-12-23T18:29:48.885785Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:46236) connection closed |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |91.0%| [TA] $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:25.294965Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:25.401846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:25.414157Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:25.414649Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:25.414765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:25.753971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:25.754152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:25.846360Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514561821276 != 1766514561821280 2025-12-23T18:29:25.864917Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:25.913831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:26.013355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0060ff/r3tmp/tmpE2qsT8/pdisk_1.dat 2025-12-23T18:29:26.349767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:26.364733Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:26.471556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:26.531784Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:906:2780] 2025-12-23T18:29:26.532108Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:26.582512Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:26.582841Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:26.584541Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:26.584606Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:26.584695Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:26.585065Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:26.585344Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:910:2782] 2025-12-23T18:29:26.585528Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:26.593982Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:26.594084Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:947:2780] in generation 1 2025-12-23T18:29:26.594728Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:914:2786] 2025-12-23T18:29:26.594965Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:26.603741Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:26.603851Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:26.605220Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:29:26.605281Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:29:26.605320Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:29:26.605542Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:26.605775Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:26.605833Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:955:2782] in generation 1 2025-12-23T18:29:26.606129Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:26.606191Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:26.607462Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-12-23T18:29:26.607543Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2025-12-23T18:29:26.607593Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2025-12-23T18:29:26.607840Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:26.607926Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:26.608079Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:956:2786] in generation 1 2025-12-23T18:29:26.619276Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:26.650714Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:26.651136Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:26.651335Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:960:2812] 2025-12-23T18:29:26.651385Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:26.651426Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:26.651485Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:26.651906Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:26.651958Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:29:26.652042Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:26.652112Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:961:2813] 2025-12-23T18:29:26.652143Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:29:26.652169Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:29:26.652195Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:29:26.652649Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:26.652704Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2025-12-23T18:29:26.652765Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:26.652832Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:962:2814] 2025-12-23T18:29:26.652858Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2025-12-23T18:29:26.652884Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-12-23T18:29:26.652910Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:29:26.653196Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:26.653328Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:26.653557Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:26.653622Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:26.653699Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:26.653780Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:26.653850Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T18:29:26.653914Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T18:29:26.654103Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:897:2775], serverId# [1:911:2783], sessionId# [0:0:0] 2025-12-23T18:29:26.654179Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:29:26.654211Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:26.654240Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:29:26.654273Z no ... 037888 2025-12-23T18:29:49.100327Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2025-12-23T18:29:49.100390Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:29:49.100429Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3000 : 281474976715662] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1292:3021], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:29:49.100470Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 72075186224037890 {TEvReadSet step# 3000 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 6} 2025-12-23T18:29:49.100496Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:29:49.100593Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2025-12-23T18:29:49.100638Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1292:3021] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 2 2025-12-23T18:29:49.100677Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1292:3021] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 2 2025-12-23T18:29:49.100718Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:165: [DistEraser] [3:1292:3021] Reply: txId# 281474976715662, status# OK, error# 2025-12-23T18:29:49.100980Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2025-12-23T18:29:49.101039Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-12-23T18:29:49.101282Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:29:49.101322Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:49.101362Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:29:49.101417Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:29:49.101577Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1286:3016], serverId# [3:1287:3017], sessionId# [0:0:0] 2025-12-23T18:29:49.102791Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:29:49.103188Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T18:29:49.103421Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:29:49.103479Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:49.103530Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037889 for WaitForStreamClearance 2025-12-23T18:29:49.103793Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:49.103865Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:29:49.104446Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 1 2025-12-23T18:29:49.104677Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037889, TxId: 281474976715664, Size: 70, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:29:49.104845Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037889, TxId: 281474976715664, PendingAcks: 0 2025-12-23T18:29:49.104911Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 0 2025-12-23T18:29:49.106967Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2025-12-23T18:29:49.107027Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715664, at: 72075186224037889 2025-12-23T18:29:49.107166Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:29:49.107215Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:49.107255Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037889 for ReadTableScan 2025-12-23T18:29:49.107375Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:49.107422Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:29:49.107461Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:29:49.140461Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:49.140810Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:29:49.140998Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:49.141042Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:49.141086Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2025-12-23T18:29:49.141307Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:49.141366Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:49.141940Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2025-12-23T18:29:49.142179Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:29:49.142314Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2025-12-23T18:29:49.142362Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2025-12-23T18:29:49.144230Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:29:49.144290Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715665, at: 72075186224037888 2025-12-23T18:29:49.144481Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:49.144521Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:49.144562Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2025-12-23T18:29:49.144694Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:49.144750Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:49.144807Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:49.186694Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037890 2025-12-23T18:29:49.187159Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2025-12-23T18:29:49.187399Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-12-23T18:29:49.187454Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:49.187512Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715666] at 72075186224037890 for WaitForStreamClearance 2025-12-23T18:29:49.187777Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:49.187860Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:29:49.188800Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 1 2025-12-23T18:29:49.189076Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715666, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:29:49.189233Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715666, PendingAcks: 0 2025-12-23T18:29:49.189294Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 0 2025-12-23T18:29:49.191259Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2025-12-23T18:29:49.191317Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715666, at: 72075186224037890 2025-12-23T18:29:49.191439Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-12-23T18:29:49.191479Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:49.191522Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715666] at 72075186224037890 for ReadTableScan 2025-12-23T18:29:49.191659Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:49.191717Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:29:49.191772Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TestMalformedRequest::ContentLengthCorrect [GOOD] >> TUrlMatcherTest::MatchRecursive [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TestMalformedRequest::ContentLengthLower [GOOD] >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> TestMalformedRequest::CompressedGzipContentLengthLower [GOOD] >> TestMalformedRequest::CompressedGzipContentLengthNone >> TestMalformedRequest::CompressedDeflateContentLengthNone [GOOD] >> TestMalformedRequest::ContentLengthNone [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TUrlMatcherTest::MatchRecursive [GOOD] |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TestMalformedRequest::ContentLengthHigher >> TestMalformedRequest::CompressedGzipContentLengthHigher >> ReadOnlyVDisk::TestGarbageCollect [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthCorrect >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] |91.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |91.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |91.0%| [TA] {RESULT} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks+Volatile >> TTransferTests::Create_Disabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 2009035021306188235 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-12-23T18:29:42.448602Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:704] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-12-23T18:29:42.453935Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:704] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-12-23T18:29:43.566120Z 1 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:704] 2025-12-23T18:29:43.567173Z 2 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:711] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-12-23T18:29:44.393191Z 1 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:704] 2025-12-23T18:29:44.393465Z 2 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:711] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-12-23T18:29:44.919258Z 1 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:704] 2025-12-23T18:29:44.920716Z 2 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:711] 2025-12-23T18:29:44.922200Z 3 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:718] 2025-12-23T18:29:44.922589Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: [6489f60907c24165] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2025-12-23T18:29:44.927758Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.774 sample PartId# [1:1:4:0:0:131072:6] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.774 sample PartId# [1:1:4:0:0:131072:5] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.775 sample PartId# [1:1:4:0:0:131072:4] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.775 sample PartId# [1:1:4:0:0:131072:3] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.775 sample PartId# [1:1:4:0:0:131072:2] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPut{ TimestampMs# 0.776 sample PartId# [1:1:4:0:0:131072:1] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 1.595 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.669 sample PartId# [1:1:4:0:0:131072:6] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 3.394 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 3.461 sample PartId# [1:1:4:0:0:131072:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 3.903 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPutResult{ TimestampMs# 3.926 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVPutResult{ TimestampMs# 3.942 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 3.96 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVPutResult{ TimestampMs# 4.004 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVPutResult{ TimestampMs# 4.238 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} 2025-12-23T18:29:45.538430Z 1 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:704] 2025-12-23T18:29:45.538677Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:711] 2025-12-23T18:29:45.538762Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:718] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-12-23T18:29:46.622082Z 1 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:704] 2025-12-23T18:29:46.622393Z 2 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:711] 2025-12-23T18:29:46.622483Z 3 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:718] 2025-12-23T18:29:46.622549Z 4 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:725] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-12-23T18:29:47.051691Z 1 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:704] 2025-12-23T18:29:47.051947Z 2 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:711] 2025-12-23T18:29:47.052015Z 3 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:718] 2025-12-23T18:29:47.052074Z 4 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:725] 2025-12-23T18:29:47.052134Z 5 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:732] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-12-23T18:29:47.392947Z 1 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:704] 2025-12-23T18:29:47.393208Z 2 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:711] 2025-12-23T18:29:47.393274Z 3 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:718] 2025-12-23T18:29:47.393331Z 4 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:725] 2025-12-23T18:29:47.393388Z 5 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:732] 2025-12-23T18:29:47.393445Z 6 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:739] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-12-23T18:29:47.682038Z 1 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:704] 2025-12-23T18:29:47.682285Z 2 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:711] 2025-12-23T18:29:47.682344Z 3 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:718] 2025-12-23T18:29:47.682393Z 4 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:725] 2025-12-23T18:29:47.682442Z 5 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:732] 2025-12-23T18:29:47.682493Z 6 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:739] 2025-12-23T18:29:47.682541Z 7 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:746] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-12-23T18:29:47.970452Z 2 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5333:711] 2025-12-23T18:29:47.970547Z 3 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:718] 2025-12-23T18:29:47.970598Z 4 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:725] 2025-12-23T18:29:47.970651Z 5 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:732] 2025-12-23T18:29:47.970703Z 6 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:739] 2025-12-23T18:29:47.970780Z 7 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:746] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2025-12-23T18:29:48.298115Z 3 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5340:718] 2025-12-23T18:29:48.298223Z 4 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:725] 2025-12-23T18:29:48.298281Z 5 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:732] 2025-12-23T18:29:48.298337Z 6 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:739] 2025-12-23T18:29:48.298389Z 7 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:746] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2025-12-23T18:29:48.676257Z 4 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5347:725] 2025-12-23T18:29:48.676369Z 5 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:732] 2025-12-23T18:29:48.676429Z 6 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:739] 2025-12-23T18:29:48.676487Z 7 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:746] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-12-23T18:29:49.070280Z 5 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5354:732] 2025-12-23T18:29:49.070379Z 6 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:739] 2025-12-23T18:29:49.070434Z 7 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:746] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-12-23T18:29:50.153185Z 6 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5361:739] 2025-12-23T18:29:50.153264Z 7 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:746] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-12-23T18:29:50.639707Z 7 00h14m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5368:746] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ReadOnlyVDisk::TestReads [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TTransferTests::Create |91.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |91.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |91.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::ContentLengthNone [GOOD] Test command err: 2025-12-23T18:29:45.382805Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122374132938624:2263];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:29:45.382850Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c4f/r3tmp/tmpyrjXMy/pdisk_1.dat 2025-12-23T18:29:45.775842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:29:45.788596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:45.788708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:45.839369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:46.011337Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:46.033079Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 30125, node 1 2025-12-23T18:29:46.123255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:29:46.123281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:29:46.123287Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:29:46.123374Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:29:46.382754Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514585969 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:29:46.750829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:24487 2025-12-23T18:29:47.051590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:29:47.058235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:29:47.061247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:29:47.103771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.224267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:47.301717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:47.363924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-12-23T18:29:47.369565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.406811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.442918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.524398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.567944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.618403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:47.667767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:49.499088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122391312809222:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.499091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122391312809230:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.499237Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.499605Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122391312809237:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.499678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.511681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:49.524118Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122391312809236:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:29:49.581970Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122391312809289:3106] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:50.008376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation ... Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:51.604386Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-12-23T18:29:51.604485Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 157ms 2025-12-23T18:29:51.604850Z node 1 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:51.608095Z node 1 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:51.608123Z node 1 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 148ms 2025-12-23T18:29:51.608510Z node 1 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:51.608540Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-12-23T18:29:51.608616Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 161ms 2025-12-23T18:29:51.609079Z node 1 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:51.753906Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7587122399902744577:2437]: Pool not found 2025-12-23T18:29:51.754470Z node 1 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-12-23T18:29:52.051567Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7587122399902744583:2441]: Pool not found 2025-12-23T18:29:52.052007Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-12-23T18:29:52.055370Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7587122404197711988:2457], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:29:52.055864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122404197711987:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:52.055932Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:52.056227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122404197711991:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:52.056299Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:52.335195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7587122404197711985:2455]: Pool not found 2025-12-23T18:29:52.335387Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2025-12-23T18:29:52.391086Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:56654) incoming connection opened 2025-12-23T18:29:52.391208Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:56654) -> (POST /Root) 2025-12-23T18:29:52.391572Z node 1 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [38ec:421a:87c:0:20ec:421a:87c:0] request [CreateQueue] url [/Root] database [/Root] requestId: bbfb5b21-7554ec0c-dc75798b-691a61cb 2025-12-23T18:29:52.392988Z node 1 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [bbfb5b21-7554ec0c-dc75798b-691a61cb] reply with status: BAD_REQUEST message: Empty body Http output full {"__type":"InvalidArgumentException","message":"Empty body"} 2025-12-23T18:29:52.393203Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:56654) <- (400 InvalidArgumentException, 60 bytes) 2025-12-23T18:29:52.393277Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:56654) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json 2025-12-23T18:29:52.393319Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:56654) Response: HTTP/1.1 400 InvalidArgumentException Connection: keep-alive x-amzn-requestid: bbfb5b21-7554ec0c-dc75798b-691a61cb Content-Type: application/x-amz-json-1.1 Content-Length: 60 2025-12-23T18:29:52.393723Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:56654) connection closed |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 3278794648429246688 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:26.324629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:26.451687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:26.466795Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:26.467272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:26.467368Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:26.777947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:26.778125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:26.865939Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514562946646 != 1766514562946650 2025-12-23T18:29:26.881388Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:26.940322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:27.031069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/006107/r3tmp/tmpOQ5KY4/pdisk_1.dat 2025-12-23T18:29:27.346608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:27.361790Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:27.479362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:27.524907Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:906:2780] 2025-12-23T18:29:27.525211Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:27.572227Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:27.572469Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:27.573927Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:27.573989Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:27.574044Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:27.574359Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:27.574617Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:910:2782] 2025-12-23T18:29:27.574798Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:27.585488Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:27.585735Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:947:2780] in generation 1 2025-12-23T18:29:27.586521Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:914:2786] 2025-12-23T18:29:27.586711Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:27.595819Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:27.595961Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:27.597338Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:29:27.597401Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:29:27.597453Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:29:27.597765Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:27.597958Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:27.598026Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:955:2782] in generation 1 2025-12-23T18:29:27.598362Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:27.598429Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:27.599671Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-12-23T18:29:27.599743Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2025-12-23T18:29:27.599840Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2025-12-23T18:29:27.600088Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:27.600172Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:27.600233Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:956:2786] in generation 1 2025-12-23T18:29:27.611582Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:27.652676Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:27.652894Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:27.652994Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:960:2812] 2025-12-23T18:29:27.653031Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:27.653064Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:27.653133Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:27.653484Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:27.653524Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:29:27.653576Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:27.653630Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:961:2813] 2025-12-23T18:29:27.653651Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:29:27.653672Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:29:27.653693Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:29:27.653988Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:27.654018Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2025-12-23T18:29:27.654080Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:27.654142Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:962:2814] 2025-12-23T18:29:27.654169Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2025-12-23T18:29:27.654189Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-12-23T18:29:27.654208Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:29:27.654452Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:27.654553Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:27.654728Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:27.654779Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:27.654833Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:27.654902Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:27.654947Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T18:29:27.655000Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T18:29:27.655167Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:897:2775], serverId# [1:911:2783], sessionId# [0:0:0] 2025-12-23T18:29:27.655208Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:29:27.655229Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:27.655250Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:29:27.655275Z no ... datashard.cpp:4020: Send RS 2 at 72075186224037891 from 72075186224037891 to 72075186224037893 txId 281474976715666 2025-12-23T18:29:52.455607Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2025-12-23T18:29:52.455710Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715666] from 72075186224037891 at tablet 72075186224037891 send result to client [3:1611:3235], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:29:52.455880Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037891, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 } 2025-12-23T18:29:52.455969Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-12-23T18:29:52.456434Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1611:3235] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037891, status# 2 2025-12-23T18:29:52.456780Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037893 step# 3500} 2025-12-23T18:29:52.456856Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2025-12-23T18:29:52.457123Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:215: TTxRequestChangeRecords Execute: at tablet# 72075186224037891 2025-12-23T18:29:52.457827Z node 3 :TX_DATASHARD DEBUG: datashard_change_sending.cpp:235: Send 3 change records: to# [3:1411:3117], at tablet# 72075186224037891 2025-12-23T18:29:52.457933Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:260: TTxRequestChangeRecords Complete: sent# 3, forgotten# 0, left# 0, at tablet# 72075186224037891 2025-12-23T18:29:52.458308Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037893 source 72075186224037891 dest 72075186224037893 producer 72075186224037891 txId 281474976715666 2025-12-23T18:29:52.458427Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037893 got read set: {TEvReadSet step# 3500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletProducer# 72075186224037891 ReadSet.Size()# 19 Seqno# 2 Flags# 0} 2025-12-23T18:29:52.458557Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037893 2025-12-23T18:29:52.458859Z node 3 :TX_DATASHARD DEBUG: datashard_change_receiving.cpp:468: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037891, generation# 1, at tablet# 72075186224037892 2025-12-23T18:29:52.459250Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2025-12-23T18:29:52.459310Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:29:52.459371Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [3500:281474976715666] at 72075186224037893 for LoadAndWaitInRS 2025-12-23T18:29:52.459874Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:52.472076Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2025-12-23T18:29:52.472174Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715666] from 72075186224037893 at tablet 72075186224037893 send result to client [3:1611:3235], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:29:52.472273Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 72075186224037893 {TEvReadSet step# 3500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletConsumer# 72075186224037893 Flags# 0 Seqno# 2} 2025-12-23T18:29:52.472327Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-12-23T18:29:52.472436Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1611:3235] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037893, status# 2 2025-12-23T18:29:52.472497Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:165: [DistEraser] [3:1611:3235] Reply: txId# 281474976715666, status# OK, error# 2025-12-23T18:29:52.472672Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037891 2025-12-23T18:29:52.472704Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 4, at tablet: 72075186224037891 2025-12-23T18:29:52.472822Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 5, at tablet: 72075186224037891 2025-12-23T18:29:52.472869Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 6, at tablet: 72075186224037891 2025-12-23T18:29:52.473042Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037891 source 72075186224037891 dest 72075186224037893 consumer 72075186224037893 txId 281474976715666 2025-12-23T18:29:52.473266Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037891 2025-12-23T18:29:52.473311Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037891 2025-12-23T18:29:52.473578Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2025-12-23T18:29:52.473619Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:52.473661Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2025-12-23T18:29:52.473770Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1606:3231], serverId# [3:1607:3232], sessionId# [0:0:0] 2025-12-23T18:29:52.475179Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037893 2025-12-23T18:29:52.475661Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037893 2025-12-23T18:29:52.475867Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2025-12-23T18:29:52.475917Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:52.475968Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715667] at 72075186224037893 for WaitForStreamClearance 2025-12-23T18:29:52.476212Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:52.476292Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2025-12-23T18:29:52.476918Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2025-12-23T18:29:52.477070Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2025-12-23T18:29:52.478883Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037893 2025-12-23T18:29:52.478946Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715667, at: 72075186224037893 2025-12-23T18:29:52.479188Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2025-12-23T18:29:52.479238Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:52.479290Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715667] at 72075186224037893 for ReadTableScan 2025-12-23T18:29:52.479456Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:52.479527Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2025-12-23T18:29:52.479591Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-12-23T18:29:52.480981Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037892 2025-12-23T18:29:52.481273Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037892 2025-12-23T18:29:52.481433Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2025-12-23T18:29:52.481467Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:52.481498Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715668] at 72075186224037892 for WaitForStreamClearance 2025-12-23T18:29:52.481660Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:52.481701Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2025-12-23T18:29:52.482211Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2025-12-23T18:29:52.482328Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2025-12-23T18:29:52.527899Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037892 2025-12-23T18:29:52.527986Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715668, at: 72075186224037892 2025-12-23T18:29:52.528164Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2025-12-23T18:29:52.528213Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:29:52.528263Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715668] at 72075186224037892 for ReadTableScan 2025-12-23T18:29:52.528417Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:52.528498Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2025-12-23T18:29:52.528549Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> DataShardWrite::UncommittedUpdateLockDeletedRowAboveSnapshot [GOOD] >> DataShardWrite::UncommittedUpdateLockUncommittedNewRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:29:38.014301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:29:38.014388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:29:38.014425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:29:38.014465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:29:38.014502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:29:38.014531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:29:38.014600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:29:38.014687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:29:38.015581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:29:38.015919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:29:38.107311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:29:38.107375Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:38.115003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:29:38.115240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:29:38.115374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:29:38.119424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:29:38.119616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:29:38.120379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:29:38.120601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:29:38.122848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:29:38.123045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:29:38.124296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:29:38.124358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:29:38.124475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:29:38.124524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:29:38.124566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:29:38.124697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:29:38.283514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.284656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.284812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.284893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.284965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.285069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.285148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.285264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.285344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.285437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.285513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.285594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.285686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.285776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.285883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... y parts: 3/4 2025-12-23T18:29:53.587981Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 3/4 2025-12-23T18:29:53.588053Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-12-23T18:29:53.588115Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-12-23T18:29:53.588843Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:53.588890Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2025-12-23T18:29:53.588976Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:749:2726] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-12-23T18:29:53.589307Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:29:53.589351Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:53.590241Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [7:128:2153], Recipient [7:128:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:29:53.590299Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:29:53.590376Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:29:53.590431Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:29:53.590762Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:29:53.590904Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:29:53.590949Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 4/4 2025-12-23T18:29:53.591017Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-12-23T18:29:53.591087Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 4/4 2025-12-23T18:29:53.591120Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-12-23T18:29:53.591166Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-12-23T18:29:53.591258Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:820:2777] message: TxId: 102 2025-12-23T18:29:53.591325Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-12-23T18:29:53.591385Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:29:53.591437Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:29:53.591589Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:29:53.591648Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2025-12-23T18:29:53.591676Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:1 2025-12-23T18:29:53.591716Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:29:53.591745Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:2 2025-12-23T18:29:53.591774Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:2 2025-12-23T18:29:53.591845Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:29:53.591889Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:3 2025-12-23T18:29:53.591916Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:3 2025-12-23T18:29:53.591980Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2025-12-23T18:29:53.592626Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435084, Sender [7:128:2153], Recipient [7:128:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-12-23T18:29:53.592689Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5438: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-12-23T18:29:53.592784Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:29:53.592855Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2025-12-23T18:29:53.592954Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:29:53.593867Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:29:53.593916Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:53.594025Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:29:53.594055Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:53.594105Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:29:53.594228Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:53.594292Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:29:53.594319Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:53.597775Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:29:53.597831Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:53.598232Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:53.600428Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:53.600558Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:820:2777] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-12-23T18:29:53.600829Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:29:53.600889Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:927:2876] 2025-12-23T18:29:53.601140Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [7:929:2878], Recipient [7:128:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:29:53.601191Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:29:53.601223Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046678944 2025-12-23T18:29:53.601885Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-12-23T18:29:53.604787Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [7:1009:2958], Recipient [7:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-12-23T18:29:53.604875Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:29:53.605059Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:29:53.605351Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 310us result status StatusPathDoesNotExist 2025-12-23T18:29:53.605581Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000039, drop txId: 102" Path: "/MyRoot/Table" PathId: 38 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sequence/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TTransferTests::Create_Disabled [GOOD] >> TTransferTests::CreateWithoutCredentials ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 3601359984485210540 |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple >> TTransferTests::Create [GOOD] >> TTransferTests::CreateSequential |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:29:37.924016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:29:37.924124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:29:37.924172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:29:37.924210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:29:37.924252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:29:37.924284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:29:37.924344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:29:37.924450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:29:37.925374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:29:37.925670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:29:38.021973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:29:38.022057Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:38.029032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:29:38.029245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:29:38.029384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:29:38.032614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:29:38.032803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:29:38.033654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:29:38.033896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:29:38.035861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:29:38.036034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:29:38.037253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:29:38.037308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:29:38.037404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:29:38.037448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:29:38.037491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:29:38.037630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:29:38.195153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.196414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.196570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.196658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.196732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.196815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.196896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.197004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.197086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.197194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.197262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.197333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.197458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.197547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:38.197633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Latency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1678 } } CommitVersion { Step: 5000050 TxId: 114 } debug: NTableState::TProposedWaitParts operationId# 114:0 2025-12-23T18:29:55.257685Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:29:55.263291Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [7:1462:3397], Recipient [7:135:2157]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:55.263358Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:55.263412Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:29:55.263690Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269551620, Sender [7:1397:3340], Recipient [7:135:2157]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 1397 RawX2: 30064774412 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-12-23T18:29:55.263739Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5279: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-12-23T18:29:55.263857Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 1397 RawX2: 30064774412 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-12-23T18:29:55.263931Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-12-23T18:29:55.264126Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1397 RawX2: 30064774412 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-12-23T18:29:55.264210Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:29:55.264342Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 1397 RawX2: 30064774412 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-12-23T18:29:55.264450Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:29:55.264508Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2025-12-23T18:29:55.264573Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-12-23T18:29:55.264643Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 114:0 129 -> 240 2025-12-23T18:29:55.264861Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:29:55.266051Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:55.266263Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-12-23T18:29:55.266331Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:55.269533Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-12-23T18:29:55.269593Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:55.269740Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-12-23T18:29:55.269774Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:55.275218Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-12-23T18:29:55.275334Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:55.275411Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 114:0 2025-12-23T18:29:55.275556Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:1397:3340] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2025-12-23T18:29:55.276054Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [7:135:2157], Recipient [7:135:2157]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:29:55.276103Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:29:55.276167Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2025-12-23T18:29:55.276224Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 114:0 ProgressState 2025-12-23T18:29:55.276383Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:29:55.276423Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#114:0 progress is 1/1 2025-12-23T18:29:55.276486Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-12-23T18:29:55.276557Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#114:0 progress is 1/1 2025-12-23T18:29:55.276627Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-12-23T18:29:55.276685Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2025-12-23T18:29:55.276774Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:797:2763] message: TxId: 114 2025-12-23T18:29:55.276844Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-12-23T18:29:55.276914Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 114:0 2025-12-23T18:29:55.276976Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 114:0 2025-12-23T18:29:55.277153Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2025-12-23T18:29:55.280598Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:29:55.280722Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:797:2763] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2025-12-23T18:29:55.280966Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-12-23T18:29:55.281019Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1428:3363] 2025-12-23T18:29:55.281298Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [7:1430:3365], Recipient [7:135:2157]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:29:55.281338Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:29:55.281386Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2025-12-23T18:29:55.282681Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122432, Sender [7:1471:3406], Recipient [7:135:2157]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2025-12-23T18:29:55.282778Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5258: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-12-23T18:29:55.290047Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:29:55.290407Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2025-12-23T18:29:55.290983Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2025-12-23T18:29:55.291309Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:29:55.295041Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:29:55.295374Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2025-12-23T18:29:55.295451Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Query [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sequence/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |91.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |91.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |91.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TMLPWriterTests::WriteTwoMessage_OnePartition [GOOD] >> TMLPWriterTests::WriteTwoMessage_TwoPartition |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |91.1%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Table |91.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> TTransferTests::CreateWithoutCredentials [GOOD] >> TTransferTests::CreateWrongConfig >> DataShardTxOrder::RandomPointsAndRanges >> DataShardOutOfOrder::UncommittedReads >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateInParallel |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |91.1%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut >> TestMalformedRequest::CompressedGzipContentLengthNone [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite >> DataShardOutOfOrder::TestPlannedTimeoutSplit >> TTransferTests::CreateWrongConfig [GOOD] >> TTransferTests::CreateWrongBatchSize >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |91.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty >> TestMalformedRequest::CompressedGzipContentLengthCorrect [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink >> DataShardTxOrder::ImmediateBetweenOnline >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite >> DataShardTxOrder::RandomPoints_DelayRS >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateDropRecreate >> TTxDataShardPrefixKMeansScan::BuildToBuild [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuildWithOverlap >> DataShardWrite::DistributedInsertWithoutLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthNone [GOOD] Test command err: 2025-12-23T18:29:44.158455Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122368529322722:2134];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:29:44.158515Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:29:44.269942Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c57/r3tmp/tmpGYei8H/pdisk_1.dat 2025-12-23T18:29:44.712819Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:29:44.715927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:44.716031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:44.721640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8676, node 1 2025-12-23T18:29:44.891934Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:44.902064Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122368529322626:2081] 1766514584125943 != 1766514584125946 2025-12-23T18:29:45.019475Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:29:45.066669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:29:45.066700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:29:45.066708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:29:45.066819Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:29:45.230237Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514584856 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:29:45.558462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:29:45.568768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:5589 2025-12-23T18:29:45.867494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:29:45.881309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:29:45.887122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:29:45.910763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-12-23T18:29:45.919518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:46.066641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:46.138396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-12-23T18:29:46.144130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:46.211105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-12-23T18:29:46.217806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:46.259732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:46.322256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:46.366637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:46.402765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:46.465194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:46.499070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:48.550287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122385709193471:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:48.550823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122385709193483:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:48.551105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:48.551464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122385709193486:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:48.551550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:48.555939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:48.576542 ... ptional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:57.995550Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 25ms 2025-12-23T18:29:57.995718Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:57.995750Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-12-23T18:29:57.995820Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 20ms 2025-12-23T18:29:57.996006Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:57.996056Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-12-23T18:29:57.996179Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 25ms 2025-12-23T18:29:57.996515Z node 2 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:57.996961Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:58.185096Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122426178723262:2430]: Pool not found 2025-12-23T18:29:58.185827Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-12-23T18:29:58.426783Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122426178723271:2431]: Pool not found 2025-12-23T18:29:58.427422Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-12-23T18:29:58.430315Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122430473690715:2454], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:58.430395Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587122430473690716:2455], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:29:58.430446Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:58.435389Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122430473690719:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:58.435462Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:58.712976Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122430473690713:2453]: Pool not found 2025-12-23T18:29:58.713228Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2025-12-23T18:29:58.877753Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:58994) incoming connection opened 2025-12-23T18:29:58.877822Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:58994) -> (POST /Root) 2025-12-23T18:29:58.877977Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f84b:efc8:ad7b:0:e04b:efc8:ad7b:0] request [CreateQueue] url [/Root] database [/Root] requestId: 6050d8e1-db3403d-29dc27f0-609c1309 2025-12-23T18:29:58.878539Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [6050d8e1-db3403d-29dc27f0-609c1309] reply with status: BAD_REQUEST message: Empty body 2025-12-23T18:29:58.878728Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:58994) <- (400 InvalidArgumentException, 60 bytes) 2025-12-23T18:29:58.878827Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:58994) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Content-Encoding: gzip 2025-12-23T18:29:58.878872Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:58994) Response: HTTP/1.1 400 InvalidArgumentException Connection: keep-alive x-amzn-requestid: 6050d8e1-db3403d-29dc27f0-609c1309 Content-Type: application/x-amz-json-1.1 Content-Length: 60 Http output full {"__type":"InvalidArgumentException","message":"Empty body"} 2025-12-23T18:29:58.882612Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:58994) connection closed |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> TTransferTests::CreateWrongBatchSize [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsSmall >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |91.1%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthCorrect [GOOD] Test command err: 2025-12-23T18:29:45.302622Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122371430257571:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:29:45.302682Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:29:45.361088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c51/r3tmp/tmpIUO8yF/pdisk_1.dat 2025-12-23T18:29:45.767251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:45.767355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:45.841927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:29:45.941909Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:45.978568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13323, node 1 2025-12-23T18:29:46.119925Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:29:46.139334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:29:46.139359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:29:46.139367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:29:46.139448Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:29:46.352939Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7095 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514586032 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:29:46.526293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:7095 2025-12-23T18:29:46.820440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:29:46.850396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:29:46.852589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:29:46.884515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-12-23T18:29:46.891340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.067699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:47.116343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:47.186806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.230602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.294541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.332323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.407346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.457105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.514059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:49.347443Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122388610128287:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.347612Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.347939Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122388610128297:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.348424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122388610128301:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.348530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.352974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:49.370138Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122388610128302:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:29:49.468050Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122388610128355:3108] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:49.969673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, ... 0\013?j\003?l\000\t\351\000?n\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\320\003?\322\024BATCH_SIZE\003\022\000\003?p\000\003?r\000\006\004?v\003\203\014\000\003\203\014\000\003\003?x\000\277\007\003?D\000\002\001\000\006\002?\002\t\211\004\202\203\005@?@J\000\003?\372\022truncated\t\211\004?@?B\203\004F\000?\360\003?\001\002\002\002\001\000\003/" } Params { Bin: "\037\000\005\205\006\203\010\203\001H\203\001H(BATCH_SIZE(FROM_QUEUE$FROM_USER\003?\000\241\017\003?\002\000\003?\004\000\007/" } FlatMKQL: true } } ExecTimeoutPeriod: 60000 }. Params: {"FROM_USER": "", "FROM_QUEUE": "", "BATCH_SIZE": 1000} 2025-12-23T18:29:58.979032Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122427012211800:2438], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:58.979168Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587122427012211805:2441], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:29:58.981183Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:59.008057Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:59.008094Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 41ms 2025-12-23T18:29:59.008540Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:59.008577Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-12-23T18:29:59.008678Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 41ms 2025-12-23T18:29:59.009201Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:29:59.173153Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122427012211773:2434]: Pool not found 2025-12-23T18:29:59.173809Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-12-23T18:29:59.446037Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122427012211802:2439]: Pool not found 2025-12-23T18:29:59.446332Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-12-23T18:29:59.448997Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122431307179187:2455], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:59.449059Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587122431307179188:2456], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:29:59.449095Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:59.452734Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122431307179191:2457], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:59.452877Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:59.753148Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122431307179185:2454]: Pool not found 2025-12-23T18:29:59.753413Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2025-12-23T18:29:59.922137Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:41846) incoming connection opened 2025-12-23T18:29:59.922242Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:41846) -> (POST /Root, 44 bytes) Http output full {"__type":"InvalidArgumentException","message":"Can not parse request body from JSON"} 2025-12-23T18:29:59.922362Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [7894:594d:a7c:0:6094:594d:a7c:0] request [CreateQueue] url [/Root] database [/Root] requestId: 53e1e05b-a5243fe1-4b4790cd-80c628d 2025-12-23T18:29:59.923002Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [53e1e05b-a5243fe1-4b4790cd-80c628d] reply with status: BAD_REQUEST message: Can not parse request body from JSON 2025-12-23T18:29:59.923149Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:41846) <- (400 InvalidArgumentException, 86 bytes) 2025-12-23T18:29:59.923192Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:41846) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Content-Encoding: gzip Content-Length: 44 nhV ,M-MKMURPrH-IU2j 2025-12-23T18:29:59.923217Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:41846) Response: HTTP/1.1 400 InvalidArgumentException Connection: keep-alive x-amzn-requestid: 53e1e05b-a5243fe1-4b4790cd-80c628d Content-Type: application/x-amz-json-1.1 Content-Length: 86 2025-12-23T18:29:59.924889Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:41846) connection closed |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> KeyValueGRPCService::SimpleCreateAlterDropVolume [GOOD] >> KeyValueGRPCService::SimpleListPartitions [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8 |91.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] Test command err: 2025-12-23T18:29:59.634301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:29:59.634366Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:59.636037Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:29:59.650795Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:29:59.651189Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:29:59.651491Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:59.700357Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:29:59.707717Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:59.707828Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:59.709571Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:29:59.709668Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:29:59.709754Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:29:59.710153Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:59.710247Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:59.710331Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:29:59.807446Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:59.845310Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:29:59.845515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:59.845631Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:29:59.845681Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:29:59.845723Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:29:59.845759Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:29:59.845921Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:59.845982Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:59.846306Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:29:59.846405Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:29:59.846561Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:29:59.846634Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:59.846687Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:29:59.846733Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:29:59.846783Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:29:59.846815Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:29:59.846859Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:29:59.846949Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:59.846983Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:59.847033Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:29:59.857532Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:29:59.857624Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:29:59.857768Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:29:59.857948Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:29:59.858006Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:29:59.858066Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:29:59.858128Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:29:59.858192Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:29:59.858233Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:29:59.858271Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:29:59.858615Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:29:59.858675Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:29:59.858733Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:29:59.858792Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:29:59.858841Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:29:59.858873Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:29:59.858909Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:29:59.858957Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:29:59.859000Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:29:59.875168Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:29:59.875243Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:29:59.875282Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:29:59.875326Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:29:59.875418Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:29:59.875879Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:59.875923Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:59.875964Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:29:59.876070Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:29:59.876100Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:29:59.876262Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:29:59.876313Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:29:59.876367Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:29:59.876406Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:29:59.884430Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:29:59.884534Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:29:59.884760Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:59.884792Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:59.884854Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:29:59.884898Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:59.884928Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:29:59.884960Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:29:59.884998Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 30:01.247237Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:01.247382Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-12-23T18:30:01.247410Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-12-23T18:30:01.247433Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-12-23T18:30:01.247465Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:01.247494Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:7] at 9437184 on unit CompleteOperation 2025-12-23T18:30:01.247538Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 7] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:01.247599Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-12-23T18:30:01.247628Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:01.247756Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:01.247792Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:8] at 9437184 on unit CompleteOperation 2025-12-23T18:30:01.247818Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 8] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:01.247846Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-12-23T18:30:01.247861Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:01.247974Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:01.247986Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:9] at 9437184 on unit CompleteOperation 2025-12-23T18:30:01.248006Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 9] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:01.248035Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-12-23T18:30:01.248052Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:01.248179Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:01.248210Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:14] at 9437184 on unit FinishPropose 2025-12-23T18:30:01.248269Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 14 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-12-23T18:30:01.248365Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:01.248513Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:01.248540Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:10] at 9437184 on unit CompleteOperation 2025-12-23T18:30:01.248574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 10] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:01.248615Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-12-23T18:30:01.248639Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:01.248816Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:01.248867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:12] at 9437184 on unit CompleteOperation 2025-12-23T18:30:01.248972Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 12] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:01.249023Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-12-23T18:30:01.249060Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:01.249229Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:01.249256Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:13] at 9437184 on unit CompleteOperation 2025-12-23T18:30:01.249288Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 13] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:30:01.249314Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:01.249409Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-12-23T18:30:01.249437Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:01.249460Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2025-12-23T18:30:01.249492Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:01.249550Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-12-23T18:30:01.249593Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:01.249866Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-12-23T18:30:01.249919Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:01.249974Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 4 2025-12-23T18:30:01.250097Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-12-23T18:30:01.250132Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:01.250159Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-12-23T18:30:01.250242Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-12-23T18:30:01.250270Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:01.250294Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-12-23T18:30:01.250353Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-12-23T18:30:01.250381Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:01.250429Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-12-23T18:30:01.250501Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-12-23T18:30:01.250529Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:01.250565Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-12-23T18:30:01.250656Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-12-23T18:30:01.250685Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:01.250711Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-12-23T18:30:01.250807Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-12-23T18:30:01.250835Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:01.250887Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-12-23T18:30:01.250950Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-12-23T18:30:01.250977Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:01.251019Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - interm - 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TTransferTests::CreateDropRecreate [GOOD] >> TTransferTests::ConsistencyLevel |91.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::UncommittedUpdateLockUncommittedNewRow [GOOD] >> DataShardWrite::UncommittedUpdateLockUncommittedDeleteRow |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore >> ReadOnlyVDisk::TestSync [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 >> TTransferTests::CreateWrongFlushIntervalIsSmall [GOOD] |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut >> TTransferTests::CreateWrongFlushIntervalIsBig |91.1%| [LD] {RESULT} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite >> TTransferTests::ConsistencyLevel [GOOD] >> TTabletResolver::TabletResolvePriority [GOOD] |91.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut >> TTransferTests::Alter >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] >> DataShardOutOfOrder::UncommittedReads [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::TabletResolvePriority [GOOD] >> DataShardOutOfOrder::LocksBrokenStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 8654524679033869264 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2025-12-23T18:29:42.269122Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:8825:947] 2025-12-23T18:29:42.269529Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8832:954] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-12-23T18:29:45.113321Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:8839:961] 2025-12-23T18:29:45.113517Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8832:954] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-12-23T18:29:50.882836Z 5 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8853:975] 2025-12-23T18:29:50.882971Z 4 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:8846:968] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-12-23T18:29:54.585613Z 6 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8860:982] 2025-12-23T18:29:54.585749Z 5 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8853:975] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-12-23T18:29:57.982448Z 7 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8867:989] 2025-12-23T18:29:57.982594Z 6 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8860:982] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2025-12-23T18:30:01.370616Z 7 00h26m00.561536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8867:989] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/keyvalue/ut/unittest >> KeyValueGRPCService::SimpleListPartitions [GOOD] Test command err: 2025-12-23T18:25:27.804820Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587121264551056263:2088];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:25:27.811074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:25:28.025504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005959/r3tmp/tmpKaEkIz/pdisk_1.dat 2025-12-23T18:25:28.809692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:25:28.916592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:28.916667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:28.934734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:28.959182Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:28.959440Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:25:29.058828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:25:29.059299Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19874, node 1 2025-12-23T18:25:29.205842Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2025-12-23T18:25:29.290919Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2025-12-23T18:25:29.299217Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2025-12-23T18:25:29.314828Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2025-12-23T18:25:29.456417Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:425: Got proxy service configuration 2025-12-23T18:25:29.456447Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2025-12-23T18:25:29.456460Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2025-12-23T18:25:29.456501Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:425: Got proxy service configuration 2025-12-23T18:25:29.456507Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2025-12-23T18:25:29.456518Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2025-12-23T18:25:29.571134Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T18:25:29.571189Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T18:25:29.571198Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T18:25:29.571208Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T18:25:29.579365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:25:29.586819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:25:29.586848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:25:29.586952Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514329160 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:25:30.369630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:25:30.387145Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T18:25:30.387225Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T18:25:30.387234Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T18:25:30.387262Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T18:25:30.391016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:25:30.504045Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# MakeDirectoryRequest, traceId# 01kd67b2w38xmtrypz78eqx4j8, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:34698, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-12-23T18:25:30.514127Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T18:25:30.514199Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T18:25:30.514207Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T18:25:30.514239Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T18:25:30.534643Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T18:25:30.534725Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T18:25:30.534734Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T18:25:30.534786Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T18:25:30.566352Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# CreateVolumeRequest, traceId# 01kd67b2xk7pntf92w67h9pegh, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:34698, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-12-23T18:25:30.568967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSolomonVolume, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp:380) 2025-12-23T18:25:30.583195Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T18:25:30.583289Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T18:25:30.583307Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T18:25:30.583360Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T18:25:30.614848Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828672 event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:25:30.615611Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828673 event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:25:30.615996Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:65: KeyValue# 72075186224037888 TTxInit flat Execute 2025-12-23T18:25:30.616006Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:68: KeyValue# 72075186224037888 TTxInit flat BuildScheme 2025-12-23T18:25:30.616361Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:93: KeyValue# 72075186224037888 TTxInit flat Execute returns true 2025-12-23T18:25:30.616865Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828684 event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:25:30.626824Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 0 Channel# 2 Status# OK} 2025-12-23T18:25:30.626868Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 0 Channel# 2 Status# OK} 2025-12-23T18:25:30.626898Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 2 Status# OK} 2025-12-23T18:25:30.626914Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 2 Status# OK} 2025-12-23T18:25:30.627063Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 0 Channel# 2 Status# OK} 2025-12-23T18:25:30.627095Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 2 Status# OK} 2025-12-23T18:25:30.627131Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2025-12-23T18:25:30.627141Z node 1 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 Initi ... et::TEvTabletDead 2025-12-23T18:29:53.147621Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268828672 event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:29:53.151602Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268828673 event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:29:53.151684Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268828684 event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:29:53.151782Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828672 event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:29:53.153578Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828673 event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:29:53.153633Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828684 event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:29:53.172965Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:65: KeyValue# 72075186224037888 TTxInit flat Execute 2025-12-23T18:29:53.173025Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:78: KeyValue# 72075186224037888 TTxInit flat ReadDb Tree 2025-12-23T18:29:53.173521Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:93: KeyValue# 72075186224037888 TTxInit flat Execute returns true 2025-12-23T18:29:53.187383Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:65: KeyValue# 72075186224037889 TTxInit flat Execute 2025-12-23T18:29:53.187410Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:78: KeyValue# 72075186224037889 TTxInit flat ReadDb Tree 2025-12-23T18:29:53.187680Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:93: KeyValue# 72075186224037889 TTxInit flat Execute returns true 2025-12-23T18:29:53.188531Z node 65 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# DescribeVolumeRequest, traceId# 01kd67k3cv0f57tnvp2rygcp2k, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:42016, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-12-23T18:29:53.197742Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2025-12-23T18:29:53.197897Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2025-12-23T18:29:53.197950Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2025-12-23T18:29:53.197979Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2025-12-23T18:29:53.198012Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2025-12-23T18:29:53.198039Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2025-12-23T18:29:53.198070Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2025-12-23T18:29:53.198099Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2025-12-23T18:29:53.198374Z node 65 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# DropVolumeRequest, traceId# 01kd67k3dd21frcrmgfxy3j34d, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:42016, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-12-23T18:29:53.198623Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2025-12-23T18:29:53.198688Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2025-12-23T18:29:53.198728Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2025-12-23T18:29:53.198781Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2025-12-23T18:29:53.199098Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2025-12-23T18:29:53.199133Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2025-12-23T18:29:53.199221Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2025-12-23T18:29:53.199256Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 4 Marker# KV50 2025-12-23T18:29:53.199271Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2025-12-23T18:29:53.199281Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 3 Marker# KV50 2025-12-23T18:29:53.199299Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2025-12-23T18:29:53.199308Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 2 Marker# KV50 2025-12-23T18:29:53.199319Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2025-12-23T18:29:53.199328Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 1 Marker# KV50 2025-12-23T18:29:53.199359Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:719: KeyValue# 72075186224037889 SendCutHistory Marker# KV51 2025-12-23T18:29:53.199473Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:324: KeyValue# 72075186224037889 TTxRegisterInitialGCCompletion Execute 2025-12-23T18:29:53.200145Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2025-12-23T18:29:53.200201Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2025-12-23T18:29:53.200244Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2025-12-23T18:29:53.200270Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2025-12-23T18:29:53.200340Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2025-12-23T18:29:53.200363Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 4 Marker# KV50 2025-12-23T18:29:53.200381Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2025-12-23T18:29:53.200389Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 3 Marker# KV50 2025-12-23T18:29:53.200401Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2025-12-23T18:29:53.200409Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 2 Marker# KV50 2025-12-23T18:29:53.200423Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2025-12-23T18:29:53.200431Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 1 Marker# KV50 2025-12-23T18:29:53.200452Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:719: KeyValue# 72075186224037888 SendCutHistory Marker# KV51 2025-12-23T18:29:53.200501Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:324: KeyValue# 72075186224037888 TTxRegisterInitialGCCompletion Execute 2025-12-23T18:29:53.209841Z node 65 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSolomonVolume, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp:184) 2025-12-23T18:29:53.216178Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:332: KeyValue# 72075186224037888 TTxRegisterInitialGCCompletion Complete 2025-12-23T18:29:53.216280Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:332: KeyValue# 72075186224037889 TTxRegisterInitialGCCompletion Complete 2025-12-23T18:29:53.266291Z node 65 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ListDirectoryRequest, traceId# 01kd67k3fh4j4q59asz780bjgh, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:42016, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-12-23T18:29:53.272521Z node 65 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 65, TabletId: 72075186224037888 not found 2025-12-23T18:29:53.272749Z node 65 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 65, TabletId: 72075186224037889 not found 2025-12-23T18:29:53.272808Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:365: KeyValue# 72075186224037888 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-12-23T18:29:53.274924Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:365: KeyValue# 72075186224037889 OnTabletDead NKikimr::TEvTablet::TEvTabletDead >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> TTransferTests::Alter [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply >> TTabletPipeTest::TestConsumerSidePipeReset >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile [GOOD] >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile |91.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:29:54.527471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:29:54.527577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:29:54.527619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:29:54.527660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:29:54.527700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:29:54.527737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:29:54.527831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:29:54.527928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:29:54.528817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:29:54.529148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:29:54.619391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:29:54.619463Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:54.626462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:29:54.626704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:29:54.626873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:29:54.630187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:29:54.630385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:29:54.631167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:29:54.631445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:29:54.637167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:29:54.637391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:29:54.638832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:29:54.638928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:29:54.639096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:29:54.639155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:29:54.639210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:29:54.639354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:29:54.802587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.803878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.804041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.804180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.804296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.804374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.804466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.804581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.804689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.804744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.804785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.804834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.804922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.805014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:54.805086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 708:2694], Recipient [6:127:2152]: NKikimrTxColumnShard.TEvNotifyTxCompletionResult Origin: 72075186233409546 TxId: 101 2025-12-23T18:30:05.100435Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5290: StateWork, processing event TEvColumnShard::TEvNotifyTxCompletionResult 2025-12-23T18:30:05.100514Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6585: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-12-23T18:30:05.100576Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-12-23T18:30:05.100742Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-12-23T18:30:05.100916Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:30:05.102816Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:30:05.102881Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:30:05.102957Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 101:0 2025-12-23T18:30:05.103112Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [6:127:2152], Recipient [6:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:30:05.103152Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:30:05.103208Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:30:05.103254Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:30:05.103403Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:30:05.103443Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:30:05.103488Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:30:05.103536Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:30:05.103580Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:30:05.103632Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-12-23T18:30:05.103719Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:739:2716] message: TxId: 101 2025-12-23T18:30:05.103783Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:30:05.103832Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:30:05.103872Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:30:05.104022Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:30:05.106421Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:30:05.106540Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [6:739:2716] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 101 at schemeshard: 72057594046678944 2025-12-23T18:30:05.106760Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:30:05.106818Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:740:2717] 2025-12-23T18:30:05.107082Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [6:742:2719], Recipient [6:127:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:30:05.107129Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:30:05.107168Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-12-23T18:30:05.107855Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122432, Sender [6:785:2754], Recipient [6:127:2152]: {TEvModifySchemeTransaction txid# 102 TabletId# 72057594046678944} 2025-12-23T18:30:05.107909Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5258: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-12-23T18:30:05.110994Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot/Table" } Batching { FlushIntervalMilliSeconds: 86400001 } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:30:05.111352Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_replication.cpp:361: [72057594046678944] TCreateReplication Propose: opId# 102:0, path# /MyRoot/Transfer 2025-12-23T18:30:05.111453Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, at schemeshard: 72057594046678944 2025-12-23T18:30:05.111702Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:30:05.114148Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Flush interval must be less than or equal to 24 hours" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:30:05.114484Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2025-12-23T18:30:05.114550Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:30:05.114931Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:30:05.114988Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:30:05.115298Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [6:791:2760], Recipient [6:127:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:05.115352Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:05.115409Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:30:05.115580Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124996, Sender [6:739:2716], Recipient [6:127:2152]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-12-23T18:30:05.115621Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-12-23T18:30:05.115707Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:30:05.115812Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:30:05.115857Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:789:2758] 2025-12-23T18:30:05.116031Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [6:791:2760], Recipient [6:127:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:30:05.116072Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:30:05.116113Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-12-23T18:30:05.116470Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [6:792:2761], Recipient [6:127:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-12-23T18:30:05.116522Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:30:05.116626Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:30:05.116807Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 192us result status StatusPathDoesNotExist 2025-12-23T18:30:05.116972Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular |91.1%| [TM] {BAZEL_UPLOAD} ydb/services/keyvalue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2025-12-23T18:30:00.615520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:00.615581Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:00.617525Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:00.632417Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:00.632757Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:30:00.633023Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:00.678642Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:00.686941Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:00.687033Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:00.688671Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:30:00.688813Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:30:00.688876Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:30:00.689202Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:00.689274Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:00.689350Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:30:00.779552Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:30:00.878977Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:30:00.879188Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:30:00.879288Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:30:00.879325Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:30:00.879358Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:30:00.879394Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:00.879563Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:00.879624Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:00.880017Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:30:00.880130Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:30:00.880299Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:30:00.880351Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:00.880393Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:30:00.880432Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:30:00.880476Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:30:00.880508Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:30:00.880545Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:00.880638Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:00.880672Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:00.880733Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:30:00.905245Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:30:00.905316Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:00.905442Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:30:00.905604Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:30:00.905646Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:30:00.905707Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:30:00.905750Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:30:00.905795Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:30:00.905828Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:30:00.905860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:30:00.906136Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:00.906212Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:30:00.906265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:30:00.906299Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:30:00.906339Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:30:00.906367Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:30:00.906402Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:30:00.906451Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:30:00.906476Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:30:00.924325Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:30:00.924398Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:30:00.924437Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:30:00.924477Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:30:00.924555Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:30:00.925150Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:00.925213Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:00.925273Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:30:00.925438Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:30:00.925476Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:30:00.925627Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:30:00.925684Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:30:00.925736Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:30:00.925782Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:30:00.929553Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:30:00.929626Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:00.937434Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:00.937507Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:00.937625Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:30:00.937676Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:30:00.937714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:30:00.937765Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:30:00.937824Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... SHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.728774Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-12-23T18:30:06.728905Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-12-23T18:30:06.728935Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.728958Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-12-23T18:30:06.729005Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-12-23T18:30:06.729030Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.729052Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-12-23T18:30:06.729113Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [1:242:2234], Recipient [1:464:2406]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-12-23T18:30:06.729154Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-12-23T18:30:06.729186Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2025-12-23T18:30:06.729252Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-12-23T18:30:06.729307Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2025-12-23T18:30:06.729372Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:30:06.729470Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-12-23T18:30:06.729508Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.729544Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-12-23T18:30:06.729625Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:464:2406], Recipient [1:464:2406]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:06.729652Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:06.729688Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2025-12-23T18:30:06.729720Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:30:06.729752Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-12-23T18:30:06.729780Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-12-23T18:30:06.729814Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2025-12-23T18:30:06.729849Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-12-23T18:30:06.729888Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2025-12-23T18:30:06.729911Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2025-12-23T18:30:06.729947Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2025-12-23T18:30:06.729970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2025-12-23T18:30:06.729992Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-12-23T18:30:06.730012Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-12-23T18:30:06.730555Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-12-23T18:30:06.730606Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:30:06.730666Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-12-23T18:30:06.730698Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-12-23T18:30:06.730733Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-12-23T18:30:06.730780Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-12-23T18:30:06.730970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-12-23T18:30:06.730995Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-12-23T18:30:06.731034Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-12-23T18:30:06.731060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-12-23T18:30:06.731116Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2025-12-23T18:30:06.731139Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-12-23T18:30:06.731162Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2025-12-23T18:30:06.731187Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:06.731219Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2025-12-23T18:30:06.731251Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2025-12-23T18:30:06.731276Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2025-12-23T18:30:06.731467Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-12-23T18:30:06.731499Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.731526Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-12-23T18:30:06.731622Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-12-23T18:30:06.731652Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.731674Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-12-23T18:30:06.731760Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-12-23T18:30:06.731785Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.731807Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-12-23T18:30:06.731863Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-12-23T18:30:06.731896Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.731931Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-12-23T18:30:06.747328Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:06.747396Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-12-23T18:30:06.747453Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:06.747525Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:30:06.747580Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:06.747876Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:30:06.747918Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.747955Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:29:54.849215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:29:54.849329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:29:54.849370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:29:54.849410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:29:54.849451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:29:54.849480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:29:54.849579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:29:54.849672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:29:54.850709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:29:54.851102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:29:54.952540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:29:54.952615Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:54.959836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:29:54.960171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:29:54.960348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:29:54.964377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:29:54.964654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:29:54.967763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:29:54.968151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:29:54.970975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:29:54.971222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:29:54.972580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:29:54.972664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:29:54.972823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:29:54.972894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:29:54.972951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:29:54.973136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:29:55.143268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.144577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.144738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.144839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.144920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.145017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.145107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.145205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.145283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.145382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.145463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.145557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.145653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.145746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:29:55.145845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... vProgressOperation 2025-12-23T18:30:06.468014Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:30:06.468067Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:196: [72057594046678944] TAlterReplication TPropose opId# 104:0 ProgressState 2025-12-23T18:30:06.468127Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:30:06.468185Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-12-23T18:30:06.468362Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:30:06.477200Z node 6 :TX_PROXY INFO: describe.cpp:354: Actor# [6:955:2898] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2025-12-23T18:30:06.477753Z node 6 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-12-23T18:30:06.477825Z node 6 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /MyRoot1/Table, status# SCHEME_ERROR, issues# , iteration# 0 2025-12-23T18:30:06.478030Z node 6 :REPLICATION_CONTROLLER TRACE: controller.cpp:201: [controller 72075186233409547] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2025-12-23T18:30:06.478208Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186233409547][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2025-12-23T18:30:06.478289Z node 6 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186233409547][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /MyRoot1/Table: SCHEME_ERROR () 2025-12-23T18:30:06.478617Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:30:06.478668Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-12-23T18:30:06.478804Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2025-12-23T18:30:06.479363Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269287424, Sender [6:139:2160], Recipient [6:271:2270] 2025-12-23T18:30:06.479430Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5267: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:30:06.479543Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:06.479703Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 25769805936 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:30:06.479790Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:210: [72057594046678944] TAlterReplication TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000041 2025-12-23T18:30:06.479945Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2025-12-23T18:30:06.480197Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:30:06.480294Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:30:06.480382Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:706: Ack tablet strongly msg opId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 2025-12-23T18:30:06.495966Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186233409547][TxDiscoveryTargetsResult] Complete 2025-12-23T18:30:06.496419Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:30:06.496462Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:394: Ack coordinator stepId#5000041 first txId#104 countTxs#1 2025-12-23T18:30:06.496602Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:364: Ack mediator stepId#5000041 2025-12-23T18:30:06.496646Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 104:0 FAKE_COORDINATOR: Erasing txId 104 2025-12-23T18:30:06.496862Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [6:127:2152], Recipient [6:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:30:06.496896Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:30:06.496954Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:30:06.496999Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:30:06.497229Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:06.497273Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:209:2210], at schemeshard: 72057594046678944, txId: 104, path id: 39 2025-12-23T18:30:06.497647Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:30:06.497710Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:30:06.497822Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:30:06.497863Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:30:06.497898Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:30:06.497941Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:30:06.497973Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:30:06.498013Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-12-23T18:30:06.498053Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:30:06.498092Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:30:06.498126Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:30:06.498893Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:30:06.498983Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-12-23T18:30:06.499031Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2025-12-23T18:30:06.499858Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 274137603, Sender [6:209:2210], Recipient [6:127:2152]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Version: 4 } 2025-12-23T18:30:06.499918Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5331: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-12-23T18:30:06.500037Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:30:06.500165Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:30:06.500217Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:30:06.500268Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2025-12-23T18:30:06.500340Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:30:06.500454Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-12-23T18:30:06.500516Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:30:06.503921Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:30:06.505646Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:30:06.505725Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] Test command err: 2025-12-23T18:30:00.256289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:00.256351Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:00.257835Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:00.278811Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:00.279199Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:30:00.279473Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:00.331387Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:00.340211Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:00.340325Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:00.342453Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:30:00.342572Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:30:00.342652Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:30:00.343112Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:00.343202Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:00.343297Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:30:00.443484Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:30:00.483376Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:30:00.483567Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:30:00.483738Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:30:00.483783Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:30:00.483826Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:30:00.483875Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:00.484042Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:00.484109Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:00.484482Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:30:00.484601Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:30:00.484760Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:30:00.484834Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:00.484884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:30:00.484923Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:30:00.484956Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:30:00.484987Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:30:00.485030Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:00.485137Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:00.485190Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:00.485236Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:30:00.496660Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:30:00.496749Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:00.496881Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:30:00.497078Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:30:00.497130Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:30:00.497189Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:30:00.497233Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:30:00.497279Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:30:00.497316Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:30:00.497351Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:30:00.497666Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:00.497720Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:30:00.497773Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:30:00.497808Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:30:00.497848Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:30:00.497875Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:30:00.497907Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:30:00.497946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:30:00.497974Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:30:00.518663Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:30:00.518772Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:30:00.518818Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:30:00.518867Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:30:00.518942Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:30:00.519489Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:00.519545Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:00.519600Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:30:00.519727Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:30:00.519763Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:30:00.519911Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:30:00.519972Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:30:00.520028Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:30:00.520065Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:30:00.526258Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:30:00.526348Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:00.526595Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:00.526722Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:00.526814Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:30:00.526867Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:30:00.526931Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:30:00.526975Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:30:00.527030Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ::Complete at 9437186 2025-12-23T18:30:06.709814Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-12-23T18:30:06.709847Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2025-12-23T18:30:06.709883Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-12-23T18:30:06.709913Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:06.710013Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:06.710048Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-12-23T18:30:06.710102Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2025-12-23T18:30:06.710167Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:30:06.710223Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:06.710549Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-12-23T18:30:06.710598Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.710636Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-12-23T18:30:06.711105Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-12-23T18:30:06.711146Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.711176Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-12-23T18:30:06.711298Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-12-23T18:30:06.711330Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.711357Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-12-23T18:30:06.711456Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-12-23T18:30:06.711490Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.711530Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-12-23T18:30:06.711625Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-12-23T18:30:06.711682Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.711721Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-12-23T18:30:06.711799Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-12-23T18:30:06.711833Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.711859Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-12-23T18:30:06.711939Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:30:06.711966Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.711993Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-12-23T18:30:06.712066Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:06.712123Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2025-12-23T18:30:06.712197Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2025-12-23T18:30:06.712286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-12-23T18:30:06.712331Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:06.712480Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-12-23T18:30:06.712511Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-12-23T18:30:06.712545Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-12-23T18:30:06.712582Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:06.712610Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2025-12-23T18:30:06.712650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2025-12-23T18:30:06.712699Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-12-23T18:30:06.712726Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:06.712872Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:06.712923Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-12-23T18:30:06.712965Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2025-12-23T18:30:06.713013Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-12-23T18:30:06.713041Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:06.713170Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:06.713209Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-12-23T18:30:06.713256Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2025-12-23T18:30:06.713306Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-12-23T18:30:06.713337Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:06.713552Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-12-23T18:30:06.713592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.713651Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-12-23T18:30:06.713788Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-12-23T18:30:06.713820Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.713848Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-12-23T18:30:06.713932Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-12-23T18:30:06.713963Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.713990Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-12-23T18:30:06.714086Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-12-23T18:30:06.714122Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.714150Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:30:01.854472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:01.995932Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:30:02.008933Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:02.009779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:02.009884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:02.407127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:02.407272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:02.492012Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514598455390 != 1766514598455394 2025-12-23T18:30:02.504646Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:02.553972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:02.661625Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004018/r3tmp/tmpNsYLlT/pdisk_1.dat 2025-12-23T18:30:02.972908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:02.987074Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:03.093840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:03.126642Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:03.127575Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:03.127886Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:30:03.128147Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:03.179584Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:03.180472Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:03.180609Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:03.182431Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:30:03.182524Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:30:03.182610Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:30:03.183046Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:03.183202Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:03.183307Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:30:03.183789Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:30:03.225647Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:30:03.225906Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:30:03.226083Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:30:03.226133Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:30:03.226172Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:30:03.226253Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:30:03.226548Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:03.226616Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:03.226994Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:30:03.227130Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:30:03.227652Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:30:03.227707Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:03.227775Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:30:03.227825Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:30:03.227867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:30:03.227906Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:30:03.227985Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:30:03.228103Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:03.228141Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:03.228194Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:30:03.228312Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:30:03.228365Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:03.228492Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:30:03.228783Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:30:03.228862Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:30:03.228982Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:30:03.229043Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:30:03.229095Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:30:03.229168Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:30:03.229220Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:30:03.229593Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:03.229651Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:30:03.229694Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:30:03.229736Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:30:03.229796Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T18:30:03.229838Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:30:03.229875Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T18:30:03.229947Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T18:30:03.229976Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T18:30:03.231178Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:30:03.231244Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:30:03.231283Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0 ... 18:30:04.652842Z node 1 :TX_DATASHARD TRACE: datashard_write_operation.cpp:256: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-12-23T18:30:04.652888Z node 1 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:38:1] 2025-12-23T18:30:04.652972Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-12-23T18:30:04.653028Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:30:04.653067Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-12-23T18:30:04.653108Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:04.653153Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:30:04.653198Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2025-12-23T18:30:04.653260Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2025-12-23T18:30:04.653300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:30:04.653334Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:04.653364Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2025-12-23T18:30:04.653387Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2025-12-23T18:30:04.653424Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:30:04.653451Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2025-12-23T18:30:04.653474Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-12-23T18:30:04.653507Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-12-23T18:30:04.653551Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2025-12-23T18:30:04.653602Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2025-12-23T18:30:04.653721Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:6] at 72075186224037888, row count=1 2025-12-23T18:30:04.653774Z node 1 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2025-12-23T18:30:04.653836Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:30:04.653863Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-12-23T18:30:04.653898Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-12-23T18:30:04.653929Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:30:04.653987Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-12-23T18:30:04.654016Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-12-23T18:30:04.654071Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:30:04.654104Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:30:04.654149Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:30:04.654171Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:30:04.654220Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2025-12-23T18:30:04.783223Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:1175:2957], Recipient [1:885:2764]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-12-23T18:30:04.783417Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:30:04.783479Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2025-12-23T18:30:04.783522Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v3500/18446744073709551615 2025-12-23T18:30:04.783583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-12-23T18:30:04.783671Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:30:04.783709Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:30:04.783772Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:04.783811Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:30:04.783861Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2025-12-23T18:30:04.783907Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:30:04.783941Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:04.783975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:30:04.784003Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:30:04.784119Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-12-23T18:30:04.784357Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is DelayComplete 2025-12-23T18:30:04.784394Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:30:04.784430Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:30:04.784464Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:30:04.784511Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:30:04.784546Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:30:04.784596Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2025-12-23T18:30:04.784646Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:30:04.869464Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [1:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 4000 2025-12-23T18:30:04.869557Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [1:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} 2025-12-23T18:30:05.007475Z node 1 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-12-23T18:30:05.007555Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:30:05.007607Z node 1 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 1000 ms, status: STATUS_COMPLETED 2025-12-23T18:30:05.007702Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:30:05.007808Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:30:05.007860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:30:05.007915Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[1:1175:2957], 0} after executionsCount# 1 2025-12-23T18:30:05.007971Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[1:1175:2957], 0} sends rowCount# 4, bytes# 128, quota rows left# 997, quota bytes left# 5242752, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:30:05.008097Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[1:1175:2957], 0} finished in read 2025-12-23T18:30:05.010612Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:1175:2957], Recipient [1:885:2764]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:30:05.010702Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestConnectReject >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2025-12-23T18:30:00.051416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:00.051481Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:00.056269Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:00.082320Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:00.082734Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:30:00.083047Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:00.135895Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:00.151058Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:00.151175Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:00.153150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:30:00.153255Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:30:00.153314Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:30:00.153709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:00.153804Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:00.153906Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:30:00.243642Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:30:00.285609Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:30:00.285822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:30:00.285938Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:30:00.285978Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:30:00.286017Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:30:00.286061Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:00.286303Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:00.286375Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:00.286718Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:30:00.286865Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:30:00.287026Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:30:00.287087Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:00.287138Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:30:00.287190Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:30:00.287225Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:30:00.287259Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:30:00.287303Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:00.287384Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:00.287417Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:00.287482Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:30:00.295634Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:30:00.295723Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:00.295857Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:30:00.296030Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:30:00.296076Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:30:00.296152Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:30:00.296207Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:30:00.296252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:30:00.296289Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:30:00.296325Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:30:00.296658Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:00.296724Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:30:00.296769Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:30:00.296806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:30:00.296852Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:30:00.296883Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:30:00.296923Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:30:00.296958Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:30:00.297001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:30:00.309454Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:30:00.310274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:30:00.310314Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:30:00.310358Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:30:00.310441Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:30:00.310994Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:00.311056Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:00.311157Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:30:00.311281Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-12-23T18:30:00.311312Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:30:00.311431Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-12-23T18:30:00.311488Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2025-12-23T18:30:00.311539Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:30:00.311584Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:30:00.320120Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:30:00.320211Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:00.320461Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:00.320500Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:00.320570Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:30:00.320620Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:30:00.320658Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:30:00.320723Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-12-23T18:30:00.320760Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-12-23T18:30:00. ... cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-12-23T18:30:06.405310Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:06.405431Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:30:06.405458Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:06.405484Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:137] at 9437186 on unit CompleteOperation 2025-12-23T18:30:06.405516Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 137] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2025-12-23T18:30:06.405562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-12-23T18:30:06.405587Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:06.405676Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:30:06.405698Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:30:06.405719Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:06.405763Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:140] at 9437186 on unit CompleteOperation 2025-12-23T18:30:06.405812Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 140] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2025-12-23T18:30:06.406088Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-12-23T18:30:06.406155Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:06.406296Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:30:06.406584Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:30:06.406626Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:06.406657Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:143] at 9437186 on unit CompleteOperation 2025-12-23T18:30:06.406901Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 143] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2025-12-23T18:30:06.406961Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-12-23T18:30:06.407010Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:06.407171Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:30:06.407213Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:06.407240Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:146] at 9437186 on unit CompleteOperation 2025-12-23T18:30:06.407276Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 146] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2025-12-23T18:30:06.407330Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-12-23T18:30:06.407373Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:06.407476Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:06.407505Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:149] at 9437186 on unit CompleteOperation 2025-12-23T18:30:06.407545Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2025-12-23T18:30:06.407583Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-12-23T18:30:06.407605Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:06.407709Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:06.407743Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:152] at 9437186 on unit CompleteOperation 2025-12-23T18:30:06.407776Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 2 ms 2025-12-23T18:30:06.407815Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:30:06.407840Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:06.408102Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-12-23T18:30:06.408142Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.408175Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-12-23T18:30:06.408282Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-12-23T18:30:06.408308Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.408333Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-12-23T18:30:06.408466Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-12-23T18:30:06.408510Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.408558Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-12-23T18:30:06.408673Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-12-23T18:30:06.408702Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.408727Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-12-23T18:30:06.408796Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-12-23T18:30:06.408830Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.408888Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-12-23T18:30:06.408940Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-12-23T18:30:06.408962Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.408984Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-12-23T18:30:06.409089Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-12-23T18:30:06.409133Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.409219Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-12-23T18:30:06.409299Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-12-23T18:30:06.409331Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.409360Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-12-23T18:30:06.409435Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-12-23T18:30:06.409465Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.409491Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-12-23T18:30:06.409576Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:30:06.409607Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:06.409632Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_transfer/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_transfer/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |91.1%| [TM] {RESULT} ydb/services/keyvalue/ut/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TTabletPipeTest::TestConnectReject [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestConnectReject [GOOD] |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2025-12-23T18:30:02.990984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:02.991052Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:02.992720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:03.012671Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:03.013077Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:30:03.013346Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:03.067890Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:03.081852Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:03.081958Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:03.084075Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:30:03.084183Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:30:03.084256Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:30:03.084719Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:03.084802Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:03.084896Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:30:03.169642Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:30:03.200575Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:30:03.200792Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:30:03.200913Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:30:03.200955Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:30:03.201003Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:30:03.201049Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:03.201222Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:03.201288Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:03.201754Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:30:03.201912Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:30:03.202081Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:30:03.202142Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:03.202204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:30:03.202256Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:30:03.202293Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:30:03.202332Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:30:03.202382Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:03.202485Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:03.202525Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:03.202593Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:30:03.218088Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:30:03.218201Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:03.218354Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:30:03.218546Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:30:03.218598Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:30:03.218674Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:30:03.218725Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:30:03.218916Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:30:03.218962Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:30:03.219002Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:30:03.219347Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:03.219423Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:30:03.219473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:30:03.219515Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:30:03.219566Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:30:03.219596Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:30:03.219653Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:30:03.219695Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:30:03.219725Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:30:03.234788Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:30:03.234876Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:30:03.234924Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:30:03.234979Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:30:03.235067Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:30:03.235658Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:03.235719Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:03.235787Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:30:03.235929Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:30:03.235979Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:30:03.236140Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:30:03.236197Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:30:03.236252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:30:03.236300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:30:03.239672Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:30:03.239753Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:03.239955Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:03.240001Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:03.240059Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:30:03.240095Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:30:03.240127Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:30:03.240164Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:30:03.240218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-12-23T18:30:08.923512Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:08.923546Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-12-23T18:30:08.924049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-12-23T18:30:08.924097Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:08.924129Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-12-23T18:30:08.924324Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:08.924379Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2025-12-23T18:30:08.924445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:08.924509Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-12-23T18:30:08.924545Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:08.924699Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:30:08.924734Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:08.924760Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2025-12-23T18:30:08.924808Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:08.924881Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-12-23T18:30:08.924928Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:08.925050Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:30:08.925075Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:30:08.925105Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:30:08.925146Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:08.925174Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-12-23T18:30:08.925212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:08.925254Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-12-23T18:30:08.925280Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:08.925381Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:30:08.925426Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:08.925455Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-12-23T18:30:08.925521Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:08.925566Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-12-23T18:30:08.925601Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:08.925719Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:08.925751Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-12-23T18:30:08.925787Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:08.925829Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-12-23T18:30:08.925855Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:08.925968Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:08.925995Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-12-23T18:30:08.926027Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:08.926088Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-12-23T18:30:08.926123Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:08.926250Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:30:08.926278Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-12-23T18:30:08.926329Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:08.926378Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:30:08.926410Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:30:08.926648Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-12-23T18:30:08.926683Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:08.926714Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-12-23T18:30:08.926860Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-12-23T18:30:08.926894Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:08.926936Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-12-23T18:30:08.927050Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-12-23T18:30:08.927082Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:08.927109Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-12-23T18:30:08.927221Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-12-23T18:30:08.927275Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:08.927306Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-12-23T18:30:08.927390Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-12-23T18:30:08.927417Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:08.927442Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-12-23T18:30:08.927510Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-12-23T18:30:08.927537Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:08.927580Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-12-23T18:30:08.927663Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:30:08.927690Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:08.927713Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TTabletPipeTest::TestShutdown >> TInterconnectTest::OldFormat >> TInterconnectTest::TestNotifyUndelivered >> GroupWriteTest::WriteHardRateDispatcher >> TResourceBrokerInstant::TestMerge [GOOD] >> TTabletCountersAggregator::ColumnShardCounters >> TabletState::ExplicitUnsubscribe |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> DataShardWrite::UncommittedUpdateLockUncommittedDeleteRow [GOOD] >> DataShardWrite::LocksBrokenStats >> TTabletPipeTest::TestShutdown [GOOD] >> TTabletPipeTest::TestTwoNodes |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |91.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |91.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build >> DataShardOutOfOrder::LocksBrokenStats [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuildWithOverlap [GOOD] >> TTxDataShardRecomputeKMeansScan::BadRequest >> GroupWriteTest::Simple >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld >> TTabletPipeTest::TestTwoNodes [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes >> GroupWriteTest::SimpleRdma >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TabletState::ExplicitUnsubscribe [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::ExplicitUnsubscribe [GOOD] |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::LocksBrokenStats [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:30:02.269192Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:02.482590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:30:02.532313Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:02.532813Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:02.532911Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:02.873379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:02.873529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:02.955318Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514598985727 != 1766514598985731 2025-12-23T18:30:02.968279Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:03.015639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:03.114534Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ff1/r3tmp/tmp50enCU/pdisk_1.dat 2025-12-23T18:30:03.434526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:03.449859Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:03.562507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:03.597583Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:03.598589Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:03.598890Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:30:03.599139Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:03.646407Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:03.647144Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:03.647261Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:03.648963Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:30:03.649057Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:30:03.649132Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:30:03.649513Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:03.649664Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:03.649799Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:30:03.660487Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:30:03.693791Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:30:03.694106Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:30:03.694262Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:30:03.694309Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:30:03.694360Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:30:03.694401Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:30:03.694707Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:03.694803Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:03.695167Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:30:03.695335Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:30:03.695809Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:30:03.695870Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:03.695947Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:30:03.695984Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:30:03.696025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:30:03.696081Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:30:03.696133Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:30:03.696229Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:03.698835Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:03.698929Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:30:03.699108Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:30:03.699155Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:03.699313Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:30:03.699612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:30:03.699689Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:30:03.699805Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:30:03.699871Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:30:03.699949Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:30:03.700004Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:30:03.700062Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:30:03.700434Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:03.700497Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:30:03.700545Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:30:03.700580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:30:03.700641Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T18:30:03.700671Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:30:03.700711Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T18:30:03.700748Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T18:30:03.700777Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T18:30:03.702704Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T18:30:03.702799Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... 116Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-12-23T18:30:11.073143Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2025-12-23T18:30:11.073170Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:11.073195Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:30:11.073231Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/18446744073709551615 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2025-12-23T18:30:11.073274Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715664] at 72075186224037888 2025-12-23T18:30:11.073303Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-12-23T18:30:11.073321Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:11.073336Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit BlockFailPoint 2025-12-23T18:30:11.073351Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit BlockFailPoint 2025-12-23T18:30:11.073365Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-12-23T18:30:11.073378Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BlockFailPoint 2025-12-23T18:30:11.073393Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-12-23T18:30:11.073406Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2025-12-23T18:30:11.073445Z node 2 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:269: Operation [0:281474976715664] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2025-12-23T18:30:11.073515Z node 2 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: false 2025-12-23T18:30:11.073567Z node 2 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:521: add locks to result: 0 2025-12-23T18:30:11.073606Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-12-23T18:30:11.073624Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-12-23T18:30:11.073637Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:30:11.073653Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-12-23T18:30:11.073677Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-12-23T18:30:11.073718Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2025-12-23T18:30:11.073744Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:30:11.073770Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:30:11.073800Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:30:11.073829Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-12-23T18:30:11.073851Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:30:11.073871Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715664] at 72075186224037888 has finished 2025-12-23T18:30:11.073907Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:30:11.073937Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-12-23T18:30:11.073969Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:30:11.075101Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:69:2116], Recipient [2:884:2764]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2025-12-23T18:30:11.075425Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1118:2928], Recipient [2:884:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:11.075466Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:11.075513Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1117:2927], serverId# [2:1118:2928], sessionId# [0:0:0] 2025-12-23T18:30:11.075676Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [2:1116:2926], Recipient [2:884:2764]: NKikimrTxDataShard.TEvGetInfoRequest 2025-12-23T18:30:11.076422Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1121:2931], Recipient [2:884:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:11.076463Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:11.076496Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1120:2930], serverId# [2:1121:2931], sessionId# [0:0:0] 2025-12-23T18:30:11.076614Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1119:2929], Recipient [2:884:2764]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-12-23T18:30:11.076683Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:30:11.076721Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/18446744073709551615 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2025-12-23T18:30:11.076750Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v2501/18446744073709551615 2025-12-23T18:30:11.076788Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-12-23T18:30:11.076855Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:30:11.076895Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:30:11.076928Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:11.076955Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:30:11.076982Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2025-12-23T18:30:11.077008Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:30:11.077027Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:11.077043Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:30:11.077063Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:30:11.077135Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-12-23T18:30:11.077304Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1119:2929], 1000} after executionsCount# 1 2025-12-23T18:30:11.077368Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1119:2929], 1000} sends rowCount# 1, bytes# 32, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:30:11.077425Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1119:2929], 1000} finished in read 2025-12-23T18:30:11.077468Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:30:11.077486Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:30:11.077502Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:30:11.077518Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:30:11.077544Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:30:11.077557Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:30:11.077583Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2025-12-23T18:30:11.077615Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:30:11.077671Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> GroupWriteTest::ByTableName >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] >> TMLPWriterTests::WriteTwoMessage_TwoPartition [GOOD] >> TMLPWriterTests::WriteTwoMessage_Deduplicated |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2025-12-23T18:30:11.454483Z node 4 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [4:22:2057] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-12-23T18:30:11.988420Z node 5 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [5:20:2058] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-12-23T18:30:12.524005Z node 8 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [8:22:2057] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-12-23T18:30:12.526763Z node 7 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [7:20:2058] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Table [GOOD] |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:30:02.902985Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:03.032554Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:30:03.046964Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:03.047335Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:03.047421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:03.362277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:03.362399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:03.453039Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514599782520 != 1766514599782524 2025-12-23T18:30:03.465681Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:03.513145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:03.615865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fde/r3tmp/tmpeqRTxf/pdisk_1.dat 2025-12-23T18:30:03.933089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:03.950729Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:04.074345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:04.115689Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:04.116700Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:04.116999Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:30:04.117275Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:04.171599Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:04.172469Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:04.172612Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:04.174237Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:30:04.174326Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:30:04.174397Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:30:04.174800Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:04.174929Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:04.175017Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:30:04.185863Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:30:04.237336Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:30:04.237548Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:30:04.237664Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:30:04.237701Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:30:04.237752Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:30:04.237796Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:30:04.238033Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:04.238090Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:04.238374Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:30:04.238473Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:30:04.238925Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:30:04.238974Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:04.239034Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:30:04.239068Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:30:04.239106Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:30:04.239137Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:30:04.239198Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:30:04.239303Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:04.239357Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:04.239402Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:30:04.239509Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:30:04.239557Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:04.239672Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:30:04.239869Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:30:04.239931Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:30:04.240015Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:30:04.240071Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:30:04.240119Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:30:04.240164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:30:04.240204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:30:04.240494Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:04.240532Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:30:04.240580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:30:04.240613Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:30:04.240660Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T18:30:04.240686Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:30:04.240720Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T18:30:04.240752Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T18:30:04.240777Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T18:30:04.242352Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T18:30:04.242413Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... h:843: ActorId: [2:1133:2833] TxId: 281474976715662. Ctx: { TraceId: 01kd67kp16ar50mehzpcy42r47, Database: , SessionId: ydb://session/3?node_id=2&id=YTgxOTJhYjktNjljNzcyODEtNzliODMwMjUtYzYwYmExODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1136:2833], 2025-12-23T18:30:12.345361Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [2:1133:2833] TxId: 281474976715662. Ctx: { TraceId: 01kd67kp16ar50mehzpcy42r47, Database: , SessionId: ydb://session/3?node_id=2&id=YTgxOTJhYjktNjljNzcyODEtNzliODMwMjUtYzYwYmExODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1136:2833], 2025-12-23T18:30:12.345421Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:2227: ActorId: [2:1133:2833] TxId: 281474976715662. Ctx: { TraceId: 01kd67kp16ar50mehzpcy42r47, Database: , SessionId: ydb://session/3?node_id=2&id=YTgxOTJhYjktNjljNzcyODEtNzliODMwMjUtYzYwYmExODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-12-23T18:30:12.346759Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [2:1133:2833] TxId: 281474976715662. Ctx: { TraceId: 01kd67kp16ar50mehzpcy42r47, Database: , SessionId: ydb://session/3?node_id=2&id=YTgxOTJhYjktNjljNzcyODEtNzliODMwMjUtYzYwYmExODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1136:2833], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-12-23T18:30:12.346844Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [2:1133:2833] TxId: 281474976715662. Ctx: { TraceId: 01kd67kp16ar50mehzpcy42r47, Database: , SessionId: ydb://session/3?node_id=2&id=YTgxOTJhYjktNjljNzcyODEtNzliODMwMjUtYzYwYmExODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1136:2833], 2025-12-23T18:30:12.346920Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [2:1133:2833] TxId: 281474976715662. Ctx: { TraceId: 01kd67kp16ar50mehzpcy42r47, Database: , SessionId: ydb://session/3?node_id=2&id=YTgxOTJhYjktNjljNzcyODEtNzliODMwMjUtYzYwYmExODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1136:2833], 2025-12-23T18:30:12.347893Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [2:1133:2833] TxId: 281474976715662. Ctx: { TraceId: 01kd67kp16ar50mehzpcy42r47, Database: , SessionId: ydb://session/3?node_id=2&id=YTgxOTJhYjktNjljNzcyODEtNzliODMwMjUtYzYwYmExODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1136:2833], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1161 Tasks { TaskId: 1 CpuTimeUs: 538 FinishTimeMs: 1766514612347 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 27 BuildCpuTimeUs: 511 HostName: "ghrun-ou4bsyjxme" NodeId: 2 CreateTimeMs: 1766514612345 UpdateTimeMs: 1766514612347 } MaxMemoryUsage: 1048576 } 2025-12-23T18:30:12.348028Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976715662. Ctx: { TraceId: 01kd67kp16ar50mehzpcy42r47, Database: , SessionId: ydb://session/3?node_id=2&id=YTgxOTJhYjktNjljNzcyODEtNzliODMwMjUtYzYwYmExODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1136:2833] 2025-12-23T18:30:12.348102Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:214: ActorId: [2:1133:2833] TxId: 281474976715662. Ctx: { TraceId: 01kd67kp16ar50mehzpcy42r47, Database: , SessionId: ydb://session/3?node_id=2&id=YTgxOTJhYjktNjljNzcyODEtNzliODMwMjUtYzYwYmExODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor=[2:1132:2833] 2025-12-23T18:30:12.348192Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [2:1133:2833] TxId: 281474976715662. Ctx: { TraceId: 01kd67kp16ar50mehzpcy42r47, Database: , SessionId: ydb://session/3?node_id=2&id=YTgxOTJhYjktNjljNzcyODEtNzliODMwMjUtYzYwYmExODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.001161s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-12-23T18:30:12.348714Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [2:1139:2833], Recipient [2:891:2767]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2025-12-23T18:30:12.348769Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2025-12-23T18:30:12.348923Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [2:891:2767], Recipient [2:891:2767]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-12-23T18:30:12.348972Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-12-23T18:30:12.349063Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2025-12-23T18:30:12.349236Z node 2 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2025-12-23T18:30:12.349328Z node 2 :TX_DATASHARD TRACE: datashard_write_operation.cpp:256: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 5) 2025-12-23T18:30:12.349386Z node 2 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 5) table: [72057594046644480:38:1] 2025-12-23T18:30:12.349503Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckWrite 2025-12-23T18:30:12.349587Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:30:12.349640Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckWrite 2025-12-23T18:30:12.349683Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:12.349725Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:30:12.349770Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-12-23T18:30:12.349970Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2025-12-23T18:30:12.350010Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:30:12.350043Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:12.350070Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BlockFailPoint 2025-12-23T18:30:12.350096Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BlockFailPoint 2025-12-23T18:30:12.350122Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:30:12.350148Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BlockFailPoint 2025-12-23T18:30:12.350172Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteWrite 2025-12-23T18:30:12.350213Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteWrite 2025-12-23T18:30:12.350273Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2025-12-23T18:30:12.350334Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-12-23T18:30:12.350503Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-12-23T18:30:12.350562Z node 2 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2025-12-23T18:30:12.350643Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:30:12.350682Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteWrite 2025-12-23T18:30:12.350730Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit FinishProposeWrite 2025-12-23T18:30:12.350801Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:30:12.350911Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:12.350950Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit FinishProposeWrite 2025-12-23T18:30:12.351000Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:30:12.351038Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:30:12.351091Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:30:12.351115Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:30:12.351147Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2025-12-23T18:30:12.362259Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-12-23T18:30:12.362372Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:4] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:30:12.362444Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 4 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-12-23T18:30:12.362573Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:30:12.363515Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [2:1133:2833] TxId: 281474976715662. Ctx: { TraceId: 01kd67kp16ar50mehzpcy42r47, Database: , SessionId: ydb://session/3?node_id=2&id=YTgxOTJhYjktNjljNzcyODEtNzliODMwMjUtYzYwYmExODI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> GroupWriteTest::WithRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:30:01.156731Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:01.283643Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:30:01.295087Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:01.295432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:01.295504Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:01.616105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:01.616258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:01.706711Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514597984712 != 1766514597984716 2025-12-23T18:30:01.720437Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:01.771930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:01.909969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fef/r3tmp/tmpPFNfD5/pdisk_1.dat 2025-12-23T18:30:02.263819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:02.280512Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:02.411625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:02.758153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:02.974942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1023:2864], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:02.975066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1034:2869], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:02.975112Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:02.975929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1039:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:02.976096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:02.979224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:30:03.173956Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1037:2872], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:30:03.261159Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-12-23T18:30:05.065170Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=MTNhNWZiYzEtNDQ0N2U2ZDktMThkYzZlMmQtNzI3MTAzMmM=, ActorId: [1:1167:2960], ActorState: ExecuteState, TraceId: 01kd67key53axwqza7fb3jx1bv, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken, status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/table-1`" issue_code: 2001 severity: 1 } ... waiting for SysViewsRoster update finished 2025-12-23T18:30:09.263227Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:09.271607Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:09.271982Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:09.272089Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:09.578566Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:09.578698Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:09.604758Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766514605743571 != 1766514605743574 2025-12-23T18:30:09.613945Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:09.672076Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:09.762917Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fef/r3tmp/tmpXpvxYI/pdisk_1.dat 2025-12-23T18:30:10.101850Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:10.114891Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:10.218723Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:10.457988Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:10.686978Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1023:2864], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:10.687076Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1034:2869], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:10.687172Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:10.688177Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1039:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:10.688370Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:10.692934Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:30:10.848256Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1037:2872], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:30:10.886541Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1095:2911] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-12-23T18:30:12.561834Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=NDI1NDRjODMtM2EzZTA4MTItNDRhMzkzZDktYTYxOGJiYzY=, ActorId: [2:1158:2961], ActorState: ExecuteState, TraceId: 01kd67kp7x4vact5f33p3h97zz, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken, status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/table-1`" issue_code: 2001 severity: 1 } |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:30:03.102492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:03.213553Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:30:03.224904Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:03.225282Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:03.225363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:03.530471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:03.530604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:03.612138Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514599867367 != 1766514599867371 2025-12-23T18:30:03.624504Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:03.671951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:03.761120Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fdc/r3tmp/tmpusPiqj/pdisk_1.dat 2025-12-23T18:30:04.108376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:04.122053Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:04.228175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:04.264357Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:04.265471Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:04.265779Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:30:04.266046Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:04.316379Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:04.317170Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:04.317305Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:04.319208Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:30:04.319297Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:30:04.319371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:30:04.319808Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:04.320001Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:04.320100Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:30:04.330850Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:30:04.373859Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:30:04.374053Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:30:04.374240Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:30:04.374288Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:30:04.374324Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:30:04.374371Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:30:04.374615Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:04.374697Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:04.374995Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:30:04.375147Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:30:04.375534Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:30:04.375589Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:04.375652Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:30:04.375689Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:30:04.375729Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:30:04.375766Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:30:04.375808Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:30:04.375922Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:04.375972Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:04.376035Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:30:04.376136Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:30:04.376176Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:04.376284Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:30:04.376495Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:30:04.376568Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:30:04.376660Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:30:04.376721Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:30:04.376782Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:30:04.376822Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:30:04.376860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:30:04.377156Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:04.377202Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:30:04.377240Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:30:04.377274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:30:04.377321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T18:30:04.377349Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:30:04.377380Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T18:30:04.377413Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T18:30:04.377456Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T18:30:04.378950Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T18:30:04.379009Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... 108], sessionId# [0:0:0] 2025-12-23T18:30:12.329232Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2025-12-23T18:30:12.329269Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:12.329298Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037892 2025-12-23T18:30:12.329328Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-12-23T18:30:12.329355Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037892 2025-12-23T18:30:12.329385Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037892 TxInFly 0 2025-12-23T18:30:12.329421Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2025-12-23T18:30:12.329791Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [2:26:2073], Recipient [2:1219:3005]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2025-12-23T18:30:12.329831Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-12-23T18:30:12.329866Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 3000 2025-12-23T18:30:12.329906Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037892: waitStep# 3000 readStep# 3000 observedStep# 3000 2025-12-23T18:30:12.329952Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2858: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-12-23T18:30:12.343332Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037888 ack split to schemeshard 281474976715664 2025-12-23T18:30:12.344227Z node 2 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037893 ack snapshot OpId 281474976715665 2025-12-23T18:30:12.344365Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037893 2025-12-23T18:30:12.344463Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:30:12.344539Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037893 2025-12-23T18:30:12.344596Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037893, actorId: [2:1371:3116] 2025-12-23T18:30:12.344625Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037893 2025-12-23T18:30:12.344664Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037893 2025-12-23T18:30:12.344696Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-12-23T18:30:12.344916Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553157, Sender [2:1221:3007], Recipient [2:968:2823]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2025-12-23T18:30:12.344978Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:461: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2025-12-23T18:30:12.345284Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1221:3007], Recipient [2:1221:3007]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:12.345323Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:12.345605Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [2:1362:3107], Recipient [2:968:2823]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1362:3107] ServerId: [2:1364:3109] } 2025-12-23T18:30:12.345644Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-12-23T18:30:12.345938Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [2:26:2073], Recipient [2:1221:3007]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 3000} 2025-12-23T18:30:12.345974Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-12-23T18:30:12.346004Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 3000 2025-12-23T18:30:12.346036Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-12-23T18:30:12.346429Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2025-12-23T18:30:12.346472Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:12.346507Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037893 2025-12-23T18:30:12.346539Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037893 has no attached operations 2025-12-23T18:30:12.346570Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037893 2025-12-23T18:30:12.346599Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037893 TxInFly 0 2025-12-23T18:30:12.346641Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2025-12-23T18:30:12.346731Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877764, Sender [2:1364:3109], Recipient [2:1221:3007]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:30:12.350916Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3190: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:30:12.350978Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1362:3107], serverId# [2:1364:3109], sessionId# [0:0:0] 2025-12-23T18:30:12.353352Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [2:26:2073], Recipient [2:1221:3007]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2025-12-23T18:30:12.353409Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-12-23T18:30:12.353442Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 3000 2025-12-23T18:30:12.353488Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037893: waitStep# 3000 readStep# 3000 observedStep# 3000 2025-12-23T18:30:12.353539Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2858: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-12-23T18:30:12.354073Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553158, Sender [2:398:2397], Recipient [2:887:2765] 2025-12-23T18:30:12.354146Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037888, state: SplitSrcWaitForPartitioningChanged 2025-12-23T18:30:12.356856Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037888 ack split partitioning changed to schemeshard 281474976715664 2025-12-23T18:30:12.356940Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-12-23T18:30:12.357594Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [2:876:2758], Recipient [2:884:2764]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-12-23T18:30:12.371388Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037889 ack split to schemeshard 281474976715665 2025-12-23T18:30:12.373920Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553158, Sender [2:398:2397], Recipient [2:975:2827] 2025-12-23T18:30:12.373993Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-12-23T18:30:12.376153Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2025-12-23T18:30:12.376222Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-12-23T18:30:12.376708Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [2:961:2818], Recipient [2:968:2823]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-12-23T18:30:12.832777Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [2:1168:2863], Recipient [2:884:2764]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1168 RawX2: 8589937455 } TxBody: " \0008\000`\200\200\200\005j\213\007\010\001\022\314\006\010\001\022\024\n\022\t\220\004\000\000\000\000\000\000\021/\013\000\000\002\000\000\000\032\262\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? L\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\ 2025-12-23T18:30:12.832871Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:12.832995Z node 2 :TX_DATASHARD NOTICE: datashard.cpp:3109: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-12-23T18:30:12.833475Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2025-12-23T18:30:12.834013Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:30:03.327793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:03.435688Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:30:03.446349Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:03.446715Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:03.446807Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:03.712331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:03.712471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:03.792693Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514600143984 != 1766514600143988 2025-12-23T18:30:03.808707Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:03.852298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:03.947573Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fdb/r3tmp/tmpanSzT3/pdisk_1.dat 2025-12-23T18:30:04.275357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:04.290376Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:04.399025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:04.435264Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:04.436105Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:04.436333Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:30:04.436520Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:04.480478Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:04.481289Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:04.481429Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:04.482868Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:30:04.482933Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:30:04.482983Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:30:04.483254Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:04.483395Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:04.483453Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:30:04.494106Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:30:04.517975Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:30:04.518213Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:30:04.518328Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:30:04.518364Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:30:04.518396Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:30:04.518428Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:30:04.518649Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:04.518733Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:04.519286Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:30:04.519429Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:30:04.519820Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:30:04.519864Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:04.519902Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:30:04.519936Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:30:04.519972Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:30:04.520010Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:30:04.520054Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:30:04.520160Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:04.520198Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:04.520261Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:30:04.520361Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:30:04.520405Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:04.520506Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:30:04.520724Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:30:04.520800Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:30:04.520885Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:30:04.520932Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:30:04.521003Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:30:04.521046Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:30:04.521082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:30:04.521378Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:04.521434Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:30:04.521468Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:30:04.521503Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:30:04.521543Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T18:30:04.521576Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:30:04.521608Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T18:30:04.521640Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T18:30:04.521682Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T18:30:04.523189Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T18:30:04.523239Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... xNDM3YmI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2025-12-23T18:30:12.965670Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715662, task: 1. pass away 2025-12-23T18:30:12.965766Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:30:12.968003Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-12-23T18:30:12.968232Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1092:2905];scan_id=1;tx_id=281474976715662;fline=kqp_scan_fetcher_actor.cpp:106;event=TEvTerminateFromCompute;sender=[2:1089:2902];info={
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:30:12.968318Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1092:2905];scan_id=1;tx_id=281474976715662;fline=kqp_scan_compute_manager.h:321;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:30:12.968889Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [2:1084:2876] TxId: 281474976715662. Ctx: { TraceId: 01kd67knfg28pm30z13ttej690, Database: , SessionId: ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1089:2902], task: 1, state: COMPUTE_STATE_FAILURE, stats: { CpuTimeUs: 243784 Tasks { TaskId: 1 CpuTimeUs: 241287 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 11 BuildCpuTimeUs: 241276 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-ou4bsyjxme" NodeId: 2 CreateTimeMs: 1766514612202 CurrentWaitInputTimeUs: 135617 UpdateTimeMs: 1766514612965 } MaxMemoryUsage: 1048576 } 2025-12-23T18:30:12.969006Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976715662. Ctx: { TraceId: 01kd67knfg28pm30z13ttej690, Database: , SessionId: ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1089:2902] 2025-12-23T18:30:12.969100Z node 2 :KQP_EXECUTER INFO: kqp_executer_impl.h:1143: ActorId: [2:1084:2876] TxId: 281474976715662. Ctx: { TraceId: 01kd67knfg28pm30z13ttej690, Database: , SessionId: ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. task: 1, does not have the CA id yet or is already complete 2025-12-23T18:30:12.969171Z node 2 :KQP_EXECUTER INFO: kqp_executer_impl.h:1138: ActorId: [2:1084:2876] TxId: 281474976715662. Ctx: { TraceId: 01kd67knfg28pm30z13ttej690, Database: , SessionId: ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:1090:2903], task: 2 2025-12-23T18:30:12.969265Z node 2 :KQP_EXECUTER INFO: kqp_executer_impl.h:1138: ActorId: [2:1084:2876] TxId: 281474976715662. Ctx: { TraceId: 01kd67knfg28pm30z13ttej690, Database: , SessionId: ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:1091:2904], task: 3 2025-12-23T18:30:12.969450Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [2:1084:2876] TxId: 281474976715662. Ctx: { TraceId: 01kd67knfg28pm30z13ttej690, Database: , SessionId: ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:30:12.969586Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [2:1090:2903], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67knfg28pm30z13ttej690. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2025-12-23T18:30:12.969681Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [2:1090:2903], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67knfg28pm30z13ttej690. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:1084:2876], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-12-23T18:30:12.969806Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715662, task: 2. pass away 2025-12-23T18:30:12.969904Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:30:12.992616Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-12-23T18:30:12.992859Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [2:1091:2904], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01kd67knfg28pm30z13ttej690. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2025-12-23T18:30:12.992946Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [2:1091:2904], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01kd67knfg28pm30z13ttej690. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:1084:2876], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-12-23T18:30:12.993049Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715662, task: 3. pass away 2025-12-23T18:30:12.993118Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:30:12.995930Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 3. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-12-23T18:30:12.996413Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=, ActorId: [2:1058:2876], ActorState: ExecuteState, TraceId: 01kd67knfg28pm30z13ttej690, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } 2025-12-23T18:30:12.996822Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [2:67:2114] Handle TEvExecuteKqpTransaction 2025-12-23T18:30:12.996883Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [2:67:2114] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-12-23T18:30:12.997438Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down 2025-12-23T18:30:12.997512Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:67:2114] Handle TEvProposeTransaction 2025-12-23T18:30:12.997549Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:67:2114] TxId# 0 ProcessProposeTransaction 2025-12-23T18:30:12.997631Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [2:67:2114] Cookie# 0 userReqId# "" txid# 0 reqId# [2:1128:2936] SnapshotReq marker# P0 2025-12-23T18:30:12.998085Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [2:1130:2936] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-12-23T18:30:12.998460Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715664. Resolved key sets: 0 2025-12-23T18:30:12.998521Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [2:1130:2936] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-12-23T18:30:12.998621Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:580: TxId: 281474976715664. Ctx: { TraceId: 01kd67knfg28pm30z13ttej690, Database: , SessionId: ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-12-23T18:30:12.998703Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [2:1127:2876] TxId: 281474976715664. Ctx: { TraceId: 01kd67knfg28pm30z13ttej690, Database: , SessionId: ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:30:12.998850Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [2:1127:2876] TxId: 281474976715664. Ctx: { TraceId: 01kd67knfg28pm30z13ttej690, Database: , SessionId: ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:30:12.998930Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [2:1127:2876] TxId: 281474976715664. Ctx: { TraceId: 01kd67knfg28pm30z13ttej690, Database: , SessionId: ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-12-23T18:30:12.999041Z node 2 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [2:1128:2936] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-12-23T18:30:12.999348Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [2:829:2724], selfId: [2:65:2112], source: [2:1058:2876] 2025-12-23T18:30:12.999645Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1128:2936], Recipient [2:886:2765]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2025-12-23T18:30:13.000623Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=2&id=MWZkYmQzMTktZTc0OGYyYjItNGEwYWQzOTAtZTUxNDM3YmI=, workerId: [2:1058:2876], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 425 |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> GroupWriteTest::TwoTables |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> TestProtocols::TestResolveProtocol >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:30:01.411943Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:01.552216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:30:01.568309Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:01.568727Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:01.568821Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:01.908489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:01.908655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:02.004265Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514598182083 != 1766514598182087 2025-12-23T18:30:02.027330Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:02.080088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:02.213748Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fe3/r3tmp/tmpZEdjZ9/pdisk_1.dat 2025-12-23T18:30:02.590355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:02.608922Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:02.724686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:03.081334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:03.307787Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1023:2864], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:03.307943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1034:2869], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:03.308030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:03.309138Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1039:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:03.309381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:03.314202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:30:03.509673Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1037:2872], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:30:03.599316Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... performing the first select { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets ... captured readset ... captured readset ... performing an upsert ... performing the second select ... performing the third select ... performing the last upsert and commit 2025-12-23T18:30:06.067094Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=ZTY4MGNlZDAtZDBjY2RkNzUtZTFlNWRjNTItNzY1YzNhOWI=, ActorId: [1:1167:2960], ActorState: ExecuteState, TraceId: 01kd67kfxdb9kj7cgx8h4bety9, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken, status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/table-1`" issue_code: 2001 severity: 1 } ... waiting for SysViewsRoster update finished 2025-12-23T18:30:10.216682Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:10.226509Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:10.226926Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:10.227020Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:10.470354Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:10.470481Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:10.492440Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766514606760701 != 1766514606760704 2025-12-23T18:30:10.500612Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:10.547364Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:10.616162Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fe3/r3tmp/tmpsh2tpK/pdisk_1.dat 2025-12-23T18:30:10.914161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:10.928581Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:11.036157Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:11.060038Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [2:876:2758], Recipient [2:884:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:11.061140Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [2:876:2758], Recipient [2:884:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:11.061451Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:884:2764] 2025-12-23T18:30:11.061670Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:11.106324Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [2:876:2758], Recipient [2:884:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:11.107172Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:11.107307Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:11.109095Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:30:11.109183Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:30:11.109241Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:30:11.109580Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:11.109757Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:11.109834Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:14 ... # 4001 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-12-23T18:30:12.687789Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:12.687824Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2025-12-23T18:30:13.503605Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1281:3040], Recipient [2:884:2764]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-12-23T18:30:13.503766Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:30:13.503839Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-12-23T18:30:13.503952Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T18:30:13.504002Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:30:13.504042Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:13.504075Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:30:13.504117Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2025-12-23T18:30:13.504161Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T18:30:13.504183Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:13.504199Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:30:13.504216Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:30:13.504324Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-12-23T18:30:13.504545Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T18:30:13.504589Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v5000/18446744073709551615 2025-12-23T18:30:13.504635Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1281:3040], 0} after executionsCount# 1 2025-12-23T18:30:13.504680Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1281:3040], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:30:13.504752Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1281:3040], 0} finished in read 2025-12-23T18:30:13.504815Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T18:30:13.504835Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:30:13.504855Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:30:13.504876Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:30:13.504907Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T18:30:13.504924Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:30:13.504945Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2025-12-23T18:30:13.504977Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:30:13.505237Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:30:13.505736Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:69:2116], Recipient [2:884:2764]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2025-12-23T18:30:13.505873Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1283:3041], Recipient [2:968:2823]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-12-23T18:30:13.506169Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-12-23T18:30:13.506238Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-12-23T18:30:13.506295Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2025-12-23T18:30:13.506331Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-12-23T18:30:13.506365Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:13.506400Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-12-23T18:30:13.506445Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2025-12-23T18:30:13.506478Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2025-12-23T18:30:13.506505Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:13.506527Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-12-23T18:30:13.506551Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-12-23T18:30:13.506648Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-12-23T18:30:13.506854Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 39] 2025-12-23T18:30:13.506897Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v5000/18446744073709551615 2025-12-23T18:30:13.506934Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[2:1283:3041], 0} after executionsCount# 1 2025-12-23T18:30:13.506971Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[2:1283:3041], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:30:13.507030Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[2:1283:3041], 0} finished in read 2025-12-23T18:30:13.507086Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2025-12-23T18:30:13.507111Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-12-23T18:30:13.507130Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-12-23T18:30:13.507150Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-12-23T18:30:13.507179Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2025-12-23T18:30:13.507197Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-12-23T18:30:13.507221Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2025-12-23T18:30:13.507253Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-12-23T18:30:13.507322Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-12-23T18:30:13.507492Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:69:2116], Recipient [2:968:2823]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2025-12-23T18:30:13.508327Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1281:3040], Recipient [2:884:2764]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:30:13.508384Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-12-23T18:30:13.509440Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1283:3041], Recipient [2:968:2823]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:30:13.509479Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } |91.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |91.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |91.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} >> TInterconnectTest::TestBlobEvent >> TInterconnectTest::TestConnectAndDisconnect >> GroupWriteTest::SimpleRdma [FAIL] >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Query >> TInterconnectTest::TestSimplePingPong >> IndexBuildTest::RejectsCreate >> CrossShardUniqIndexValidationTest::Validation [GOOD] >> VectorIndexBuildTest::Metering_Documentation_Formula [GOOD] >> TActorActivity::Basic [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndexUniq >> TInterconnectTest::TestConnectAndDisconnect [GOOD] >> TInterconnectTest::TestSimplePingPong [GOOD] >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent >> TInterconnectTest::TestBlobEventPreSerialized >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-false >> TInterconnectTest::TestBlobEvent [GOOD] >> TInterconnectTest::TestSubscribeByFlag >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> FulltextIndexBuildTest::Basic >> TInterconnectTest::TestBlobEvent220Bytes >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TActorTracker::Basic [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes >> TInterconnectTest::TestReconnect >> TInterconnectTest::TestAddressResolve >> VectorIndexBuildTest::TTxReply_DoExecute_Throws >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> DataShardWrite::LocksBrokenStats [GOOD] >> TInterconnectTest::TestReconnect [GOOD] |91.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent >> TInterconnectTest::TestAddressResolve [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] >> TInterconnectTest::OldNbs >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] >> TestProtocols::TestHTTPRequest Test command err: ASYNC_DESTROYER >> TTxDataShardRecomputeKMeansScan::BadRequest [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> TInterconnectTest::OldNbs [GOOD] >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit [GOOD] >> FulltextIndexBuildTest::Basic [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndexUniq [GOOD] >> TTxDataShardRecomputeKMeansScan::MainTable >> DataShardWrite::DistributedInsertWithoutLocks-Volatile >> TestProtocols::TestHTTPRequest [GOOD] >> IndexBuildTest::Lock >> FulltextIndexBuildTest::FlatRelevance >> VectorIndexBuildTest::TTxReply_DoExecute_Throws [GOOD] >> VectorIndexBuildTest::TTxProgress_Throws |91.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::SimpleRdma [FAIL] Test command err: RandomSeed# 9828103778056588953 2025-12-23T18:30:13.316198Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-12-23T18:30:13.336835Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-12-23T18:30:13.336933Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-12-23T18:30:13.340341Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-12-23T18:30:13.359601Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-12-23T18:30:13.362880Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} assertion failed at ydb/core/load_test/ut/group_test_ut.cpp:40, auto (anonymous namespace)::TTetsEnv::RunSingleLoadTest(const TString &, bool)::(anonymous class)::operator()(ui32, std::unique_ptr &) const: (!memReg.Empty()) unable to extract mem region from chunk TBackTrace::Capture()+28 (0xFAAFDAC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0xFF9F3AC) ??+0 (0xF72097D) void NKikimr::TTestActorSystem::Sim, std::__y1::allocator> const&)::'lambda'()>(NKikimr::TTestActorSystem::WaitForEdgeActorEvent(std::__y1::set, std::__y1::allocator> const&)::'lambda'()&&, std::__y1::function)+3731 (0xF70BCB3) NKikimr::TTestActorSystem::WaitForEdgeActorEvent(std::__y1::set, std::__y1::allocator> const&)+1088 (0xF70A240) TAutoPtr, TDelete> TEnvironmentSetup::WaitForEdgeActorEvent(NActors::TActorId const&, bool, TInstant)+1728 (0xF71F620) ??+0 (0xF6A3EA9) NTestSuiteGroupWriteTest::TTestCaseSimpleRdma::Execute_(NUnitTest::TTestContext&)+1008 (0xF6A6870) std::__y1::__function::__func, void ()>::operator()()+280 (0xF6B6A48) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0xFFCE80A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0xFFA6088) NTestSuiteGroupWriteTest::TCurrentTest::Execute()+1229 (0xF6B5B4D) NUnitTest::TTestFactory::Execute()+2176 (0xFFA7840) NUnitTest::RunMain(int, char**)+5805 (0xFFC866D) ??+0 (0x7F99A1C28D90) __libc_start_main+128 (0x7F99A1C28E40) _start+41 (0xD0AB029) >> IndexBuildTest::RejectsCreate [GOOD] >> IndexBuildTest::RejectsCreateUniq |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:02.434194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:02.664876Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:02.675985Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:02.676389Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:02.676474Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:03.516282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:03.516434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:03.852163Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514538474424 != 1766514538474428 2025-12-23T18:29:03.882016Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:03.935297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:04.049341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004ef1/r3tmp/tmp4CscMr/pdisk_1.dat 2025-12-23T18:29:04.432043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:04.449500Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:04.579524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:04.653548Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:880:2762], Recipient [1:895:2772]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:29:04.655092Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:880:2762], Recipient [1:895:2772]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:29:04.655480Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:895:2772] 2025-12-23T18:29:04.655779Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:04.666522Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:880:2762], Recipient [1:895:2772]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:29:04.715233Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:882:2763], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:29:04.716776Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:04.717052Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:04.728002Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:04.728121Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:04.728215Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:04.728693Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:04.728838Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:882:2763], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:29:04.729168Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2025-12-23T18:29:04.729415Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:04.756658Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:04.756787Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2772] in generation 1 2025-12-23T18:29:04.757081Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:882:2763], Recipient [1:900:2775]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:29:04.758589Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:04.766819Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:04.768478Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:29:04.768563Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:29:04.768629Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:29:04.768994Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:04.769158Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:04.769270Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:928:2775] in generation 1 2025-12-23T18:29:04.783268Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:04.840763Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:04.841011Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:04.841169Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:931:2793] 2025-12-23T18:29:04.841234Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:04.841293Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:04.841333Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:04.841667Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:895:2772], Recipient [1:895:2772]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:04.841726Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:04.841898Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:04.841943Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:29:04.842006Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:04.842099Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:932:2794] 2025-12-23T18:29:04.842136Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:29:04.842161Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:29:04.842195Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:29:04.842478Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:900:2775], Recipient [1:900:2775]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:04.842511Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:04.846523Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:04.846679Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:04.846962Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:04.847034Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:04.847079Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:29:04.847118Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:29:04.847153Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:29:04.847217Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:04.847267Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:04.847331Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T18:29:04.847401Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T18:29:04.847530Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:898:2773], Recipient [1:895:2772]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:04.847574Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:04.847637Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:889:2768], serverId# [1:898:2773], sessionId# [0:0:0] 2025-12-23T18:29:04.847699Z node 1 :TX_DATASHARD DEBUG: datashard__prog ... k, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:14.398219Z node 11 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 1234567890011 2025-12-23T18:30:14.398445Z node 11 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=1234567890011; 2025-12-23T18:30:14.398724Z node 11 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [3000 : 1234567890011] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-12-23T18:30:14.398926Z node 11 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:30:14.399071Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [11:920:2783], Recipient [11:917:2781]: {TEvReadSet step# 3000 txid# 1234567890011 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2025-12-23T18:30:14.399109Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:14.399142Z node 11 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890011 2025-12-23T18:30:14.631105Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [11:1165:2966], Recipient [11:917:2781]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-12-23T18:30:14.631327Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:30:14.631456Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-12-23T18:30:14.631605Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T18:30:14.631672Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:30:14.631741Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:14.631800Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:30:14.631860Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2025-12-23T18:30:14.632052Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T18:30:14.632093Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:14.632122Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:30:14.632148Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:30:14.632319Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-12-23T18:30:14.632683Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-12-23T18:30:14.632760Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[11:1165:2966], 0} after executionsCount# 1 2025-12-23T18:30:14.632837Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[11:1165:2966], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:30:14.632947Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[11:1165:2966], 0} finished in read 2025-12-23T18:30:14.633073Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T18:30:14.633103Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:30:14.633131Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:30:14.633162Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:30:14.633214Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T18:30:14.633241Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:30:14.633287Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2025-12-23T18:30:14.633353Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:30:14.633528Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:30:14.634577Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [11:1165:2966], Recipient [11:917:2781]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:30:14.634665Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-12-23T18:30:14.635114Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [11:1165:2966], Recipient [11:920:2783]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-12-23T18:30:14.635292Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-12-23T18:30:14.635351Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-12-23T18:30:14.635412Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2025-12-23T18:30:14.635444Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-12-23T18:30:14.635491Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:14.635520Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-12-23T18:30:14.635563Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2025-12-23T18:30:14.635618Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2025-12-23T18:30:14.635648Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:14.635673Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-12-23T18:30:14.635698Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-12-23T18:30:14.635797Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-12-23T18:30:14.636000Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-12-23T18:30:14.636045Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[11:1165:2966], 1} after executionsCount# 1 2025-12-23T18:30:14.636085Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[11:1165:2966], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:30:14.636161Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[11:1165:2966], 1} finished in read 2025-12-23T18:30:14.636216Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2025-12-23T18:30:14.636243Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-12-23T18:30:14.636270Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-12-23T18:30:14.636295Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-12-23T18:30:14.636356Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2025-12-23T18:30:14.636386Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-12-23T18:30:14.636411Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2025-12-23T18:30:14.636442Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-12-23T18:30:14.636539Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-12-23T18:30:14.637133Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [11:1165:2966], Recipient [11:920:2783]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-12-23T18:30:14.637188Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 11 } items { int32_value: 1002 } } |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::LocksBrokenStats [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:04.699373Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:04.836963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:04.850329Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:04.850892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:04.850984Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:05.191425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:05.191596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:05.284243Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514540521321 != 1766514540521325 2025-12-23T18:29:05.298208Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:05.357001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:05.457140Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004eea/r3tmp/tmpGxmoPW/pdisk_1.dat 2025-12-23T18:29:05.809367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:05.834029Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:05.973271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:06.060328Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:29:06.068608Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:29:06.069134Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:29:06.069492Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:06.133982Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:29:06.134911Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:06.135058Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:06.136868Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:06.136967Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:06.137036Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:06.137430Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:06.137586Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:06.137667Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:29:06.151383Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:06.202632Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:06.203096Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:06.203243Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:29:06.203286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:06.203334Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:06.203402Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:06.203696Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:06.203744Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:06.204062Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:06.204152Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:06.204621Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:06.204682Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:06.204732Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:29:06.204765Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:29:06.204820Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:29:06.204855Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:06.204926Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:06.205035Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:06.205079Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:06.205123Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:29:06.205220Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:29:06.205266Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:29:06.205369Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:06.205608Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:29:06.205677Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:29:06.205785Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:29:06.205845Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:29:06.205890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:29:06.205940Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:29:06.205978Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:29:06.206321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:29:06.206367Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:29:06.206400Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:29:06.206434Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:29:06.206488Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T18:29:06.206529Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:29:06.206568Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T18:29:06.206599Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T18:29:06.206638Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T18:29:06.208405Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T18:29:06.208477Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... 6715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2025-12-23T18:30:17.489335Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-12-23T18:30:17.489373Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:30:17.489398Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-12-23T18:30:17.489424Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:17.489450Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:30:17.489487Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/0 ImmediateWriteEdgeReplied# v2501/0 2025-12-23T18:30:17.489535Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2025-12-23T18:30:17.489567Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:30:17.489591Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:17.489613Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2025-12-23T18:30:17.489638Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2025-12-23T18:30:17.489663Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:30:17.489684Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2025-12-23T18:30:17.489707Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-12-23T18:30:17.489730Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-12-23T18:30:17.489760Z node 11 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2025-12-23T18:30:17.489860Z node 11 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2025-12-23T18:30:17.489934Z node 11 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:6] at 72075186224037888 2025-12-23T18:30:17.490002Z node 11 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2025-12-23T18:30:17.490082Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:30:17.490118Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-12-23T18:30:17.490167Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-12-23T18:30:17.490233Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:30:17.490269Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-12-23T18:30:17.490295Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-12-23T18:30:17.490322Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:30:17.490348Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:30:17.490391Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:30:17.490414Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:30:17.490439Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2025-12-23T18:30:17.490491Z node 11 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-12-23T18:30:17.490526Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:30:17.490563Z node 11 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-12-23T18:30:17.490631Z node 11 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:30:17.492299Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [11:69:2116], Recipient [11:885:2764]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 11 Status: STATUS_NOT_FOUND 2025-12-23T18:30:17.492900Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [11:1110:2913], Recipient [11:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:17.492951Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:17.493023Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [11:1109:2912], serverId# [11:1110:2913], sessionId# [0:0:0] 2025-12-23T18:30:17.493202Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [11:1108:2911], Recipient [11:885:2764]: NKikimrTxDataShard.TEvGetInfoRequest 2025-12-23T18:30:17.494624Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [11:1113:2916], Recipient [11:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:17.494698Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:17.494790Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [11:1112:2915], serverId# [11:1113:2916], sessionId# [0:0:0] 2025-12-23T18:30:17.495002Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [11:1111:2914], Recipient [11:885:2764]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-12-23T18:30:17.495151Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:30:17.495225Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/0 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2025-12-23T18:30:17.495284Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v2501/18446744073709551615 2025-12-23T18:30:17.495365Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-12-23T18:30:17.495498Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:30:17.495561Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:30:17.495619Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:17.495668Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:30:17.495717Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2025-12-23T18:30:17.495773Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:30:17.495802Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:17.495827Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:30:17.495852Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:30:17.495963Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-12-23T18:30:17.496254Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[11:1111:2914], 1000} after executionsCount# 1 2025-12-23T18:30:17.496344Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[11:1111:2914], 1000} sends rowCount# 1, bytes# 32, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:30:17.496444Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[11:1111:2914], 1000} finished in read 2025-12-23T18:30:17.496527Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:30:17.496557Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:30:17.496586Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:30:17.496610Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:30:17.496654Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:30:17.496678Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:30:17.496705Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2025-12-23T18:30:17.496763Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:30:17.496890Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2025-12-23T18:30:17.534642Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @207 (null) -> PendingActivation 2025-12-23T18:30:17.534765Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:43: Proxy [6:10:2048] [node 5] ICP01 ready to work 2025-12-23T18:30:17.535022Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @207 (null) -> PendingActivation 2025-12-23T18:30:17.535055Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:43: Proxy [5:1:2048] [node 6] ICP01 ready to work 2025-12-23T18:30:17.535422Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-12-23T18:30:17.537177Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:120: Proxy [5:1:2048] [node 6] ICP02 configured for host ::1:28153 2025-12-23T18:30:17.537336Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @489 PendingNodeInfo -> PendingConnection 2025-12-23T18:30:17.538026Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:747: Handshake [5:21:2058] [node 6] ICH01 starting outgoing handshake 2025-12-23T18:30:17.538179Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2025-12-23T18:30:17.539106Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:751: Handshake [5:21:2058] [node 6] ICH05 connected to peer 2025-12-23T18:30:17.539602Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:42618 2025-12-23T18:30:17.540089Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1037: Handshake [6:23:2058] [node 0] ICH02 starting incoming handshake 2025-12-23T18:30:17.541362Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:21:2058] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 1124065 ProgramStartTime: 16106939070592 Serial: 163078957 ReceiverNodeId: 6 SenderActorId: "[5:163078957:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 1124065" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 1124065" AcceptUUID: "Cluster for process with id: 1124065" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\245\303\223\0350j\'ZC\375M\006\225\225\260wn\362\261\252\305$Y\370\336\361\342\201\227\363v\256" RequestXxhash: true RequestXdcShuffle: true 2025-12-23T18:30:17.541941Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [6:23:2058] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 1124065 ProgramStartTime: 16106939070592 Serial: 163078957 ReceiverNodeId: 6 SenderActorId: "[5:163078957:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 1124065" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 1124065" AcceptUUID: "Cluster for process with id: 1124065" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\245\303\223\0350j\'ZC\375M\006\225\225\260wn\362\261\252\305$Y\370\336\361\342\201\227\363v\256" RequestXxhash: true RequestXdcShuffle: true 2025-12-23T18:30:17.542013Z node 6 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [6:23:2058] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-12-23T18:30:17.542479Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-12-23T18:30:17.543826Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:120: Proxy [6:10:2048] [node 5] ICP02 configured for host ::1:28302 2025-12-23T18:30:17.543881Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:263: Proxy [6:10:2048] [node 5] ICP17 incoming handshake (actor [6:23:2058]) 2025-12-23T18:30:17.543927Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @489 PendingNodeInfo -> PendingConnection 2025-12-23T18:30:17.543978Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:211: Proxy [6:10:2048] [node 5] ICP07 issued incoming handshake reply 2025-12-23T18:30:17.544032Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:220: Proxy [6:10:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2025-12-23T18:30:17.544069Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @221 PendingConnection -> PendingConnection 2025-12-23T18:30:17.544419Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [6:23:2058] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 1124065 ProgramStartTime: 16106955781096 Serial: 2422732751 SenderActorId: "[6:2422732751:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 1124065" AcceptUUID: "Cluster for process with id: 1124065" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true RdmaErr: "Rdma is not ready on the incomming side" } 2025-12-23T18:30:17.544835Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:21:2058] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 1124065 ProgramStartTime: 16106955781096 Serial: 2422732751 SenderActorId: "[6:2422732751:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 1124065" AcceptUUID: "Cluster for process with id: 1124065" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true RdmaErr: "Rdma is not ready on the incomming side" } 2025-12-23T18:30:17.544895Z node 5 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [5:21:2058] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-12-23T18:30:17.545006Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2025-12-23T18:30:17.545758Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:21:2058] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\245\303\223\0350j\'ZC\375M\006\225\225\260wn\362\261\252\305$Y\370\336\361\342\201\227\363v\256" 2025-12-23T18:30:17.545887Z node 5 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [5:21:2058] [node 6] ICH04 handshake succeeded 2025-12-23T18:30:17.546189Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:339: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-12-23T18:30:17.546261Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:461: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:21:2058] poison: false 2025-12-23T18:30:17.546304Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @351 PendingConnection -> StateWork 2025-12-23T18:30:17.546542Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:378: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:25:2048] 2025-12-23T18:30:17.546613Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [5:25:2048] [node 6] ICS09 handshake done sender: [5:21:2058] self: [5:163078957:0] peer: [6:2422732751:0] socket: 24 qp: -1 2025-12-23T18:30:17.546665Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [5:25:2048] [node 6] ICS10 traffic start 2025-12-23T18:30:17.546786Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [5:25:2048] [node 6] ICS11 registering socket in PollerActor 2025-12-23T18:30:17.546857Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 0 2025-12-23T18:30:17.546905Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [5:25:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-12-23T18:30:17.546953Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 0 2025-12-23T18:30:17.547043Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:241: Session [5:25:2048] [node 6] ICS04 subscribe for session state for [5:19:2057] 2025-12-23T18:30:17.547167Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:42630 2025-12-23T18:30:17.547647Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1037: Handshake [6:27:2059] [node 0] ICH02 starting incoming handshake 2025-12-23T18:30:17.547793Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [5:26:2048] [node 6] ICIS01 InputSession created 2025-12-23T18:30:17.547869Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:26:2048] [node 6] ICIS02 ReceiveData called 2025-12-23T18:30:17.547970Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:26:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-12-23T18:30:17.549149Z node 6 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [6:23:2058] [node 5] ICH04 handshake succeeded 2025-12-23T18:30:17.549497Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:335: Proxy [6:10:2048] [node 5] ICP19 incoming handshake succeeded 2025-12-23T18:30:17.549555Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:446: Proxy [6:10:2048] [node 5] ICP111 dropped incoming handshake: [6:23:2058] poison: false 2025-12-23T18:30:17.549596Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @351 PendingConnection -> StateWork 2025-12-23T18:30:17.549730Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:378: Proxy [6:10:2048] [node 5] ICP22 created new session: [6:28:2048] 2025-12-23T18:30:17.549777Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [6:28:2048] [node 5] ICS09 handshake done sender: [6:23:2058] self: [6:2422732751:0] peer: [5:163078957:0] socket: 25 qp: -1 2025-12-23T18:30:17.549815Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [6:28:2048] [node 5] ICS10 traffic start 2025-12-23T18:30:17.549881Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [6:28:2048] [node 5] ICS11 registering socket in PollerActor 2025-12-23T18:30:17.549950Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 0 2025-12-23T18:30:17.549987Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [6:28:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-12-23T18:30:17.550025Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 0 2025-12-23T18:30:17.550067Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:26:2048] [node 6] ICIS02 ReceiveData called 2025-12-23T18:30:17.550109Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:26:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-12-23T18:30:17.550247Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [6:29:2048] [node 5] ICIS01 InputSession created 2025-12-23T18:30:17.550307Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:29:2048] [node 5] ICIS02 ReceiveData called 2025-12-23T18:30:17.550368Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:29:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-12-23T18:30:17.550422Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:26:2048] [node 6] ICIS02 ReceiveD ... BUG: interconnect_tcp_proxy.cpp:235: Proxy [6:10:2048] [node 5] ICP09 (actor [6:35:2061]) from: [5:163078957:0] for: [6:2422732751:0] 2025-12-23T18:30:17.559713Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:264: Session [6:28:2048] [node 5] ICS08 incoming handshake Self# [5:163078957:0] Peer# [6:2422732751:0] Counter# 1 LastInputSerial# 1 2025-12-23T18:30:17.559741Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:211: Proxy [6:10:2048] [node 5] ICP07 issued incoming handshake reply 2025-12-23T18:30:17.560259Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2025-12-23T18:30:17.560860Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:32:2059] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "X\373\335Q\222\351|/W\352\345w\214\305\347\352\345\007\027\331\321\310{1 \260\376\373aZ\264\034" 2025-12-23T18:30:17.560949Z node 5 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [5:32:2059] [node 6] ICH04 handshake succeeded 2025-12-23T18:30:17.561147Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:339: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-12-23T18:30:17.561198Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:446: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:34:2060] poison: true 2025-12-23T18:30:17.561250Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:461: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:32:2059] poison: false 2025-12-23T18:30:17.561310Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @351 StateWork -> StateWork 2025-12-23T18:30:17.561353Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [5:25:2048] [node 6] ICS09 handshake done sender: [5:32:2059] self: [5:163078957:0] peer: [6:2422732751:0] socket: 30 qp: -1 2025-12-23T18:30:17.561402Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [5:25:2048] [node 6] ICS10 traffic start 2025-12-23T18:30:17.561464Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [5:25:2048] [node 6] ICS11 registering socket in PollerActor 2025-12-23T18:30:17.561500Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2025-12-23T18:30:17.561536Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [5:25:2048] [node 6] ICS06 rewind SendQueue size# 1 LastConfirmed# 1 NextSerial# 2 2025-12-23T18:30:17.561601Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2025-12-23T18:30:17.561667Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:42650 2025-12-23T18:30:17.562106Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1037: Handshake [6:38:2062] [node 0] ICH02 starting incoming handshake 2025-12-23T18:30:17.563432Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [5:37:2048] [node 6] ICIS01 InputSession created 2025-12-23T18:30:17.563516Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2025-12-23T18:30:17.563605Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-12-23T18:30:17.563904Z node 6 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [6:35:2061] [node 5] ICH04 handshake succeeded 2025-12-23T18:30:17.564100Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:335: Proxy [6:10:2048] [node 5] ICP19 incoming handshake succeeded 2025-12-23T18:30:17.564156Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:446: Proxy [6:10:2048] [node 5] ICP111 dropped incoming handshake: [6:35:2061] poison: false 2025-12-23T18:30:17.564204Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:461: Proxy [6:10:2048] [node 5] ICP052 dropped outgoing handshake: [6:30:2060] poison: true 2025-12-23T18:30:17.564240Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @351 StateWork -> StateWork 2025-12-23T18:30:17.564284Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [6:28:2048] [node 5] ICS09 handshake done sender: [6:35:2061] self: [6:2422732751:0] peer: [5:163078957:0] socket: 31 qp: -1 2025-12-23T18:30:17.564321Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [6:28:2048] [node 5] ICS10 traffic start 2025-12-23T18:30:17.564396Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [6:28:2048] [node 5] ICS11 registering socket in PollerActor 2025-12-23T18:30:17.564449Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2025-12-23T18:30:17.564511Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:62: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2025-12-23T18:30:17.564576Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:1027: Session [6:28:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes RdmaInflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 0 rdma bytes dropped 1 packets 2025-12-23T18:30:17.564649Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [6:28:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2025-12-23T18:30:17.564706Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2025-12-23T18:30:17.565462Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [6:39:2048] [node 5] ICIS01 InputSession created 2025-12-23T18:30:17.565625Z node 6 :INTERCONNECT NOTICE: interconnect_tcp_proxy.cpp:422: Proxy [6:10:2048] [node 5] ICP27 obsolete handshake fail ignored 2025-12-23T18:30:17.565671Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2025-12-23T18:30:17.565714Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-12-23T18:30:17.565848Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2025-12-23T18:30:17.565928Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2025-12-23T18:30:17.566024Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-12-23T18:30:17.566089Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2025-12-23T18:30:17.566120Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-12-23T18:30:17.566167Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-12-23T18:30:17.566306Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2025-12-23T18:30:17.566341Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-12-23T18:30:17.566386Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2025-12-23T18:30:17.566427Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-12-23T18:30:17.566472Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2025-12-23T18:30:17.566511Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2025-12-23T18:30:17.566617Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2025-12-23T18:30:17.566643Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2025-12-23T18:30:17.566676Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2025-12-23T18:30:17.566702Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2025-12-23T18:30:17.566768Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:164: Session [6:28:2048] [node 5] ICS02 send event from: [6:20:2057] to: [5:19:2057] 2025-12-23T18:30:17.566857Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:961: Session [6:28:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 RdmaPayload# 0 InflightDataAmount# 84 RdmaInflightDataAmount# 0 2025-12-23T18:30:17.566936Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2025-12-23T18:30:17.566972Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2025-12-23T18:30:17.566994Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2025-12-23T18:30:17.567091Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:37:2048] [node 6] ICIS02 ReceiveData called 2025-12-23T18:30:17.567150Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2025-12-23T18:30:17.567237Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:37:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-12-23T18:30:17.567320Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 2 2025-12-23T18:30:17.567353Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:62: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2025-12-23T18:30:17.567435Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:1027: Session [5:25:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes RdmaInflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 0 rdma bytes dropped 1 packets 2025-12-23T18:30:17.567487Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 2 2025-12-23T18:30:17.567558Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:105: Session [5:25:2048] [node 6] ICS01 socket: 30 reason# 2025-12-23T18:30:17.567601Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:543: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:25:2048] VirtualId# [5:163078957:0] 2025-12-23T18:30:17.567646Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @207 StateWork -> PendingActivation 2025-12-23T18:30:17.567690Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:571: Session [5:25:2048] [node 6] ICS25 shutdown socket, reason# 2025-12-23T18:30:17.567805Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:461: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |91.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:30:03.915812Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:04.047103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:30:04.058055Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:04.058434Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:04.058500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:04.328527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:04.328635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:04.414681Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514600485824 != 1766514600485828 2025-12-23T18:30:04.426523Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:04.470304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:04.577202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fd4/r3tmp/tmpcoGdjB/pdisk_1.dat 2025-12-23T18:30:04.866974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:04.882300Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:04.989714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:05.027325Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:05.028405Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:05.028725Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:30:05.028982Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:05.084662Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:05.085462Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:05.085598Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:05.087633Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:30:05.087719Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:30:05.087850Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:30:05.088239Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:05.088392Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:05.088466Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:30:05.099216Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:30:05.141362Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:30:05.141750Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:30:05.141922Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:30:05.141970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:30:05.142038Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:30:05.142093Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:30:05.142362Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:05.142417Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:05.142701Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:30:05.142871Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:30:05.143282Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:30:05.143342Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:05.143389Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:30:05.143435Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:30:05.143490Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:30:05.143534Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:30:05.143583Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:30:05.143672Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:05.143707Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:05.143765Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:30:05.143853Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:30:05.143894Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:05.143993Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:30:05.144217Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:30:05.144266Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:30:05.144401Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:30:05.144452Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:30:05.144509Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:30:05.144548Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:30:05.144591Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:30:05.144964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:05.145021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:30:05.145068Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:30:05.145107Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:30:05.145170Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T18:30:05.145218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:30:05.145260Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T18:30:05.145295Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T18:30:05.145321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T18:30:05.146966Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T18:30:05.147017Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... 4: ActorId: [2:1379:3095] TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shards nodes resolved, success: 1, failed: 0 2025-12-23T18:30:15.013338Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:277: ActorId: [2:1379:3095] TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shards on nodes: node 2: [72075186224037888] 2025-12-23T18:30:15.013409Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:580: TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-12-23T18:30:15.013738Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:867: TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:1383:3095] 2025-12-23T18:30:15.013819Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:859: TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:1383:3095], channels: 1 2025-12-23T18:30:15.013897Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [2:1379:3095] TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:30:15.013967Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [2:1379:3095] TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1383:3095], 2025-12-23T18:30:15.014034Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [2:1379:3095] TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1383:3095], 2025-12-23T18:30:15.014084Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:2227: ActorId: [2:1379:3095] TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-12-23T18:30:15.014932Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [2:1379:3095] TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1383:3095], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-12-23T18:30:15.015005Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [2:1379:3095] TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1383:3095], 2025-12-23T18:30:15.015065Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [2:1379:3095] TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1383:3095], 2025-12-23T18:30:15.015477Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1385:3095], Recipient [2:1298:3048]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-12-23T18:30:15.015628Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:30:15.015691Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3002/281474976715665 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v4000/18446744073709551615 2025-12-23T18:30:15.015749Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2025-12-23T18:30:15.015818Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-12-23T18:30:15.015910Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:30:15.015955Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:30:15.016013Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:15.016049Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:30:15.016102Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2025-12-23T18:30:15.016148Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:30:15.016178Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:15.016203Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:30:15.016228Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:30:15.016333Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-12-23T18:30:15.016568Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1385:3095], 0} after executionsCount# 1 2025-12-23T18:30:15.016638Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1385:3095], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:30:15.016715Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1385:3095], 0} finished in read 2025-12-23T18:30:15.016781Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:30:15.016811Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:30:15.016839Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:30:15.016868Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:30:15.016915Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:30:15.016940Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:30:15.016966Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2025-12-23T18:30:15.017072Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:30:15.017865Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1385:3095], Recipient [2:1298:3048]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:30:15.017933Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-12-23T18:30:15.018579Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [2:1379:3095] TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1383:3095], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 887 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 192 FinishTimeMs: 1766514615018 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 47 BuildCpuTimeUs: 145 HostName: "ghrun-ou4bsyjxme" NodeId: 2 StartTimeMs: 1766514615017 CreateTimeMs: 1766514615014 UpdateTimeMs: 1766514615018 } MaxMemoryUsage: 1048576 } 2025-12-23T18:30:15.018727Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1383:3095] 2025-12-23T18:30:15.018949Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [2:1379:3095] TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:30:15.019008Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [2:1379:3095] TxId: 281474976715672. Ctx: { TraceId: 01kd67krkr25t2gxgt9qzg3hsd, Database: , SessionId: ydb://session/3?node_id=2&id=OThmNmE2YmUtNzIzNzkxODMtODJhODkyYzEtZmI4NTIwN2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.000887s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2025-12-23T18:30:04.730103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:04.730162Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:04.731660Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:04.745895Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:04.746216Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:30:04.746453Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:04.794707Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:04.810943Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:04.811043Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:04.812660Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:30:04.812738Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:30:04.812789Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:30:04.813062Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:04.813110Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:04.813168Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:30:04.898561Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:30:04.940434Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:30:04.940634Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:30:04.940736Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:30:04.940792Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:30:04.940833Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:30:04.940876Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:04.941038Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:04.941098Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:04.941477Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:30:04.941575Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:30:04.941740Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:30:04.941807Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:04.941849Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:30:04.941886Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:30:04.941920Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:30:04.941956Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:30:04.942000Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:04.942095Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:04.942138Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:04.942210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:30:04.952738Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:30:04.952817Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:04.952954Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:30:04.953152Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:30:04.953201Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:30:04.953272Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:30:04.953323Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:30:04.953361Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:30:04.953396Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:30:04.953429Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:30:04.953738Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:04.953788Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:30:04.953839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:30:04.953874Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:30:04.953930Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:30:04.953956Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:30:04.953987Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:30:04.954023Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:30:04.954064Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:30:04.975564Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:30:04.975637Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:30:04.975680Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:30:04.975721Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:30:04.975823Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:30:04.976403Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:04.976457Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:04.976515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:30:04.976648Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:30:04.976690Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:30:04.976834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:30:04.976895Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:30:04.976956Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:30:04.977000Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:30:04.980865Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:30:04.980944Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:04.981156Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:04.981195Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:04.981276Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:30:04.981332Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:30:04.981372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:30:04.981415Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:30:04.981470Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-12-23T18:30:18.027586Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:18.027629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2025-12-23T18:30:18.027671Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:30:18.027753Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-12-23T18:30:18.027793Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:18.027918Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-12-23T18:30:18.027941Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-12-23T18:30:18.027961Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-12-23T18:30:18.028002Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:18.028035Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2025-12-23T18:30:18.028073Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:30:18.028120Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-12-23T18:30:18.028156Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:18.028285Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-12-23T18:30:18.028311Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:18.028333Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2025-12-23T18:30:18.028381Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:30:18.028438Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-12-23T18:30:18.028463Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:18.028605Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:18.028655Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2025-12-23T18:30:18.028694Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:30:18.028741Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-12-23T18:30:18.028767Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:18.028889Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:18.028914Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2025-12-23T18:30:18.028973Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:30:18.029033Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-12-23T18:30:18.029072Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:18.029199Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:18.029250Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:516] at 9437184 on unit FinishPropose 2025-12-23T18:30:18.029321Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-12-23T18:30:18.029423Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:18.029583Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:18.029610Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2025-12-23T18:30:18.029649Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:30:18.029713Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-12-23T18:30:18.029742Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:18.029864Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:18.029910Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2025-12-23T18:30:18.029955Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:30:18.029984Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:18.030220Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-12-23T18:30:18.030273Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:18.030326Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2025-12-23T18:30:18.030688Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-12-23T18:30:18.030730Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:18.030778Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2025-12-23T18:30:18.030939Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-12-23T18:30:18.030982Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:18.031015Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2025-12-23T18:30:18.031130Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-12-23T18:30:18.031191Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:18.031220Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2025-12-23T18:30:18.031400Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-12-23T18:30:18.031433Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:18.031459Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2025-12-23T18:30:18.031627Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-12-23T18:30:18.031690Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:18.031733Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-12-23T18:30:18.031878Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-12-23T18:30:18.031910Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:18.031937Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-12-23T18:30:18.032087Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-12-23T18:30:18.032120Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:18.032145Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:30:07.819682Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:07.934998Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:30:07.946576Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:07.946951Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:07.947028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:08.217687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:08.217822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:08.295171Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514604911148 != 1766514604911152 2025-12-23T18:30:08.309470Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:08.359242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:08.467629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fcd/r3tmp/tmpT1t31M/pdisk_1.dat 2025-12-23T18:30:08.808588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:08.827083Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:08.934304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:08.980675Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:08.981960Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:08.982336Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:30:08.982625Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:09.039586Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:09.040444Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:09.040583Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:09.042395Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:30:09.042474Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:30:09.042545Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:30:09.043003Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:09.043239Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:09.043330Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:30:09.054183Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:30:09.091665Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:30:09.091858Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:30:09.092020Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:30:09.092068Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:30:09.092102Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:30:09.092137Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:30:09.092414Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:09.092483Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:09.092752Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:30:09.092914Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:30:09.093364Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:30:09.093409Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:09.093463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:30:09.093498Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:30:09.093535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:30:09.093568Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:30:09.093621Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:30:09.093731Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:09.093771Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:09.093828Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:30:09.093932Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:30:09.093975Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:09.094104Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:30:09.094374Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:30:09.094430Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:30:09.094553Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:30:09.094636Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:30:09.094705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:30:09.096433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:30:09.096506Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:30:09.096842Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:09.096905Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:30:09.096946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:30:09.096981Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:30:09.097033Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T18:30:09.097067Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:30:09.097100Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T18:30:09.097150Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T18:30:09.097199Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T18:30:09.099110Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T18:30:09.099175Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... 4: ActorId: [2:1425:3133] TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shards nodes resolved, success: 1, failed: 0 2025-12-23T18:30:19.780161Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:277: ActorId: [2:1425:3133] TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shards on nodes: node 2: [72075186224037888] 2025-12-23T18:30:19.780241Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:580: TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-12-23T18:30:19.780584Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:867: TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:1429:3133] 2025-12-23T18:30:19.780671Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:859: TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:1429:3133], channels: 1 2025-12-23T18:30:19.780757Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [2:1425:3133] TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:30:19.780831Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [2:1425:3133] TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1429:3133], 2025-12-23T18:30:19.780910Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [2:1425:3133] TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1429:3133], 2025-12-23T18:30:19.780968Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:2227: ActorId: [2:1425:3133] TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-12-23T18:30:19.781766Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [2:1425:3133] TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1429:3133], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-12-23T18:30:19.781842Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [2:1425:3133] TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1429:3133], 2025-12-23T18:30:19.781909Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [2:1425:3133] TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1429:3133], 2025-12-23T18:30:19.782394Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1431:3133], Recipient [2:1366:3100]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-12-23T18:30:19.782541Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:30:19.782604Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v5001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v5001/18446744073709551615 2025-12-23T18:30:19.782657Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v6000/18446744073709551615 2025-12-23T18:30:19.782727Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-12-23T18:30:19.782842Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T18:30:19.782897Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:30:19.782946Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:19.782991Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:30:19.783045Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2025-12-23T18:30:19.783092Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T18:30:19.783120Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:19.783149Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:30:19.783176Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:30:19.783299Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-12-23T18:30:19.783548Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1431:3133], 0} after executionsCount# 1 2025-12-23T18:30:19.783729Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1431:3133], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:30:19.783827Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1431:3133], 0} finished in read 2025-12-23T18:30:19.783901Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T18:30:19.783935Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:30:19.783963Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:30:19.783994Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:30:19.784046Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T18:30:19.784074Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:30:19.784103Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2025-12-23T18:30:19.784155Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:30:19.784825Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1431:3133], Recipient [2:1366:3100]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:30:19.784912Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-12-23T18:30:19.785578Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [2:1425:3133] TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1429:3133], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 872 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 172 FinishTimeMs: 1766514619785 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 52 BuildCpuTimeUs: 120 HostName: "ghrun-ou4bsyjxme" NodeId: 2 StartTimeMs: 1766514619784 CreateTimeMs: 1766514619781 UpdateTimeMs: 1766514619785 } MaxMemoryUsage: 1048576 } 2025-12-23T18:30:19.785733Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1429:3133] 2025-12-23T18:30:19.785948Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [2:1425:3133] TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:30:19.786019Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [2:1425:3133] TxId: 281474976715671. Ctx: { TraceId: 01kd67kx8p224wvswhjks7g6rc, Database: , SessionId: ydb://session/3?node_id=2&id=ZWEwNDkxMDItMjdkNjhmN2ItNmFiOTkxNTAtMzkxMGRjNDk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.000872s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 7 } items { uint32_value: 4 } } |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] >> IndexBuildTest::Lock [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted >> FulltextIndexBuildTest::FlatRelevance [GOOD] >> IndexBuildTest::BaseCase |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |91.2%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut >> VDiskRestart::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 9774191104327813720 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-12-23T18:29:48.741829Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:48.744762Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:48.748099Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:48.753966Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:48.754183Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:48.771929Z 1 00h02m38.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:48.792906Z 1 00h02m38.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:48.941027Z 1 00h02m38.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:49.210797Z 1 00h02m38.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:49.325102Z 1 00h02m38.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:49.339334Z 1 00h02m38.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:49.581530Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:49.582071Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:49.601409Z 1 00h02m39.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:49.631901Z 1 00h02m39.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:49.841419Z 1 00h02m39.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.038263Z 1 00h02m39.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.052181Z 1 00h02m39.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.173778Z 1 00h02m40.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.355126Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.355917Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.372536Z 1 00h02m40.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.402782Z 1 00h02m40.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.704161Z 1 00h02m40.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.723148Z 1 00h02m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.745898Z 1 00h02m40.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.794315Z 1 00h02m40.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.809725Z 1 00h02m40.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.831957Z 1 00h02m40.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.852968Z 1 00h02m41.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.878291Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.880986Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:50.989752Z 1 00h02m41.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:51.031507Z 1 00h02m41.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:51.336828Z 1 00h02m41.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:51.378928Z 1 00h02m41.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:51.535558Z 1 00h02m41.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:51.550297Z 1 00h02m42.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:51.617361Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:51.618270Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:51.656810Z 1 00h02m42.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:51.670986Z 1 00h02m42.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:51.913441Z 1 00h02m42.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.045136Z 1 00h02m42.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.077180Z 1 00h02m42.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.093944Z 1 00h02m43.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.194880Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.195704Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.386447Z 1 00h02m43.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.547775Z 1 00h02m43.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.565570Z 1 00h02m43.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.583280Z 1 00h02m43.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.603606Z 1 00h02m43.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.621263Z 1 00h02m43.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.683049Z 1 00h02m43.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.856383Z 1 00h02m43.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.871052Z 1 00h02m44.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.894769Z 1 00h02m44.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:52.916725Z 1 00h02m44.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:53.016798Z 1 00h02m44.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:53.319049Z 1 00h02m44.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:53.336103Z 1 00h02m44.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:53.371909Z 1 00h02m44.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:53.388879Z 1 00h02m44.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:53.552158Z 1 00h02m45.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:53.735203Z 1 00h02m45.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:53.982654Z 1 00h02m45.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:54.032925Z 1 00h02m45.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:54.051832Z 1 00h02m45.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:54.117743Z 1 00h02m45.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:54.288976Z 1 00h02m45.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5326:706] 2025-12-23T18:29:54.528924Z 1 00h02m46.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1 ... k read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-12-23T18:30:09.058544Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:09.062380Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:09.064974Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:09.070817Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:09.071043Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:09.107047Z 8 00h20m54.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:09.123157Z 8 00h20m54.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:09.554148Z 8 00h20m54.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:09.580659Z 8 00h20m54.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:09.898842Z 8 00h20m55.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:09.996827Z 8 00h20m55.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.055990Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.057370Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.079891Z 8 00h20m55.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.130422Z 8 00h20m55.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.152081Z 8 00h20m55.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.221705Z 8 00h20m56.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.239370Z 8 00h20m56.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.493457Z 8 00h20m56.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.516176Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.517351Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.649802Z 8 00h20m56.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.793943Z 8 00h20m56.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.805553Z 8 00h20m56.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.817567Z 8 00h20m56.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.829157Z 8 00h20m57.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.903873Z 8 00h20m57.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:10.918365Z 8 00h20m57.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:11.064595Z 8 00h20m57.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:11.162549Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:11.163924Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:11.320945Z 8 00h20m57.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:11.340799Z 8 00h20m57.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:11.376895Z 8 00h20m57.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:11.394933Z 8 00h20m58.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:11.428129Z 8 00h20m58.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:11.441874Z 8 00h20m58.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:11.772508Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:11.774290Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:11.903568Z 8 00h20m58.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:11.918459Z 8 00h20m58.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:12.255120Z 8 00h20m58.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:12.274648Z 8 00h20m59.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:12.511926Z 8 00h20m59.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:12.570722Z 8 00h20m59.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:12.642265Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:12.643601Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:12.812183Z 8 00h20m59.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:12.831249Z 8 00h20m59.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:12.995664Z 8 00h20m59.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:13.030025Z 8 00h21m00.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:13.066266Z 8 00h21m00.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:13.088426Z 8 00h21m00.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:13.289547Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:13.290526Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:13.436863Z 8 00h21m00.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:13.660338Z 8 00h21m00.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:13.673728Z 8 00h21m00.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:13.687170Z 8 00h21m00.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:13.742547Z 8 00h21m00.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:13.779785Z 8 00h21m01.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:13.796926Z 8 00h21m01.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:14.085324Z 8 00h21m01.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:14.122194Z 8 00h21m01.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:14.138170Z 8 00h21m01.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:14.514932Z 8 00h21m01.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:14.529775Z 8 00h21m01.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:14.562692Z 8 00h21m02.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:14.578708Z 8 00h21m02.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:14.837321Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] 2025-12-23T18:30:14.839276Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5375:755] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> IndexBuildTest::Metering_Documentation_Formula [GOOD] >> IndexBuildTest::LockUniq >> VectorIndexBuildTest::CreateAndDrop >> VectorIndexBuildTest::TTxProgress_Throws [GOOD] >> VectorIndexBuildTest::TTxInit_Throws |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> VDiskRestart::Simple [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> IndexBuildTest::ShadowDataNotAllowedByDefault >> IndexBuildTest::CancellationNotEnoughRetries >> VectorIndexBuildTest::RecreatedColumns |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> IndexBuildTest::RejectsCreateUniq [GOOD] >> IndexBuildTest::RejectsDropIndex |91.2%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> GroupWriteTest::WithRead [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] >> IndexBuildTest::IndexPartitioningIsPersistedUniq |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 2326040220033545359 2025-12-23T18:30:15.624346Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-12-23T18:30:15.649774Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-12-23T18:30:15.649860Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-12-23T18:30:15.651951Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-12-23T18:30:15.666891Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-12-23T18:30:15.670133Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-12-23T18:30:23.841016Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-12-23T18:30:23.841149Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-12-23T18:30:23.965764Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> TestProtocols::TestConnectProtocol >> TInterconnectTest::TestBlobEvent220BytesPreSerialized >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> IndexBuildTest::ShadowDataEdgeCases >> TInterconnectTest::TestManyEvents >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] >> IndexBuildTest::RejectsDropIndex [GOOD] >> IndexBuildTest::RejectsDropIndexUniq >> IndexBuildTest::LockUniq [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady >> TestProtocols::TestConnectProtocol [GOOD] |91.2%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} >> TestProtocols::TestHTTPCollected >> TabletState::ImplicitUnsubscribeOnDisconnect >> TTabletLabeledCountersAggregator::SimpleAggregation >> GroupWriteTest::TwoTables [GOOD] >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestTraceIdPassThrough >> TTabletLabeledCountersAggregator::SimpleAggregation [GOOD] >> TTabletLabeledCountersAggregator::HeavyAggregation >> VectorIndexBuildTest::TTxInit_Throws [GOOD] >> VectorIndexBuildTest::TTxInit_Checks_EnableVectorIndex >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized >> IndexBuildTest::IndexPartitioningIsPersistedUniq [GOOD] >> IndexBuildTest::DropIndex >> TInterconnectTest::TestTraceIdPassThrough [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] Test command err: RandomSeed# 18270519306927111194 2025-12-23T18:30:16.137865Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-12-23T18:30:16.137979Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-12-23T18:30:16.162508Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-12-23T18:30:16.162598Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-12-23T18:30:16.162725Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-12-23T18:30:16.162775Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-12-23T18:30:16.167370Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-12-23T18:30:16.167483Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-12-23T18:30:16.189339Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-12-23T18:30:16.189467Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-12-23T18:30:16.194862Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-12-23T18:30:16.194989Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-12-23T18:30:25.342921Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-12-23T18:30:25.343036Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-12-23T18:30:25.343108Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-12-23T18:30:25.381067Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-12-23T18:30:25.381181Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> DataShardWrite::DistributedInsertWithoutLocks-Volatile [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:30:03.224479Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:03.347254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:30:03.358128Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:03.358499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:03.358571Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:03.644608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:03.644736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:03.718981Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514599760806 != 1766514599760810 2025-12-23T18:30:03.735069Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:03.780724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:03.893047Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fcf/r3tmp/tmp9tO6UR/pdisk_1.dat 2025-12-23T18:30:04.195886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:04.210684Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:04.313481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:04.614596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===== UPSERT initial rows 2025-12-23T18:30:04.843149Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1023:2864], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:04.843336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1034:2869], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:04.843421Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:04.844703Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1039:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:04.844965Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:04.849777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:30:05.049448Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1037:2872], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:30:05.154034Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===== Begin SELECT { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT { items { uint32_value: 3 } items { uint32_value: 2 } } ... waiting for SysViewsRoster update finished 2025-12-23T18:30:10.731764Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:10.743123Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:10.743532Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:10.743636Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:11.011444Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:11.011560Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:11.033515Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766514607475428 != 1766514607475431 2025-12-23T18:30:11.041014Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:11.085600Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:11.165477Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fcf/r3tmp/tmpSreXp5/pdisk_1.dat 2025-12-23T18:30:11.478547Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:11.492446Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:11.595917Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:11.827180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:12.046508Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1023:2864], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:12.046612Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1034:2869], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:12.046692Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:12.047662Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1039:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:12.047828Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:30:12.055627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:30:12.207308Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1037:2872], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:30:12.244804Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1095:2911] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for at least 2 blocked commits 2025-12-23T18:30:14.802009Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:30:14.802081Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 ... blocked commit for tablet 72075186224037889 ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 >> TabletState::SeqNoSubscriptionReplace >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TabletState::ImplicitUnsubscribeOnDisconnect [GOOD] >> TTxDataShardRecomputeKMeansScan::MainTable [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TTxDataShardRecomputeKMeansScan::BuildTable+WithForeign >> IndexBuildTest::RejectsDropIndexUniq [GOOD] >> IndexBuildTest::RejectsCancelUniq >> TabletState::SeqNoSubscriptionReplace [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> IndexBuildTest::ShadowDataEdgeCases [GOOD] >> IndexBuildTest::WithFollowers |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::ImplicitUnsubscribeOnDisconnect [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TInterconnectTest::TestManyEvents [GOOD] >> TInterconnectTest::TestCrossConnect |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscriptionReplace [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest |91.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |91.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |91.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber >> VectorIndexBuildTest::TTxInit_Checks_EnableVectorIndex [GOOD] >> VectorIndexBuildTest::UnknownState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DistributedInsertWithoutLocks-Volatile [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:29:07.080630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:29:07.280824Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:29:07.306542Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:29:07.307121Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:29:07.307235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:29:07.724386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:07.724533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:07.919387Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514542067404 != 1766514542067408 2025-12-23T18:29:07.933308Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:07.981717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:08.121666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004ee8/r3tmp/tmpIgBJcH/pdisk_1.dat 2025-12-23T18:29:08.659325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:29:08.688229Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:08.827385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:08.901634Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:29:08.902858Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:29:08.903168Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:29:08.903426Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:09.059822Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:29:09.060713Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:09.060861Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:09.062734Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:29:09.066961Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:29:09.067048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:29:09.067470Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:09.067645Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:09.067772Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:29:09.083617Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:09.149576Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:29:09.149815Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:09.149950Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:29:09.149990Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:29:09.150024Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:29:09.150057Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:29:09.150346Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:09.150430Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:09.150792Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:29:09.150962Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:29:09.151449Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:29:09.151497Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:09.151537Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:29:09.151574Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:29:09.151613Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:29:09.151644Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:29:09.151687Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:29:09.151813Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:09.151873Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:09.151928Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:29:09.152032Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:29:09.152071Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:29:09.152158Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:29:09.152401Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:29:09.152466Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:29:09.152550Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:29:09.152594Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:29:09.152646Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:29:09.152693Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:29:09.152745Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:29:09.153068Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:29:09.153117Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:29:09.153159Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:29:09.153205Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:29:09.153262Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T18:29:09.153290Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:29:09.153326Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T18:29:09.153359Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T18:29:09.153399Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T18:29:09.155476Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T18:29:09.155540Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... :2783], Recipient [12:918:2781]: {TEvReadSet step# 3000 txid# 1234567890011 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2025-12-23T18:30:25.810728Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:25.810828Z node 12 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890011 2025-12-23T18:30:25.810943Z node 12 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:30:25.811209Z node 12 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-12-23T18:30:25.811296Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [12:918:2781], Recipient [12:920:2783]: {TEvReadSet step# 3000 txid# 1234567890011 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2025-12-23T18:30:25.811361Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:30:25.811402Z node 12 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 1234567890011 2025-12-23T18:30:26.074544Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [12:1166:2967], Recipient [12:918:2781]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-12-23T18:30:26.074818Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:30:26.074957Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-12-23T18:30:26.075121Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:30:26.075200Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:30:26.075264Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:26.075341Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:30:26.075418Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2025-12-23T18:30:26.075488Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:30:26.075523Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:26.075556Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:30:26.075591Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:30:26.075772Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-12-23T18:30:26.076116Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-12-23T18:30:26.076194Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[12:1166:2967], 0} after executionsCount# 1 2025-12-23T18:30:26.076304Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[12:1166:2967], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:30:26.076424Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[12:1166:2967], 0} finished in read 2025-12-23T18:30:26.076539Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:30:26.076576Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:30:26.076607Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:30:26.076640Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:30:26.076698Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:30:26.076733Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:30:26.076776Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2025-12-23T18:30:26.076851Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:30:26.077030Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:30:26.078455Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [12:1166:2967], Recipient [12:918:2781]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:30:26.078550Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-12-23T18:30:26.079096Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [12:1166:2967], Recipient [12:920:2783]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-12-23T18:30:26.079317Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-12-23T18:30:26.079395Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-12-23T18:30:26.079472Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2025-12-23T18:30:26.079501Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-12-23T18:30:26.079530Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-12-23T18:30:26.079560Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-12-23T18:30:26.079615Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2025-12-23T18:30:26.079659Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2025-12-23T18:30:26.079691Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-12-23T18:30:26.079718Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-12-23T18:30:26.079745Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-12-23T18:30:26.079856Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-12-23T18:30:26.080101Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-12-23T18:30:26.080151Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[12:1166:2967], 1} after executionsCount# 1 2025-12-23T18:30:26.080193Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[12:1166:2967], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:30:26.080267Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[12:1166:2967], 1} finished in read 2025-12-23T18:30:26.080322Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2025-12-23T18:30:26.080354Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-12-23T18:30:26.080385Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-12-23T18:30:26.080423Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-12-23T18:30:26.080478Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2025-12-23T18:30:26.080515Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-12-23T18:30:26.080544Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2025-12-23T18:30:26.080580Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-12-23T18:30:26.080675Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-12-23T18:30:26.081545Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [12:1166:2967], Recipient [12:920:2783]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-12-23T18:30:26.081616Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 2 } items { int32_value: 1003 } }, { items { int32_value: 11 } items { int32_value: 1002 } }, { items { int32_value: 12 } items { int32_value: 1004 } } |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest >> IndexBuildTest::DropIndex [GOOD] >> IndexBuildTest::DropIndexUniq >> TNetClassifierTest::TestInitFromFile >> IndexBuildTest::WithFollowers [GOOD] >> IndexBuildTest::WithFollowersUniq |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TMLPWriterTests::WriteTwoMessage_Deduplicated [GOOD] >> TMLPWriterTests::Deduplicated_Reboot |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TTabletPipeTest::TestPipeConnectToHint |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> TResourceBrokerInstant::Test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Query [GOOD] >> TResourceBrokerInstant::Test [GOOD] >> TResourceBrokerInstant::TestErrors |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] >> GroupWriteTest::Simple [GOOD] >> TResourceBrokerInstant::TestErrors [GOOD] >> IndexBuildTest::WithFollowersUniq [GOOD] >> IndexBuildTest::RejectsOnDuplicatesUniq >> VectorIndexBuildTest::UnknownState [GOOD] >> IndexBuildTest::DropIndexUniq [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TxUsage::WriteToTopic_Demo_20_RestartNo_Table >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries [GOOD] >> TTabletCountersPercentile::SingleBucket [GOOD] >> TTabletCountersPercentile::StartFromZero [GOOD] >> test_select.py::TestDML::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> TResourceBroker::TestErrors >> VectorIndexBuildTest::RecreatedColumns [GOOD] >> VectorIndexBuildTest::SimpleDuplicates-Overlap-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 7961231906058196967 2025-12-23T18:30:13.062182Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-12-23T18:30:13.088384Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-12-23T18:30:13.088459Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-12-23T18:30:13.091363Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-12-23T18:30:13.108282Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-12-23T18:30:13.111362Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-12-23T18:30:30.833988Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-12-23T18:30:30.834077Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-12-23T18:30:30.893309Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestErrors [GOOD] Test command err: 2025-12-23T18:30:30.901217Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:1080: FinishTaskInstant failed for task 2: cannot finish unknown task >> VectorIndexBuildTest::CreateAndDrop [GOOD] >> VectorIndexBuildTest::Metering_CommonDB |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed >> TPipeTrackerTest::TestShareTablet [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> TResourceBroker::TestErrors [GOOD] >> TResourceBroker::TestExecutionStat |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::UnknownState [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:30:18.280711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:30:18.280787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:18.280816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:30:18.280845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:30:18.280889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:30:18.280918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:30:18.280966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:18.281070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:30:18.281886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:30:18.282190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:30:18.369388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:18.369461Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:18.380820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:30:18.381066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:30:18.381222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:30:18.393823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:30:18.394085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:30:18.394974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:18.395218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:30:18.402292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:18.402502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:30:18.404068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:30:18.404140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:18.404299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:30:18.404346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:30:18.404407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:30:18.404566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:30:18.568240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.569250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.569358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.569451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.569514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.569597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.569675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.569742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.569804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.569864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.569954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.570039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.570102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.570179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.570276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Id: 18] was 0 2025-12-23T18:30:30.837407Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2025-12-23T18:30:30.837424Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2025-12-23T18:30:30.837438Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2025-12-23T18:30:30.837452Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2025-12-23T18:30:30.837482Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2025-12-23T18:30:30.837497Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2025-12-23T18:30:30.837519Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2025-12-23T18:30:30.837545Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2025-12-23T18:30:30.837565Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2025-12-23T18:30:30.837584Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2025-12-23T18:30:30.837605Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2025-12-23T18:30:30.837631Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2025-12-23T18:30:30.837657Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2025-12-23T18:30:30.837683Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2025-12-23T18:30:30.837706Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2025-12-23T18:30:30.837729Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2025-12-23T18:30:30.837746Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2025-12-23T18:30:30.837761Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2025-12-23T18:30:30.837777Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2025-12-23T18:30:30.837995Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 10, at schemeshard: 72057594046678944 2025-12-23T18:30:30.838157Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.838271Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-12-23T18:30:30.838326Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:30:30.838357Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:30:30.838380Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2025-12-23T18:30:30.838401Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 2025-12-23T18:30:30.838571Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 4, at schemeshard: 72057594046678944 2025-12-23T18:30:30.838867Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.839072Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 12, at schemeshard: 72057594046678944 2025-12-23T18:30:30.839450Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:30.839515Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:30:30.839713Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:30.840135Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.840210Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.840447Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.840623Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.840672Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.840763Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.840934Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.841006Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.841365Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.841784Z node 5 :BUILD_INDEX DEBUG: schemeshard_index_build_info.h:626: Restored index build id# 102: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, State: Filling, SubState: None, IsBroken: 1, IsCancellationRequested: 0, Issue: Unknown build kind: 999999, SubscribersCount: 0, CreateSender: [0:0:0], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:30:30.841856Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:30.841960Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.842011Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.842259Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 1 tables: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:30.842352Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:30.842424Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:30.849031Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:30:30.859692Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:30:30.859811Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:30.860093Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:30:30.860188Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:30:30.860259Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:30:30.860440Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:1271:3171] sender: [5:1331:2058] recipient: [5:15:2062] 2025-12-23T18:30:30.933302Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-12-23T18:30:30.933595Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 Issues { message: "Unknown build kind: 999999" severity: 1 } State: STATE_TRANSFERING_DATA Settings { source_path: "/MyRoot/vectors" max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 Issues { message: "Unknown build kind: 999999" severity: 1 } State: STATE_TRANSFERING_DATA Settings { source_path: "/MyRoot/vectors" max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } 2025-12-23T18:30:30.934591Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:1703: Handle TEvRemoteHttpInfo: BuildIndexId=102&Page=BuildIndexInfo 2025-12-23T18:30:30.934718Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:441: TTxMonitoring.Execute: BuildIndexId=102&Page=BuildIndexInfo |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndexUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:30:17.423605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:30:17.423730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:17.423767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:30:17.423801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:30:17.423864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:30:17.423903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:30:17.423967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:17.424038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:30:17.424907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:30:17.425243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:30:17.512462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:17.512520Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:17.518354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:30:17.518544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:30:17.518673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:30:17.521702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:30:17.521928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:30:17.522835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:17.523060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:30:17.526556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:17.526732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:30:17.527791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:30:17.527845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:17.527960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:30:17.528002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:30:17.528041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:30:17.528164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:30:17.672201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.673283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.673415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.673502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.673569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.673669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.673755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.673825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.673899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.673976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.674064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.674168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.674262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.674375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:17.674467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 7594046678944, cookie: 105 2025-12-23T18:30:30.871212Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:30:30.871245Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2025-12-23T18:30:30.871281Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 5 2025-12-23T18:30:30.871818Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:30:30.871897Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:30:30.871930Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:30:30.871963Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 44], version: 18446744073709551615 2025-12-23T18:30:30.871995Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2025-12-23T18:30:30.872864Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:30:30.872936Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:30:30.872963Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:30:30.872993Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 18 2025-12-23T18:30:30.873032Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:30:30.874125Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:30:30.874203Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:30:30.874244Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:30:30.875282Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:30:30.875359Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:30:30.875387Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:30:30.875932Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:30:30.876013Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:30:30.876042Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:30:30.876073Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 45], version: 18446744073709551615 2025-12-23T18:30:30.876105Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 4 2025-12-23T18:30:30.876183Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/3, is published: true 2025-12-23T18:30:30.878365Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:30:30.878423Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:30:30.878648Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 4 2025-12-23T18:30:30.878788Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 2/3 2025-12-23T18:30:30.878823Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-12-23T18:30:30.878856Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 2/3 2025-12-23T18:30:30.878885Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-12-23T18:30:30.878917Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2025-12-23T18:30:30.879551Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-12-23T18:30:30.879602Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:30:30.879771Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 3 2025-12-23T18:30:30.879861Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:2 progress is 3/3 2025-12-23T18:30:30.879887Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-12-23T18:30:30.879919Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:2 progress is 3/3 2025-12-23T18:30:30.879945Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-12-23T18:30:30.879973Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2025-12-23T18:30:30.880027Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:816:2772] message: TxId: 105 2025-12-23T18:30:30.880067Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-12-23T18:30:30.880104Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2025-12-23T18:30:30.880132Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 105:0 2025-12-23T18:30:30.880224Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2025-12-23T18:30:30.880259Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:1 2025-12-23T18:30:30.880281Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 105:1 2025-12-23T18:30:30.880312Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 2 2025-12-23T18:30:30.880336Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:2 2025-12-23T18:30:30.880358Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 105:2 2025-12-23T18:30:30.880400Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 2 2025-12-23T18:30:30.881362Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:30:30.881457Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:30:30.881528Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:30:30.881565Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:30:30.881601Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:30:30.886629Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:30:30.886975Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:30:30.887020Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:1349:3271] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for boot2 ... waiting for client destroyed notification ... waiting for connect2 |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] >> TResourceBroker::TestExecutionStat [GOOD] >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck >> VDiskTest::HugeBlobWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2025-12-23T18:30:29.177795Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122562410684323:2212];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:29.178097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004ec9/r3tmp/tmpJ0fTok/pdisk_1.dat 2025-12-23T18:30:29.466313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:29.466456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:29.486158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:29.549943Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:29.551254Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:30:29.563864Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/004ec9/r3tmp/yandex3EhOdx.tmp 2025-12-23T18:30:29.563899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/004ec9/r3tmp/yandex3EhOdx.tmp 2025-12-23T18:30:29.564202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/004ec9/r3tmp/yandex3EhOdx.tmp 2025-12-23T18:30:29.564319Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:30:29.793153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:30:30.178183Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> IndexBuildTest::RejectsCancelUniq [GOOD] >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> TTabletPipeTest::TestSendAfterReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2025-12-23T18:30:29.608316Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122561467444957:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:29.608398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004ec6/r3tmp/tmpczBXw8/pdisk_1.dat 2025-12-23T18:30:29.957436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:29.957540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:29.958772Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:30:29.962261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:29.963367Z node 1 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:6083) connection closed with error: Connection refused 2025-12-23T18:30:29.963736Z node 1 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-12-23T18:30:30.005380Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:30.016750Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:30.016781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:30.016798Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:30.016908Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:30:30.165867Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:30:30.629929Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] Test command err: 2025-12-23T18:30:25.999379Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [2:8:2055] 2025-12-23T18:30:25.999717Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:9:2056] worker 0 2025-12-23T18:30:25.999767Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:10:2057] worker 1 2025-12-23T18:30:25.999798Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:11:2058] worker 2 2025-12-23T18:30:25.999826Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:12:2059] worker 3 2025-12-23T18:30:25.999859Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:13:2060] worker 4 2025-12-23T18:30:25.999896Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:14:2061] worker 5 2025-12-23T18:30:25.999927Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:15:2062] worker 6 2025-12-23T18:30:25.999966Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:16:2063] worker 7 2025-12-23T18:30:26.000010Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:17:2064] worker 8 2025-12-23T18:30:26.000057Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:18:2065] worker 9 Sending message to [2:10:2057] from [2:8:2055] id 1 Sending message to [2:11:2058] from [2:8:2055] id 2 Sending message to [2:12:2059] from [2:8:2055] id 3 Sending message to [2:13:2060] from [2:8:2055] id 4 Sending message to [2:14:2061] from [2:8:2055] id 5 Sending message to [2:15:2062] from [2:8:2055] id 6 Sending message to [2:16:2063] from [2:8:2055] id 7 Sending message to [2:17:2064] from [2:8:2055] id 8 Sending message to [2:18:2065] from [2:8:2055] id 9 Sending message to [2:9:2056] from [2:8:2055] id 10 2025-12-23T18:30:26.740360Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [2:17:2064] 2025-12-23T18:30:26.740437Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [2:18:2065] 2025-12-23T18:30:26.740592Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [2:9:2056] 2025-12-23T18:30:26.740646Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [2:10:2057] 2025-12-23T18:30:26.740680Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [2:11:2058] 2025-12-23T18:30:26.740716Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [2:12:2059] 2025-12-23T18:30:26.740764Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [2:13:2060] 2025-12-23T18:30:26.740817Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [2:14:2061] 2025-12-23T18:30:26.740852Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [2:15:2062] 2025-12-23T18:30:26.740885Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [2:16:2063] 2025-12-23T18:30:26.740919Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [2:15:2062] 2025-12-23T18:30:26.741955Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [2:15:2062] 2025-12-23T18:30:26.770013Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:15:2062] Initiator [2:8:2055] 2025-12-23T18:30:26.792336Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [2:16:2063] 2025-12-23T18:30:26.793954Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [2:16:2063] 2025-12-23T18:30:26.824054Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:16:2063] Initiator [2:8:2055] 2025-12-23T18:30:26.848433Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [2:17:2064] 2025-12-23T18:30:26.850049Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [2:17:2064] 2025-12-23T18:30:26.876587Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:17:2064] Initiator [2:8:2055] 2025-12-23T18:30:26.891243Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [2:18:2065] 2025-12-23T18:30:26.892517Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [2:18:2065] 2025-12-23T18:30:26.916927Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:18:2065] Initiator [2:8:2055] 2025-12-23T18:30:26.933767Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [2:8:2055] 2025-12-23T18:30:26.933880Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [2:8:2055] 2025-12-23T18:30:26.938724Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [2:9:2056] 2025-12-23T18:30:26.940374Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [2:9:2056] 2025-12-23T18:30:26.968929Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:9:2056] Initiator [2:8:2055] 2025-12-23T18:30:26.984736Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [2:10:2057] 2025-12-23T18:30:26.985902Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [2:10:2057] 2025-12-23T18:30:27.017250Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:10:2057] Initiator [2:8:2055] 2025-12-23T18:30:27.035237Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [2:11:2058] 2025-12-23T18:30:27.036545Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [2:11:2058] 2025-12-23T18:30:27.068213Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:11:2058] Initiator [2:8:2055] 2025-12-23T18:30:27.091274Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [2:12:2059] 2025-12-23T18:30:27.092810Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [2:12:2059] 2025-12-23T18:30:27.119932Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:12:2059] Initiator [2:8:2055] 2025-12-23T18:30:27.142437Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [2:13:2060] 2025-12-23T18:30:27.145333Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [2:13:2060] 2025-12-23T18:30:27.172595Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:13:2060] Initiator [2:8:2055] 2025-12-23T18:30:27.195746Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [2:14:2061] 2025-12-23T18:30:27.197469Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [2:14:2061] 2025-12-23T18:30:27.230897Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:14:2061] Initiator [2:8:2055] 2025-12-23T18:30:27.249617Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [2:8:2055] 2025-12-23T18:30:27.249725Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [2:8:2055] 2025-12-23T18:30:27.255573Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [2:8:2055] 2025-12-23T18:30:27.255731Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [2:8:2055] 2025-12-23T18:30:27.260487Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [2:8:2055] 2025-12-23T18:30:27.260643Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [2:8:2055] 2025-12-23T18:30:27.266524Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 0 [2:8:2055] 2025-12-23T18:30:27.266648Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 0 [2:8:2055] 2025-12-23T18:30:27.270615Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [2:8:2055] 2025-12-23T18:30:27.270735Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [2:8:2055] 2025-12-23T18:30:27.275991Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [2:8:2055] 2025-12-23T18:30:27.276146Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [2:8:2055] 2025-12-23T18:30:27.284633Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [2:8:2055] 2025-12-23T18:30:27.284794Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [2:8:2055] 2025-12-23T18:30:27.289920Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [2:8:2055] 2025-12-23T18:30:27.290033Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [2:8:2055] 2025-12-23T18:30:27.294441Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [2:8:2055] 2025-12-23T18:30:27.294586Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [2:8:2055] 2025-12-23T18:30:27.300037Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:8:2055] Initiator [2:7:2054] TEST 2 10 duration 1.460217s 2025-12-23T18:30:27.491017Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [3:8:2055] 2025-12-23T18:30:27.491551Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [3:8:2055] self [3:9:2056] worker 0 2025-12-23T18:30:27.491613Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [3:8:2055] self [3:10:2057] worker 1 2025-12-23T18:30:27.491634Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_ag ... or got response node 8 [3:8:2055] 2025-12-23T18:30:28.910336Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [3:8:2055] 2025-12-23T18:30:28.915664Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [3:8:2055] 2025-12-23T18:30:28.915823Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [3:8:2055] 2025-12-23T18:30:28.921320Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [3:8:2055] 2025-12-23T18:30:28.921473Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [3:8:2055] 2025-12-23T18:30:28.927599Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [3:8:2055] Initiator [3:7:2054] TEST 2 20 duration 1.586805s 2025-12-23T18:30:29.194573Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [4:8:2055] 2025-12-23T18:30:29.194665Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [4:8:2055] self [4:9:2056] worker 0 Sending message to [4:9:2056] from [4:8:2055] id 1 Sending message to [4:9:2056] from [4:8:2055] id 2 Sending message to [4:9:2056] from [4:8:2055] id 3 Sending message to [4:9:2056] from [4:8:2055] id 4 Sending message to [4:9:2056] from [4:8:2055] id 5 Sending message to [4:9:2056] from [4:8:2055] id 6 Sending message to [4:9:2056] from [4:8:2055] id 7 Sending message to [4:9:2056] from [4:8:2055] id 8 Sending message to [4:9:2056] from [4:8:2055] id 9 Sending message to [4:9:2056] from [4:8:2055] id 10 2025-12-23T18:30:29.971930Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [4:9:2056] 2025-12-23T18:30:29.971997Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [4:9:2056] 2025-12-23T18:30:29.972025Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [4:9:2056] 2025-12-23T18:30:29.972053Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [4:9:2056] 2025-12-23T18:30:29.972133Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [4:9:2056] 2025-12-23T18:30:29.972174Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [4:9:2056] 2025-12-23T18:30:29.972212Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [4:9:2056] 2025-12-23T18:30:29.972250Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [4:9:2056] 2025-12-23T18:30:29.972288Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [4:9:2056] 2025-12-23T18:30:29.972326Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [4:9:2056] 2025-12-23T18:30:29.972680Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [4:9:2056] 2025-12-23T18:30:29.974299Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [4:9:2056] 2025-12-23T18:30:30.004633Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [4:9:2056] 2025-12-23T18:30:30.006166Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [4:9:2056] 2025-12-23T18:30:30.042831Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [4:9:2056] 2025-12-23T18:30:30.044392Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [4:9:2056] 2025-12-23T18:30:30.082212Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [4:9:2056] 2025-12-23T18:30:30.083836Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [4:9:2056] 2025-12-23T18:30:30.117630Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [4:9:2056] 2025-12-23T18:30:30.119243Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [4:9:2056] 2025-12-23T18:30:30.161583Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [4:9:2056] 2025-12-23T18:30:30.162878Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [4:9:2056] 2025-12-23T18:30:30.188538Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [4:9:2056] 2025-12-23T18:30:30.190650Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [4:9:2056] 2025-12-23T18:30:30.223823Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [4:9:2056] 2025-12-23T18:30:30.225394Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [4:9:2056] 2025-12-23T18:30:30.256703Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [4:9:2056] 2025-12-23T18:30:30.258055Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [4:9:2056] 2025-12-23T18:30:30.289022Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [4:9:2056] 2025-12-23T18:30:30.290449Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [4:9:2056] 2025-12-23T18:30:30.339032Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [4:9:2056] Initiator [4:8:2055] 2025-12-23T18:30:30.740343Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 0 [4:8:2055] 2025-12-23T18:30:30.741295Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 0 [4:8:2055] 2025-12-23T18:30:30.801299Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [4:8:2055] Initiator [4:7:2054] TEST 2 1 duration 1.751594s 2025-12-23T18:30:31.144964Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [5:7:2054] self [5:8:2055] worker 0 Sending message to [5:8:2055] from [5:8:2055] id 1 Sending message to [5:8:2055] from [5:8:2055] id 2 Sending message to [5:8:2055] from [5:8:2055] id 3 Sending message to [5:8:2055] from [5:8:2055] id 4 Sending message to [5:8:2055] from [5:8:2055] id 5 Sending message to [5:8:2055] from [5:8:2055] id 6 Sending message to [5:8:2055] from [5:8:2055] id 7 Sending message to [5:8:2055] from [5:8:2055] id 8 Sending message to [5:8:2055] from [5:8:2055] id 9 Sending message to [5:8:2055] from [5:8:2055] id 10 2025-12-23T18:30:31.888673Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [5:8:2055] 2025-12-23T18:30:31.888743Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [5:8:2055] 2025-12-23T18:30:31.888772Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [5:8:2055] 2025-12-23T18:30:31.888815Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [5:8:2055] 2025-12-23T18:30:31.888844Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [5:8:2055] 2025-12-23T18:30:31.888931Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [5:8:2055] 2025-12-23T18:30:31.888973Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [5:8:2055] 2025-12-23T18:30:31.889013Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [5:8:2055] 2025-12-23T18:30:31.889052Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [5:8:2055] 2025-12-23T18:30:31.889081Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [5:8:2055] 2025-12-23T18:30:31.889307Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [5:8:2055] 2025-12-23T18:30:31.890838Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [5:8:2055] 2025-12-23T18:30:31.913379Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [5:8:2055] 2025-12-23T18:30:31.914967Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [5:8:2055] 2025-12-23T18:30:31.945134Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [5:8:2055] 2025-12-23T18:30:31.946819Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [5:8:2055] 2025-12-23T18:30:31.981269Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [5:8:2055] 2025-12-23T18:30:31.982831Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [5:8:2055] 2025-12-23T18:30:32.013120Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [5:8:2055] 2025-12-23T18:30:32.014757Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [5:8:2055] 2025-12-23T18:30:32.056168Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [5:8:2055] 2025-12-23T18:30:32.057882Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [5:8:2055] 2025-12-23T18:30:32.090082Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [5:8:2055] 2025-12-23T18:30:32.091802Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [5:8:2055] 2025-12-23T18:30:32.126970Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [5:8:2055] 2025-12-23T18:30:32.128904Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [5:8:2055] 2025-12-23T18:30:32.164603Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [5:8:2055] 2025-12-23T18:30:32.166879Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [5:8:2055] 2025-12-23T18:30:32.203934Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [5:8:2055] 2025-12-23T18:30:32.205920Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [5:8:2055] 2025-12-23T18:30:32.282085Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [5:8:2055] Initiator [5:7:2054] TEST 2 1 duration 1.585989s |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> TTabletPipeTest::TestOpen |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2025-12-23T18:30:29.813226Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122562170587728:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:29.813722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004ec4/r3tmp/tmpGF6FGG/pdisk_1.dat 2025-12-23T18:30:30.033742Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:30:30.063446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:30.063580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:30.129800Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122562170587597:2081] 1766514629788314 != 1766514629788317 2025-12-23T18:30:30.133735Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:30.146138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:30.199606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/004ec4/r3tmp/yandexwtC1cE.tmp 2025-12-23T18:30:30.199634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/004ec4/r3tmp/yandexwtC1cE.tmp 2025-12-23T18:30:30.199767Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:344: invalid NetData format 2025-12-23T18:30:30.199790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: /home/runner/.ya/build/build_root/8ptz/004ec4/r3tmp/yandexwtC1cE.tmp 2025-12-23T18:30:30.199903Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:30:30.214880Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions >> TSubscriberCombinationsTest::CombinationsRootDomain >> TSubscriberTest::StrongNotificationAfterCommit >> TSubscriberSinglePathUpdateTest::OneRingGroup >> TSubscriberTest::NotifyDelete |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> TTabletPipeTest::TestOpen [GOOD] >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk2/unittest >> VDiskTest::HugeBlobWrite [GOOD] Test command err: Put id# [29:1:1:0:0:1048576:1] totalSize# 0 blobValueIndex# 45 Trim Put id# [25:1:1:0:0:1572864:1] totalSize# 1048576 blobValueIndex# 56 Put id# [81:1:1:0:0:589824:1] totalSize# 2621440 blobValueIndex# 33 Put id# [11:1:1:0:0:40960:1] totalSize# 3211264 blobValueIndex# 23 Change MinHugeBlobSize# 65536 Put id# [68:1:1:0:0:10:1] totalSize# 3252224 blobValueIndex# 8 Put id# [28:1:1:0:0:1572864:1] totalSize# 3252234 blobValueIndex# 54 Put id# [78:1:1:0:0:40960:1] totalSize# 4825098 blobValueIndex# 20 Put id# [4:1:1:0:0:40960:1] totalSize# 4866058 blobValueIndex# 25 Put id# [29:1:2:0:0:40960:1] totalSize# 4907018 blobValueIndex# 26 Put id# [55:1:1:0:0:1572864:1] totalSize# 4947978 blobValueIndex# 51 Put id# [21:1:1:0:0:1048576:1] totalSize# 6520842 blobValueIndex# 44 Put id# [25:1:2:0:0:589824:1] totalSize# 7569418 blobValueIndex# 37 Put id# [27:1:1:0:0:589824:1] totalSize# 8159242 blobValueIndex# 39 Change MinHugeBlobSize# 8192 Restart Put id# [90:1:1:0:0:1024:1] totalSize# 8749066 blobValueIndex# 19 Change MinHugeBlobSize# 61440 Trim Put id# [93:1:1:0:0:1572864:1] totalSize# 8750090 blobValueIndex# 52 Put id# [40:1:1:0:0:1024:1] totalSize# 10322954 blobValueIndex# 19 Trim Put id# [44:1:1:0:0:1048576:1] totalSize# 10323978 blobValueIndex# 45 Change MinHugeBlobSize# 8192 Put id# [23:1:1:0:0:10:1] totalSize# 11372554 blobValueIndex# 0 Trim Put id# [96:1:1:0:0:1048576:1] totalSize# 11372564 blobValueIndex# 48 Trim Put id# [63:1:1:0:0:589824:1] totalSize# 12421140 blobValueIndex# 30 Put id# [99:1:1:0:0:10:1] totalSize# 13010964 blobValueIndex# 8 Change MinHugeBlobSize# 12288 Put id# [73:1:1:0:0:1048576:1] totalSize# 13010974 blobValueIndex# 47 Change MinHugeBlobSize# 524288 Put id# [79:1:1:0:0:40960:1] totalSize# 14059550 blobValueIndex# 29 Put id# [18:1:1:0:0:40960:1] totalSize# 14100510 blobValueIndex# 27 Trim Put id# [68:1:2:0:0:10:1] totalSize# 14141470 blobValueIndex# 8 Trim Put id# [100:1:1:0:0:1024:1] totalSize# 14141480 blobValueIndex# 16 Put id# [34:1:1:0:0:589824:1] totalSize# 14142504 blobValueIndex# 37 Put id# [31:1:1:0:0:1024:1] totalSize# 14732328 blobValueIndex# 15 Put id# [98:1:1:0:0:1024:1] totalSize# 14733352 blobValueIndex# 11 Put id# [60:1:1:0:0:589824:1] totalSize# 14734376 blobValueIndex# 38 Change MinHugeBlobSize# 12288 Put id# [15:1:1:0:0:10:1] totalSize# 15324200 blobValueIndex# 5 Put id# [37:1:1:0:0:1048576:1] totalSize# 15324210 blobValueIndex# 40 Put id# [24:1:1:0:0:10:1] totalSize# 16372786 blobValueIndex# 6 Change MinHugeBlobSize# 65536 Put id# [84:1:1:0:0:1572864:1] totalSize# 16372796 blobValueIndex# 52 Put id# [56:1:1:0:0:1024:1] totalSize# 17945660 blobValueIndex# 15 Put id# [53:1:1:0:0:40960:1] totalSize# 17946684 blobValueIndex# 24 Restart Put id# [65:1:1:0:0:40960:1] totalSize# 17987644 blobValueIndex# 25 Put id# [68:1:3:0:0:10:1] totalSize# 18028604 blobValueIndex# 6 Put id# [2:1:1:0:0:1048576:1] totalSize# 18028614 blobValueIndex# 45 Put id# [76:1:1:0:0:589824:1] totalSize# 19077190 blobValueIndex# 36 Put id# [23:1:2:0:0:1024:1] totalSize# 19667014 blobValueIndex# 14 Put id# [20:1:1:0:0:1024:1] totalSize# 19668038 blobValueIndex# 18 Trim Put id# [59:1:1:0:0:589824:1] totalSize# 19669062 blobValueIndex# 36 Put id# [59:1:2:0:0:1048576:1] totalSize# 20258886 blobValueIndex# 41 Trim Put id# [18:1:2:0:0:10:1] totalSize# 21307462 blobValueIndex# 6 Put id# [27:1:2:0:0:1572864:1] totalSize# 21307472 blobValueIndex# 58 Change MinHugeBlobSize# 8192 Put id# [70:1:1:0:0:1572864:1] totalSize# 22880336 blobValueIndex# 52 Trim Put id# [86:1:1:0:0:1572864:1] totalSize# 24453200 blobValueIndex# 58 Change MinHugeBlobSize# 61440 Put id# [82:1:1:0:0:1024:1] totalSize# 26026064 blobValueIndex# 11 Put id# [71:1:1:0:0:589824:1] totalSize# 26027088 blobValueIndex# 32 Put id# [46:1:1:0:0:1024:1] totalSize# 26616912 blobValueIndex# 10 Put id# [29:1:3:0:0:1048576:1] totalSize# 26617936 blobValueIndex# 46 Restart Put id# [54:1:1:0:0:1048576:1] totalSize# 27666512 blobValueIndex# 40 Trim Put id# [93:1:2:0:0:589824:1] totalSize# 28715088 blobValueIndex# 32 Put id# [96:1:2:0:0:10:1] totalSize# 29304912 blobValueIndex# 0 Put id# [19:1:1:0:0:589824:1] totalSize# 29304922 blobValueIndex# 32 Change MinHugeBlobSize# 12288 Put id# [82:1:2:0:0:10:1] totalSize# 29894746 blobValueIndex# 0 Change MinHugeBlobSize# 65536 Put id# [11:1:2:0:0:40960:1] totalSize# 29894756 blobValueIndex# 26 Put id# [35:1:1:0:0:40960:1] totalSize# 29935716 blobValueIndex# 23 Put id# [52:1:1:0:0:1024:1] totalSize# 29976676 blobValueIndex# 14 Trim Put id# [26:1:1:0:0:10:1] totalSize# 29977700 blobValueIndex# 0 Put id# [21:1:2:0:0:1572864:1] totalSize# 29977710 blobValueIndex# 55 Put id# [12:1:1:0:0:40960:1] totalSize# 31550574 blobValueIndex# 23 Put id# [23:1:3:0:0:1048576:1] totalSize# 31591534 blobValueIndex# 47 Put id# [73:1:2:0:0:1572864:1] totalSize# 32640110 blobValueIndex# 55 Put id# [78:1:2:0:0:589824:1] totalSize# 34212974 blobValueIndex# 36 Put id# [40:1:2:0:0:589824:1] totalSize# 34802798 blobValueIndex# 31 Put id# [35:1:2:0:0:1572864:1] totalSize# 35392622 blobValueIndex# 51 Put id# [100:1:2:0:0:1024:1] totalSize# 36965486 blobValueIndex# 11 Put id# [72:1:1:0:0:1572864:1] totalSize# 36966510 blobValueIndex# 54 Put id# [94:1:1:0:0:10:1] totalSize# 38539374 blobValueIndex# 1 Put id# [21:1:3:0:0:10:1] totalSize# 38539384 blobValueIndex# 1 Put id# [61:1:1:0:0:589824:1] totalSize# 38539394 blobValueIndex# 31 Put id# [93:1:3:0:0:10:1] totalSize# 39129218 blobValueIndex# 2 Put id# [26:1:2:0:0:1572864:1] totalSize# 39129228 blobValueIndex# 50 Put id# [44:1:2:0:0:589824:1] totalSize# 40702092 blobValueIndex# 36 Put id# [94:1:2:0:0:589824:1] totalSize# 41291916 blobValueIndex# 35 Trim Put id# [36:1:1:0:0:1048576:1] totalSize# 41881740 blobValueIndex# 42 Put id# [8:1:1:0:0:10:1] totalSize# 42930316 blobValueIndex# 8 Change MinHugeBlobSize# 12288 Put id# [30:1:1:0:0:589824:1] totalSize# 42930326 blobValueIndex# 31 Restart Put id# [72:1:2:0:0:1572864:1] totalSize# 43520150 blobValueIndex# 52 Put id# [94:1:3:0:0:40960:1] totalSize# 45093014 blobValueIndex# 27 Put id# [92:1:1:0:0:10:1] totalSize# 45133974 blobValueIndex# 8 Trim Put id# [1:1:1:0:0:1048576:1] totalSize# 45133984 blobValueIndex# 48 Put id# [84:1:2:0:0:589824:1] totalSize# 46182560 blobValueIndex# 39 Trim Put id# [51:1:1:0:0:589824:1] totalSize# 46772384 blobValueIndex# 39 Change MinHugeBlobSize# 8192 Put id# [57:1:1:0:0:1024:1] totalSize# 47362208 blobValueIndex# 16 Put id# [53:1:2:0:0:1572864:1] totalSize# 47363232 blobValueIndex# 58 Change MinHugeBlobSize# 12288 Put id# [63:1:2:0:0:589824:1] totalSize# 48936096 blobValueIndex# 35 Put id# [20:1:2:0:0:1024:1] totalSize# 49525920 blobValueIndex# 16 Put id# [23:1:4:0:0:589824:1] totalSize# 49526944 blobValueIndex# 39 Put id# [29:1:4:0:0:10:1] totalSize# 50116768 blobValueIndex# 7 Put id# [40:1:3:0:0:1048576:1] totalSize# 50116778 blobValueIndex# 43 Put id# [92:1:2:0:0:1024:1] totalSize# 51165354 blobValueIndex# 11 Put id# [12:1:2:0:0:40960:1] totalSize# 51166378 blobValueIndex# 23 Put id# [93:1:4:0:0:1048576:1] totalSize# 51207338 blobValueIndex# 44 Put id# [91:1:1:0:0:1048576:1] totalSize# 52255914 blobValueIndex# 46 Change MinHugeBlobSize# 65536 Put id# [66:1:1:0:0:40960:1] totalSize# 53304490 blobValueIndex# 20 Put id# [96:1:3:0:0:589824:1] totalSize# 53345450 blobValueIndex# 36 Put id# [16:1:1:0:0:1024:1] totalSize# 53935274 blobValueIndex# 11 Put id# [59:1:3:0:0:1048576:1] totalSize# 53936298 blobValueIndex# 49 Change MinHugeBlobSize# 524288 Put id# [49:1:1:0:0:40960:1] totalSize# 54984874 blobValueIndex# 21 Trim Put id# [28:1:2:0:0:10:1] totalSize# 55025834 blobValueIndex# 3 Put id# [52:1:2:0:0:40960:1] totalSize# 55025844 blobValueIndex# 29 Put id# [65:1:2:0:0:1024:1] totalSize# 55066804 blobValueIndex# 15 Put id# [62:1:1:0:0:40960:1] totalSize# 55067828 blobValueIndex# 21 Trim Put id# [63:1:3:0:0:1048576:1] totalSize# 55108788 blobValueIndex# 41 Trim Put id# [5:1:1:0:0:40960:1] totalSize# 56157364 blobValueIndex# 28 Trim Put id# [67:1:1:0:0:589824:1] totalSize# 56198324 blobValueIndex# 37 Trim Put id# [13:1:1:0:0:589824:1] totalSize# 56788148 blobValueIndex# 35 Put id# [32:1:1:0:0:10:1] totalSize# 57377972 blobValueIndex# 1 Put id# [90:1:2:0:0:10:1] totalSize# 57377982 blobValueIndex# 6 Put id# [16:1:2:0:0:40960:1] totalSize# 57377992 blobValueIndex# 25 Put id# [6:1:1:0:0:1048576:1] totalSize# 57418952 blobValueIndex# 49 Put id# [55:1:2:0:0:1572864:1] totalSize# 58467528 blobValueIndex# 52 Trim Put id# [99:1:2:0:0:1024:1] totalSize# 60040392 blobValueIndex# 10 Put id# [43:1:1:0:0:589824:1] totalSize# 60041416 blobValueIndex# 30 Put id# [89:1:1:0:0:10:1] totalSize# 60631240 blobValueIndex# 6 Put id# [94:1:4:0:0:1024:1] totalSize# 60631250 blobValueIndex# 16 Put id# [47:1:1:0:0:1048576:1] totalSize# 60632274 blobValueIndex# 43 Put id# [1:1:2:0:0:10:1] totalSize# 61680850 blobValueIndex# 5 Change MinHugeBlobSize# 12288 Put id# [33:1:1:0:0:10:1] totalSize# 61680860 blobValueIndex# 2 Trim Put id# [1:1:3:0:0:589824:1] totalSize# 61680870 blobValueIndex# 34 Put id# [77:1:1:0:0:40960:1] totalSize# 62270694 blobValueIndex# 26 Put id# [34:1:2:0:0:1024:1] totalSize# 62311654 blobValueIndex# 19 Put id# [55:1:3:0:0:1572864:1] totalSize# 62312678 blobValueIndex# 56 Put id# [91:1:2:0:0:10:1] totalSize# 63885542 blobValueIndex# 1 Put id# [81:1:2:0:0:1572864:1] totalSize# 63885552 blobValueIndex# 53 Put id# [80:1:1:0:0:10:1] totalSize# 65458416 blobValueIndex# 3 Put id# [23:1:5:0:0:1572864:1] totalSize# 65458426 blobValueIndex# 58 Change MinHugeBlobSize# 65536 Put id# [32:1:2:0:0:10:1] totalSize# 67031290 blobValueIndex# 9 Put id# [31:1:2:0:0:40960:1] totalSize# 67031300 blobValueIndex# 23 Change MinHugeBlobSize# 12288 Trim Put id# [41:1:1:0:0:589824:1] totalSize# 67072260 blobValueIndex# 33 Put id# [43:1:2:0:0:1048576:1] totalSize# 67662084 blobValueIndex# 42 Put id# [40:1:4:0:0:1572864:1] totalSize# 68710660 blobValueIndex# 50 Trim Put id# [33:1:2:0:0:1048576:1] totalSize# 70283524 blobValueIndex# 45 Trim Restart Put id# [10:1:1:0:0:1572864:1] totalSize# 71332100 blobValueIndex# 58 Put id# [68:1:4:0:0:589824:1] totalSize# 72904964 blobValueIndex# 32 Put id# [49:1:2:0:0:10:1] totalSize# 73494788 blobValueIndex# 8 Change MinHugeBlobSize# 65536 Put id# [23:1:6:0:0:40960:1] totalSize# 73494798 blobValueIndex# 28 Put id# [58:1:1:0:0:1024:1] totalSize# 73535758 blobValueIndex# 16 Restart Put id# [19:1:2:0:0:10:1] totalSize# 73536782 blobValueIndex# 9 Put id# [81:1:3:0:0:10:1] totalSize# 73536792 blobValueIndex# 5 Put id# [68:1:5:0:0:1024:1] totalSize# 73536802 blobValueIndex# 14 Put id# [28:1:3:0:0:40960:1] totalSize# 73537826 blobValueIndex# 23 Put id# [26:1:3:0:0:1024:1] totalSize# 73578786 blobValueIndex# 19 Put id# [90:1:3:0:0:40960:1] totalSize# 73579810 blobValueIndex# 26 Put id# [37:1:2:0:0:589824:1] totalSize# 73620770 blobValueIndex# 34 Trim Put id# [3:1:1:0:0:1024:1] totalSize# 74210594 blobValueIndex# 18 Trim Put id# [28:1:4:0:0:1572864:1] totalSize# 74211618 blobValueIndex# 59 Put id# [100:1:3:0:0:1048576:1] totalSize# 75784482 blobValueIndex# 42 Trim Put id# [96:1:4:0:0:1048576:1] totalSize# 76833058 blobValueIndex# 47 Put id# [58:1:2:0:0:40960:1] totalSize# 77881634 blobValueIndex# 27 Put id# [62:1:2:0:0:1048576:1] totalSize# 77922594 blobValueIndex# 48 Put id# [72:1:3:0:0:10:1] totalSize# 78971170 blobValueIndex# 4 Put id# [15:1:2:0:0:1048576:1] totalSize# 78971180 blobValueIndex# 45 Restart Put id# [14:1:1:0:0:1572864:1] totalSize# 80019756 blobValueIndex# 51 Put id# [27:1:3:0:0:40960:1] totalSize# 81592620 blobValueIndex# 23 Put id# [32:1:3:0:0:589824:1] totalSize# 81633580 blobValueIndex# 30 Put id# [25:1:3:0:0:589824:1] totalSize# 82223404 blobValueIndex# 33 Restart Put id# [100:1:4:0:0:1024:1] totalSize# 82813228 blobValueIndex# 17 Put id# [34:1:3:0:0:10:1] totalSize# 82814252 blobValueIndex# 9 Put id# [28:1:5:0:0:1024:1] totalSize# 82814262 blobValueIndex# 15 Put id# [66:1:2:0:0:1024:1] totalSize# 82815286 blobValueIndex# 14 Change MinHugeBlobSize# 524288 Put id# [98:1:2:0:0:589824:1] tot ... :0:0:1572864:1] totalSize# 1259284184 blobValueIndex# 55 Trim Restart Put id# [89:1:19:0:0:589824:1] totalSize# 1260857048 blobValueIndex# 39 Put id# [55:1:29:0:0:589824:1] totalSize# 1261446872 blobValueIndex# 32 Put id# [98:1:22:0:0:1048576:1] totalSize# 1262036696 blobValueIndex# 43 Change MinHugeBlobSize# 524288 Put id# [69:1:24:0:0:1572864:1] totalSize# 1263085272 blobValueIndex# 55 Change MinHugeBlobSize# 65536 Put id# [20:1:24:0:0:589824:1] totalSize# 1264658136 blobValueIndex# 32 Trim Put id# [14:1:22:0:0:10:1] totalSize# 1265247960 blobValueIndex# 8 Put id# [40:1:24:0:0:40960:1] totalSize# 1265247970 blobValueIndex# 29 Trim Put id# [97:1:26:0:0:1048576:1] totalSize# 1265288930 blobValueIndex# 46 Put id# [15:1:19:0:0:589824:1] totalSize# 1266337506 blobValueIndex# 38 Put id# [64:1:21:0:0:10:1] totalSize# 1266927330 blobValueIndex# 8 Trim Put id# [55:1:30:0:0:10:1] totalSize# 1266927340 blobValueIndex# 4 Put id# [58:1:27:0:0:40960:1] totalSize# 1266927350 blobValueIndex# 28 Put id# [61:1:31:0:0:1048576:1] totalSize# 1266968310 blobValueIndex# 46 Put id# [57:1:20:0:0:40960:1] totalSize# 1268016886 blobValueIndex# 29 Put id# [100:1:21:0:0:1024:1] totalSize# 1268057846 blobValueIndex# 18 Put id# [56:1:15:0:0:40960:1] totalSize# 1268058870 blobValueIndex# 29 Put id# [11:1:22:0:0:40960:1] totalSize# 1268099830 blobValueIndex# 22 Change MinHugeBlobSize# 12288 Put id# [17:1:24:0:0:1048576:1] totalSize# 1268140790 blobValueIndex# 49 Put id# [57:1:21:0:0:1572864:1] totalSize# 1269189366 blobValueIndex# 57 Put id# [70:1:19:0:0:1024:1] totalSize# 1270762230 blobValueIndex# 17 Put id# [50:1:25:0:0:10:1] totalSize# 1270763254 blobValueIndex# 8 Trim Put id# [22:1:23:0:0:589824:1] totalSize# 1270763264 blobValueIndex# 37 Trim Put id# [71:1:37:0:0:10:1] totalSize# 1271353088 blobValueIndex# 6 Put id# [47:1:23:0:0:1024:1] totalSize# 1271353098 blobValueIndex# 13 Put id# [92:1:20:0:0:1048576:1] totalSize# 1271354122 blobValueIndex# 42 Put id# [91:1:16:0:0:1024:1] totalSize# 1272402698 blobValueIndex# 19 Put id# [36:1:17:0:0:40960:1] totalSize# 1272403722 blobValueIndex# 27 Put id# [35:1:29:0:0:1024:1] totalSize# 1272444682 blobValueIndex# 18 Put id# [38:1:16:0:0:1024:1] totalSize# 1272445706 blobValueIndex# 12 Put id# [22:1:24:0:0:1048576:1] totalSize# 1272446730 blobValueIndex# 46 Put id# [83:1:29:0:0:1024:1] totalSize# 1273495306 blobValueIndex# 14 Put id# [45:1:20:0:0:1572864:1] totalSize# 1273496330 blobValueIndex# 53 Change MinHugeBlobSize# 65536 Put id# [59:1:30:0:0:10:1] totalSize# 1275069194 blobValueIndex# 6 Put id# [96:1:24:0:0:10:1] totalSize# 1275069204 blobValueIndex# 1 Put id# [70:1:20:0:0:40960:1] totalSize# 1275069214 blobValueIndex# 23 Put id# [78:1:20:0:0:1024:1] totalSize# 1275110174 blobValueIndex# 19 Put id# [70:1:21:0:0:10:1] totalSize# 1275111198 blobValueIndex# 6 Put id# [22:1:25:0:0:10:1] totalSize# 1275111208 blobValueIndex# 7 Put id# [48:1:22:0:0:10:1] totalSize# 1275111218 blobValueIndex# 7 Put id# [7:1:23:0:0:1048576:1] totalSize# 1275111228 blobValueIndex# 45 Put id# [49:1:24:0:0:589824:1] totalSize# 1276159804 blobValueIndex# 32 Put id# [20:1:25:0:0:40960:1] totalSize# 1276749628 blobValueIndex# 27 Trim Restart Put id# [74:1:22:0:0:40960:1] totalSize# 1276790588 blobValueIndex# 29 Change MinHugeBlobSize# 61440 Restart Put id# [77:1:33:0:0:1572864:1] totalSize# 1276831548 blobValueIndex# 57 Trim Put id# [38:1:17:0:0:589824:1] totalSize# 1278404412 blobValueIndex# 36 Put id# [47:1:24:0:0:40960:1] totalSize# 1278994236 blobValueIndex# 28 Put id# [72:1:31:0:0:10:1] totalSize# 1279035196 blobValueIndex# 2 Trim Put id# [50:1:26:0:0:589824:1] totalSize# 1279035206 blobValueIndex# 33 Put id# [7:1:24:0:0:589824:1] totalSize# 1279625030 blobValueIndex# 38 Put id# [93:1:19:0:0:1572864:1] totalSize# 1280214854 blobValueIndex# 59 Put id# [62:1:24:0:0:1024:1] totalSize# 1281787718 blobValueIndex# 10 Put id# [53:1:30:0:0:1024:1] totalSize# 1281788742 blobValueIndex# 12 Change MinHugeBlobSize# 524288 Put id# [38:1:18:0:0:1572864:1] totalSize# 1281789766 blobValueIndex# 58 Put id# [13:1:20:0:0:589824:1] totalSize# 1283362630 blobValueIndex# 30 Put id# [69:1:25:0:0:10:1] totalSize# 1283952454 blobValueIndex# 9 Put id# [25:1:25:0:0:10:1] totalSize# 1283952464 blobValueIndex# 1 Put id# [78:1:21:0:0:40960:1] totalSize# 1283952474 blobValueIndex# 25 Put id# [29:1:29:0:0:1572864:1] totalSize# 1283993434 blobValueIndex# 51 Put id# [56:1:16:0:0:1024:1] totalSize# 1285566298 blobValueIndex# 17 Trim Put id# [85:1:21:0:0:10:1] totalSize# 1285567322 blobValueIndex# 8 Put id# [81:1:16:0:0:1048576:1] totalSize# 1285567332 blobValueIndex# 42 Put id# [70:1:22:0:0:10:1] totalSize# 1286615908 blobValueIndex# 3 Put id# [67:1:32:0:0:1024:1] totalSize# 1286615918 blobValueIndex# 12 Restart Put id# [13:1:21:0:0:10:1] totalSize# 1286616942 blobValueIndex# 5 Change MinHugeBlobSize# 8192 Put id# [43:1:26:0:0:40960:1] totalSize# 1286616952 blobValueIndex# 24 Put id# [59:1:31:0:0:1024:1] totalSize# 1286657912 blobValueIndex# 16 Trim Put id# [65:1:27:0:0:1572864:1] totalSize# 1286658936 blobValueIndex# 57 Put id# [62:1:25:0:0:10:1] totalSize# 1288231800 blobValueIndex# 1 Trim Put id# [57:1:22:0:0:589824:1] totalSize# 1288231810 blobValueIndex# 32 Put id# [23:1:37:0:0:1048576:1] totalSize# 1288821634 blobValueIndex# 42 Put id# [99:1:25:0:0:1048576:1] totalSize# 1289870210 blobValueIndex# 48 Change MinHugeBlobSize# 524288 Put id# [42:1:21:0:0:1048576:1] totalSize# 1290918786 blobValueIndex# 41 Change MinHugeBlobSize# 12288 Put id# [76:1:26:0:0:1048576:1] totalSize# 1291967362 blobValueIndex# 48 Put id# [89:1:20:0:0:40960:1] totalSize# 1293015938 blobValueIndex# 20 Put id# [80:1:21:0:0:1048576:1] totalSize# 1293056898 blobValueIndex# 46 Change MinHugeBlobSize# 61440 Put id# [73:1:18:0:0:40960:1] totalSize# 1294105474 blobValueIndex# 21 Put id# [96:1:25:0:0:1048576:1] totalSize# 1294146434 blobValueIndex# 40 Put id# [64:1:22:0:0:1048576:1] totalSize# 1295195010 blobValueIndex# 44 Put id# [86:1:22:0:0:40960:1] totalSize# 1296243586 blobValueIndex# 22 Put id# [89:1:21:0:0:1048576:1] totalSize# 1296284546 blobValueIndex# 45 Trim Put id# [73:1:19:0:0:1048576:1] totalSize# 1297333122 blobValueIndex# 41 Put id# [47:1:25:0:0:1572864:1] totalSize# 1298381698 blobValueIndex# 50 Put id# [39:1:20:0:0:589824:1] totalSize# 1299954562 blobValueIndex# 38 Put id# [27:1:37:0:0:40960:1] totalSize# 1300544386 blobValueIndex# 28 Put id# [28:1:32:0:0:10:1] totalSize# 1300585346 blobValueIndex# 4 Put id# [25:1:26:0:0:1024:1] totalSize# 1300585356 blobValueIndex# 11 Put id# [70:1:23:0:0:40960:1] totalSize# 1300586380 blobValueIndex# 25 Put id# [94:1:29:0:0:1572864:1] totalSize# 1300627340 blobValueIndex# 51 Put id# [58:1:28:0:0:10:1] totalSize# 1302200204 blobValueIndex# 4 Put id# [39:1:21:0:0:589824:1] totalSize# 1302200214 blobValueIndex# 31 Put id# [98:1:23:0:0:1024:1] totalSize# 1302790038 blobValueIndex# 17 Put id# [38:1:19:0:0:1048576:1] totalSize# 1302791062 blobValueIndex# 47 Put id# [26:1:23:0:0:40960:1] totalSize# 1303839638 blobValueIndex# 25 Restart Put id# [66:1:26:0:0:10:1] totalSize# 1303880598 blobValueIndex# 3 Put id# [31:1:19:0:0:10:1] totalSize# 1303880608 blobValueIndex# 5 Put id# [36:1:18:0:0:40960:1] totalSize# 1303880618 blobValueIndex# 26 Trim Put id# [46:1:24:0:0:1024:1] totalSize# 1303921578 blobValueIndex# 17 Put id# [47:1:26:0:0:1572864:1] totalSize# 1303922602 blobValueIndex# 52 Put id# [30:1:16:0:0:10:1] totalSize# 1305495466 blobValueIndex# 7 Put id# [75:1:18:0:0:1024:1] totalSize# 1305495476 blobValueIndex# 13 Trim Put id# [60:1:25:0:0:1024:1] totalSize# 1305496500 blobValueIndex# 14 Put id# [45:1:21:0:0:1024:1] totalSize# 1305497524 blobValueIndex# 15 Put id# [13:1:22:0:0:40960:1] totalSize# 1305498548 blobValueIndex# 29 Change MinHugeBlobSize# 65536 Put id# [49:1:25:0:0:589824:1] totalSize# 1305539508 blobValueIndex# 31 Put id# [7:1:25:0:0:1048576:1] totalSize# 1306129332 blobValueIndex# 46 Put id# [79:1:36:0:0:1572864:1] totalSize# 1307177908 blobValueIndex# 56 Trim Put id# [39:1:22:0:0:1572864:1] totalSize# 1308750772 blobValueIndex# 50 Put id# [88:1:16:0:0:1024:1] totalSize# 1310323636 blobValueIndex# 13 Put id# [11:1:23:0:0:10:1] totalSize# 1310324660 blobValueIndex# 9 Put id# [30:1:17:0:0:1024:1] totalSize# 1310324670 blobValueIndex# 18 Put id# [25:1:27:0:0:10:1] totalSize# 1310325694 blobValueIndex# 7 Put id# [31:1:20:0:0:40960:1] totalSize# 1310325704 blobValueIndex# 28 Trim Put id# [23:1:38:0:0:1024:1] totalSize# 1310366664 blobValueIndex# 12 Put id# [82:1:37:0:0:1572864:1] totalSize# 1310367688 blobValueIndex# 50 Put id# [10:1:29:0:0:1024:1] totalSize# 1311940552 blobValueIndex# 11 Put id# [77:1:34:0:0:1024:1] totalSize# 1311941576 blobValueIndex# 14 Put id# [12:1:32:0:0:1048576:1] totalSize# 1311942600 blobValueIndex# 46 Put id# [100:1:22:0:0:40960:1] totalSize# 1312991176 blobValueIndex# 28 Put id# [25:1:28:0:0:1572864:1] totalSize# 1313032136 blobValueIndex# 59 Put id# [29:1:30:0:0:1048576:1] totalSize# 1314605000 blobValueIndex# 49 Trim Put id# [20:1:26:0:0:1024:1] totalSize# 1315653576 blobValueIndex# 14 Put id# [50:1:27:0:0:40960:1] totalSize# 1315654600 blobValueIndex# 26 Put id# [66:1:27:0:0:10:1] totalSize# 1315695560 blobValueIndex# 9 Trim Put id# [44:1:21:0:0:1024:1] totalSize# 1315695570 blobValueIndex# 14 Put id# [98:1:24:0:0:1024:1] totalSize# 1315696594 blobValueIndex# 19 Put id# [27:1:38:0:0:1572864:1] totalSize# 1315697618 blobValueIndex# 51 Restart Put id# [71:1:38:0:0:589824:1] totalSize# 1317270482 blobValueIndex# 35 Change MinHugeBlobSize# 524288 Put id# [34:1:26:0:0:10:1] totalSize# 1317860306 blobValueIndex# 1 Put id# [30:1:18:0:0:1572864:1] totalSize# 1317860316 blobValueIndex# 53 Put id# [93:1:20:0:0:1024:1] totalSize# 1319433180 blobValueIndex# 11 Trim Put id# [94:1:30:0:0:1572864:1] totalSize# 1319434204 blobValueIndex# 56 Put id# [70:1:24:0:0:1572864:1] totalSize# 1321007068 blobValueIndex# 55 Put id# [14:1:23:0:0:10:1] totalSize# 1322579932 blobValueIndex# 3 Put id# [37:1:24:0:0:10:1] totalSize# 1322579942 blobValueIndex# 5 Put id# [14:1:24:0:0:1048576:1] totalSize# 1322579952 blobValueIndex# 42 Trim Put id# [29:1:31:0:0:589824:1] totalSize# 1323628528 blobValueIndex# 33 Trim Put id# [86:1:23:0:0:40960:1] totalSize# 1324218352 blobValueIndex# 25 Put id# [62:1:26:0:0:1572864:1] totalSize# 1324259312 blobValueIndex# 59 Put id# [80:1:22:0:0:10:1] totalSize# 1325832176 blobValueIndex# 8 Put id# [35:1:30:0:0:40960:1] totalSize# 1325832186 blobValueIndex# 23 Put id# [19:1:22:0:0:40960:1] totalSize# 1325873146 blobValueIndex# 22 Put id# [89:1:22:0:0:1024:1] totalSize# 1325914106 blobValueIndex# 19 Trim Put id# [72:1:32:0:0:1572864:1] totalSize# 1325915130 blobValueIndex# 58 Put id# [11:1:24:0:0:1048576:1] totalSize# 1327487994 blobValueIndex# 45 Put id# [64:1:23:0:0:40960:1] totalSize# 1328536570 blobValueIndex# 22 Put id# [53:1:31:0:0:1048576:1] totalSize# 1328577530 blobValueIndex# 48 Trim Put id# [52:1:23:0:0:10:1] totalSize# 1329626106 blobValueIndex# 9 Put id# [52:1:24:0:0:1024:1] totalSize# 1329626116 blobValueIndex# 15 Put id# [41:1:21:0:0:40960:1] totalSize# 1329627140 blobValueIndex# 24 Trim Put id# [75:1:19:0:0:589824:1] totalSize# 1329668100 blobValueIndex# 31 Trim Put id# [72:1:33:0:0:589824:1] totalSize# 1330257924 blobValueIndex# 32 Put id# [88:1:17:0:0:589824:1] totalSize# 1330847748 blobValueIndex# 33 Put id# [22:1:26:0:0:1572864:1] totalSize# 1331437572 blobValueIndex# 56 Put id# [15:1:20:0:0:589824:1] totalSize# 1333010436 blobValueIndex# 30 Put id# [51:1:32:0:0:40960:1] totalSize# 1333600260 blobValueIndex# 21 Put id# [5:1:30:0:0:1572864:1] totalSize# 1333641220 blobValueIndex# 57 Put id# [95:1:17:0:0:1048576:1] totalSize# 1335214084 blobValueIndex# 49 Restart Put id# [30:1:19:0:0:10:1] totalSize# 1336262660 blobValueIndex# 4 Put id# [89:1:23:0:0:40960:1] totalSize# 1336262670 blobValueIndex# 20 Put id# [4:1:17:0:0:40960:1] totalSize# 1336303630 blobValueIndex# 22 Put id# [36:1:19:0:0:10:1] totalSize# 1336344590 blobValueIndex# 7 Restart |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk2/unittest >> TSubscriberSinglePathUpdateTest::OneRingGroup [GOOD] >> TSubscriberSinglePathUpdateTest::OneDisconnectedRingGroup |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterReboot [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] >> TSubscriberTest::Sync >> TSubscriberSinglePathUpdateTest::OneDisconnectedRingGroup [GOOD] >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup >> TSubscriberTest::NotifyDelete [GOOD] >> TSubscriberTest::InvalidNotification >> TSubscriberTest::SyncPartial >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] |91.3%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup [GOOD] >> TTxDataShardRecomputeKMeansScan::BuildTable+WithForeign [GOOD] >> TTxDataShardRecomputeKMeansScan::BuildTable-WithForeign >> TSubscriberTest::Sync [GOOD] >> TSubscriberSyncQuorumTest::ReplicaConfigMismatch >> TSubscriberTest::InvalidNotification [GOOD] >> TSubscriberTest::SyncPartial [GOOD] >> TSubscriberTest::SyncWithOutdatedReplica |91.3%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries [GOOD] >> TSubscriberTest::NotifyUpdate |91.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2025-12-23T18:30:34.666779Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:34.668928Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2025-12-23T18:30:34.669031Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2025-12-23T18:30:34.669079Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2025-12-23T18:30:34.669347Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2025-12-23T18:30:34.669518Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2025-12-23T18:30:34.669586Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:34.669629Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2025-12-23T18:30:34.669688Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:24339059:0] 2025-12-23T18:30:34.670174Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:4:2051] 2025-12-23T18:30:34.670267Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:20:2066] 2025-12-23T18:30:34.670325Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Update to strong state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:1099535966835:0] 2025-12-23T18:30:34.670622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:5:2052] 2025-12-23T18:30:34.670671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:21:2066] 2025-12-23T18:30:34.670729Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Path was updated to new version: owner# [1:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:2199047594611:0] 2025-12-23T18:30:34.671292Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:6:2053] 2025-12-23T18:30:34.671358Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:22:2066] 2025-12-23T18:30:34.671392Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Path was updated to new version: owner# [1:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2025-12-23T18:30:34.944726Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:34.945492Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:4:2051] 2025-12-23T18:30:34.945562Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:5:2052] 2025-12-23T18:30:34.945614Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2025-12-23T18:30:34.945886Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:29:2075] 2025-12-23T18:30:34.946015Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:30:2075] 2025-12-23T18:30:34.946086Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][2:28:2075][TestPath] Set up state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:34.946151Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:31:2075] 2025-12-23T18:30:34.946202Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:28:2075][TestPath] Ignore empty state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:24339059:0] 2025-12-23T18:30:34.946534Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:4:2051] 2025-12-23T18:30:34.946615Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:29:2075] 2025-12-23T18:30:34.946675Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Update to strong state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:1099535966835:0] 2025-12-23T18:30:34.947010Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:5:2052] 2025-12-23T18:30:34.947100Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:30:2075] 2025-12-23T18:30:34.947160Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Path was updated to new version: owner# [2:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:2199047594611:0] 2025-12-23T18:30:34.947369Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [2:6:2053] 2025-12-23T18:30:34.947441Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [2:31:2075] 2025-12-23T18:30:34.947493Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Path was updated to new version: owner# [2:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:3298559222387:0] Sending path update to replica: [2:4398070850163:0] Sending path update to replica: [2:5497582477939:0] ... waiting for initial path lookups 2025-12-23T18:30:35.248592Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[3:3298559222387:0], [3:4398070850163:0], [3:5497582477939:0]] WriteOnly: 1 State: 0}]} 2025-12-23T18:30:35.249472Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2025-12-23T18:30:35.249591Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2025-12-23T18:30:35.249681Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2025-12-23T18:30:35.249940Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2025-12-23T18:30:35.250086Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2025-12-23T18:30:35.250155Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.250224Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2025-12-23T18:30:35.250294Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:24339059:0] 2025-12-23T18:30:35.250654Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:4:2051] 2025-12-23T18:30:35.250809Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:29:2075] 2025-12-23T18:30:35.250884Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Update to strong state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:1099535966835:0] 2025-12-23T18:30:35.251291Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [3:5:2052] 2025-12-23T18:30:35.251363Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [3:30:2075] 2025-12-23T18:30:35.251434Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Path was updated to new version: owner# [3:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:2199047594611:0] 2025-12-23T18:30:35.251742Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [3:6:2053] 2025-12-23T18:30:35.251807Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [3:31:2075] 2025-12-23T18:30:35.251869Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Path was updated to new version: owner# [3:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:3298559222387:0] Sending path update to replica: [3:4398070850163:0] Sending path update to replica: [3:5497582477939:0] >> TSubscriberSyncQuorumTest::ReplicaConfigMismatch [GOOD] >> TSubscriberSyncQuorumTest::ReconfigurationWithDelayedSyncRequest >> IndexBuildTest::RejectsOnDuplicatesUniq [GOOD] >> VectorIndexBuildTest::BuildTableWithEmptyShard >> TSubscriberSinglePathUpdateTest::TwoRingGroups |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] >> TSubscriberSyncQuorumTest::ReconfigurationWithDelayedSyncRequest [GOOD] >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2025-12-23T18:30:34.689445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:34.691593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-12-23T18:30:34.691763Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-12-23T18:30:34.691806Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-12-23T18:30:34.691883Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2025-12-23T18:30:34.691996Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2025-12-23T18:30:34.692053Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:34.692109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2025-12-23T18:30:34.692145Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:34.692490Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-12-23T18:30:34.692558Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2025-12-23T18:30:34.692609Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:36:2066][path] Update to strong state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:34.692790Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-12-23T18:30:34.692857Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2025-12-23T18:30:34.692924Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.185949Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:35.186523Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:3:2050] 2025-12-23T18:30:35.186588Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:6:2053] 2025-12-23T18:30:35.186629Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:9:2056] 2025-12-23T18:30:35.186683Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:38:2067] 2025-12-23T18:30:35.186760Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:39:2067] 2025-12-23T18:30:35.186833Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:37:2067][path] Set up state: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.186960Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:40:2067] 2025-12-23T18:30:35.187034Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:37:2067][path] Path was already updated: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.187177Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:37:2067][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:35:2065], cookie# 1 2025-12-23T18:30:35.187316Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:38:2067], cookie# 1 2025-12-23T18:30:35.187418Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:39:2067], cookie# 1 2025-12-23T18:30:35.187463Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:40:2067], cookie# 1 2025-12-23T18:30:35.187586Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:3:2050], cookie# 1 2025-12-23T18:30:35.187639Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 1 2025-12-23T18:30:35.187682Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:9:2056], cookie# 1 2025-12-23T18:30:35.187761Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:38:2067], cookie# 1 2025-12-23T18:30:35.187839Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:37:2067][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:30:35.187931Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:39:2067], cookie# 1 2025-12-23T18:30:35.187972Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:37:2067][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:30:35.188061Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:40:2067], cookie# 1 2025-12-23T18:30:35.188089Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:37:2067][path] Sync cookie mismatch: sender# [3:40:2067], cookie# 1, current cookie# 0 |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSinglePathUpdateTest::TwoRingGroups [GOOD] >> TSubscriberSinglePathUpdateTest::ReplicaConfigMismatch >> TSubscriberTest::NotifyUpdate [GOOD] >> TSubscriberTest::ReconnectOnFailure >> TSubscriberSyncQuorumTest::TwoRingGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2025-12-23T18:30:34.708650Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:34.711201Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-12-23T18:30:34.711305Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-12-23T18:30:34.711349Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-12-23T18:30:34.711417Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2067] 2025-12-23T18:30:34.711468Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:39:2067] 2025-12-23T18:30:34.711521Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:37:2067][path] Set up state: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:34.711648Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:40:2067] 2025-12-23T18:30:34.711714Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:37:2067][path] Path was already updated: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:34.712080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2025-12-23T18:30:34.712139Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2025-12-23T18:30:34.712227Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:38:2067] 2025-12-23T18:30:34.712301Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:37:2067][path] Path was updated to new version: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:34.712357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:39:2067] 2025-12-23T18:30:34.712431Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:37:2067][path] Path was already updated: owner# [1:35:2065], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.258812Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:35.259599Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2025-12-23T18:30:35.259707Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2025-12-23T18:30:35.259781Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2025-12-23T18:30:35.259856Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:37:2066] 2025-12-23T18:30:35.259947Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:38:2066] 2025-12-23T18:30:35.260002Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:36:2066][path] Set up state: owner# [3:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.260053Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:39:2066] 2025-12-23T18:30:35.260099Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:36:2066][path] Ignore empty state: owner# [3:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.260228Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [3:35:2065] 2025-12-23T18:30:35.260276Z node 3 :SCHEME_BOARD_SUBSCRIBER ERROR: subscriber.cpp:818: [main][3:36:2066][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [3:35:2065] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for client destroyed notification ... waiting for boot2 ... waiting for connect2 >> TSubscriberSyncQuorumTest::OneRingGroup >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> TSubscriberSinglePathUpdateTest::ReplicaConfigMismatch [GOOD] |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |91.3%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk2/test-results/unittest/{meta.json ... results_accumulator.log} |91.3%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup >> TSubscriberSyncQuorumTest::TwoRingGroups [GOOD] >> TSubscriberTest::Boot >> TFlatMetrics::MaximumValue3 [GOOD] >> TFlatMetrics::MaximumValue4 [GOOD] >> TSubscriberSyncQuorumTest::OneRingGroup [GOOD] >> TSubscriberSyncQuorumTest::OneSynchronizedRingGroup >> TSubscriberTest::ReconnectOnFailure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2025-12-23T18:30:35.299726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:35.302041Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-12-23T18:30:35.302224Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-12-23T18:30:35.302299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-12-23T18:30:35.302394Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2025-12-23T18:30:35.302515Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2025-12-23T18:30:35.302575Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.302643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2025-12-23T18:30:35.302688Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.302944Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 1 2025-12-23T18:30:35.303134Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-12-23T18:30:35.303227Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-12-23T18:30:35.303286Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 1 2025-12-23T18:30:35.303423Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:6:2053], cookie# 1 2025-12-23T18:30:35.303499Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:9:2056], cookie# 1 2025-12-23T18:30:35.303558Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 1 2025-12-23T18:30:35.303606Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2025-12-23T18:30:35.303676Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2025-12-23T18:30:35.303749Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.303828Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:38:2066], cookie# 1 2025-12-23T18:30:35.303860Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2025-12-23T18:30:35.303900Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:39:2066], cookie# 1 2025-12-23T18:30:35.303943Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:36:2066][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2025-12-23T18:30:35.304091Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 2 2025-12-23T18:30:35.304218Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 2 2025-12-23T18:30:35.304255Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 2, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2025-12-23T18:30:35.304304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 2 2025-12-23T18:30:35.304371Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 2 2025-12-23T18:30:35.304455Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:9:2056], cookie# 2 2025-12-23T18:30:35.304533Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:38:2066], cookie# 2 2025-12-23T18:30:35.304580Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:36:2066][path] Sync is done in the ring group: cookie# 2, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2025-12-23T18:30:35.304625Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:36:2066][path] Sync is incomplete in one of the ring groups: cookie# 2 2025-12-23T18:30:35.304679Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2025-12-23T18:30:35.304742Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.304799Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:39:2066], cookie# 2 2025-12-23T18:30:35.304824Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:36:2066][path] Sync cookie mismatch: sender# [1:39:2066], cookie# 2, current cookie# 0 2025-12-23T18:30:35.304920Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 3 2025-12-23T18:30:35.305031Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 3 2025-12-23T18:30:35.305062Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 3, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2025-12-23T18:30:35.305116Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:38:2066], cookie# 3 2025-12-23T18:30:35.305152Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:36:2066][path] Sync is done in the ring group: cookie# 3, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2025-12-23T18:30:35.305203Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:36:2066][path] Sync is incomplete in one of the ring groups: cookie# 3 2025-12-23T18:30:35.305246Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 3 2025-12-23T18:30:35.305370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:39:2066], cookie# 3 2025-12-23T18:30:35.305399Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:36:2066][path] Sync cookie mismatch: sender# [1:39:2066], cookie# 3, current cookie# 0 2025-12-23T18:30:35.305449Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2025-12-23T18:30:35.305492Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.795861Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:35.796435Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [3:3:2050] 2025-12-23T18:30:35.796494Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:6:2053] 2025-12-23T18:30:35.796523Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:9:2056] 2025-12-23T18:30:35.796583Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [3:38:2067] 2025-12-23T18:30:35.796651Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:39:2067] 2025-12-23T18:30:35.796705Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:37:2067][path] Set up state: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.796787Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:40:2067] 2025-12-23T18:30:35.796832Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:37:2067][path] Path was already updated: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.796924Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:37:2067][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:35:2065], cookie# 1 2025-12-23T18:30:35.797081Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:38:2067], cookie# 1 2025-12-23T18:30:35.797152Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:39:2067], cookie# 1 2025-12-23T18:30:35.797207Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:40:2067], cookie# 1 2025-12-23T18:30:35.797303Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [3:3:2050], cookie# 1 2025-12-23T18:30:35.797339Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 1 2025-12-23T18:30:35.797362Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:9:2056], cookie# 1 2025-12-23T18:30:35.797428Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [3:38:2067], cookie# 1 2025-12-23T18:30:35.797482Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:37:2067][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:30:35.797524Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:39:2067], cookie# 1 2025-12-23T18:30:35.797554Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:37:2067][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:30:35.797599Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:40:2067], cookie# 1 2025-12-23T18:30:35.797618Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:37:2067][path] Sync cookie mismatch: sender# [3:40:2067], cookie# 1, current cookie# 0 |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup [GOOD] >> TTabletCountersPercentile::WithoutZero [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation >> TSubscriberSyncQuorumTest::OneSynchronizedRingGroup [GOOD] >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer >> IndexBuildTest::BaseCase [GOOD] >> IndexBuildTest::BaseCaseUniq ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest [GOOD] Test command err: ... waiting for initial path lookups 2025-12-23T18:30:35.689665Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:35.691726Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:4:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:23:2066] 2025-12-23T18:30:35.691812Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:4:2051] Upsert description: path# TestPath 2025-12-23T18:30:35.692003Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:4:2051] Subscribe: subscriber# [1:23:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:30:35.692218Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:24:2066] 2025-12-23T18:30:35.692245Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:5:2052] Upsert description: path# TestPath 2025-12-23T18:30:35.692326Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:5:2052] Subscribe: subscriber# [1:24:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:30:35.692458Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:25:2066] 2025-12-23T18:30:35.692489Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:6:2053] Upsert description: path# TestPath 2025-12-23T18:30:35.692538Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:6:2053] Subscribe: subscriber# [1:25:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:30:35.692619Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2025-12-23T18:30:35.692697Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:4:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:23:2066] 2025-12-23T18:30:35.692758Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2025-12-23T18:30:35.692794Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:5:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:24:2066] 2025-12-23T18:30:35.692825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2025-12-23T18:30:35.692856Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:25:2066] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] 2025-12-23T18:30:35.693111Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12345 2025-12-23T18:30:35.693179Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2025-12-23T18:30:35.693321Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2025-12-23T18:30:35.693425Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.693479Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2025-12-23T18:30:35.693581Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.693712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12345 2025-12-23T18:30:35.693770Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:4:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:23:2066], cookie# 12345 2025-12-23T18:30:35.693825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12345 2025-12-23T18:30:35.693862Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:5:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:24:2066], cookie# 12345 2025-12-23T18:30:35.693899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:22:2066], cookie# 12345 2025-12-23T18:30:35.693980Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2025-12-23T18:30:35.694051Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2025-12-23T18:30:35.694211Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:20:2066], cookie# 12345 2025-12-23T18:30:35.694280Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:30:35.694338Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12345 2025-12-23T18:30:35.694390Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:30:35.694452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12345 2025-12-23T18:30:35.694497Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:22:2066], cookie# 12345, current cookie# 0 2025-12-23T18:30:35.694544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2025-12-23T18:30:35.694610Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Updating cluster state generation on replica: [1:1099535966835:0] 2025-12-23T18:30:35.694793Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12346 2025-12-23T18:30:35.694917Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:1240: [1:5:2052] Handle {EvUpdateGroupConfig GroupConfig: empty BoardConfig: empty SchemeBoardConfig: {RingGroups# [0:{NToSelect# 3 Rings# [0:{[[1:24339059:0]]} 1:{[[1:1099535966835:0]]} 2:{[[1:2199047594611:0]]}}] StateStorageVersion# 0 CompatibleVersions# [] ClusterStateGeneration# 1 ClusterStateGuid# 0}} 2025-12-23T18:30:35.694987Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12346 2025-12-23T18:30:35.695035Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:4:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:23:2066], cookie# 12346 2025-12-23T18:30:35.695082Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12346 2025-12-23T18:30:35.695115Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:5:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:24:2066], cookie# 12346 2025-12-23T18:30:35.695202Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12346 2025-12-23T18:30:35.695246Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2025-12-23T18:30:35.695299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12346 2025-12-23T18:30:35.695370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { Generation: 1 Guid: 0 } }: sender# [1:5:2052], cookie# 12346 2025-12-23T18:30:35.695415Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:20:2066], cookie# 12346 2025-12-23T18:30:35.695445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2025-12-23T18:30:35.695488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersio ... [TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:36.285114Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2025-12-23T18:30:36.285234Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2025-12-23T18:30:36.285288Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2025-12-23T18:30:36.285523Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:18:2065], cookie# 12345 ... waiting for some sync responses 2025-12-23T18:30:36.285585Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:20:2066] 2025-12-23T18:30:36.285682Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:21:2066] 2025-12-23T18:30:36.285726Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:19:2066][TestPath] Set up state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.285827Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:22:2066] 2025-12-23T18:30:36.285863Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:19:2066][TestPath] Ignore empty state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.285959Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:20:2066], cookie# 12345 2025-12-23T18:30:36.286029Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:21:2066], cookie# 12345 2025-12-23T18:30:36.286056Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:22:2066], cookie# 12345 2025-12-23T18:30:36.286125Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2025-12-23T18:30:36.286169Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2025-12-23T18:30:36.286216Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... waiting for some sync responses (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2025-12-23T18:30:36.286425Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:20:2066], cookie# 12345 2025-12-23T18:30:36.286462Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:30:36.286575Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:36.286612Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:1041: [main][3:19:2066][TestPath] Delay current sync request: 12345 ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2025-12-23T18:30:36.287074Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:21:2066], cookie# 12345 2025-12-23T18:30:36.287113Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:21:2066], cookie# 12345, current cookie# 0 2025-12-23T18:30:36.287145Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:22:2066], cookie# 12345 2025-12-23T18:30:36.287177Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:22:2066], cookie# 12345, current cookie# 0 2025-12-23T18:30:36.287479Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2025-12-23T18:30:36.287526Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2025-12-23T18:30:36.287565Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] 2025-12-23T18:30:36.287613Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:27:2066] 2025-12-23T18:30:36.287658Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:28:2066] 2025-12-23T18:30:36.287713Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:19:2066][TestPath] Set up state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.287771Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2066] 2025-12-23T18:30:36.287818Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:19:2066][TestPath] Ignore empty state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.287937Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:27:2066], cookie# 12345 2025-12-23T18:30:36.288024Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:28:2066], cookie# 12345 2025-12-23T18:30:36.288067Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2066], cookie# 12345 2025-12-23T18:30:36.288121Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2025-12-23T18:30:36.288163Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2025-12-23T18:30:36.288191Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 12345 2025-12-23T18:30:36.288239Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:27:2066], cookie# 12345 2025-12-23T18:30:36.288273Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:30:36.288310Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:28:2066], cookie# 12345 2025-12-23T18:30:36.288354Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:30:36.288412Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2066], cookie# 12345 2025-12-23T18:30:36.288434Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:29:2066], cookie# 12345, current cookie# 0 |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] Test command err: RandomSeed# 4165688889164351611 Step = 0 SEND TEvPut with key [1:1:0:0:0:51943:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:51943:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:85877:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:85877:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:192081:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:192081:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:267203:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:267203:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 3 2025-12-23T18:28:23.201508Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 6 SEND TEvPut with key [1:1:6:0:0:377427:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:377427:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2025-12-23T18:28:23.511556Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 7 SEND TEvPut with key [1:1:7:0:0:48850:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:48850:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 8 SEND TEvPut with key [1:1:8:0:0:411812:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:411812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 9 SEND TEvPut with key [1:1:9:0:0:293766:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:293766:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start node 3 Step = 10 SEND TEvPut with key [1:1:10:0:0:127358:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:127358:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 11 SEND TEvPut with key [1:1:11:0:0:282945:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:282945:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 12 SEND TEvPut with key [1:1:12:0:0:34864:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:34864:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 13 SEND TEvPut with key [1:1:13:0:0:363096:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:363096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 15 SEND TEvPut with key [1:1:15:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 16 SEND TEvPut with key [1:1:16:0:0:136892:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:136892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 17 SEND TEvPut with key [1:1:17:0:0:517733:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:517733:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 18 SEND TEvPut with key [1:1:18:0:0:250802:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:250802:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 19 SEND TEvPut with key [1:1:19:0:0:199490:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:199490:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 20 SEND TEvPut with key [1:1:20:0:0:244269:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:244269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 21 SEND TEvPut with key [1:1:21:0:0:329606:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:329606:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 23 SEND TEvPut with key [1:1:23:0:0:519258:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:519258:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 25 SEND TEvPut with key [1:1:25:0:0:514591:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:514591:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Stop node 7 2025-12-23T18:28:24.520519Z 1 00h01m30.100512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 26 SEND TEvPut with key [1:1:26:0:0:5927:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:5927:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 28 SEND TEvPut with key [1:1:28:0:0:6043:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:6043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 30 SEND TEvPut with key [1:1:30:0:0:264716:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:264716:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Compact vdisk 3 Step = 31 SEND TEvPut with key [1:1:31:0:0:168116:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:168116:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 32 SEND TEvPut with key [1:1:32:0:0:444749:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:444749:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 33 SEND TEvPut with key [1:1:33:0:0:350254:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:350254:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 34 SEND TEvPut with key [1:1:34:0:0:145950:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:145950:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 35 SEND TEvPut with key [1:1:35:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 38 SEND TEvPut with key [1:1:38:0:0:185170:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:185170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 39 SEND TEvPut with key [1:1:39:0:0:297271:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:297271:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 40 SEND TEvPut with key [1:1:40:0:0:419670:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:419670:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 41 SEND TEvPut with key [1:1:41:0:0:218956:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:218956:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 42 SEND TEvPut with key [1:1:42:0:0:154723:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:154723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 43 SEND TEvPut with key [1:1:43:0:0:13332:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:13332:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 44 SEND TEvPut with key [1:1:44:0:0:448892:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:448892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 45 SEND TEvPut with key [1:1:45:0:0:103231:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:103231:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 46 SEND TEvPut with key [1:1:46:0:0:295973:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:295973:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 47 SEND TEvPut with key [1:1:47:0:0:402799:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:402799:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 48 SEND TEvPut with key [1:1:48:0:0:165045:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:165045:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 49 SEND TEvPut with key [1:1:49:0:0:360099:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:360099:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 50 SEND TEvPut with key [1:1:50:0:0:97222:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:97222:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 51 SEND TEvPut with key [1:1:51:0:0:303396:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:303396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 52 SEND TEvPut with key [1:1:52:0:0:304876:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:304876:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 53 SEND TEvPut with key [1:1:53:0:0:375063:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:375063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Start node 4 Step = 54 SEND TEvPut with key [1:1:54:0:0:288044:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:288044:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999878} Step = 55 SEND TEvPut with key [1:1:55:0:0:181559:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:181559:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999878} Step = 57 SEND TEvPut with key [1:1:57:0:0:424399:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:424399:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 58 SEND TEvPut with key [1:1:58:0:0:169341:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:169341:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 59 SEND TEvPut with key [1:1:59:0:0:405932:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:405932:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 60 SEND TEvPut with key [1:1:60:0:0:190148:0] TEvPutResult: TEvPutResult {Id# [1:1:60:0:0:190148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Stop node 3 2025-12-23T18:28:26.594982Z 1 00h02m00.150512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Wipe node 0 2025-12-23T18:28:26.812900Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:28:26.821709Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 10821803127893032065] 2025-12-23T18:28:26.915782Z 1 00h02m15.161024s :BS_PROXY ERROR: StateEstablishingSessions Wakeup TIMEOUT Marker# DSP12 GroupId# 2181038080 EstablishingSessionsStateTs# 130.161024s NumUnconnectedDisks# 3 Step = 61 SEND TEvPut with key [1:1:61:0:0:500240:0] TEvPutResult: TEvPutResult {Id# [1:1:61:0:0:500240:0] Status# ERROR StatusFlags# { } ErrorReason# "Timeout while establishing sessions (DSPE4). GroupId# 2181038080 EstablishingSessionsStateTs# 130.161024s NumUnconnectedDisks# 3" ApproximateFreeSpaceShare# 0} Step = 62 SEND TEvPut with key [1:1:62:0:0:354994:0] TEvPutResult ... 1:945:0:0:76599:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99978} Compact vdisk 2 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 948 SEND TEvPut with key [1:1:948:0:0:112126:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:112126:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 949 SEND TEvPut with key [1:1:949:0:0:525378:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:525378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 950 SEND TEvPut with key [1:1:950:0:0:410875:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:410875:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 951 SEND TEvPut with key [1:1:951:0:0:113503:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:113503:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 952 SEND TEvPut with key [1:1:952:0:0:431140:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:431140:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 953 SEND TEvPut with key [1:1:953:0:0:509293:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:509293:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 3 2025-12-23T18:30:07.400219Z 1 00h25m01.150000s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:286395:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:286395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 1 2025-12-23T18:30:07.721560Z 1 00h25m11.150512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 955 SEND TEvPut with key [1:1:955:0:0:219270:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:219270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Start node 1 Step = 956 SEND TEvPut with key [1:1:956:0:0:274971:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:274971:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Step = 957 SEND TEvPut with key [1:1:957:0:0:487884:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:487884:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Start node 3 Step = 958 SEND TEvPut with key [1:1:958:0:0:327302:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:327302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 961 SEND TEvPut with key [1:1:961:0:0:61147:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:61147:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 962 SEND TEvPut with key [1:1:962:0:0:237906:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:237906:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 963 SEND TEvPut with key [1:1:963:0:0:347273:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:347273:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 964 SEND TEvPut with key [1:1:964:0:0:181317:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:181317:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 965 SEND TEvPut with key [1:1:965:0:0:456096:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:456096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 966 SEND TEvPut with key [1:1:966:0:0:93776:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:93776:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 967 SEND TEvPut with key [1:1:967:0:0:447659:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:447659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 969 SEND TEvPut with key [1:1:969:0:0:92781:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:92781:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 0 2025-12-23T18:30:09.223260Z 9 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [9:116263:357] ServerId# [1:117301:174] TabletId# 72057594037932033 PipeClientId# [9:116263:357] 2025-12-23T18:30:09.223625Z 8 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:146914:17] ServerId# [1:146923:4108] TabletId# 72057594037932033 PipeClientId# [8:146914:17] 2025-12-23T18:30:09.223813Z 7 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:145866:17] ServerId# [1:145872:3981] TabletId# 72057594037932033 PipeClientId# [7:145866:17] 2025-12-23T18:30:09.223968Z 6 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:122897:17] ServerId# [1:122903:1014] TabletId# 72057594037932033 PipeClientId# [6:122897:17] 2025-12-23T18:30:09.224198Z 5 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:142941:17] ServerId# [1:142948:3599] TabletId# 72057594037932033 PipeClientId# [5:142941:17] 2025-12-23T18:30:09.224369Z 4 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:151899:17] ServerId# [1:151908:4704] TabletId# 72057594037932033 PipeClientId# [4:151899:17] 2025-12-23T18:30:09.224565Z 3 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:141860:17] ServerId# [1:141869:3476] TabletId# 72057594037932033 PipeClientId# [3:141860:17] 2025-12-23T18:30:09.224780Z 2 00h25m41.153072s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:150941:17] ServerId# [1:150947:4599] TabletId# 72057594037932033 PipeClientId# [2:150941:17] Step = 971 SEND TEvPut with key [1:1:971:0:0:439384:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:439384:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Step = 972 SEND TEvPut with key [1:1:972:0:0:252551:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:252551:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 973 SEND TEvPut with key [1:1:973:0:0:39982:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:39982:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:526796:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:526796:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Start node 0 Step = 975 SEND TEvPut with key [1:1:975:0:0:337763:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:337763:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:475740:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:475740:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 977 SEND TEvPut with key [1:1:977:0:0:169780:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:169780:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 980 SEND TEvPut with key [1:1:980:0:0:159890:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:159890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 981 SEND TEvPut with key [1:1:981:0:0:111300:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:111300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 982 SEND TEvPut with key [1:1:982:0:0:355914:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:355914:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 983 SEND TEvPut with key [1:1:983:0:0:399106:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:399106:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 985 SEND TEvPut with key [1:1:985:0:0:261994:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:261994:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 987 SEND TEvPut with key [1:1:987:0:0:138774:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:138774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 988 SEND TEvPut with key [1:1:988:0:0:441913:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:441913:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 989 SEND TEvPut with key [1:1:989:0:0:134469:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:134469:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 990 SEND TEvPut with key [1:1:990:0:0:123825:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:123825:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 991 SEND TEvPut with key [1:1:991:0:0:40387:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:40387:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Stop node 7 2025-12-23T18:30:11.855819Z 1 00h26m21.163584s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 993 SEND TEvPut with key [1:1:993:0:0:455894:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:455894:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Compact vdisk 0 Step = 994 SEND TEvPut with key [1:1:994:0:0:54378:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:54378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Compact vdisk 6 Step = 995 SEND TEvPut with key [1:1:995:0:0:487669:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:487669:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 996 SEND TEvPut with key [1:1:996:0:0:194641:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:194641:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 997 SEND TEvPut with key [1:1:997:0:0:74188:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:74188:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 998 SEND TEvPut with key [1:1:998:0:0:136082:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:136082:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 999 SEND TEvPut with key [1:1:999:0:0:145518:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:145518:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Starting nodes Start compaction 1 Start checking |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2025-12-23T18:30:35.909195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:35.911468Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-12-23T18:30:35.911651Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-12-23T18:30:35.911697Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-12-23T18:30:35.911775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2025-12-23T18:30:35.911899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2025-12-23T18:30:35.911962Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.912024Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2025-12-23T18:30:35.912063Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:35.912580Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-12-23T18:30:35.912681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-12-23T18:30:35.912749Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:36:2066][path] Update to strong state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.432181Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][4:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:36.433157Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2025-12-23T18:30:36.433245Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2025-12-23T18:30:36.433290Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2025-12-23T18:30:36.433460Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2025-12-23T18:30:36.433529Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2025-12-23T18:30:36.433572Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][4:36:2066][path] Set up state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.433618Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2025-12-23T18:30:36.433668Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.434056Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2025-12-23T18:30:36.434111Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.434149Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2025-12-23T18:30:36.434174Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.434194Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2025-12-23T18:30:36.434213Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.445540Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:47:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2025-12-23T18:30:36.445938Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2025-12-23T18:30:36.446035Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.446138Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:48:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2025-12-23T18:30:36.446231Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:49:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2025-12-23T18:30:36.446386Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2025-12-23T18:30:36.446430Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.446501Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2025-12-23T18:30:36.446536Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.447099Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:47:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:3:2050] 2025-12-23T18:30:36.447191Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [4:37:2066] 2025-12-23T18:30:36.447244Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][4:36:2066][path] Update to strong state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2025-12-23T18:30:36.145129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0]] WriteOnly: 0 State: 1}]} 2025-12-23T18:30:36.148704Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2025-12-23T18:30:36.148830Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2025-12-23T18:30:36.148880Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2025-12-23T18:30:36.148930Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:13:2060] 2025-12-23T18:30:36.149010Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:14:2061] 2025-12-23T18:30:36.149057Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2025-12-23T18:30:36.149493Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:29:2075] 2025-12-23T18:30:36.149598Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:30:2075] 2025-12-23T18:30:36.149642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2025-12-23T18:30:36.149704Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:32:2075] 2025-12-23T18:30:36.149897Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:33:2075] 2025-12-23T18:30:36.149958Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:28:2075][TestPath] Set up state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.150026Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2025-12-23T18:30:36.150077Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:24339059:0] 2025-12-23T18:30:36.150450Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:4:2051] 2025-12-23T18:30:36.150534Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:29:2075] 2025-12-23T18:30:36.150618Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Update to strong state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:1099535966835:0] 2025-12-23T18:30:36.150976Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:5:2052] 2025-12-23T18:30:36.151039Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:30:2075] 2025-12-23T18:30:36.151104Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:2199047594611:0] 2025-12-23T18:30:36.151334Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:6:2053] 2025-12-23T18:30:36.151409Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:31:2075] 2025-12-23T18:30:36.151459Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:3298559222387:0] 2025-12-23T18:30:36.151711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 4 }: sender# [1:13:2060] 2025-12-23T18:30:36.151780Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 4 }: sender# [1:32:2075] 2025-12-23T18:30:36.151853Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 4) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:4398070850163:0] 2025-12-23T18:30:36.152092Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 5 }: sender# [1:14:2061] 2025-12-23T18:30:36.152163Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 5 }: sender# [1:33:2075] 2025-12-23T18:30:36.152211Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 4) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 5) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:5497582477939:0] 2025-12-23T18:30:36.152449Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 6 }: sender# [1:15:2062] 2025-12-23T18:30:36.152502Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 6 }: sender# [1:34:2075] 2025-12-23T18:30:36.152552Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 5) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 6) DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2025-12-23T18:30:36.446118Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:36.446533Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:4:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [2:23:2066] 2025-12-23T18:30:36.446563Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:4:2051] Upsert description: path# TestPath 2025-12-2 ... tion: 1 }: sender# [2:18:2065], cookie# 0, event size# 80 2025-12-23T18:30:36.457098Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:4:2051] Update description: path# TestPath, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:30:36.457172Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:4:2051] Upsert description: path# TestPath, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path TestPath, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 34} 2025-12-23T18:30:36.457346Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:4:2051] 2025-12-23T18:30:36.457405Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:4:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [2:23:2066] 2025-12-23T18:30:36.457539Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:520: [proxy][2:20:2066][TestPath] Cluster state mismatch in replica notification: sender# [2:23:2066], subscriber cluster state# {Generation: 0, GUID: 0}, replica cluster state# {Generation: 1 Guid: 0} 2025-12-23T18:30:36.457675Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:4:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: TestPath }: sender# [2:23:2066] 2025-12-23T18:30:36.457739Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:4:2051] Unsubscribe: subscriber# [2:23:2066], path# TestPath 2025-12-23T18:30:36.457799Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:20:2066] 2025-12-23T18:30:36.457882Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:19:2066][TestPath] Ignore empty state: owner# [2:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2025-12-23T18:30:36.764658Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:36.765219Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2025-12-23T18:30:36.765297Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2025-12-23T18:30:36.765330Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [3:2199047594611:0] Poisoning replica: [3:3298559222387:0] Poisoning replica: [3:4398070850163:0] Poisoning replica: [3:5497582477939:0] 2025-12-23T18:30:36.765638Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12345 2025-12-23T18:30:36.765727Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2025-12-23T18:30:36.765896Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2025-12-23T18:30:36.766014Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.766075Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2025-12-23T18:30:36.766123Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.766275Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12345 2025-12-23T18:30:36.766385Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12345 2025-12-23T18:30:36.766432Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:31:2075], cookie# 12345 2025-12-23T18:30:36.766542Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2025-12-23T18:30:36.766598Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2025-12-23T18:30:36.766767Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2075], cookie# 12345 2025-12-23T18:30:36.766827Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:30:36.766885Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12345 2025-12-23T18:30:36.766932Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:30:36.767025Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12345 2025-12-23T18:30:36.767056Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:31:2075], cookie# 12345, current cookie# 0 2025-12-23T18:30:36.767103Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2025-12-23T18:30:36.767159Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [3:1099535966835:0] whose ring group state is: 0 2025-12-23T18:30:36.767303Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12346 2025-12-23T18:30:36.767463Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12346 2025-12-23T18:30:36.767571Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12346 2025-12-23T18:30:36.767652Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12346 2025-12-23T18:30:36.767701Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2025-12-23T18:30:36.767763Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12346 2025-12-23T18:30:36.767871Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2075], cookie# 12346 2025-12-23T18:30:36.767901Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2025-12-23T18:30:36.767969Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:30:2075], cookie# 12346 2025-12-23T18:30:36.768010Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2025-12-23T18:30:36.768048Z node 3 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][3:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2025-12-23T18:30:36.768150Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2025-12-23T18:30:36.768219Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: ... waiting for initial path lookups 2025-12-23T18:30:36.500255Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0]] WriteOnly: 0 State: 1}]} 2025-12-23T18:30:36.503762Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2025-12-23T18:30:36.503864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2025-12-23T18:30:36.503891Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2025-12-23T18:30:36.503920Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:13:2060] 2025-12-23T18:30:36.503962Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:14:2061] 2025-12-23T18:30:36.503996Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] Poisoning replica: [1:5497582477939:0] 2025-12-23T18:30:36.504288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:27:2074], cookie# 12345 2025-12-23T18:30:36.504360Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:29:2075] 2025-12-23T18:30:36.504506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:30:2075] 2025-12-23T18:30:36.504548Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2025-12-23T18:30:36.504609Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:32:2075] 2025-12-23T18:30:36.504657Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:33:2075] 2025-12-23T18:30:36.504760Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:28:2075][TestPath] Set up state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.504831Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2025-12-23T18:30:36.504870Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.505002Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:29:2075], cookie# 12345 2025-12-23T18:30:36.505084Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:30:2075], cookie# 12345 2025-12-23T18:30:36.505137Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:31:2075], cookie# 12345 2025-12-23T18:30:36.505232Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:32:2075], cookie# 12345 2025-12-23T18:30:36.505279Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:33:2075], cookie# 12345 2025-12-23T18:30:36.505327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:34:2075], cookie# 12345 2025-12-23T18:30:36.505430Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2025-12-23T18:30:36.505469Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2025-12-23T18:30:36.505545Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:13:2060], cookie# 12345 2025-12-23T18:30:36.505581Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:14:2061], cookie# 12345 2025-12-23T18:30:36.505772Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:29:2075], cookie# 12345 2025-12-23T18:30:36.505820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:30:36.505858Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:30:2075], cookie# 12345 2025-12-23T18:30:36.505897Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:30:36.505925Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 0, failures# 0 2025-12-23T18:30:36.505964Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:31:2075], cookie# 12345 2025-12-23T18:30:36.505992Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2025-12-23T18:30:36.506011Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 0, failures# 0 2025-12-23T18:30:36.506043Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2025-12-23T18:30:36.506087Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.506154Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:32:2075], cookie# 12345 2025-12-23T18:30:36.506181Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2025-12-23T18:30:36.506206Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:30:36.506262Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:33:2075], cookie# 12345 2025-12-23T18:30:36.506297Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2025-12-23T18:30:36.506359Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:30:36.506435Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:34:2075], cookie# 12345 2025-12-23T18:30:36.506466Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:28:2075][TestPath] Sync cookie mismatch: sender# [1:34:2075], cookie# 12345, current cookie# 0 2025-12-23T18:30:36.506507Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2025-12-23T18:30:36.506550Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [1:4398070850163:0] whose ring group state is: 1 2025-12-23T18:30:36.506694Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:27:2074], cookie# 12346 2025-12-23T18:30:36.507144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:29:2075], cookie# 12346 2025-12-23T18:30:36.507208Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:30:2075], cookie# 12346 2025-12-23T18:30:36.507285Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:31:2075], cookie# 12346 2025-12-23T18:30:36.507333Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2025-12-23T18:30:36.507381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:32:2075], cookie# 12346 2025-12-23T18:30:36.507431Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:33:2075], cookie# 12346 2025-12-23T18:30:36.507477Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:34:2075], cookie# 12346 2025-12-23T18:30:36.507512Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2025-12-23T18:30:36.507552Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12346 2025-12-23T18:30:36.507612Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12346 2025-12-23T18:30:36.507643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:13:2060], cookie# 12346 2025-12-23T18:30:36.507748Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:29:2075], cookie# 12346 2025-12-23T18:30:36.507784Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2025-12-23T18:30:36.507843Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:30:2075], cookie# 12346 2025-12-23T18:30:36.507886Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2025-12-23T18:30:36.507923Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 0, failures# 1 2025-12-23T18:30:36.507958Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:32:2075], cookie# 12346 2025-12-23T18:30:36.507989Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2025-12-23T18:30:36.508010Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 1, failures# 1 2025-12-23T18:30:36.508059Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:33:2075], cookie# 12346 2025-12-23T18:30:36.508093Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2025-12-23T18:30:36.508120Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2025-12-23T18:30:36.508156Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2025-12-23T18:30:36.508242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:33:2075] 2025-12-23T18:30:36.508308Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.797837Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:36.798580Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:3:2050] 2025-12-23T18:30:36.798649Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:6:2053] 2025-12-23T18:30:36.798704Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:9:2056] 2025-12-23T18:30:36.798803Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2066] 2025-12-23T18:30:36.798875Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:38:2066] 2025-12-23T18:30:36.798919Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][2:36:2066][path] Set up state: owner# [2:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.798964Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:39:2066] 2025-12-23T18:30:36.798999Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:36:2066][path] Ignore empty state: owner# [2:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2025-12-23T18:30:36.538076Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:36.540601Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2025-12-23T18:30:36.540746Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2025-12-23T18:30:36.540807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] 2025-12-23T18:30:36.541157Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12345 2025-12-23T18:30:36.541241Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2025-12-23T18:30:36.541403Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2025-12-23T18:30:36.541484Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.541585Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2025-12-23T18:30:36.541657Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.541855Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12345 2025-12-23T18:30:36.541951Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12345 2025-12-23T18:30:36.542008Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:22:2066], cookie# 12345 2025-12-23T18:30:36.542147Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2025-12-23T18:30:36.542211Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2025-12-23T18:30:36.542416Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:20:2066], cookie# 12345 2025-12-23T18:30:36.542477Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:30:36.542560Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12345 2025-12-23T18:30:36.542614Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:30:36.542677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12345 2025-12-23T18:30:36.542716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:22:2066], cookie# 12345, current cookie# 0 2025-12-23T18:30:36.542792Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2025-12-23T18:30:36.542848Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [1:24339059:0] whose ring group state is: 0 2025-12-23T18:30:36.543025Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12346 2025-12-23T18:30:36.543239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12346 2025-12-23T18:30:36.543323Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12346 2025-12-23T18:30:36.543397Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12346 2025-12-23T18:30:36.543465Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2025-12-23T18:30:36.543573Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12346 2025-12-23T18:30:36.543653Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:20:2066], cookie# 12346 2025-12-23T18:30:36.543697Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2025-12-23T18:30:36.543742Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:19:2066][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2025-12-23T18:30:36.543825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2025-12-23T18:30:36.543921Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.543999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12346 2025-12-23T18:30:36.544037Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:21:2066], cookie# 12346, current cookie# 0 ... waiting for initial path lookups 2025-12-23T18:30:36.824942Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[2:3298559222387:0], [2:4398070850163:0], [2:5497582477939:0]] WriteOnly: 0 State: 1}]} 2025-12-23T18:30:36.826064Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:4:2051] 2025-12-23T18:30:36.826130Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:5:2052] 2025-12-23T18:30:36.826170Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:6:2053] 2025-12-23T18:30:36.826217Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:13:2060] 2025-12-23T18:30:36.826288Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:14:2061] 2025-12-23T18:30:36.826340Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr:: ... 0, failures# 1 2025-12-23T18:30:36.830214Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:30:2075] 2025-12-23T18:30:36.830290Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:28:2075][TestPath] Ignore empty state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:36.830342Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [2:32:2075], cookie# 12346 2025-12-23T18:30:36.830370Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2025-12-23T18:30:36.830393Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 1, failures# 1 2025-12-23T18:30:36.830444Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [2:33:2075], cookie# 12346 2025-12-23T18:30:36.830477Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2025-12-23T18:30:36.830517Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 2, failures# 1, partial# 1 2025-12-23T18:30:36.830567Z node 2 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][2:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 ... waiting for initial path lookups 2025-12-23T18:30:37.120190Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[3:3298559222387:0], [3:4398070850163:0], [3:5497582477939:0]] WriteOnly: 1 State: 0}]} 2025-12-23T18:30:37.121020Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2025-12-23T18:30:37.121119Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2025-12-23T18:30:37.121184Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [3:2199047594611:0] Poisoning replica: [3:3298559222387:0] Poisoning replica: [3:4398070850163:0] Poisoning replica: [3:5497582477939:0] 2025-12-23T18:30:37.121515Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12345 2025-12-23T18:30:37.121599Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2025-12-23T18:30:37.121786Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2025-12-23T18:30:37.121866Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:37.121938Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2025-12-23T18:30:37.121997Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:37.122145Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12345 2025-12-23T18:30:37.122220Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12345 2025-12-23T18:30:37.122330Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:31:2075], cookie# 12345 2025-12-23T18:30:37.122426Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2025-12-23T18:30:37.122489Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2025-12-23T18:30:37.122674Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2075], cookie# 12345 2025-12-23T18:30:37.122770Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:30:37.122834Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12345 2025-12-23T18:30:37.122895Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:30:37.122965Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12345 2025-12-23T18:30:37.123002Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:31:2075], cookie# 12345, current cookie# 0 2025-12-23T18:30:37.123066Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2025-12-23T18:30:37.123121Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [3:24339059:0] whose ring group state is: 0 2025-12-23T18:30:37.123309Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12346 2025-12-23T18:30:37.123523Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12346 2025-12-23T18:30:37.123606Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12346 2025-12-23T18:30:37.123679Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12346 2025-12-23T18:30:37.123726Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2025-12-23T18:30:37.123837Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12346 2025-12-23T18:30:37.123913Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:29:2075], cookie# 12346 2025-12-23T18:30:37.123968Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2025-12-23T18:30:37.124011Z node 3 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][3:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2025-12-23T18:30:37.124124Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2025-12-23T18:30:37.124195Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:37.124271Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12346 2025-12-23T18:30:37.124308Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:30:2075], cookie# 12346, current cookie# 0 |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestKillClientBeforServerIdKnown >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] >> VectorIndexBuildTest::Metering_CommonDB [GOOD] >> TResourceBroker::TestOverusage >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-false >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] >> TTabletPipeTest::TestInterconnectSession |91.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |91.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |91.3%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> TabletState::SeqNoSubscribeOutOfOrder |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TBlockBlobStorageTest::DelayedErrorsNotIgnored >> TResourceBroker::TestOverusage [GOOD] >> TResourceBroker::TestNotifyActorDied ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:109:2141] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:109:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:121:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [1:123:2150] sender: [1:125:2057] recipient: [1:109:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:160:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:162:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:164:2057] recipient: [1:105:2139] Leader for TabletID 9437185 is [1:123:2150] sender: [1:166:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:169:2057] recipient: [1:168:2179] Leader for TabletID 9437185 is [1:170:2180] sender: [1:171:2057] recipient: [1:168:2179] Leader for TabletID 9437185 is [1:170:2180] sender: [1:200:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:120:2148] sender: [1:203:2057] recipient: [1:104:2138] Leader for TabletID 9437184 is [1:120:2148] sender: [1:206:2057] recipient: [1:205:2203] Leader for TabletID 9437184 is [1:207:2204] sender: [1:208:2057] recipient: [1:205:2203] Leader for TabletID 9437184 is [1:207:2204] sender: [1:236:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2139] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2139] Leader for TabletID 9437184 is [2:112:2143] sender: [2:113:2057] recipient: [2:106:2139] Leader for TabletID 9437184 is [2:112:2143] sender: [2:132:2057] recipient: [2:14:2061] Leader for TabletID 9437185 is [0:0:0] sender: [2:168:2057] recipient: [2:166:2171] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [2:168:2057] recipient: [2:166:2171] Leader for TabletID 9437185 is [2:172:2175] sender: [2:173:2057] recipient: [2:166:2171] Leader for TabletID 9437185 is [2:172:2175] sender: [2:208:2057] recipient: [2:14:2061] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TResourceBroker::TestQueueWithConfigure >> TTabletPipeTest::TestInterconnectSession [GOOD] >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] >> TTabletPipeTest::TestSendAfterOpen ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [3:167:2058] recipient: [3:165:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [3:167:2058] recipient: [3:165:2140] Leader for TabletID 9437184 is [3:173:2144] sender: [3:174:2058] recipient: [3:165:2140] Leader for TabletID 9437185 is [0:0:0] sender: [4:177:2049] recipient: [4:168:2097] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [4:177:2049] recipient: [4:168:2097] Leader for TabletID 9437185 is [4:189:2100] sender: [4:190:2049] recipient: [4:168:2097] Leader for TabletID 9437184 is [3:173:2144] sender: [3:217:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:189:2100] sender: [3:219:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:189:2100] sender: [4:221:2049] recipient: [4:45:2053] Leader for TabletID 9437185 is [4:189:2100] sender: [3:224:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:189:2100] sender: [4:222:2049] recipient: [4:162:2096] Leader for TabletID 9437185 is [4:189:2100] sender: [4:227:2049] recipient: [4:226:2113] Leader for TabletID 9437185 is [4:228:2114] sender: [4:229:2049] recipient: [4:226:2113] Leader for TabletID 9437185 is [4:228:2114] sender: [3:258:2058] recipient: [3:15:2062] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::DefaultConfig [GOOD] >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:30:17.950800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:30:17.950890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:17.950927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:30:17.950967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:30:17.951037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:30:17.951084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:30:17.951145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:17.951292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:30:17.952231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:30:17.952530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:30:18.058534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:18.058602Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:18.078524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:30:18.078794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:30:18.079002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:30:18.091835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:30:18.092089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:30:18.092658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:18.092836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:30:18.096800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:18.096953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:30:18.098045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:30:18.098092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:18.098221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:30:18.098268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:30:18.098299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:30:18.098424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:30:18.203888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.204776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.204858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.204900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.204942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.204976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.205026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.205082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.205157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.205247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.205331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.205409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.205496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.205553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.205631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... DROP LOCK, path: /MyRoot/ServerLessDB/Table/test_index/indexImplTable 2025-12-23T18:30:38.386611Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7144: Handle: TEvModifySchemeTransactionResult: txId# 281474976725762, status# StatusPathDoesNotExist 2025-12-23T18:30:38.386675Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7146: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549 2025-12-23T18:30:38.386782Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3152: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 107, cookie: 107, record: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549, status: StatusPathDoesNotExist 2025-12-23T18:30:38.386935Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3157: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Rejection_Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: Duplicate key found: (index1=500, index2=500), SubscribersCount: 1, CreateSender: [6:1657:3528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725762, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 2, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 107, record: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549, status: StatusPathDoesNotExist 2025-12-23T18:30:38.387105Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2025-12-23T18:30:38.389044Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Rejected 2025-12-23T18:30:38.389184Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Rejected TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Rejected, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/Table/test_index', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760, SubscribersCount: 1, CreateSender: [6:1657:3528], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725762, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 2, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:30:38.389241Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2025-12-23T18:30:38.389391Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-12-23T18:30:38.389463Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [6:1674:3545] TestWaitNotification: OK eventTxId 107 2025-12-23T18:30:38.390173Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2025-12-23T18:30:38.390544Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 107 Issues { message: "Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 107 Issues { message: "Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2025-12-23T18:30:38.391427Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-12-23T18:30:38.391702Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 301us result status StatusSuccess 2025-12-23T18:30:38.392194Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index1" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "index2" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2025-12-23T18:30:38.392980Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__forget.cpp:18: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: DoExecute TxId: 108 DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2025-12-23T18:30:38.393272Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index_tx_base.h:102: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: Reply TxId: 108 Status: SUCCESS BUILDINDEX RESPONSE Forget: NKikimrIndexBuilder.TEvForgetResponse TxId: 108 Status: SUCCESS 2025-12-23T18:30:38.402822Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__list.cpp:23: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" PageSize: 100 PageToken: "" 2025-12-23T18:30:38.402971Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TResourceBroker::TestOverusageDifferentResources >> TKeyValueTest::TestIncorrectRequestThenResponseError >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] Test command err: 2025-12-23T18:30:38.713126Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:315: [9437185] Detach 2025-12-23T18:30:38.728101Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [9437185] Activate 2025-12-23T18:30:38.735826Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [9437185] Activate 2025-12-23T18:30:38.738972Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:132:2157] 2025-12-23T18:30:38.739033Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:132:2157] 2025-12-23T18:30:38.739373Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:132:2157] 2025-12-23T18:30:38.739432Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:132:2157] 2025-12-23T18:30:38.739548Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:132:2157] 2025-12-23T18:30:38.739589Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:132:2157] 2025-12-23T18:30:38.739663Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:132:2157] 2025-12-23T18:30:38.739818Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:132:2157] Type# 269877249 Reason# ActorUnknown 2025-12-23T18:30:38.739965Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:135:2159] 2025-12-23T18:30:38.739991Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:135:2159] 2025-12-23T18:30:38.740099Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:135:2159] 2025-12-23T18:30:38.740148Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:135:2159] 2025-12-23T18:30:38.740208Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:135:2159] 2025-12-23T18:30:38.740259Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:135:2159] 2025-12-23T18:30:38.740313Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:135:2159] 2025-12-23T18:30:38.740401Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:135:2159] Type# 269877249 Reason# ActorUnknown 2025-12-23T18:30:38.740533Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:137:2161] 2025-12-23T18:30:38.740558Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:137:2161] 2025-12-23T18:30:38.740597Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:137:2161] 2025-12-23T18:30:38.740625Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:137:2161] 2025-12-23T18:30:38.740669Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:137:2161] 2025-12-23T18:30:38.740700Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:137:2161] 2025-12-23T18:30:38.740747Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:137:2161] 2025-12-23T18:30:38.740833Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:137:2161] Type# 269877249 Reason# ActorUnknown >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi >> TResourceBroker::TestOverusageDifferentResources [GOOD] >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2025-12-23T18:30:39.876379Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-12-23T18:30:39.876472Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:272} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1766514639875 ErrorReason# 2025-12-23T18:30:39.885494Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-12-23T18:30:39.885591Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:242} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1766514639885 ErrorReason# 2025-12-23T18:30:39.891238Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-12-23T18:30:39.891341Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:224} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1766514639891 ErrorReason# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::DefaultConfig [GOOD] Test command err: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 10737418240 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 17179869184 } Total queues cpu: 90 |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2025-12-23T18:30:40.370824Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-12-23T18:30:40.374591Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-12-23T18:30:40.381717Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-12-23T18:30:40.381804Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-12-23T18:30:40.388721Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-12-23T18:30:40.388878Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:312} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2025-12-23T18:30:40.388488Z ErrorReason# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2025-12-23T18:30:39.757950Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2025-12-23T18:30:39.758183Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' is required" 2025-12-23T18:30:39.758367Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TKeyValueTest::TestRenameWorks |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TKeyValueTest::TestWriteReadPatchRead >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-false [GOOD] >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-true >> TKeyValueTest::TestObtainLockNewApi >> TKeyValueCollectorTest::TestKeyValueCollectorSingle ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:105:2138] ... blocking block result NO_GROUP for [1:106:2138] ... blocking block result NO_GROUP for [1:107:2138] ... blocking block result NO_GROUP for [1:108:2138] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestRewriteThenLastValueNewApi >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError >> VectorIndexBuildTest::BuildTableWithEmptyShard [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany >> TKeyValueTest::TestRewriteThenLastValue |91.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |91.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple >> VectorIndexBuildTest::SimpleDuplicates-Overlap-false [GOOD] >> VectorIndexBuildTest::SimpleDuplicates-Overlap-true >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> DataStreams::TestDeleteStream >> DataStreams::TestUpdateStream >> DataStreams::TestGetShardIterator >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> TTxDataShardRecomputeKMeansScan::BuildTable-WithForeign [GOOD] >> TTxDataShardRecomputeKMeansScan::EmptyCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2025-12-23T18:30:42.967248Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-12-23T18:30:42.969727Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BuildTableWithEmptyShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:30:23.849061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:30:23.849162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:23.849205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:30:23.849299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:30:23.849353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:30:23.849382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:30:23.849435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:23.849509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:30:23.850454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:30:23.850809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:30:23.940824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:23.940888Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:23.948049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:30:23.948287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:30:23.948433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:30:23.953046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:30:23.953245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:30:23.953985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:23.954185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:30:23.956552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:23.956741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:30:23.957988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:30:23.958048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:23.958184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:30:23.958253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:30:23.958323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:30:23.958468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:30:24.108306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.109349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.109476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.109564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.109634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.109730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.109817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.109890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.109977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.110062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.110132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.110256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.110336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.110432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.110519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1663:3534], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 42 UploadBytes: 812 ReadRows: 80 ReadBytes: 1270 CpuTimeUs: 11103, Billed: UploadRows: 42 UploadBytes: 812 ReadRows: 80 ReadBytes: 1270 CpuTimeUs: 11103} 2025-12-23T18:30:42.165088Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2025-12-23T18:30:42.165272Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-12-23T18:30:42.165351Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [6:2319:4096] TestWaitNotification: OK eventTxId 107 2025-12-23T18:30:42.166424Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-12-23T18:30:42.166803Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 398us result status StatusSuccess 2025-12-23T18:30:42.167477Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "prefix" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 33 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72075186233409549 DataSize: 0 IndexImplTableDescriptions { } IndexImplTableDescriptions { } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 4 } clusters: 20 levels: 2 overlap_clusters: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 34 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2025-12-23T18:30:42.168504Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72075186233409549 2025-12-23T18:30:42.168952Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" took 475us result status StatusSuccess 2025-12-23T18:30:42.170036Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 35 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976725758 CreateStep: 400 ParentPathId: 33 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "pool-kind-1" } Log { PreferredPoolKind: "pool-kind-1" } Data { PreferredPoolKind: "pool-kind-1" } } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 34 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 35 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2025-12-23T18:30:42.176876Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268830210, Sender [6:3431:5156], Recipient [6:1737:3601]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "(\n (let range \'(\'(\'__ydb_parent (Null) (Void)) \'(\'key (Null) (Void)) ))\n (let columns \'(\'__ydb_parent))\n (let result (SelectRange \'__user__indexImplPostingTable range columns \'()))\n (return (AsList (SetResult \'Result result) ))\n )" } } ... posting table contains 10 rows >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> DataStreams::TestPutRecordsOfAnauthorizedUser |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |91.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |91.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TxUsage::WriteToTopic_Demo_20_RestartNo_Table [GOOD] >> DataStreams::TestNonChargeableUser >> DataStreams::TestStreamStorageRetention >> DataStreams::TestUpdateStorage >> TxUsage::WriteToTopic_Demo_20_RestartNo_Query >> TMLPWriterTests::Deduplicated_Reboot [GOOD] >> DataStreams::TestReservedResourcesMetering >> DataStreams::TestGetRecordsStreamWithSingleShard |91.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy >> DataStreams::TestGetShardIterator [GOOD] >> DataStreams::TestGetRecordsWithoutPermission >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag >> DataStreams::TestUpdateStream [GOOD] >> DataStreams::Test_AutoPartitioning_Describe >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-true [GOOD] >> VectorIndexBuildTest::CancelBroken |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |91.4%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> DataStreams::TestPutRecordsWithRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/mlp/ut/unittest >> TMLPWriterTests::Deduplicated_Reboot [GOOD] Test command err: 2025-12-23T18:25:53.356938Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587121378329982700:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:25:53.357129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056d8/r3tmp/tmpZARTZi/pdisk_1.dat 2025-12-23T18:25:53.381386Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:25:53.560033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:25:53.568856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:53.568972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:53.641844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:53.670163Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:53.672902Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587121378329982597:2081] 1766514353353689 != 1766514353353692 TServer::EnableGrpc on GrpcPort 65462, node 1 2025-12-23T18:25:53.731153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0056d8/r3tmp/yandexDxj9GV.tmp 2025-12-23T18:25:53.731177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0056d8/r3tmp/yandexDxj9GV.tmp 2025-12-23T18:25:53.731293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0056d8/r3tmp/yandexDxj9GV.tmp 2025-12-23T18:25:53.731364Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:25:53.764230Z INFO: TTestServer started on Port 20307 GrpcPort 65462 2025-12-23T18:25:53.805176Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20307 PQClient connected to localhost:65462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514353695 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:25:53.984024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:25:54.004241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:25:54.363162Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:56.153452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121391214885508:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:56.153607Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:56.158870Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121391214885524:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:56.159009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587121391214885530:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:56.159204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:25:56.164527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:25:56.181732Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587121391214885532:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-12-23T18:25:56.395323Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587121391214885596:2640] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:25:56.422846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:56.454491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:56.579569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:56.597477Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587121391214885613:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:25:56.600440Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YjEwOWRjYjItZTc4MDFhMTUtOGY1YTMxNDItZGFjYzcxNzA=, ActorId: [1:7587121391214885505:2328], ActorState: ExecuteState, TraceId: 01kd67bvx6c0km1xegv7k3wnc7, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:25:56.602685Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587121391214885899:2819] 2025-12-23T18:25:58.356263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587121378329982700:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:25:58.356341Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-12-23T18:26:02.705763Z :TODO INFO: TTopicSdkTestSetup started 2025-12-23T18:26:02.718327Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-12-23T18:26:02.730665Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7587121416984689914:2927] connected; active server actors: 1 2025-12-23T18:26:02.731060Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added pa ... sh AND Topic == $Topic AND ProducerId == $SourceId; 2025-12-23T18:30:44.184884Z node 19 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-12-23T18:30:44.184904Z node 19 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-12-23T18:30:44.184972Z node 19 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [19:7587122625896902136:2481] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-12-23T18:30:44.185854Z node 19 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|2a83b13c-2ccea700-6a434060-c2ae5619_0 generated for partition 0 topic 'topic1' owner src 2025-12-23T18:30:44.186415Z node 19 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: src|2a83b13c-2ccea700-6a434060-c2ae5619_0 2025-12-23T18:30:44.187425Z :INFO: [/Root] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1766514644187 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:30:44.187594Z :INFO: [/Root] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|2a83b13c-2ccea700-6a434060-c2ae5619_0" topic: "topic1" 2025-12-23T18:30:44.187883Z :INFO: [/Root] MessageGroupId [src] SessionId [src|2a83b13c-2ccea700-6a434060-c2ae5619_0] Write session: close. Timeout = 0 ms 2025-12-23T18:30:44.187954Z :INFO: [/Root] MessageGroupId [src] SessionId [src|2a83b13c-2ccea700-6a434060-c2ae5619_0] Write session will now close 2025-12-23T18:30:44.188044Z :DEBUG: [/Root] MessageGroupId [src] SessionId [src|2a83b13c-2ccea700-6a434060-c2ae5619_0] Write session: aborting 2025-12-23T18:30:44.189006Z :INFO: [/Root] MessageGroupId [src] SessionId [src|2a83b13c-2ccea700-6a434060-c2ae5619_0] Write session: gracefully shut down, all writes complete 2025-12-23T18:30:44.189092Z :DEBUG: [/Root] MessageGroupId [src] SessionId [src|2a83b13c-2ccea700-6a434060-c2ae5619_0] Write session: destroy 2025-12-23T18:30:44.190315Z node 19 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: src|2a83b13c-2ccea700-6a434060-c2ae5619_0 grpc read failed 2025-12-23T18:30:44.190404Z node 19 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 2 sessionId: src|2a83b13c-2ccea700-6a434060-c2ae5619_0 2025-12-23T18:30:44.190432Z node 19 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: src|2a83b13c-2ccea700-6a434060-c2ae5619_0 is DEAD 2025-12-23T18:30:44.192571Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:29: [[19:7587122625896902142:3044]] Start describe 2025-12-23T18:30:44.194867Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:40: [[19:7587122625896902142:3044]] Handle NDescriber::TEvDescribeTopicsResponse 2025-12-23T18:30:44.194898Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:115: [[19:7587122625896902142:3044]] Start write 2025-12-23T18:30:44.290340Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:385: [72075186224037894][0][MLP][mlp-consumer] Handle TEvPQ::TEvEndOffsetChanged. Offset: 1 2025-12-23T18:30:44.290352Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:174: [[19:7587122625896902142:3044]] Handle TEvPersQueue::TEvResponse 2025-12-23T18:30:44.290394Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:689: [72075186224037894][0][MLP][mlp-consumer] Fetching 100 messages from offset 0 from [19:7587122625896902110:2477] >>>>>> reload PQ tablet 2025-12-23T18:30:44.292081Z node 19 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'topic1' partition: 0 messageNo: 0 requestId: error: tablet will be restarted right now 2025-12-23T18:30:44.294665Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:708: [72075186224037894][0][MLP][mlp-consumer] Handle TEvPersQueue::TEvResponse 2025-12-23T18:30:44.296947Z node 19 :PQ_MLP_CONSUMER WARN: mlp_consumer.cpp:713: [72075186224037894][0][MLP][mlp-consumer] Fetch messages failed: Status: 128 ErrorReason: "tablet will be restarted right now" ErrorCode: INITIALIZING 2025-12-23T18:30:44.316101Z node 19 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:30:44.316167Z node 19 :PERSQUEUE INFO: pq_impl.cpp:599: [PQ: 72075186224037894] has a tx writes info 2025-12-23T18:30:44.317196Z node 19 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][0][StateInit] bootstrapping 0 [19:7587122625896902187:2484] 2025-12-23T18:30:44.325368Z node 19 :PERSQUEUE INFO: partition_init.cpp:1016: [topic1:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:30:44.325908Z node 19 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][0][StateInit] init complete for topic 'topic1' partition 0 generation 2 [19:7587122625896902187:2484] 2025-12-23T18:30:44.326091Z node 19 :PERSQUEUE INFO: partition_mlp.cpp:146: [72075186224037894][Partition][0][StateIdle] Creating MLP consumer 'mlp-consumer' 2025-12-23T18:30:44.326531Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:105: [72075186224037894][0][MLP][mlp-consumer] Start MLP consumer mlp-consumer 2025-12-23T18:30:44.326722Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:355: [72075186224037894][0][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: true DeadLetterPolicy: DEAD_LETTER_POLICY_DELETE MaxProcessingAttempts: 10 DefaultProcessingTimeoutSeconds: 0 2025-12-23T18:30:44.328122Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:185: [72075186224037894][0][MLP][mlp-consumer] HandleOnInit TEvKeyValue::TEvResponse 2025-12-23T18:30:44.328157Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:225: [72075186224037894][0][MLP][mlp-consumer] Initializing new consumer 2025-12-23T18:30:44.328184Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:278: [72075186224037894][0][MLP][mlp-consumer] Initializing new consumer 2025-12-23T18:30:44.328215Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:346: [72075186224037894][0][MLP][mlp-consumer] Try commit offset: 0 vs 0 2025-12-23T18:30:44.328266Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:689: [72075186224037894][0][MLP][mlp-consumer] Fetching 100 messages from offset 0 from [19:7587122625896902187:2484] 2025-12-23T18:30:44.328884Z node 19 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72075186224037894' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' size 152 2025-12-23T18:30:44.329816Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:702: [72075186224037894][0][MLP][mlp-consumer] Initialized 2025-12-23T18:30:44.329868Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:708: [72075186224037894][0][MLP][mlp-consumer] Handle TEvPersQueue::TEvResponse 2025-12-23T18:30:44.330024Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:758: [72075186224037894][0][MLP][mlp-consumer] Fetched 1 messages 2025-12-23T18:30:44.330061Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:670: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2025-12-23T18:30:44.330087Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:495: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2025-12-23T18:30:44.330129Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:570: [72075186224037894][0][MLP][mlp-consumer] Persist 2025-12-23T18:30:44.330439Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:639: [72075186224037894][0][MLP][mlp-consumer] Delete old WAL: c0000000000wmlp-consumer|0000000000000000 - c0000000000wmlp-consumer|0000000000000000 2025-12-23T18:30:44.330502Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:649: [72075186224037894][0][MLP][mlp-consumer] Write Snapshot Count: 1 Size: 69 cookie: 3 2025-12-23T18:30:44.331528Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:301: [72075186224037894][0][MLP][mlp-consumer] HandleOnWrite TEvKeyValue::TEvResponse Status: 1 Cookie: 3 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2025-12-23T18:30:44.331571Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:326: [72075186224037894][0][MLP][mlp-consumer] TX write finished 2025-12-23T18:30:44.331601Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:346: [72075186224037894][0][MLP][mlp-consumer] Try commit offset: 0 vs 0 2025-12-23T18:30:44.331633Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:495: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2025-12-23T18:30:44.331662Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:570: [72075186224037894][0][MLP][mlp-consumer] Persist 2025-12-23T18:30:44.331691Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:576: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2025-12-23T18:30:44.331715Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:670: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2025-12-23T18:30:44.936283Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:30:44.936340Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:45.142516Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:689: [72075186224037894][0][MLP][mlp-consumer] Fetching 100 messages from offset 0 from [19:7587122625896902110:2477] 2025-12-23T18:30:45.142610Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:495: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2025-12-23T18:30:45.142650Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:570: [72075186224037894][0][MLP][mlp-consumer] Persist 2025-12-23T18:30:45.142696Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:576: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2025-12-23T18:30:45.291957Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:29: [[19:7587122630191869523:3081]] Start describe 2025-12-23T18:30:45.292330Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:40: [[19:7587122630191869523:3081]] Handle NDescriber::TEvDescribeTopicsResponse 2025-12-23T18:30:45.292346Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:115: [[19:7587122630191869523:3081]] Start write 2025-12-23T18:30:45.293584Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:174: [[19:7587122630191869523:3081]] Handle TEvPersQueue::TEvResponse 2025-12-23T18:30:45.330874Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:670: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2025-12-23T18:30:45.330926Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:495: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2025-12-23T18:30:45.330975Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:570: [72075186224037894][0][MLP][mlp-consumer] Persist 2025-12-23T18:30:45.331009Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:576: [72075186224037894][0][MLP][mlp-consumer] Batch is empty |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |91.4%| [TM] {RESULT} ydb/core/persqueue/public/mlp/ut/unittest |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/public/mlp/ut/unittest >> GroupWriteTest::ByTableName [GOOD] |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |91.4%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> TInterconnectTest::TestCrossConnect [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 12927096868924421764 2025-12-23T18:30:14.502352Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-12-23T18:30:14.529668Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-12-23T18:30:14.529753Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-12-23T18:30:14.532889Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-12-23T18:30:14.551126Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-12-23T18:30:14.554277Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-12-23T18:30:49.597810Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-12-23T18:30:49.597974Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-12-23T18:30:49.669287Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] >> DataStreams::TestNonChargeableUser [GOOD] >> DataStreams::TestPutEmptyMessage >> DataStreams::TestUpdateStorage [GOOD] >> DataStreams::TestStreamTimeRetention >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] >> TTxDataShardRecomputeKMeansScan::EmptyCluster [GOOD] >> TTxDataShardReshuffleKMeansScan::BadRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:78:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:81:2057] recipient: [5:80:2112] Leader for TabletID 72057594037927937 is [5:82:2113] sender: [5:83:2057] recipient: [5:80:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:82:2113] Leader for TabletID 72057594037927937 is [5:82:2113] sender: [5:198:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:79:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:83:2113] sender: [6:84:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:83:2113] Leader for TabletID 72057594037927937 is [6:83:2113] sender: [6:199:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> DataStreams::TestControlPlaneAndMeteringData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 >> IndexBuildTest::BaseCaseUniq [GOOD] >> IndexBuildTest::CancelBuild |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS >> VectorIndexBuildTest::CancelBroken [GOOD] >> VectorIndexBuildTest::CreateBuildProposeReject |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move >> KeyValueReadStorage::ReadRangeOk1Key >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] |91.4%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |91.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> DataStreams::TestGetRecordsWithCount [GOOD] >> DataStreams::TestInvalidRetentionCombinations >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2025-12-23T18:30:52.890452Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-12-23T18:30:52.892768Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-12-23T18:30:52.898032Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-12-23T18:30:52.898106Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-12-23T18:30:52.903925Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:124} Inline read request KeyValue# 1 Status# OK 2025-12-23T18:30:52.903996Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:136} Expected OK or UNKNOWN and given OK readCount# 0 2025-12-23T18:30:52.904039Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> DataStreams::Test_AutoPartitioning_Describe [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled >> DataStreams::TestPutRecordsWithRead [GOOD] >> DataStreams::TestPutRecordsCornerCases >> TKeyValueTest::TestWrite200KDeleteThenResponseError >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueTest::TestWriteLongKey |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> VectorIndexBuildTest::CreateBuildProposeReject [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReadyUniq >> DataStreams::TestPutEmptyMessage [GOOD] >> DataStreams::TestListStreamConsumers |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |91.4%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> DataStreams::TestReservedResourcesMetering [GOOD] >> DataStreams::TestReservedStorageMetering |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::CreateBuildProposeReject [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:30:23.751038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:30:23.751130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:23.751195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:30:23.751249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:30:23.751396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:30:23.751431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:30:23.751496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:23.751574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:30:23.752518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:30:23.752853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:30:23.835323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:23.835397Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:23.841843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:30:23.842007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:30:23.842120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:30:23.844960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:30:23.845148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:30:23.845814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:23.846007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:30:23.848253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:23.848449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:30:23.849811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:30:23.849887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:23.850049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:30:23.850122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:30:23.850180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:30:23.850364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:30:23.985862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.986987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.987124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.987213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.987293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.987407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.987492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.987553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.987620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.987684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.987785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.987882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.987980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.988086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.988191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1164:3111], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 254, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:30:55.344918Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710766:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710766 msg type: 269090816 2025-12-23T18:30:55.345033Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710766, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:30:55.345279Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710766, at schemeshard: 72057594046678944 2025-12-23T18:30:55.345322Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710766, ready parts: 0/1, is published: true 2025-12-23T18:30:55.345376Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710766, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710766 at step: 5000047 FAKE_COORDINATOR: advance: minStep5000047 State->FrontStep: 5000046 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710766 at step: 5000047 2025-12-23T18:30:55.345641Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000047, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:55.345766Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710766 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 25769805936 } } Step: 5000047 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:30:55.345834Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710766:0 HandleReply TEvOperationPlan: step# 5000047 2025-12-23T18:30:55.345890Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710766:0 128 -> 240 2025-12-23T18:30:55.348313Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710766:0, at schemeshard: 72057594046678944 2025-12-23T18:30:55.348373Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710766:0 ProgressState 2025-12-23T18:30:55.348459Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710766:0 progress is 1/1 2025-12-23T18:30:55.348510Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710766 ready parts: 1/1 2025-12-23T18:30:55.348558Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710766:0 progress is 1/1 2025-12-23T18:30:55.348591Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710766 ready parts: 1/1 2025-12-23T18:30:55.348629Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710766, ready parts: 1/1, is published: true 2025-12-23T18:30:55.348689Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:127:2152] message: TxId: 281474976710766 2025-12-23T18:30:55.348750Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710766 ready parts: 1/1 2025-12-23T18:30:55.348802Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710766:0 2025-12-23T18:30:55.348850Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710766:0 2025-12-23T18:30:55.348933Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710766 2025-12-23T18:30:55.350944Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710766 2025-12-23T18:30:55.351010Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710766 2025-12-23T18:30:55.351073Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 103, txId# 281474976710766 2025-12-23T18:30:55.351201Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1164:3111], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 254, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710766 2025-12-23T18:30:55.358304Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2025-12-23T18:30:55.358519Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1164:3111], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 254, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:30:55.358593Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-12-23T18:30:55.361148Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done 2025-12-23T18:30:55.361331Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1164:3111], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 254, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:30:55.361396Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 103, subscribers count# 1 2025-12-23T18:30:55.361588Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:30:55.361665Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [6:1165:3112] TestWaitNotification: OK eventTxId 103 2025-12-23T18:30:55.362352Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 103 2025-12-23T18:30:55.362784Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 103 Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: INVALID UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 103 Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: INVALID UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::TestShardPagination >> IndexBuildTest::CancelBuild [GOOD] >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> DataStreams::ChangeBetweenRetentionModes |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> DataStreams::TestInvalidRetentionCombinations [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:30:17.912911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:30:17.913021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:17.913067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:30:17.913130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:30:17.913245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:30:17.913284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:30:17.913359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:17.913459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:30:17.914503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:30:17.914853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:30:18.012988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:18.013047Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:18.029666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:30:18.029891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:30:18.030093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:30:18.052114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:30:18.052397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:30:18.053215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:18.053437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:30:18.058420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:18.058639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:30:18.060296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:30:18.060377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:18.060579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:30:18.060638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:30:18.060687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:30:18.060825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:30:18.196012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.196935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.197044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.197100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.197161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.197232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.197330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.197409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.197481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.197559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.197622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.197719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.197791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.197857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.197930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-12-23T18:30:57.303671Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [5:128:2153] message: TxId: 281474976710760 2025-12-23T18:30:57.303731Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:30:57.303767Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2025-12-23T18:30:57.303798Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710760:0 2025-12-23T18:30:57.303883Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-12-23T18:30:57.306454Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-12-23T18:30:57.306562Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710760 2025-12-23T18:30:57.306635Z node 5 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2025-12-23T18:30:57.306790Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [5:1562:3424], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2025-12-23T18:30:57.308867Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking 2025-12-23T18:30:57.309012Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [5:1562:3424], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:30:57.309103Z node 5 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-12-23T18:30:57.311043Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled 2025-12-23T18:30:57.311206Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [5:1562:3424], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:30:57.311263Z node 5 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-12-23T18:30:57.311443Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:30:57.311504Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:1661:3512] TestWaitNotification: OK eventTxId 102 2025-12-23T18:30:57.315049Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-12-23T18:30:57.315365Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2025-12-23T18:30:57.318660Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:30:57.318981Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 354us result status StatusSuccess 2025-12-23T18:30:57.319527Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:30:57.321876Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:30:57.322138Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 292us result status StatusPathDoesNotExist 2025-12-23T18:30:57.322393Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 39 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TTxDataShardReshuffleKMeansScan::BadRequest [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPosting |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TxUsage::WriteToTopic_Demo_20_RestartNo_Query [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> DataStreams::TestStreamTimeRetention [GOOD] >> DataStreams::TestUnsupported ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestInvalidRetentionCombinations [GOOD] Test command err: 2025-12-23T18:30:43.801719Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122621760054075:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:43.801841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b2c/r3tmp/tmp9Urfc2/pdisk_1.dat 2025-12-23T18:30:44.042586Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:44.105965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:44.106088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:44.130302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:44.172390Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4951, node 1 2025-12-23T18:30:44.257847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:44.257870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:44.257896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:44.258006Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:30:44.326426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4483 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514644188 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:44.531563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:44.585150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:4483 2025-12-23T18:30:44.756067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:44.817627Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:48.357770Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587122643408554631:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:48.357837Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:30:48.399930Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b2c/r3tmp/tmpQJT8J0/pdisk_1.dat 2025-12-23T18:30:48.554697Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:48.581129Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:48.598764Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:48.598866Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:48.610164Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24491, node 4 2025-12-23T18:30:48.786705Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:30:48.848357Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:48.848378Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:48.848383Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:48.848455Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:65261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514648731 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:49.151985Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:49.246421Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:30:49.389236Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:65261 2025-12-23T18:30:49.480081Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:49.718323Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:30:49.748964Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7587122647703523535:3178], for# user2@builtin, access# DescribeSchema 2025-12-23T18:30:49.757332Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7587122647703523538:3179], for# user2@builtin, access# DescribeSchema 2025-12-23T18:30:49.772084Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:30:53.780275Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7587122665142560304:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:53.780365Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b2c/r3tmp/tmpZjxXiS/pdisk_1.dat 2025-12-23T18:30:53.833647Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:54.005310Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:54.025163Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:54.025300Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:54.033803Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17159, node 7 2025-12-23T18:30:54.140501Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:30:54.150581Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:54.150605Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:54.150613Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:54.150711Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514654107 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:54.421643Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:54.485133Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:19083 2025-12-23T18:30:54.700937Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting...
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 10, code: 500080
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 144, storage 0, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 130048, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 1049600, code: 500080 2025-12-23T18:30:54.810805Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |91.4%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] Test command err: 2025-12-23T18:30:43.632203Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122621395622671:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:43.632380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b32/r3tmp/tmp1ypNnG/pdisk_1.dat 2025-12-23T18:30:43.863404Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:43.888815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:43.888949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:43.931385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:44.013801Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7330, node 1 2025-12-23T18:30:44.033838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:30:44.114101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:44.114130Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:44.114145Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:44.114242Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514644034 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:44.382053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:44.433173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:62665 2025-12-23T18:30:44.600515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:44.643999Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:44.844727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:45.086385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:48.752511Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587122642643394399:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:48.752618Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b32/r3tmp/tmpA4l3oK/pdisk_1.dat 2025-12-23T18:30:48.874569Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:49.072632Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:49.075054Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:49.083758Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:49.083859Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:49.093594Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8944, node 4 2025-12-23T18:30:49.279517Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:49.279549Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:49.279557Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:49.279657Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:30:49.511855Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514649200 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:49.602886Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:49.696774Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:30:49.860302Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:8341 2025-12-23T18:30:49.909727Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } ALTER_SCHEME: { Name: "test-topic" Split { Partition: 1 SplitBoundary: "a" } } 2025-12-23T18:30:51.128643Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 107:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:52.432927Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:52.556110Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:52.764371Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:52.979767Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:54.685081Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7587122670465800803:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:54.685159Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b32/r3tmp/tmpJaK7PY/pdisk_1.dat 2025-12-23T18:30:54.728192Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:54.871749Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:54.891839Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:54.891927Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:54.912990Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:54.941514Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 24483, node 7 2025-12-23T18:30:55.032005Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:55.032031Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:55.032039Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:55.032141Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514654989 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:55.292313Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:55.351807Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:1911 2025-12-23T18:30:55.611707Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:55.625661Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-12-23T18:30:55.693346Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |91.4%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Table >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TKeyValueTest::TestRenameWorks [GOOD] >> TKeyValueTest::TestRenameToLongKey |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> DataStreams::TestListStreamConsumers [GOOD] >> DataStreams::TestListShards1Shard >> TSchemeShardMoveTest::Reject >> DataStreams::TestPutRecordsCornerCases [GOOD] >> DataStreams::TestPutRecords >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |91.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> TSchemeShardMoveTest::MoveMigratedTable >> DataStreams::TestShardPagination [GOOD] |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |91.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool >> TSchemeShardMoveTest::Boot >> TSchemeShardMoveTest::MoveIndexSameDst |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |91.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |91.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] |91.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |91.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan >> TSchemeShardMoveTest::MoveIndex >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable >> TSchemeShardMoveTest::Replace ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2025-12-23T18:30:46.326253Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122634511937076:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:46.326998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b24/r3tmp/tmpvmxSOG/pdisk_1.dat 2025-12-23T18:30:46.681835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:46.733927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:46.734056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:46.745479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:46.898227Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:46.933265Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 1029, node 1 2025-12-23T18:30:47.030165Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.016023s 2025-12-23T18:30:47.082071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:47.082115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:47.082128Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:47.082298Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:30:47.333404Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514646869 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:47.374536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:47.442132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:26479 2025-12-23T18:30:47.629789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting...
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 40960, code: 500080 2025-12-23T18:30:48.010949Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122643101873863:3582] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestStreamStorageRetention\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:30:51.373743Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587122655550201341:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:51.373815Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b24/r3tmp/tmpiXBJJP/pdisk_1.dat 2025-12-23T18:30:51.402497Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:51.532670Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:51.554039Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:51.554135Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:51.583568Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3007, node 4 2025-12-23T18:30:51.689788Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:51.689820Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:51.689834Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:51.689947Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:30:51.693302Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514651671 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:51.995199Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:52.063553Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:26383 2025-12-23T18:30:52.312984Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:52.397028Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:57.942226Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7587122681648397934:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:57.942300Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b24/r3tmp/tmpFPtetf/pdisk_1.dat 2025-12-23T18:30:57.972247Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:58.054385Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:58.072310Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:58.072403Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:58.105922Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11434, node 7 2025-12-23T18:30:58.222049Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:58.222077Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:58.222086Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:58.222174Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:30:58.238338Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514658181 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:58.448005Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:58.493488Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:23266 2025-12-23T18:30:58.708170Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:58.962182Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |91.5%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |91.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk >> TSchemeShardMoveTest::Boot [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly >> TSchemeShardMoveTest::MoveTableForBackup >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:79:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:84:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:83:2113] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:199:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:83:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:105:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:85:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:88:2057] recipient: [10:87:2117] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:90:2057] recipient: [10:87:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:89:2118] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:205:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:86:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:90:2118] sender: [11:91:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:90:2118] Leader for TabletID 72057594037927937 is [11:90:2118] sender: [11:206:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:87:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:89:2118] Leader for TabletID 72057594037927937 is [12:91:2119] sender: [12:92:2057] recipient: [12:89:2118] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:91:2119] Leader for TabletID 72057594037927937 is [12:91:2119] sender: [12:111:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:88:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:91:2057] recipient: [13:90:2119] Leader for TabletID 72057594037927937 is [13:92:2120] sender: [13:93:2057] recipient: [13:90:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:92:2120] Leader for TabletID 72057594037927937 is [13:92:2120] sender: [13:112:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:91:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:94:2057] recipient: [14:93:2122] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:96:2057] recipient: [14:93:2122] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:95:2123] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:211:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:91:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:94:2057] recipient: [15:93:2122] Leader for TabletID 72057594037927937 is [15:95:2123] sender: [15:96:2057] recipient: [15:93:2122] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:95:2123] Leader for TabletID 72057594037927937 is [15:95:2123] sender: [15:211:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> DataStreams::TestUnsupported [GOOD] >> DataStreams::TestReservedStorageMetering [GOOD] >> DataStreams::TestReservedConsumersMetering >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted >> TSchemeShardMoveTest::OneTable [GOOD] >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestUnsupported [GOOD] Test command err: 2025-12-23T18:30:46.335455Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122635183924351:2157];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:46.335555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b25/r3tmp/tmpkw3LG3/pdisk_1.dat 2025-12-23T18:30:46.578304Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:46.615160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:46.615275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:46.620925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:46.839004Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:46.853705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 21578, node 1 2025-12-23T18:30:46.935365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:46.935387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:46.935397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:46.935508Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514646841 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:47.219411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:47.276824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:30:47.344919Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1333 2025-12-23T18:30:47.456222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:47.630231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:51.345186Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587122655253793667:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:51.345284Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b25/r3tmp/tmpIkf6Oi/pdisk_1.dat 2025-12-23T18:30:51.393489Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:51.540631Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:51.562580Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:51.562786Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:51.598251Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:51.600243Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18787, node 4 2025-12-23T18:30:51.744230Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:51.744267Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:51.744275Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:51.744401Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4945 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-12-23T18:30:52.009046Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514651685 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:52.013697Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:52.091640Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:4945 2025-12-23T18:30:52.321777Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:52.364122Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:52.583701Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:52.638218Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_ ... r: "46" shard_id: "shard-000000" } records { sequence_number: "47" shard_id: "shard-000000" } records { sequence_number: "48" shard_id: "shard-000000" } records { sequence_number: "49" shard_id: "shard-000000" } records { sequence_number: "50" shard_id: "shard-000000" } records { sequence_number: "51" shard_id: "shard-000000" } records { sequence_number: "52" shard_id: "shard-000000" } records { sequence_number: "53" shard_id: "shard-000000" } records { sequence_number: "54" shard_id: "shard-000000" } records { sequence_number: "55" shard_id: "shard-000000" } records { sequence_number: "56" shard_id: "shard-000000" } records { sequence_number: "57" shard_id: "shard-000000" } records { sequence_number: "58" shard_id: "shard-000000" } records { sequence_number: "59" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "60" shard_id: "shard-000000" } records { sequence_number: "61" shard_id: "shard-000000" } records { sequence_number: "62" shard_id: "shard-000000" } records { sequence_number: "63" shard_id: "shard-000000" } records { sequence_number: "64" shard_id: "shard-000000" } records { sequence_number: "65" shard_id: "shard-000000" } records { sequence_number: "66" shard_id: "shard-000000" } records { sequence_number: "67" shard_id: "shard-000000" } records { sequence_number: "68" shard_id: "shard-000000" } records { sequence_number: "69" shard_id: "shard-000000" } records { sequence_number: "70" shard_id: "shard-000000" } records { sequence_number: "71" shard_id: "shard-000000" } records { sequence_number: "72" shard_id: "shard-000000" } records { sequence_number: "73" shard_id: "shard-000000" } records { sequence_number: "74" shard_id: "shard-000000" } records { sequence_number: "75" shard_id: "shard-000000" } records { sequence_number: "76" shard_id: "shard-000000" } records { sequence_number: "77" shard_id: "shard-000000" } records { sequence_number: "78" shard_id: "shard-000000" } records { sequence_number: "79" shard_id: "shard-000000" } records { sequence_number: "80" shard_id: "shard-000000" } records { sequence_number: "81" shard_id: "shard-000000" } records { sequence_number: "82" shard_id: "shard-000000" } records { sequence_number: "83" shard_id: "shard-000000" } records { sequence_number: "84" shard_id: "shard-000000" } records { sequence_number: "85" shard_id: "shard-000000" } records { sequence_number: "86" shard_id: "shard-000000" } records { sequence_number: "87" shard_id: "shard-000000" } records { sequence_number: "88" shard_id: "shard-000000" } records { sequence_number: "89" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "90" shard_id: "shard-000000" } records { sequence_number: "91" shard_id: "shard-000000" } records { sequence_number: "92" shard_id: "shard-000000" } records { sequence_number: "93" shard_id: "shard-000000" } records { sequence_number: "94" shard_id: "shard-000000" } records { sequence_number: "95" shard_id: "shard-000000" } records { sequence_number: "96" shard_id: "shard-000000" } records { sequence_number: "97" shard_id: "shard-000000" } records { sequence_number: "98" shard_id: "shard-000000" } records { sequence_number: "99" shard_id: "shard-000000" } records { sequence_number: "100" shard_id: "shard-000000" } records { sequence_number: "101" shard_id: "shard-000000" } records { sequence_number: "102" shard_id: "shard-000000" } records { sequence_number: "103" shard_id: "shard-000000" } records { sequence_number: "104" shard_id: "shard-000000" } records { sequence_number: "105" shard_id: "shard-000000" } records { sequence_number: "106" shard_id: "shard-000000" } records { sequence_number: "107" shard_id: "shard-000000" } records { sequence_number: "108" shard_id: "shard-000000" } records { sequence_number: "109" shard_id: "shard-000000" } records { sequence_number: "110" shard_id: "shard-000000" } records { sequence_number: "111" shard_id: "shard-000000" } records { sequence_number: "112" shard_id: "shard-000000" } records { sequence_number: "113" shard_id: "shard-000000" } records { sequence_number: "114" shard_id: "shard-000000" } records { sequence_number: "115" shard_id: "shard-000000" } records { sequence_number: "116" shard_id: "shard-000000" } records { sequence_number: "117" shard_id: "shard-000000" } records { sequence_number: "118" shard_id: "shard-000000" } records { sequence_number: "119" shard_id: "shard-000000" } 2025-12-23T18:30:56.345358Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587122655253793667:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:56.345429Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "120" shard_id: "shard-000000" } records { sequence_number: "121" shard_id: "shard-000000" } records { sequence_number: "122" shard_id: "shard-000000" } records { sequence_number: "123" shard_id: "shard-000000" } records { sequence_number: "124" shard_id: "shard-000000" } records { sequence_number: "125" shard_id: "shard-000000" } records { sequence_number: "126" shard_id: "shard-000000" } records { sequence_number: "127" shard_id: "shard-000000" } records { sequence_number: "128" shard_id: "shard-000000" } records { sequence_number: "129" shard_id: "shard-000000" } records { sequence_number: "130" shard_id: "shard-000000" } records { sequence_number: "131" shard_id: "shard-000000" } records { sequence_number: "132" shard_id: "shard-000000" } records { sequence_number: "133" shard_id: "shard-000000" } records { sequence_number: "134" shard_id: "shard-000000" } records { sequence_number: "135" shard_id: "shard-000000" } records { sequence_number: "136" shard_id: "shard-000000" } records { sequence_number: "137" shard_id: "shard-000000" } records { sequence_number: "138" shard_id: "shard-000000" } records { sequence_number: "139" shard_id: "shard-000000" } records { sequence_number: "140" shard_id: "shard-000000" } records { sequence_number: "141" shard_id: "shard-000000" } records { sequence_number: "142" shard_id: "shard-000000" } records { sequence_number: "143" shard_id: "shard-000000" } records { sequence_number: "144" shard_id: "shard-000000" } records { sequence_number: "145" shard_id: "shard-000000" } records { sequence_number: "146" shard_id: "shard-000000" } records { sequence_number: "147" shard_id: "shard-000000" } records { sequence_number: "148" shard_id: "shard-000000" } records { sequence_number: "149" shard_id: "shard-000000" } 2025-12-23T18:31:00.683712Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7587122695117242586:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:00.684775Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b25/r3tmp/tmp4lUiDv/pdisk_1.dat 2025-12-23T18:31:00.731121Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:00.851635Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:00.875224Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:00.875313Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:00.905003Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29036, node 7 2025-12-23T18:31:01.018857Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:01.023923Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:01.023955Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:01.023963Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:01.024090Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514660988 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:01.312904Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:01.448693Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:31:01.692461Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24383 2025-12-23T18:31:01.738898Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:31:01.758826Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 |91.5%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:31:02.885046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:31:02.885185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:02.885226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:31:02.885267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:31:02.885307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:31:02.885335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:31:02.885395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:02.885456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:31:02.886457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:31:02.886797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:31:02.987606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:31:02.987688Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:02.995680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:31:02.996034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:31:02.996230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:31:03.000544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:31:03.000791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:31:03.001732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:03.002024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:31:03.004585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:03.004803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:31:03.006217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:03.006310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:03.006517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:31:03.006577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:31:03.006626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:31:03.006819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:31:03.015377Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:31:03.168235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:31:03.168506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:03.168750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:31:03.168821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:31:03.169076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:31:03.169151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:03.172207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:03.172425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:31:03.172658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:03.172747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:31:03.172794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:31:03.172827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:31:03.175165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:03.175258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:31:03.175303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:31:03.177248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:03.177297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:03.177356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:03.177406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:31:03.181420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:31:03.183656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:31:03.183866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:31:03.185034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:03.185181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:03.185225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:03.185485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:31:03.185536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:03.185770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:31:03.185859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:31:03.188203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:03.188265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ecute, operationId: 108:0, at schemeshard: 72057594046678944 2025-12-23T18:31:06.240310Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:31:06.240404Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2025-12-23T18:31:06.240459Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:31:06.240507Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 129 -> 240 2025-12-23T18:31:06.241610Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:31:06.241729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:31:06.241773Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-12-23T18:31:06.241823Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2025-12-23T18:31:06.241875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:31:06.243471Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:31:06.243563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:31:06.243594Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-12-23T18:31:06.243627Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-12-23T18:31:06.243662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-12-23T18:31:06.243746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-12-23T18:31:06.246331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-12-23T18:31:06.246405Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:31:06.246757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-12-23T18:31:06.246911Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2025-12-23T18:31:06.246959Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-12-23T18:31:06.247004Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2025-12-23T18:31:06.247043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-12-23T18:31:06.247084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-12-23T18:31:06.247162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:339:2317] message: TxId: 108 2025-12-23T18:31:06.247217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-12-23T18:31:06.247261Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2025-12-23T18:31:06.247305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 108:0 2025-12-23T18:31:06.247420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-12-23T18:31:06.251091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-12-23T18:31:06.251652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-12-23T18:31:06.253363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-12-23T18:31:06.253426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:838:2794] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-12-23T18:31:06.254271Z node 2 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-12-23T18:31:06.254354Z node 2 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2025-12-23T18:31:06.271880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 313 RawX2: 8589936890 } TabletId: 72075186233409546 State: 4 2025-12-23T18:31:06.271995Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-12-23T18:31:06.274089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-12-23T18:31:06.274205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:31:06.274695Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-12-23T18:31:06.274906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:06.275203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-12-23T18:31:06.275466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:31:06.275520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-12-23T18:31:06.275598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 Forgetting tablet 72075186233409546 2025-12-23T18:31:06.283128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:31:06.283223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:31:06.283333Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2025-12-23T18:31:06.284211Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:31:06.284409Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 227us result status StatusSuccess 2025-12-23T18:31:06.284848Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence >> TSchemeShardMoveTest::Replace [GOOD] >> TSchemeShardMoveTest::ReplaceVectorIndex >> TKeyValueTest::TestWriteLongKey [GOOD] >> DataStreams::TestListShards1Shard [GOOD] >> TBackupCollectionTests::HiddenByFeatureFlag >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::ListStreamsValidation >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:31:04.274311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:31:04.274463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:04.274514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:31:04.274560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:31:04.274604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:31:04.274632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:31:04.274694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:04.274832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:31:04.276040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:31:04.276354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:31:04.380604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:31:04.380714Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:04.392600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:31:04.392840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:31:04.392972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:31:04.396426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:31:04.396607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:31:04.397347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:04.397545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:31:04.399314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:04.399468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:31:04.400356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:04.400406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:04.400521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:31:04.400554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:31:04.400585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:31:04.400667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:31:04.556981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.558063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.558227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.558351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.558447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.558519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.558588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.558697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.558830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.558914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.559000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.559104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.559200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.559301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.559396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 5186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-12-23T18:31:06.910872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:06.910919Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:31:06.910961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:31:06.911010Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2025-12-23T18:31:06.911979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 726 RawX2: 8589937300 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-12-23T18:31:06.912019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 2 2025-12-23T18:31:06.912126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: Source { RawX1: 726 RawX2: 8589937300 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-12-23T18:31:06.912169Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:31:06.912221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 726 RawX2: 8589937300 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-12-23T18:31:06.912261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:06.912286Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2025-12-23T18:31:06.912306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:31:06.912329Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:2 129 -> 240 2025-12-23T18:31:06.917365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:31:06.917610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-12-23T18:31:06.920223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:31:06.920420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:31:06.920473Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:31:06.920526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2025-12-23T18:31:06.920647Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 2/3 2025-12-23T18:31:06.920687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-12-23T18:31:06.920743Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 2/3 2025-12-23T18:31:06.920783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-12-23T18:31:06.920823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2025-12-23T18:31:06.921326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-12-23T18:31:06.921680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-12-23T18:31:06.921723Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:31:06.921756Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 40], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2025-12-23T18:31:06.921819Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:2 progress is 3/3 2025-12-23T18:31:06.921840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-12-23T18:31:06.921869Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:2 progress is 3/3 2025-12-23T18:31:06.921889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-12-23T18:31:06.921915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2025-12-23T18:31:06.921964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-12-23T18:31:06.922015Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:31:06.922078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:31:06.922202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 4 2025-12-23T18:31:06.922242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:31:06.922278Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:1 2025-12-23T18:31:06.922299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:1 2025-12-23T18:31:06.922343Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2025-12-23T18:31:06.922365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:31:06.922385Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:2 2025-12-23T18:31:06.922403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:2 2025-12-23T18:31:06.922445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2025-12-23T18:31:06.922466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:31:06.922860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:31:06.922915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2025-12-23T18:31:06.922995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:31:06.923044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:31:06.923081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:31:06.923109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:31:06.923147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:31:06.934767Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:31:06.935374Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [2:676:2665] Handle TEvGetProxyServicesRequest TestWaitNotification wait txId: 103 2025-12-23T18:31:06.988992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:31:06.989048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:31:06.989488Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:31:06.989607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:31:06.989651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:1081:2963] TestWaitNotification: OK eventTxId 103 |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |91.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:31:04.360821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:31:04.360931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:04.360981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:31:04.361015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:31:04.361046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:31:04.361072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:31:04.361127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:04.361181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:31:04.361881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:31:04.362104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:31:04.459980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:31:04.460044Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:04.480479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:31:04.480756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:31:04.480932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:31:04.497997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:31:04.498190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:31:04.498850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:04.499056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:31:04.502874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:04.503085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:31:04.504615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:04.504731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:04.504938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:31:04.504992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:31:04.505051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:31:04.505202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:31:04.515296Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:243:2058] recipient: [1:15:2062] 2025-12-23T18:31:04.650518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:31:04.650793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:04.651061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:31:04.651118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:31:04.651360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:31:04.651432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:04.654685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:04.654990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:31:04.655220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:04.655288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:31:04.655334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:31:04.655372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:31:04.658933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:04.659017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:31:04.659085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:31:04.661397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:04.661458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:04.661526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:04.661580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:31:04.665804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:31:04.668345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:31:04.668543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:31:04.669692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:04.669842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:04.669888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:04.670188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:31:04.670253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:04.670464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:31:04.670573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:31:04.672917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:04.672969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 94046678944 2025-12-23T18:31:07.301195Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:07.301291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:07.301346Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2025-12-23T18:31:07.301392Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2025-12-23T18:31:07.303169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-12-23T18:31:07.303218Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-12-23T18:31:07.303290Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2025-12-23T18:31:07.303319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:31:07.303349Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2025-12-23T18:31:07.303376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:31:07.303429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-12-23T18:31:07.303479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:128:2153] message: TxId: 281474976710760 2025-12-23T18:31:07.303514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:31:07.303543Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2025-12-23T18:31:07.303567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710760:0 2025-12-23T18:31:07.303620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-12-23T18:31:07.305252Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-12-23T18:31:07.305305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710760 2025-12-23T18:31:07.305378Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2025-12-23T18:31:07.305491Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:855:2814], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2025-12-23T18:31:07.306996Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2025-12-23T18:31:07.307116Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:855:2814], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:31:07.307169Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-12-23T18:31:07.308639Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2025-12-23T18:31:07.308755Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:855:2814], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:31:07.308801Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-12-23T18:31:07.308902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:31:07.308944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1042:2989] TestWaitNotification: OK eventTxId 102 2025-12-23T18:31:07.309490Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:31:07.309714Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 239us result status StatusSuccess 2025-12-23T18:31:07.310257Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 41 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> DataStreams::TestPutRecords [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:31:05.086959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:31:05.087060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:05.087121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:31:05.087169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:31:05.087206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:31:05.087242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:31:05.087301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:05.087364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:31:05.088232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:31:05.088480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:31:05.183687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:31:05.183745Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:05.194970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:31:05.195157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:31:05.195304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:31:05.211990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:31:05.212221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:31:05.212953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:05.213215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:31:05.217571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:05.217766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:31:05.219193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:05.219266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:05.219459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:31:05.219506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:31:05.219564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:31:05.219695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:31:05.226139Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:243:2058] recipient: [1:15:2062] 2025-12-23T18:31:05.395756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:31:05.396023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:05.396280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:31:05.396342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:31:05.396596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:31:05.396671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:05.399661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:05.399933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:31:05.400145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:05.400248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:31:05.400305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:31:05.400347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:31:05.403279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:05.403378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:31:05.403446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:31:05.405703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:05.405770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:05.405840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:05.405896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:31:05.410167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:31:05.415760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:31:05.415985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:31:05.417254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:05.417430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:05.417505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:05.417836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:31:05.417902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:05.418127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:31:05.418240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:31:05.430835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:05.430899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:07.323344Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:31:07.323660Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 335us result status StatusSuccess 2025-12-23T18:31:07.324720Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Sync" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:07.325534Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:31:07.325840Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 329us result status StatusSuccess 2025-12-23T18:31:07.326905Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Async" LocalPathId: 5 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:79:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:84:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:83:2113] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:199:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:83:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:203:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] >> TSchemeShardMoveTest::TwoTables [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |91.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:31:03.736312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:31:03.736421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:03.736459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:31:03.736503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:31:03.736544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:31:03.736573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:31:03.736622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:03.736705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:31:03.737571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:31:03.737861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:31:03.825632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:31:03.825720Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:03.831796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:31:03.832038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:31:03.832175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:31:03.835799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:31:03.836012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:31:03.836842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:03.837111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:31:03.839421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:03.839601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:31:03.840847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:03.840926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:03.841099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:31:03.841157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:31:03.841208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:31:03.841403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:31:04.006445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.007532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.007693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.007768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.007842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.007897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.007971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.008077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.008173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.008241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.008326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.008418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.008519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.008621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:04.008712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:31:07.802839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 727 RawX2: 8589937302 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:31:07.802925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:07.802992Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2025-12-23T18:31:07.803039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:31:07.803091Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:2 129 -> 240 2025-12-23T18:31:07.803985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 8589937307 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:31:07.804020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-12-23T18:31:07.804092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 8589937307 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:31:07.804121Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:31:07.804170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 734 RawX2: 8589937307 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:31:07.804207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:07.804229Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:31:07.804250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:31:07.804276Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:31:07.806682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-12-23T18:31:07.807685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:31:07.811927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-12-23T18:31:07.812495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-12-23T18:31:07.812557Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:31:07.812651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 40], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2025-12-23T18:31:07.812781Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 2/3 2025-12-23T18:31:07.812843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-12-23T18:31:07.812901Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 2/3 2025-12-23T18:31:07.812938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-12-23T18:31:07.812980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2025-12-23T18:31:07.813314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:31:07.813665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:31:07.813712Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:31:07.813749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2025-12-23T18:31:07.813818Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 3/3 2025-12-23T18:31:07.813842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-12-23T18:31:07.813874Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 3/3 2025-12-23T18:31:07.813898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-12-23T18:31:07.813925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2025-12-23T18:31:07.814002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:785:2752] message: TxId: 102 2025-12-23T18:31:07.814039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-12-23T18:31:07.814072Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:31:07.814104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:31:07.814206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 4 2025-12-23T18:31:07.814233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:31:07.814262Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2025-12-23T18:31:07.814278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:1 2025-12-23T18:31:07.814298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2025-12-23T18:31:07.814336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:31:07.814353Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:2 2025-12-23T18:31:07.814368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:2 2025-12-23T18:31:07.814396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2025-12-23T18:31:07.814412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:31:07.814765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:31:07.814831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2025-12-23T18:31:07.814928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:31:07.814986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:31:07.815043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:31:07.815087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:31:07.815132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:31:07.820374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:31:07.820438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:887:2846] 2025-12-23T18:31:07.820523Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestListShards1Shard [GOOD] Test command err: 2025-12-23T18:30:46.344859Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122635245168675:2083];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:46.346305Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b28/r3tmp/tmpsA0HYp/pdisk_1.dat 2025-12-23T18:30:46.613226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:46.638033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:46.638194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:46.700347Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:46.823976Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64400, node 1 2025-12-23T18:30:46.873663Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.009862s 2025-12-23T18:30:46.918833Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:30:46.981998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:46.982021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:46.982028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:46.982115Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514646876 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:47.293896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:47.358366Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:47.411184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:24294 2025-12-23T18:30:47.596960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:47.935533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } 2025-12-23T18:30:48.047589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:48.180323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2025-12-23T18:30:48.207491Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-12-23T18:30:48.207739Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-12-23T18:30:48.208355Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-12-23T18:30:48.220731Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-12-23T18:30:48.220830Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-12-23T18:30:48.220865Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1766514647800-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":1,"unit":"second","start":1766514647,"finish":1766514648},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514648}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1766514648116-2","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1766514648,"finish":1766514648},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514648}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1766514648120-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1766514648,"finish":1766514648},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037890","source_wt":1766514648}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1766514647800-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":1,"unit":"second","start":1766514647,"finish":1766514648},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514648}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1766514648116-2","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1766514648,"finish":1766514648},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514648}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id ... mestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1766514657.858867 1133695 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-12-23T18:30:57.878418Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:57.926826Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1766514657.989051 1133695 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1766514657.989192 1133695 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-12-23T18:30:58.000893Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1766514658.046387 1133695 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1766514658.046521 1133695 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-12-23T18:30:58.056236Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1766514658.120980 1133695 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1766514658.121119 1133695 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1766514658.144914 1133695 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1766514658.145103 1133695 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-12-23T18:30:58.211861Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2025-12-23T18:30:58.237969Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-12-23T18:30:58.238010Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037892 not found 2025-12-23T18:30:58.238023Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2025-12-23T18:30:58.238035Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037893 not found 2025-12-23T18:30:58.238062Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-12-23T18:30:58.238082Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-12-23T18:30:58.251865Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2025-12-23T18:30:58.251944Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-12-23T18:30:58.252024Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-12-23T18:30:58.252062Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-12-23T18:30:58.252099Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-12-23T18:30:58.252159Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found E0000 00:00:1766514658.267551 1133695 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1766514658.267699 1133695 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-12-23T18:31:02.693462Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587122704386005688:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:02.693543Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b28/r3tmp/tmpbRxqVn/pdisk_1.dat 2025-12-23T18:31:02.747038Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:02.881691Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:02.908147Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:02.908246Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:02.936928Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19319, node 10 2025-12-23T18:31:03.038826Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:03.047868Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:03.047902Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:03.047912Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:03.048195Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514663004 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:03.367327Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:03.487058Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:31:03.701854Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:4390 2025-12-23T18:31:03.728412Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... E0000 00:00:1766514663.860519 1135146 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1766514663.871603 1135146 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1766514663.879074 1135146 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1766514663.889112 1135146 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1766514663.903918 1135146 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn |91.5%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TTxDataShardReshuffleKMeansScan::MainToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPostingWithOverlap >> TBackupCollectionTests::DisallowedPath >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TBackupCollectionTests::ParallelCreate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:31:07.886538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:31:07.886731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:07.886806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:31:07.886857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:31:07.886906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:31:07.886942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:31:07.887006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:07.887082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:31:07.888093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:31:07.888409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:31:07.980340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:31:07.980413Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:07.987901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:31:07.988191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:31:07.988376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:31:07.992320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:31:07.992537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:31:07.993397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:07.993661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:31:07.996029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:07.996238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:31:07.997408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:07.997481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:07.997648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:31:07.997702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:31:07.997746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:31:07.997897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:31:08.006169Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:31:08.149360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:31:08.149609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:08.149835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:31:08.149907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:31:08.150181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:31:08.150264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:08.156073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:08.156346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:31:08.156601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:08.156678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:31:08.156729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:31:08.156790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:31:08.162252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:08.162375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:31:08.162438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:31:08.169189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:08.169273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:08.169360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:08.169431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:31:08.173718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:31:08.176291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:31:08.176522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:31:08.177902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:08.178107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:08.178168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:08.178506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:31:08.178577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:08.178845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:31:08.178964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:31:08.181943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:08.182003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 05482Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:31:08.805765Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 255us result status StatusPathDoesNotExist 2025-12-23T18:31:08.806055Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:31:08.806637Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:31:08.806937Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 296us result status StatusSuccess 2025-12-23T18:31:08.807425Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:08.808234Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:31:08.808427Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 189us result status StatusPathDoesNotExist 2025-12-23T18:31:08.808576Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:31:08.809086Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:31:08.809355Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 225us result status StatusSuccess 2025-12-23T18:31:08.809864Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:08.810698Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:31:08.810960Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 261us result status StatusSuccess 2025-12-23T18:31:08.811454Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecords [GOOD] Test command err: 2025-12-23T18:30:44.736574Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122626151927982:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:44.736652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b2b/r3tmp/tmpeMMBpZ/pdisk_1.dat 2025-12-23T18:30:44.973936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:44.994647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:44.994730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:45.052674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:45.170191Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:45.189588Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 5708, node 1 2025-12-23T18:30:45.327386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:45.327441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:45.327452Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:45.327569Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24903 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514645196 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:45.654771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:45.724830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:30:45.751277Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24903 2025-12-23T18:30:45.885391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:49.541833Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587122648629306886:2084];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:49.542150Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:30:49.614846Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.006104s test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b2b/r3tmp/tmp5YJTbx/pdisk_1.dat 2025-12-23T18:30:49.621446Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:49.812177Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:49.831162Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:49.831296Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:49.848932Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:49.862130Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 22143, node 4 2025-12-23T18:30:49.953923Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:49.953955Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:49.953997Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:49.954127Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514649942 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:50.209140Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:50.272930Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:30:50.447024Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4225 2025-12-23T18:30:50.494988Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:50.509024Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-12-23T18:30:50.590603Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:50.669140Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:50.735827Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) encryption_type: NONE sequence_number: "0" shard_id: "shard-000000" encryption_type: NONE records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shar ... 880Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {5, 0} (5-5) 2025-12-23T18:31:00.873947Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {6, 0} (6-6) 2025-12-23T18:31:00.882858Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {7, 0} (7-7) 2025-12-23T18:31:00.919295Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {8, 0} (8-8) 2025-12-23T18:31:00.919407Z :DEBUG: [/Root/] [/Root/] [dc19de34-172caa5d-b456e6d7-1f85a5b7] [null] The application data is transferred to the client. Number of messages 9, size 8388611 bytes 2025-12-23T18:31:00.920153Z :DEBUG: [/Root/] [/Root/] [dc19de34-172caa5d-b456e6d7-1f85a5b7] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:31:00.920787Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 4 (0-1) 2025-12-23T18:31:00.920991Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 0 (0-1) 2025-12-23T18:31:00.921068Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (0-2) 2025-12-23T18:31:00.921219Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (3-3) 2025-12-23T18:31:00.934898Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {0, 0} (0-0) 2025-12-23T18:31:00.934972Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {1, 0} (1-1) 2025-12-23T18:31:00.935009Z :DEBUG: [/Root/] [/Root/] [dc19de34-172caa5d-b456e6d7-1f85a5b7] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2025-12-23T18:31:00.935222Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-12-23T18:31:00.935255Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-12-23T18:31:00.935280Z :DEBUG: [/Root/] [/Root/] [dc19de34-172caa5d-b456e6d7-1f85a5b7] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2025-12-23T18:31:00.935391Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 0} (0-0) 2025-12-23T18:31:00.935426Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {1, 0} (1-1) 2025-12-23T18:31:00.936462Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {2, 0} (2-2) 2025-12-23T18:31:00.936503Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {3, 0} (3-3) 2025-12-23T18:31:00.936538Z :DEBUG: [/Root/] [/Root/] [dc19de34-172caa5d-b456e6d7-1f85a5b7] [null] The application data is transferred to the client. Number of messages 4, size 1049088 bytes 2025-12-23T18:31:00.937189Z :INFO: [/Root/] [/Root/] [dc19de34-172caa5d-b456e6d7-1f85a5b7] Closing read session. Close timeout: 0.000000s 2025-12-23T18:31:00.937261Z :INFO: [/Root/] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:stream_TestPutRecordsCornerCases:4:5:1:0 null:stream_TestPutRecordsCornerCases:0:4:1:0 null:stream_TestPutRecordsCornerCases:3:3:3:0 null:stream_TestPutRecordsCornerCases:1:2:8:0 null:stream_TestPutRecordsCornerCases:2:1:0:0 2025-12-23T18:31:00.937315Z :INFO: [/Root/] [/Root/] [dc19de34-172caa5d-b456e6d7-1f85a5b7] Counters: { Errors: 0 CurrentSessionLifetimeMs: 310 BytesRead: 9437699 MessagesRead: 17 BytesReadCompressed: 9437699 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:31:00.937425Z :NOTICE: [/Root/] [/Root/] [dc19de34-172caa5d-b456e6d7-1f85a5b7] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-12-23T18:31:00.937474Z :DEBUG: [/Root/] [/Root/] [dc19de34-172caa5d-b456e6d7-1f85a5b7] [null] Abort session to cluster 2025-12-23T18:31:00.938061Z :NOTICE: [/Root/] [/Root/] [dc19de34-172caa5d-b456e6d7-1f85a5b7] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:31:00.946944Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer user1 session user1_7_1_15059657150441161421_v1 grpc read failed 2025-12-23T18:31:00.946995Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer user1 session user1_7_1_15059657150441161421_v1 grpc closed 2025-12-23T18:31:00.947057Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer user1 session user1_7_1_15059657150441161421_v1 is DEAD 2025-12-23T18:31:02.918780Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587122704557202377:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:02.919165Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:31:02.963593Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b2b/r3tmp/tmpxnvxYW/pdisk_1.dat 2025-12-23T18:31:03.168261Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:03.189084Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:03.189194Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:03.207750Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21827, node 10 2025-12-23T18:31:03.284585Z node 12 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.012212s 2025-12-23T18:31:03.292599Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:03.351563Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:03.351593Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:03.351602Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:03.351687Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514663291 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:03.678570Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:03.809832Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:31:03.924166Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1912 2025-12-23T18:31:04.082409Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:31:04.528461Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101)
: Error: Access for stream /Root/stream_TestPutRecords is denied for subject user2@builtin, code: 500018 2025-12-23T18:31:04.732853Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) PutRecordsResponse = encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } PutRecord response = encryption_type: NONE sequence_number: "7" shard_id: "shard-000004" |91.5%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TSchemeShardMoveTest::ResetCachedPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:31:05.803074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:31:05.804115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:05.804176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:31:05.804224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:31:05.804267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:31:05.804308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:31:05.804368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:05.804440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:31:05.805419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:31:05.805735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:31:05.911126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:31:05.911198Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:05.926246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:31:05.926425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:31:05.926515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:31:05.928724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:31:05.928857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:31:05.929335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:05.929534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:31:05.931294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:05.931461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:31:05.932581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:05.932658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:05.932803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:31:05.932855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:31:05.932896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:31:05.933039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:31:06.100282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.104759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.104963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.105036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.105102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.105158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.105221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.105328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.105429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.105497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.105591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.105696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.105787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.105859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:06.105934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... EvNotifyTxCompletionResult to actorId: [2:778:2746] message: TxId: 102 2025-12-23T18:31:09.262690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-12-23T18:31:09.262755Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:31:09.262793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:31:09.262943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2025-12-23T18:31:09.262997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:31:09.263054Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2025-12-23T18:31:09.263077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:1 2025-12-23T18:31:09.263133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:31:09.263157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:31:09.272651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:31:09.272731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:31:09.272838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:31:09.272889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:31:09.272926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:31:09.275991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:31:09.276063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:877:2831] 2025-12-23T18:31:09.276528Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-12-23T18:31:09.284876Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:31:09.285134Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 300us result status StatusPathDoesNotExist 2025-12-23T18:31:09.285303Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:31:09.285786Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:31:09.285962Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 192us result status StatusPathDoesNotExist 2025-12-23T18:31:09.286098Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:31:09.286513Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:31:09.286784Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 262us result status StatusSuccess 2025-12-23T18:31:09.287251Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:09.287908Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:31:09.288092Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 213us result status StatusSuccess 2025-12-23T18:31:09.288377Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TSchemeShardMoveTest::Chain >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::DropTwice |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TBackupCollectionTests::DisallowedPath [GOOD] >> TBackupCollectionTests::CreateAbsolutePath |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> VectorIndexBuildTest::SimpleDuplicates-Overlap-true [GOOD] >> VectorIndexBuildTest::PrefixedDuplicates |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> TResourcePoolTest::CreateResourcePool >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::TableWithSystemColumns >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] >> TSchemeShardMoveTest::ResetCachedPath [GOOD] >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TBackupCollectionTests::Create |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> DataStreams::ListStreamsValidation [GOOD] >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] >> KqpQueryPerf::RangeLimitRead+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:31:10.829328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:31:10.829428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:10.829464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:31:10.829494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:31:10.829525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:31:10.829550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:31:10.829593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:10.829643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:31:10.830375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:31:10.830609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:31:10.905013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:31:10.905103Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:10.911754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:31:10.911972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:31:10.912107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:31:10.923792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:31:10.924033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:31:10.924892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:10.925173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:31:10.927825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:10.928052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:31:10.929289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:10.929369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:10.929586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:31:10.929642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:31:10.929696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:31:10.929837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:31:11.062840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.064057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.064241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.064331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.064438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.064508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.064575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.064688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.064793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.064861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.064933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.065024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.065109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.065210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:11.065317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 44 2025-12-23T18:31:12.562346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-12-23T18:31:12.562471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:31:12.566984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2025-12-23T18:31:12.567098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000040 2025-12-23T18:31:12.567473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:12.567584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:12.567660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:373: TAlterTable TPropose operationId# 105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000040, at schemeshard: 72057594046678944 2025-12-23T18:31:12.567893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 129 2025-12-23T18:31:12.568044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000040 2025-12-23T18:31:12.575424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:12.575481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:31:12.575730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:12.575775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 105, path id: 39 2025-12-23T18:31:12.576217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:31:12.576277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 105 2025-12-23T18:31:12.577170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:31:12.577272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:31:12.577313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:31:12.577347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 8 2025-12-23T18:31:12.577390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:31:12.577474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-12-23T18:31:12.580033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1511 } } CommitVersion { Step: 5000040 TxId: 105 } 2025-12-23T18:31:12.580093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-12-23T18:31:12.580312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1511 } } CommitVersion { Step: 5000040 TxId: 105 } 2025-12-23T18:31:12.580478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1511 } } CommitVersion { Step: 5000040 TxId: 105 } debug: NTableState::TProposedWaitParts operationId# 105:0 2025-12-23T18:31:12.582164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 1074 RawX2: 4294970309 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-12-23T18:31:12.582221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-12-23T18:31:12.582411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1074 RawX2: 4294970309 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-12-23T18:31:12.582504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:31:12.582632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 1074 RawX2: 4294970309 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-12-23T18:31:12.582712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:12.582846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:31:12.582948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-12-23T18:31:12.583004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 129 -> 240 2025-12-23T18:31:12.583850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:31:12.585672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:31:12.585802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:31:12.586045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:31:12.586101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2025-12-23T18:31:12.586222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:31:12.586254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:31:12.586284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:31:12.586327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:31:12.586356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-12-23T18:31:12.586410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2715] message: TxId: 105 2025-12-23T18:31:12.586461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:31:12.586506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2025-12-23T18:31:12.586547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 105:0 2025-12-23T18:31:12.586674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:31:12.588813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:31:12.588896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1243:3163] TestWaitNotification: OK eventTxId 105 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> TResourcePoolTest::CreateResourcePool [GOOD] >> KqpQueryPerf::IndexReplace+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] Test command err: 2025-12-23T18:30:43.622043Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122621020328404:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:43.624387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b31/r3tmp/tmpCcXC37/pdisk_1.dat 2025-12-23T18:30:43.849239Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:43.885242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:43.885373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:43.920586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:44.003493Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11140, node 1 2025-12-23T18:30:44.033110Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.005782s 2025-12-23T18:30:44.091259Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:30:44.114992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:44.115028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:44.115038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:44.115220Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514644006 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:44.404724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:44.449365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:18641 2025-12-23T18:30:44.603644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:44.631104Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:44.828565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2025-12-23T18:30:44.843328Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-12-23T18:30:44.844701Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-12-23T18:30:44.844731Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-12-23T18:30:44.844746Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-12-23T18:30:44.863760Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-12-23T18:30:44.863886Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-12-23T18:30:44.863925Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-12-23T18:30:44.863954Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-12-23T18:30:48.460217Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587122641893043620:2175];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:48.460483Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b31/r3tmp/tmpERduyN/pdisk_1.dat 2025-12-23T18:30:48.515992Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:48.714684Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:48.740291Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:48.740407Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:48.775593Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:48.802924Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 18449, node 4 2025-12-23T18:30:48.935240Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:48.935301Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:48.935311Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:48.935411Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514648857 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:49.232571Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:49.336195Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:30:49.497296Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6250 2025-12-23T18:30:49.559972Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:49.759594Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:49.813857Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: ( ... tus: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:53.995030Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:54.015724Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:54.015826Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:54.047047Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:54.068045Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 15908, node 7 2025-12-23T18:30:54.239663Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:54.239690Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:54.239703Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:54.239797Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514654135 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:30:54.523245Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:54.592732Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:30:54.804218Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5657 2025-12-23T18:30:54.820568Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:55.015628Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:55.089442Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:55.152423Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2025-12-23T18:30:55.190480Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-12-23T18:30:55.190519Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2025-12-23T18:30:55.190537Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-12-23T18:30:55.193532Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-12-23T18:30:55.197988Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-12-23T18:30:55.198044Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-12-23T18:30:55.198082Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-12-23T18:30:59.241000Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587122691182671933:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:59.241104Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b31/r3tmp/tmpfFCfl3/pdisk_1.dat 2025-12-23T18:30:59.267749Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:59.421472Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:59.438402Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:59.446113Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:59.446213Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:59.455170Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7124, node 10 2025-12-23T18:30:59.626816Z node 12 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007497s 2025-12-23T18:30:59.672094Z node 12 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.032097s 2025-12-23T18:30:59.819828Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:59.819857Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:59.819870Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:59.819975Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:00.026952Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6178 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-12-23T18:31:00.251345Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514659574 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:00.266099Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:00.380520Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:6178 2025-12-23T18:31:00.664333Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:31:04.241519Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7587122691182671933:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:04.241643Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Insert-QueryService-UseSink |91.5%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> KqpQueryPerf::MultiRead-QueryService >> TBackupCollectionTests::Create [GOOD] >> TBackupCollectionTests::CreateTwice >> TSchemeShardMoveTest::Index [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> KqpQueryPerf::Delete-QueryService-UseSink >> TBackupCollectionTests::TableWithSystemColumns [GOOD] >> TBackupCollectionTests::DropEmptyBackupCollection |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |91.5%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2025-12-23T18:30:52.290816Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122661038848092:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:52.297151Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b1f/r3tmp/tmpb7QsGn/pdisk_1.dat 2025-12-23T18:30:52.583533Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:52.621818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:52.621922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:52.660642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:52.769811Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:52.816144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 14753, node 1 2025-12-23T18:30:52.927192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:52.927222Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:52.927243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:52.927352Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514652791 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:53.206835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:53.283538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:30:53.331753Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22989 2025-12-23T18:30:53.468640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:55.715476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:30:55.957834Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037890:1][1:7587122673923751950:2351] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:42:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-12-23T18:30:56.153834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:56.361781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2025-12-23T18:30:56.398549Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-12-23T18:30:56.398585Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-12-23T18:30:56.398598Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-12-23T18:30:56.398610Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-12-23T18:30:56.398625Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-12-23T18:30:56.398637Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-12-23T18:30:56.398649Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-12-23T18:30:56.398660Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-12-23T18:30:56.398671Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-12-23T18:30:56.398683Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-12-23T18:30:56.399269Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-12-23T18:30:56.399311Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-12-23T18:30:56.399324Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-12-23T18:30:56.401657Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-12-23T18:30:56.404084Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-12-23T18:30:56.407931Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-12-23T18:30:58.196568Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587122687363297088:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:58.196671Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b1f/r3tmp/tmpQyH7Gz/pdisk_1.dat 2025-12-23T18:30:58.218427Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:58.322037Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:58.337461Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:58.337546Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:58.356464Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19882, node 4 2025-12-23T18:30:58.423909Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:58.423930Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:58.423935Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:58.424006Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:30:58.429283Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19311 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: tr ... operation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:58.946252Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-12-23T18:30:59.183602Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:59.205401Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:59.269233Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:30:59.366913Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:31:03.133789Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7587122708775839560:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:03.133856Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b1f/r3tmp/tmpPO2tgy/pdisk_1.dat 2025-12-23T18:31:03.165193Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:03.267385Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:03.297440Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:03.297540Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:03.304840Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:03.328527Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 6896, node 7 2025-12-23T18:31:03.403073Z node 8 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.006003s 2025-12-23T18:31:03.510730Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:03.510772Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:03.510780Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:03.510880Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:03.781209Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514663403 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:03.800903Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:03.864005Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:17864 2025-12-23T18:31:04.109345Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:31:04.143477Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:04.451035Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7587122713070809022:3556] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/stream_TestCreateExistingStream\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:08.437906Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587122727840620507:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:08.437996Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:31:08.479650Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b1f/r3tmp/tmplTF9c9/pdisk_1.dat 2025-12-23T18:31:08.612671Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:08.628382Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:08.628503Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:08.658099Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31836, node 10 2025-12-23T18:31:08.806324Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:08.859472Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:08.859506Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:08.859514Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:08.862790Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514668730 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:09.209464Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:09.348525Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:31:09.473955Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:63768 2025-12-23T18:31:09.628053Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... |91.5%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::CreateResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:31:12.624543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:31:12.624670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:12.624723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:31:12.624770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:31:12.625042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:31:12.625080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:31:12.625197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:12.625282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:31:12.626306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:31:12.626680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:31:12.724453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:31:12.724542Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:12.734102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:31:12.734370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:31:12.734514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:31:12.739289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:31:12.739555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:31:12.740319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:12.740697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:31:12.743125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:12.743339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:31:12.744700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:12.744763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:12.744921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:31:12.744969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:31:12.745010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:31:12.745153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:31:12.928307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.929616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.929814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.929947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.930061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.930156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.930232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.930354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.930455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.930537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.930614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.930715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.931049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.931167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:12.931292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:13.849982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_resource_pool.cpp:19: [72057594046678944] TCreateResourcePool TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2025-12-23T18:31:13.850158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2025-12-23T18:31:13.850389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:31:13.850471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:31:13.851755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:31:13.853609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:31:13.855444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:31:13.855877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:13.855915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2025-12-23T18:31:13.856091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2025-12-23T18:31:13.856195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2025-12-23T18:31:13.856318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:13.856367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 40 2025-12-23T18:31:13.856413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 41 2025-12-23T18:31:13.856437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 41 2025-12-23T18:31:13.856636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:31:13.856682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:31:13.856793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:31:13.856833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:31:13.856882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:31:13.856935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:31:13.856982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:31:13.857031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:31:13.857082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:31:13.857141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:31:13.857242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2025-12-23T18:31:13.857289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:31:13.857326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 40], 5 2025-12-23T18:31:13.857350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 41], 2 2025-12-23T18:31:13.858407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:31:13.858520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:31:13.858561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:31:13.858604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 5 2025-12-23T18:31:13.858662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:31:13.862279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:31:13.862394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:31:13.862423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:31:13.862449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2025-12-23T18:31:13.862488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:31:13.862556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:31:13.866822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:31:13.867231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:31:13.867473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:31:13.867562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:31:13.867949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:31:13.868140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:31:13.868187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:746:2736] TestWaitNotification: OK eventTxId 102 2025-12-23T18:31:13.868700Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:31:13.868934Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 262us result status StatusSuccess 2025-12-23T18:31:13.869373Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TSchemeShardMoveTest::ReplaceVectorIndex [GOOD] >> DataStreams::TestReservedConsumersMetering [GOOD] >> KqpQueryPerf::UpdateOn+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:31:11.434628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:31:11.434792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:11.434846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:31:11.434888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:31:11.434930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:31:11.434964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:31:11.435066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:11.435143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:31:11.436157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:31:11.436482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:31:11.531918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:31:11.532001Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:11.538629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:31:11.538929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:31:11.539110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:31:11.542936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:31:11.543159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:31:11.544035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:11.544284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:31:11.546570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:11.546799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:31:11.548105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:11.548179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:11.548371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:31:11.548431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:31:11.548483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:31:11.548649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:31:11.557514Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:31:11.702842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:31:11.702997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:11.703141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:31:11.703185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:31:11.703346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:31:11.703401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:11.705498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:11.705648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:31:11.705808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:11.705859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:31:11.705890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:31:11.705914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:31:11.708035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:11.708160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:31:11.708213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:31:11.710351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:11.710413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:11.710471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:11.710525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:31:11.714583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:31:11.716936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:31:11.717154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:31:11.718398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:11.718555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:11.718608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:11.718930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:31:11.718997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:11.719240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:31:11.719347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:31:11.721822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:11.721873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 57594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:14.470129Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:31:14.470368Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 244us result status StatusSuccess 2025-12-23T18:31:14.471195Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:14.471929Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:31:14.472153Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 248us result status StatusSuccess 2025-12-23T18:31:14.472840Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] >> TKeyValueTest::TestVacuumWithMockDisk >> TBackupCollectionTests::CreateTwice [GOOD] >> TBackupCollectionTests::Drop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ReplaceVectorIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:31:05.134453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:31:05.134589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:05.134662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:31:05.134718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:31:05.134789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:31:05.134829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:31:05.134900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:05.134984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:31:05.136065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:31:05.136441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:31:05.233133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:31:05.233237Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:05.245012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:31:05.245355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:31:05.245504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:31:05.253048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:31:05.253276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:31:05.254138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:05.254425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:31:05.256886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:05.257077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:31:05.258287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:05.258390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:05.258578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:31:05.258638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:31:05.258697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:31:05.258883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:31:05.266313Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:31:05.406723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:31:05.406981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:05.407188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:31:05.407260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:31:05.407514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:31:05.407593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:05.410289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:05.410494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:31:05.410657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:05.410731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:31:05.410796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:31:05.410832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:31:05.412562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:05.412644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:31:05.412686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:31:05.414506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:05.414556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:31:05.414600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:05.414649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:31:05.418471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:31:05.420656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:31:05.420819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:31:05.422090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:05.422243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:31:05.422296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:05.422594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:31:05.422676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:31:05.422932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:31:05.423031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:31:05.425253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:05.425323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 25-12-23T18:31:14.662111Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 65], version: 18446744073709551615 2025-12-23T18:31:14.662135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 4 2025-12-23T18:31:14.662464Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 66 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-12-23T18:31:14.662534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 66 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-12-23T18:31:14.662564Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-12-23T18:31:14.662587Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 66], version: 18446744073709551615 2025-12-23T18:31:14.662607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 66] was 3 2025-12-23T18:31:14.662899Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 67 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-12-23T18:31:14.662956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 67 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-12-23T18:31:14.662975Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-12-23T18:31:14.662992Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 67], version: 18446744073709551615 2025-12-23T18:31:14.663009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 5 2025-12-23T18:31:14.663062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 4/6, is published: true 2025-12-23T18:31:14.663949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:4, at schemeshard: 72057594046678944 2025-12-23T18:31:14.664019Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:4 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:31:14.669668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 4 2025-12-23T18:31:14.669867Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:4 progress is 5/6 2025-12-23T18:31:14.669910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 5/6 2025-12-23T18:31:14.669949Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:4 progress is 5/6 2025-12-23T18:31:14.669991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 5/6 2025-12-23T18:31:14.670038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 5/6, is published: true 2025-12-23T18:31:14.670882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:31:14.670952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:31:14.671015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:31:14.671187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2025-12-23T18:31:14.671234Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:2 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:31:14.671447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 3 2025-12-23T18:31:14.671530Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 6/6 2025-12-23T18:31:14.671577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2025-12-23T18:31:14.671629Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 6/6 2025-12-23T18:31:14.671666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2025-12-23T18:31:14.671698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 6/6, is published: true 2025-12-23T18:31:14.671779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:739:2717] message: TxId: 107 2025-12-23T18:31:14.671874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2025-12-23T18:31:14.671926Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2025-12-23T18:31:14.671965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 107:0 2025-12-23T18:31:14.672092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2025-12-23T18:31:14.672133Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:1 2025-12-23T18:31:14.672153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 107:1 2025-12-23T18:31:14.672181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 64] was 4 2025-12-23T18:31:14.672201Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:2 2025-12-23T18:31:14.672239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 107:2 2025-12-23T18:31:14.672316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 2 2025-12-23T18:31:14.672361Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:3 2025-12-23T18:31:14.672387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 107:3 2025-12-23T18:31:14.672431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 66] was 2 2025-12-23T18:31:14.672454Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:4 2025-12-23T18:31:14.672475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 107:4 2025-12-23T18:31:14.672516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 3 2025-12-23T18:31:14.672537Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:5 2025-12-23T18:31:14.672556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 107:5 2025-12-23T18:31:14.672612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 68] was 1 2025-12-23T18:31:14.673164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:31:14.673224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 68], at schemeshard: 72057594046678944 2025-12-23T18:31:14.673301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 2 2025-12-23T18:31:14.677986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:31:14.678108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:31:14.678161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:31:14.678298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:31:14.681899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:31:14.682017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:31:14.682264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-12-23T18:31:14.682312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:4124:5875] 2025-12-23T18:31:14.682430Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> TBackupCollectionTests::DropEmptyBackupCollection [GOOD] >> TBackupCollectionTests::DropNonExistentCollection >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestReservedConsumersMetering [GOOD] Test command err: 2025-12-23T18:30:47.529270Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122640995743304:2082];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:47.534072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b22/r3tmp/tmptVt4nu/pdisk_1.dat 2025-12-23T18:30:47.822428Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:47.853006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:47.853384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:47.889702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:47.986491Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2684, node 1 2025-12-23T18:30:48.160574Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:30:48.184544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:48.184577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:48.184585Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:48.184665Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:30:48.541400Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514647975 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:48.565308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:48.678931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:4834 2025-12-23T18:30:48.914632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:49.223710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "1" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000007" } records { sequence_number: "1" shard_id: "shard-000007" } records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000007" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000009" } records { sequence_number: "1" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "5" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000008" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000009" } records { sequence_number: "2" shard_id: "shard-000006" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "7" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000008" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000006" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000009" } records { sequence_number: "8" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000009" } records { sequence_number: "9" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "10" shard_id: "shard-000001" } records { sequence_number: "10" shard_id: "shard-000009" } records { sequence_number: "10" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000005" } records { sequence_number: "4" shard_id: "shard-000008" } records { sequence_number: "11" shard_id: "shard-000004" } records { sequence_number: "12" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000005" } records { sequence_number: "11" shard_id: "shard-000001" } records { sequence_number: "11" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000006" } records { sequence_number: "12" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000007" } records { sequence_number: "7" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000007" } records { sequence_number: "13" shard_id: "shard-000004" } records { sequence_number: "8" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "12" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000008" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000006" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000009" } records { sequence_number: "13" shard_id: "shard-000001" } records { sequence_number: "14" shard_id: "shard-000009" } records { sequence_number: "14" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000001" } 2025-12-23T18:30:52.608756Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122640995743304:2082];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:52.611119Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "15" shard_id: "shard-000001" } records { sequence_number: "15" shard_id: "shard-000009" } records { sequence_number: "15" shard_id: "shard-000004" } records { sequence_number: "9" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000008" } records { sequence_number: "16" shard_id: "shard-000004" } records { sequence_number: "17" shard_id: "shard-000004" } records { sequence_number: "10" shard_id: "shard-000005" } records { sequence_number: "16" shard_id: "shard-000001" } records { sequence_number: "16" shard_id: "shard-000009" } records { sequence_number: "6" shard_id: "shard-000006" } records { sequence_number: "17" shard_id: "shard-000001" } records { seque ... ":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514667429-105","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1766514667,"finish":1766514667},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514667}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514667464-106","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":1},"usage":{"quantity":0,"unit":"second","start":1766514667,"finish":1766514667},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514667}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514667464-107","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1766514667,"finish":1766514667},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514667}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514667505-108","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":1},"usage":{"quantity":0,"unit":"second","start":1766514667,"finish":1766514667},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514667}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514667505-109","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1766514667,"finish":1766514667},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514667}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514667577-110","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":0,"unit":"second","start":1766514667,"finish":1766514667},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514667}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514667577-111","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1766514667,"finish":1766514667},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514667}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1766514667655-112","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1766514667,"finish":1766514668},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514668}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514667655-113","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1766514667,"finish":1766514668},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514668}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514667655-114","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1766514667,"finish":1766514668},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514668}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1766514668700-115","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1766514668,"finish":1766514669},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514669}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514668700-116","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1766514668,"finish":1766514669},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514669}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514668700-117","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1766514668,"finish":1766514669},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514669}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1766514669728-118","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1766514669,"finish":1766514670},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514670}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514669728-119","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1766514669,"finish":1766514670},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514670}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514669728-120","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1766514669,"finish":1766514670},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514670}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1766514670752-121","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1766514670,"finish":1766514671},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514671}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514670752-122","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1766514670,"finish":1766514671},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514671}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514670752-123","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1766514670,"finish":1766514671},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514671}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1766514671777-124","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1766514671,"finish":1766514672},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514672}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514671777-125","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1766514671,"finish":1766514672},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514672}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1766514671777-126","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1766514671,"finish":1766514672},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1766514672}' |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Table [GOOD] |91.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |91.5%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results >> KqpQueryPerf::ComputeLength+QueryService >> KqpQueryPerf::UpdateOn+QueryService-UseSink >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::BackupAbsentCollection >> KqpQueryPerf::RangeLimitRead-QueryService >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestReadRequestInFlightLimit >> TBackupCollectionTests::DropNonExistentCollection [GOOD] >> TBackupCollectionTests::DropCollectionWithMultipleBackups >> TKeyValueTest::TestReadRequestInFlightLimit [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPostingWithOverlap [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuild >> KqpQueryPerf::Delete+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestReadRequestInFlightLimit [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:84:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:87:2057] recipient: [7:86:2117] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:89:2057] recipient: [7:86:2117] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:88:2118] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:204:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:84:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:86:2117] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:89:2057] recipient: [8:86:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:88:2118] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:204:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2119] Leader for TabletID 72057594037927937 is [9:90:2120] sender: [9:91:2057] recipient: [9:88:2119] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2120] Leader for TabletID 72057594037927937 is [9:90:2120] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2119] Leader for TabletID 72057594037927937 is [10:90:2120] sender: [10:91:2057] recipient: [10:88:2119] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2120] Leader for TabletID 72057594037927937 is [10:90:2120] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2121] Leader for TabletID 72057594037927937 is [11:92:2122] sender: [11:93:2057] recipient: [11:90:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2122] Leader for TabletID 72057594037927937 is [11:92:2122] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2121] Leader for TabletID 72057594037927937 is [12:92:2122] sender: [12:93:2057] recipient: [12:90:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2122] Leader for TabletID 72057594037927937 is [12:92:2122] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:90:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:93:2057] recipient: [13:92:2123] Leader for TabletID 72057594037927937 is [13:94:2124] sender: [13:95:2057] recipient: [13:92:2123] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:94:2124] Leader for TabletID 72057594037927937 is [13:94:2124] sender: [13:210:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:90:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:93:2057] recipient: [14:92:2123] Leader for TabletID 72057594037927937 is [14:94:2124] sender: [14:95:2057] recipient: [14:92:2123] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:94:2124] Leader for TabletID 72057594037927937 is [14:94:2124] sender: [14:210:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:91:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:94:2057] recipient: [15:93:2123] Leader for TabletID 72057594037927937 is [15:95:2124] sender: [15:96:2057] recipient: [15:93:2123] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:95:2124] Leader for TabletID 72057594037927937 is [15:95:2124] sender: [15:211:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:93:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:96:2057] recipient: [16:95:2125] Leader for TabletID 72057594037927937 is [16:97:2126] sender: [16:98:2057] recipient: [16:95:2125] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:97:2126] Leader for TabletID 72057594037927937 is [16:97:2126] sender: [16:213:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:93:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:96:2057] recipient: [17:95:2125] Leader for TabletID 72057594037927937 is [17:97:2126] sender: [17:98:2057] recipient: [17:95:2125] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:97:2126] Leader for TabletID 72057594037927937 is [17:97:2126] sender: [17:213:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:94:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:97:2057] recipient: [18:96:2125] Leader for TabletID 72057594037927937 is [18:98:2126] sender: [18:99:2057] recipient: [18:96:2125] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:98:2126] Leader for TabletID 72057594037927937 is [18:98:2126] sender: [18:214:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> TBackupCollectionTests::BackupDroppedCollection >> KqpQueryPerf::Upsert-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes [GOOD] Test command err: RandomSeed# 10549692868034271340 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] Test command err: RandomSeed# 11012328342811229512 Reassign# 5 -- VSlotId { NodeId: 6 PDiskId: 1000 VSlotId: 1000 } GroupId: 2181038080 GroupGeneration: 1 VDiskKind: "Default" FailDomainIdx: 5 VDiskMetrics { SatisfactionRank: 0 VSlotId { NodeId: 6 PDiskId: 1000 VSlotId: 1000 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } Status: "READY" Ready: true Put# [1:1:1:0:0:66:0] Put# [1:1:2:0:0:70:0] Put# [1:1:3:0:0:64:0] Put# [1:1:4:0:0:2:0] Put# [1:1:5:0:0:25:0] Put# [1:1:6:0:0:57:0] 2025-12-23T18:28:04.430030Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:28:04.433689Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4885498993739759485] 2025-12-23T18:28:04.469131Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:1:0:0:66:1] 2025-12-23T18:28:04.469239Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:2:0:0:70:2] 2025-12-23T18:28:04.469285Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:3:0:0:64:3] 2025-12-23T18:28:04.469318Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:4:0:0:2:3] 2025-12-23T18:28:04.469367Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:5:0:0:25:4] 2025-12-23T18:28:04.469402Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:6:0:0:57:5] 2025-12-23T18:28:04.469841Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 6 PartsResurrected# 6 Put# [1:1:7:0:0:92:0] Put# [1:1:8:0:0:93:0] Put# [1:1:9:0:0:2:0] Put# [1:1:10:0:0:52:0] Put# [1:1:11:0:0:27:0] Put# [1:1:12:0:0:32:0] Put# [1:1:13:0:0:79:0] Put# [1:1:14:0:0:85:0] Put# [1:1:15:0:0:92:0] Put# [1:1:16:0:0:97:0] Put# [1:1:17:0:0:44:0] Put# [1:1:18:0:0:64:0] Put# [1:1:19:0:0:99:0] Put# [1:1:20:0:0:83:0] Put# [1:1:21:0:0:71:0] Put# [1:1:22:0:0:18:0] Put# [1:1:23:0:0:12:0] Put# [1:1:24:0:0:63:0] Put# [1:1:25:0:0:38:0] Put# [1:1:26:0:0:33:0] Put# [1:1:27:0:0:52:0] Put# [1:1:28:0:0:80:0] Put# [1:1:29:0:0:2:0] Put# [1:1:30:0:0:2:0] Put# [1:1:31:0:0:31:0] Put# [1:1:32:0:0:99:0] Put# [1:1:33:0:0:76:0] Put# [1:1:34:0:0:21:0] Put# [1:1:35:0:0:44:0] Put# [1:1:36:0:0:2:0] Put# [1:1:37:0:0:10:0] Put# [1:1:38:0:0:8:0] Put# [1:1:39:0:0:88:0] Put# [1:1:40:0:0:19:0] Put# [1:1:41:0:0:53:0] Put# [1:1:42:0:0:69:0] Put# [1:1:43:0:0:60:0] Put# [1:1:44:0:0:13:0] Put# [1:1:45:0:0:10:0] Put# [1:1:46:0:0:70:0] Put# [1:1:47:0:0:91:0] Put# [1:1:48:0:0:43:0] Put# [1:1:49:0:0:86:0] Put# [1:1:50:0:0:32:0] Put# [1:1:51:0:0:25:0] Put# [1:1:52:0:0:29:0] Put# [1:1:53:0:0:55:0] Put# [1:1:54:0:0:43:0] Put# [1:1:55:0:0:33:0] Put# [1:1:56:0:0:100:0] Put# [1:1:57:0:0:94:0] Put# [1:1:58:0:0:16:0] Put# [1:1:59:0:0:18:0] Put# [1:1:60:0:0:90:0] Put# [1:1:61:0:0:97:0] Put# [1:1:62:0:0:2:0] Put# [1:1:63:0:0:19:0] Put# [1:1:64:0:0:44:0] Put# [1:1:65:0:0:36:0] Put# [1:1:66:0:0:35:0] Put# [1:1:67:0:0:72:0] Put# [1:1:68:0:0:48:0] Put# [1:1:69:0:0:95:0] Put# [1:1:70:0:0:30:0] Put# [1:1:71:0:0:94:0] Put# [1:1:72:0:0:44:0] Put# [1:1:73:0:0:99:0] Put# [1:1:74:0:0:5:0] Put# [1:1:75:0:0:85:0] Put# [1:1:76:0:0:17:0] Put# [1:1:77:0:0:89:0] Put# [1:1:78:0:0:59:0] Put# [1:1:79:0:0:77:0] Put# [1:1:80:0:0:87:0] Put# [1:1:81:0:0:23:0] Put# [1:1:82:0:0:7:0] Put# [1:1:83:0:0:84:0] Put# [1:1:84:0:0:22:0] Put# [1:1:85:0:0:52:0] Put# [1:1:86:0:0:30:0] Put# [1:1:87:0:0:54:0] Put# [1:1:88:0:0:91:0] Put# [1:1:89:0:0:21:0] Put# [1:1:90:0:0:65:0] Put# [1:1:91:0:0:43:0] Put# [1:1:92:0:0:18:0] Put# [1:1:93:0:0:82:0] Put# [1:1:94:0:0:93:0] Put# [1:1:95:0:0:5:0] Put# [1:1:96:0:0:84:0] Put# [1:1:97:0:0:15:0] Put# [1:1:98:0:0:9:0] Put# [1:1:99:0:0:52:0] Put# [1:1:100:0:0:88:0] Put# [1:1:101:0:0:84:0] Put# [1:1:102:0:0:74:0] Put# [1:1:103:0:0:99:0] Put# [1:1:104:0:0:38:0] Put# [1:1:105:0:0:2:0] Put# [1:1:106:0:0:44:0] Put# [1:1:107:0:0:27:0] Put# [1:1:108:0:0:2:0] Put# [1:1:109:0:0:68:0] Put# [1:1:110:0:0:22:0] Put# [1:1:111:0:0:97:0] Put# [1:1:112:0:0:20:0] Put# [1:1:113:0:0:25:0] Put# [1:1:114:0:0:25:0] Put# [1:1:115:0:0:88:0] Put# [1:1:116:0:0:90:0] Put# [1:1:117:0:0:6:0] Put# [1:1:118:0:0:22:0] Put# [1:1:119:0:0:74:0] Put# [1:1:120:0:0:45:0] Put# [1:1:121:0:0:68:0] Put# [1:1:122:0:0:10:0] Put# [1:1:123:0:0:26:0] Put# [1:1:124:0:0:70:0] Put# [1:1:125:0:0:34:0] Put# [1:1:126:0:0:60:0] Put# [1:1:127:0:0:35:0] Put# [1:1:128:0:0:66:0] Put# [1:1:129:0:0:61:0] Put# [1:1:130:0:0:97:0] Put# [1:1:131:0:0:88:0] Put# [1:1:132:0:0:59:0] Put# [1:1:133:0:0:11:0] Put# [1:1:134:0:0:1:0] Put# [1:1:135:0:0:21:0] Put# [1:1:136:0:0:36:0] Put# [1:1:137:0:0:12:0] Put# [1:1:138:0:0:82:0] Put# [1:1:139:0:0:7:0] Put# [1:1:140:0:0:60:0] Put# [1:1:141:0:0:8:0] Put# [1:1:142:0:0:40:0] Put# [1:1:143:0:0:19:0] Put# [1:1:144:0:0:73:0] Put# [1:1:145:0:0:8:0] Put# [1:1:146:0:0:99:0] Put# [1:1:147:0:0:87:0] Put# [1:1:148:0:0:43:0] Put# [1:1:149:0:0:81:0] Put# [1:1:150:0:0:45:0] Put# [1:1:151:0:0:5:0] Put# [1:1:152:0:0:45:0] Put# [1:1:153:0:0:3:0] Put# [1:1:154:0:0:84:0] Put# [1:1:155:0:0:84:0] Put# [1:1:156:0:0:30:0] Put# [1:1:157:0:0:7:0] Put# [1:1:158:0:0:89:0] Put# [1:1:159:0:0:48:0] Put# [1:1:160:0:0:51:0] Put# [1:1:161:0:0:50:0] Put# [1:1:162:0:0:43:0] Put# [1:1:163:0:0:69:0] Put# [1:1:164:0:0:21:0] Put# [1:1:165:0:0:86:0] Put# [1:1:166:0:0:38:0] Put# [1:1:167:0:0:68:0] Put# [1:1:168:0:0:7:0] Put# [1:1:169:0:0:22:0] Put# [1:1:170:0:0:57:0] Put# [1:1:171:0:0:57:0] Put# [1:1:172:0:0:69:0] Put# [1:1:173:0:0:90:0] Put# [1:1:174:0:0:90:0] Put# [1:1:175:0:0:35:0] Put# [1:1:176:0:0:16:0] Put# [1:1:177:0:0:99:0] Put# [1:1:178:0:0:100:0] Put# [1:1:179:0:0:61:0] Put# [1:1:180:0:0:18:0] Put# [1:1:181:0:0:10:0] Put# [1:1:182:0:0:48:0] Put# [1:1:183:0:0:2:0] Put# [1:1:184:0:0:66:0] Put# [1:1:185:0:0:54:0] Put# [1:1:186:0:0:43:0] Put# [1:1:187:0:0:69:0] Put# [1:1:188:0:0:94:0] Put# [1:1:189:0:0:12:0] Put# [1:1:190:0:0:14:0] Put# [1:1:191:0:0:72:0] Put# [1:1:192:0:0:10:0] Put# [1:1:193:0:0:10:0] Put# [1:1:194:0:0:59:0] Put# [1:1:195:0:0:50:0] Put# [1:1:196:0:0:98:0] Put# [1:1:197:0:0:25:0] Put# [1:1:198:0:0:76:0] Put# [1:1:199:0:0:2:0] Put# [1:1:200:0:0:64:0] Put# [1:1:201:0:0:41:0] Put# [1:1:202:0:0:48:0] Put# [1:1:203:0:0:5:0] Put# [1:1:204:0:0:43:0] Put# [1:1:205:0:0:70:0] Put# [1:1:206:0:0:20:0] Put# [1:1:207:0:0:1:0] Put# [1:1:208:0:0:91:0] Put# [1:1:209:0:0:34:0] Put# [1:1:210:0:0:53:0] Put# [1:1:211:0:0:34:0] Put# [1:1:212:0:0:88:0] Put# [1:1:213:0:0:77:0] Put# [1:1:214:0:0:38:0] Put# [1:1:215:0:0:20:0] Put# [1:1:216:0:0:20:0] Put# [1:1:217:0:0:49:0] Put# [1:1:218:0:0:71:0] Put# [1:1:219:0:0:22:0] Put# [1:1:220:0:0:61:0] Put# [1:1:221:0:0:59:0] Put# [1:1:222:0:0:1:0] Put# [1:1:223:0:0:87:0] Put# [1:1:224:0:0:87:0] Put# [1:1:225:0:0:35:0] Put# [1:1:226:0:0:20:0] Put# [1:1:227:0:0:55:0] Put# [1:1:228:0:0:48:0] Put# [1:1:229:0:0:29:0] Put# [1:1:230:0:0:6:0] Put# [1:1:231:0:0:89:0] Put# [1:1:232:0:0:59:0] Put# [1:1:233:0:0:31:0] Put# [1:1:234:0:0:71:0] Put# [1:1:235:0:0:70:0] Put# [1:1:236:0:0:85:0] Put# [1:1:237:0:0:30:0] Put# [1:1:238:0:0:22:0] Put# [1:1:239:0:0:39:0] Put# [1:1:240:0:0:26:0] Put# [1:1:241:0:0:72:0] Put# [1:1:242:0:0:53:0] Put# [1:1:243:0:0:68:0] Put# [1:1:244:0:0:22:0] Put# [1:1:245:0:0:5:0] Put# [1:1:246:0:0:54:0] Put# [1:1:247:0:0:68:0] Put# [1:1:248:0:0:37:0] Put# [1:1:249:0:0:5:0] Put# [1:1:250:0:0:36:0] Put# [1:1:251:0:0:86:0] Put# [1:1:252:0:0:73:0] Put# [1:1:253:0:0:40:0] Put# [1:1:254:0:0:58:0] Put# [1:1:255:0:0:21:0] Put# [1:1:256:0:0:34:0] Put# [1:1:257:0:0:69:0] Put# [1:1:258:0:0:33:0] Put# [1:1:259:0:0:33:0] Put# [1:1:260:0:0:3:0] Put# [1:1:261:0:0:4:0] Put# [1:1:262:0:0:95:0] Put# [1:1:263:0:0:51:0] Put# [1:1:264:0:0:12:0] Put# [1:1:265:0:0:97:0] Put# [1:1:266:0:0:15:0] Put# [1:1:267:0:0:98:0] Put# [1:1:268:0:0:74:0] Put# [1:1:269:0:0:87:0] Put# [1:1:270:0:0:8:0] Put# [1:1:271:0:0:67:0] Put# [1:1:272:0:0:18:0] Put# [1:1:273:0:0:77:0] Put# [1:1:274:0:0:50:0] Put# [1:1:275:0:0:65:0] Put# [1:1:276:0:0:90:0] Put# [1:1:277:0:0:30:0] Put# [1:1:278:0:0:61:0] Put# [1:1:279:0:0:27:0] Put# [1:1:280:0:0:20:0] Put# [1:1:281:0:0:96:0] Put# [1:1:282:0:0:30:0] Put# [1:1:283:0:0:29:0] Put# [1:1:284:0:0:73:0] Put# [1:1:285:0:0:21:0] Put# [1:1:286:0:0:71:0] Put# [1:1:287:0:0:45:0] Put# [1:1:288:0:0:43:0] Put# [1:1:289:0:0:38:0] Put# [1:1:290:0:0:12:0] Put# [1:1:291:0:0:9:0] Put# [1:1:292:0:0:46:0] Put# [1:1:293:0:0:94:0] Put# [1:1:294:0:0:26:0] Put# [1:1:295:0:0:10:0] Put# [1:1:296:0:0:38:0] Put# [1:1:297:0:0:61:0] Put# [1:1:298:0:0:48:0] Put# [1:1:299:0:0:15:0] Put# [1:1:300:0:0:89:0] Put# [1:1:301:0:0:46:0] Put# [1:1:302:0:0:20:0] Put# [1:1:303:0:0:59:0] Put# [1:1:304:0:0:32:0] Put# [1:1:305:0:0:40:0] Put# [1:1:306:0:0:48:0] Put# [1:1:307:0:0:46:0] Put# [1:1:308:0:0:1:0] Put# [1:1:309:0:0:62:0] Put# [1:1:310:0:0:32:0] Put# [1:1:311:0:0:92:0] Put# [1:1:312:0:0:71:0] Put# [1:1:313:0:0:88:0] Put# [1:1:314:0:0:15:0] Put# [1:1:315:0:0:49:0] Put# [1:1:316:0:0:43:0] Put# [1:1:317:0:0:4:0] Put# [1:1:318:0:0:20:0] Put# [1:1:319:0:0:42:0] Put# [1:1:320:0:0:10:0] Put# [1:1:321:0:0:13:0] Put# [1:1:322:0:0:64:0] Put# [1:1:323:0:0:8:0] Put# [1:1:324:0:0:22:0] Put# [1:1:325:0:0:11:0] Put# [1:1:326:0:0:52:0] Put# [1:1:327:0:0:28:0] Put# [1:1:328:0:0:53:0] Put# [1:1:329:0:0:89:0] Put# [1:1:330:0:0:97:0] Put# [1:1:331:0:0:58:0] Put# [1:1:332:0:0:89:0] Put# [1:1:333:0:0:84:0] Put# [1:1:334:0:0:18:0] Put# [1:1:335:0:0:33:0] Put# [1:1:336:0:0:73:0] Put# [1:1:337:0:0:78:0] Put# [1:1:338:0:0:44:0] Put# [1:1:339:0:0:17:0] Put# [1:1:340:0:0:31:0] Put# [1:1:341:0:0:61:0] Put# [1:1:342:0:0:21:0] Put# [1:1:343:0:0:24:0] Put# [1:1:344:0:0:2:0] Put# [1:1:345:0:0:77:0] Put# [1:1:346:0:0:83:0] Put# [1:1:347:0:0:29:0] Put# [1:1:348:0:0:51:0] Put# [1:1:349:0:0:53:0] Put# [1:1:350:0:0:71:0] Put# [1:1:351:0:0:16:0] Put# [1:1:352:0:0:61:0] Put# [1:1:353:0:0:100:0] Put# [1:1:354:0:0:66:0] Put# [1:1:355:0:0:47:0] Put# [1:1:356:0:0:57:0] Put# [1:1:357:0:0:52:0] Put# [1:1:358:0:0:16:0] Put# [1:1:359:0:0:22:0] Put# [1:1:360:0:0:32:0] Put# [1:1:361:0:0:43:0] Put# [1:1:362:0:0:48:0] Put# [1:1:363:0:0:21:0] Put# [1:1:364:0:0:100:0] Put# [1:1:365:0:0:46:0] Put# [1:1:366:0:0:87:0] Put# [1:1:367:0:0:97:0] Put# [1:1:368:0:0:44:0] Put# [1:1:369:0:0:97:0] Put# [1:1:370:0:0:97:0] Put# [1:1:371:0:0:23:0] Put# [1:1:372:0:0:81:0] Put# [1:1:373:0:0:71:0] Put# [1:1:374:0:0:79:0] Put# [1:1:375:0:0:17:0] Put# [1:1:376:0:0:55:0] Put# [1:1:377:0:0:42:0] Put# [1:1:378:0:0:57:0] Put# [1:1:379:0:0:96:0] Put# [1:1:380:0:0:83:0] Put# [1:1:381:0:0:43:0] Put# [1:1:382:0:0:95:0] Put# [1:1:383:0:0:50:0] Put# [1:1:384:0:0:52:0] Put# [1:1:385:0:0:26:0] Put# [1:1:386:0:0:2:0] Put# [1:1:387:0:0:46:0] Put# [1:1:388:0:0:7:0] Put# [1:1:389:0:0:60:0] Put# [1:1:390:0:0:100:0] Put# [1:1:391:0:0:76:0] Put# [1:1:392:0:0:97:0] Put# [1:1:393:0:0:95:0] Put# [1:1:394:0:0:98:0] Put# [1:1:395:0:0:94:0] Put# [1:1:396:0:0:73:0] Put# [1:1:397:0:0:28:0] Put# [1:1:398:0:0:6:0] Put# [1:1:399:0:0:4:0] Put# [1:1:400:0:0:6:0] Put# [1:1:401:0:0:59:0] Put# [1:1:402:0:0:42:0] Put# [1:1:403:0:0:80:0] Put# [1:1:404:0:0:85:0] Put# [1:1:405:0:0:76:0] Put# [1:1:406:0:0:62:0] Put# [1:1:407:0:0:91:0] Put# [1:1:408:0:0:47:0] Put# [1:1:409:0:0:35:0] Put# [1:1:410:0:0:72:0] Put# [1:1:411:0:0:98:0] Put# [1:1:412:0:0:21:0] Put# [1:1:413:0:0:69:0] Put# [1:1:414:0:0:81:0] Put# [1:1:415:0:0:66:0] Put# [1:1:416:0:0:65:0] Put# [1:1:417:0:0:48:0] Put# [1:1:418:0:0:42:0] Put# [1:1:419:0:0:22:0] Put# [1:1:420:0:0:90:0] Put# [1:1:421:0:0:29:0] Put# [1:1:422:0:0:68:0] Put# [1:1:423:0:0:32:0] Put# [1:1:424:0:0:45:0] Put# [1:1:425:0:0:8 ... 0] Put# [1:3:9520:0:0:6:0] Put# [1:3:9521:0:0:80:0] Put# [1:3:9522:0:0:59:0] Put# [1:3:9523:0:0:59:0] Put# [1:3:9524:0:0:72:0] Put# [1:3:9525:0:0:55:0] Put# [1:3:9526:0:0:40:0] Put# [1:3:9527:0:0:9:0] Put# [1:3:9528:0:0:50:0] Put# [1:3:9529:0:0:78:0] Put# [1:3:9530:0:0:87:0] Put# [1:3:9531:0:0:72:0] Put# [1:3:9532:0:0:76:0] Put# [1:3:9533:0:0:92:0] Put# [1:3:9534:0:0:36:0] Put# [1:3:9535:0:0:90:0] Put# [1:3:9536:0:0:62:0] Put# [1:3:9537:0:0:95:0] Put# [1:3:9538:0:0:12:0] Put# [1:3:9539:0:0:40:0] Put# [1:3:9540:0:0:45:0] Put# [1:3:9541:0:0:23:0] Put# [1:3:9542:0:0:43:0] Put# [1:3:9543:0:0:83:0] Put# [1:3:9544:0:0:37:0] Put# [1:3:9545:0:0:11:0] Put# [1:3:9546:0:0:50:0] Put# [1:3:9547:0:0:15:0] Put# [1:3:9548:0:0:64:0] Put# [1:3:9549:0:0:82:0] Put# [1:3:9550:0:0:68:0] Put# [1:3:9551:0:0:44:0] Put# [1:3:9552:0:0:87:0] Put# [1:3:9553:0:0:42:0] Put# [1:3:9554:0:0:63:0] Put# [1:3:9555:0:0:61:0] Put# [1:3:9556:0:0:97:0] Put# [1:3:9557:0:0:20:0] Put# [1:3:9558:0:0:36:0] Put# [1:3:9559:0:0:65:0] Put# [1:3:9560:0:0:48:0] Put# [1:3:9561:0:0:31:0] Put# [1:3:9562:0:0:13:0] Put# [1:3:9563:0:0:53:0] Put# [1:3:9564:0:0:90:0] Put# [1:3:9565:0:0:13:0] Put# [1:3:9566:0:0:74:0] Put# [1:3:9567:0:0:58:0] Put# [1:3:9568:0:0:82:0] Put# [1:3:9569:0:0:3:0] Put# [1:3:9570:0:0:62:0] Put# [1:3:9571:0:0:16:0] Put# [1:3:9572:0:0:36:0] Put# [1:3:9573:0:0:19:0] Put# [1:3:9574:0:0:34:0] Put# [1:3:9575:0:0:26:0] Put# [1:3:9576:0:0:36:0] Put# [1:3:9577:0:0:94:0] Put# [1:3:9578:0:0:53:0] Put# [1:3:9579:0:0:100:0] Put# [1:3:9580:0:0:81:0] Put# [1:3:9581:0:0:58:0] Put# [1:3:9582:0:0:12:0] Put# [1:3:9583:0:0:4:0] Put# [1:3:9584:0:0:47:0] Put# [1:3:9585:0:0:15:0] Put# [1:3:9586:0:0:93:0] Put# [1:3:9587:0:0:11:0] Put# [1:3:9588:0:0:38:0] Put# [1:3:9589:0:0:60:0] Put# [1:3:9590:0:0:8:0] Put# [1:3:9591:0:0:39:0] Put# [1:3:9592:0:0:83:0] Put# [1:3:9593:0:0:89:0] Put# [1:3:9594:0:0:49:0] Put# [1:3:9595:0:0:89:0] Put# [1:3:9596:0:0:9:0] Put# [1:3:9597:0:0:100:0] Put# [1:3:9598:0:0:82:0] Put# [1:3:9599:0:0:15:0] Put# [1:3:9600:0:0:56:0] Put# [1:3:9601:0:0:55:0] Put# [1:3:9602:0:0:85:0] Put# [1:3:9603:0:0:87:0] Put# [1:3:9604:0:0:24:0] Put# [1:3:9605:0:0:97:0] Put# [1:3:9606:0:0:19:0] Put# [1:3:9607:0:0:56:0] Put# [1:3:9608:0:0:27:0] Put# [1:3:9609:0:0:35:0] Put# [1:3:9610:0:0:53:0] Put# [1:3:9611:0:0:25:0] Put# [1:3:9612:0:0:24:0] Put# [1:3:9613:0:0:37:0] Put# [1:3:9614:0:0:22:0] Put# [1:3:9615:0:0:4:0] Put# [1:3:9616:0:0:31:0] Put# [1:3:9617:0:0:85:0] Put# [1:3:9618:0:0:82:0] Put# [1:3:9619:0:0:11:0] Put# [1:3:9620:0:0:14:0] Put# [1:3:9621:0:0:41:0] Put# [1:3:9622:0:0:28:0] Put# [1:3:9623:0:0:56:0] Put# [1:3:9624:0:0:55:0] Put# [1:3:9625:0:0:16:0] Put# [1:3:9626:0:0:23:0] Put# [1:3:9627:0:0:44:0] Put# [1:3:9628:0:0:61:0] Put# [1:3:9629:0:0:28:0] Put# [1:3:9630:0:0:50:0] Put# [1:3:9631:0:0:100:0] Put# [1:3:9632:0:0:63:0] Put# [1:3:9633:0:0:59:0] Put# [1:3:9634:0:0:44:0] Put# [1:3:9635:0:0:72:0] Put# [1:3:9636:0:0:18:0] Put# [1:3:9637:0:0:44:0] Put# [1:3:9638:0:0:68:0] Put# [1:3:9639:0:0:20:0] Put# [1:3:9640:0:0:81:0] Put# [1:3:9641:0:0:63:0] Put# [1:3:9642:0:0:91:0] Put# [1:3:9643:0:0:26:0] Put# [1:3:9644:0:0:47:0] Put# [1:3:9645:0:0:17:0] Put# [1:3:9646:0:0:86:0] Put# [1:3:9647:0:0:67:0] Put# [1:3:9648:0:0:24:0] Put# [1:3:9649:0:0:32:0] Put# [1:3:9650:0:0:32:0] Put# [1:3:9651:0:0:100:0] Put# [1:3:9652:0:0:64:0] Put# [1:3:9653:0:0:82:0] Put# [1:3:9654:0:0:28:0] Put# [1:3:9655:0:0:71:0] Put# [1:3:9656:0:0:27:0] Put# [1:3:9657:0:0:73:0] Put# [1:3:9658:0:0:27:0] Put# [1:3:9659:0:0:6:0] Put# [1:3:9660:0:0:10:0] Put# [1:3:9661:0:0:38:0] Put# [1:3:9662:0:0:20:0] Put# [1:3:9663:0:0:89:0] Put# [1:3:9664:0:0:24:0] Put# [1:3:9665:0:0:10:0] Put# [1:3:9666:0:0:32:0] Put# [1:3:9667:0:0:38:0] Put# [1:3:9668:0:0:7:0] Put# [1:3:9669:0:0:41:0] Put# [1:3:9670:0:0:82:0] Put# [1:3:9671:0:0:68:0] Put# [1:3:9672:0:0:18:0] Put# [1:3:9673:0:0:8:0] Put# [1:3:9674:0:0:2:0] Put# [1:3:9675:0:0:31:0] Put# [1:3:9676:0:0:33:0] Put# [1:3:9677:0:0:12:0] Put# [1:3:9678:0:0:78:0] Put# [1:3:9679:0:0:41:0] Put# [1:3:9680:0:0:93:0] Put# [1:3:9681:0:0:70:0] Put# [1:3:9682:0:0:15:0] Put# [1:3:9683:0:0:33:0] Put# [1:3:9684:0:0:96:0] Put# [1:3:9685:0:0:52:0] Put# [1:3:9686:0:0:70:0] Put# [1:3:9687:0:0:95:0] Put# [1:3:9688:0:0:98:0] Put# [1:3:9689:0:0:13:0] Put# [1:3:9690:0:0:78:0] Put# [1:3:9691:0:0:7:0] Put# [1:3:9692:0:0:36:0] Put# [1:3:9693:0:0:30:0] Put# [1:3:9694:0:0:64:0] Put# [1:3:9695:0:0:90:0] Put# [1:3:9696:0:0:86:0] Put# [1:3:9697:0:0:7:0] Put# [1:3:9698:0:0:99:0] Put# [1:3:9699:0:0:76:0] Put# [1:3:9700:0:0:63:0] Put# [1:3:9701:0:0:82:0] Put# [1:3:9702:0:0:1:0] Put# [1:3:9703:0:0:64:0] Put# [1:3:9704:0:0:55:0] Put# [1:3:9705:0:0:26:0] Put# [1:3:9706:0:0:17:0] Put# [1:3:9707:0:0:54:0] Put# [1:3:9708:0:0:37:0] Put# [1:3:9709:0:0:73:0] Put# [1:3:9710:0:0:35:0] Put# [1:3:9711:0:0:23:0] Put# [1:3:9712:0:0:1:0] Put# [1:3:9713:0:0:76:0] Put# [1:3:9714:0:0:53:0] Put# [1:3:9715:0:0:42:0] Put# [1:3:9716:0:0:73:0] Put# [1:3:9717:0:0:48:0] Put# [1:3:9718:0:0:56:0] Put# [1:3:9719:0:0:19:0] Put# [1:3:9720:0:0:87:0] Put# [1:3:9721:0:0:92:0] Put# [1:3:9722:0:0:68:0] Put# [1:3:9723:0:0:57:0] Put# [1:3:9724:0:0:19:0] Put# [1:3:9725:0:0:17:0] Put# [1:3:9726:0:0:76:0] Put# [1:3:9727:0:0:71:0] Put# [1:3:9728:0:0:3:0] Put# [1:3:9729:0:0:18:0] Put# [1:3:9730:0:0:63:0] Put# [1:3:9731:0:0:81:0] Put# [1:3:9732:0:0:87:0] Put# [1:3:9733:0:0:45:0] Put# [1:3:9734:0:0:57:0] Put# [1:3:9735:0:0:52:0] Put# [1:3:9736:0:0:32:0] Put# [1:3:9737:0:0:94:0] Put# [1:3:9738:0:0:93:0] Put# [1:3:9739:0:0:18:0] Put# [1:3:9740:0:0:98:0] Put# [1:3:9741:0:0:87:0] Put# [1:3:9742:0:0:14:0] Put# [1:3:9743:0:0:92:0] Put# [1:3:9744:0:0:20:0] Put# [1:3:9745:0:0:73:0] Put# [1:3:9746:0:0:68:0] Put# [1:3:9747:0:0:13:0] Put# [1:3:9748:0:0:24:0] Put# [1:3:9749:0:0:22:0] Put# [1:3:9750:0:0:68:0] Put# [1:3:9751:0:0:94:0] Put# [1:3:9752:0:0:92:0] Put# [1:3:9753:0:0:56:0] Put# [1:3:9754:0:0:24:0] Put# [1:3:9755:0:0:23:0] Put# [1:3:9756:0:0:2:0] Put# [1:3:9757:0:0:83:0] Put# [1:3:9758:0:0:3:0] Put# [1:3:9759:0:0:67:0] Put# [1:3:9760:0:0:53:0] Put# [1:3:9761:0:0:32:0] Put# [1:3:9762:0:0:16:0] Put# [1:3:9763:0:0:31:0] Put# [1:3:9764:0:0:64:0] Put# [1:3:9765:0:0:23:0] Put# [1:3:9766:0:0:44:0] Put# [1:3:9767:0:0:63:0] Put# [1:3:9768:0:0:60:0] Put# [1:3:9769:0:0:38:0] Put# [1:3:9770:0:0:50:0] Put# [1:3:9771:0:0:38:0] Put# [1:3:9772:0:0:19:0] Put# [1:3:9773:0:0:32:0] Put# [1:3:9774:0:0:23:0] Put# [1:3:9775:0:0:83:0] Put# [1:3:9776:0:0:50:0] Put# [1:3:9777:0:0:8:0] Put# [1:3:9778:0:0:58:0] Put# [1:3:9779:0:0:60:0] Put# [1:3:9780:0:0:1:0] Put# [1:3:9781:0:0:75:0] Put# [1:3:9782:0:0:84:0] Put# [1:3:9783:0:0:11:0] Put# [1:3:9784:0:0:79:0] Put# [1:3:9785:0:0:88:0] Put# [1:3:9786:0:0:48:0] Put# [1:3:9787:0:0:52:0] Put# [1:3:9788:0:0:52:0] Put# [1:3:9789:0:0:89:0] Put# [1:3:9790:0:0:86:0] Put# [1:3:9791:0:0:41:0] Put# [1:3:9792:0:0:44:0] Put# [1:3:9793:0:0:34:0] Put# [1:3:9794:0:0:59:0] Put# [1:3:9795:0:0:33:0] Put# [1:3:9796:0:0:24:0] Put# [1:3:9797:0:0:35:0] Put# [1:3:9798:0:0:3:0] Put# [1:3:9799:0:0:64:0] Put# [1:3:9800:0:0:21:0] Put# [1:3:9801:0:0:47:0] Put# [1:3:9802:0:0:77:0] Put# [1:3:9803:0:0:69:0] Put# [1:3:9804:0:0:93:0] Put# [1:3:9805:0:0:100:0] Put# [1:3:9806:0:0:29:0] Put# [1:3:9807:0:0:62:0] Put# [1:3:9808:0:0:18:0] Put# [1:3:9809:0:0:57:0] Put# [1:3:9810:0:0:38:0] Put# [1:3:9811:0:0:22:0] Put# [1:3:9812:0:0:42:0] Put# [1:3:9813:0:0:34:0] Put# [1:3:9814:0:0:9:0] Put# [1:3:9815:0:0:87:0] Put# [1:3:9816:0:0:87:0] Put# [1:3:9817:0:0:8:0] Put# [1:3:9818:0:0:16:0] Put# [1:3:9819:0:0:28:0] Put# [1:3:9820:0:0:76:0] Put# [1:3:9821:0:0:14:0] Put# [1:3:9822:0:0:34:0] Put# [1:3:9823:0:0:34:0] Put# [1:3:9824:0:0:4:0] Put# [1:3:9825:0:0:39:0] Put# [1:3:9826:0:0:35:0] Put# [1:3:9827:0:0:55:0] Put# [1:3:9828:0:0:71:0] Put# [1:3:9829:0:0:27:0] Put# [1:3:9830:0:0:77:0] Put# [1:3:9831:0:0:85:0] Put# [1:3:9832:0:0:86:0] Put# [1:3:9833:0:0:69:0] Put# [1:3:9834:0:0:69:0] Put# [1:3:9835:0:0:6:0] Put# [1:3:9836:0:0:50:0] Put# [1:3:9837:0:0:42:0] Put# [1:3:9838:0:0:41:0] Put# [1:3:9839:0:0:78:0] Put# [1:3:9840:0:0:56:0] Put# [1:3:9841:0:0:76:0] Put# [1:3:9842:0:0:20:0] Put# [1:3:9843:0:0:54:0] Put# [1:3:9844:0:0:58:0] Put# [1:3:9845:0:0:56:0] Put# [1:3:9846:0:0:65:0] Put# [1:3:9847:0:0:44:0] Put# [1:3:9848:0:0:13:0] Put# [1:3:9849:0:0:31:0] Put# [1:3:9850:0:0:2:0] Put# [1:3:9851:0:0:42:0] Put# [1:3:9852:0:0:95:0] Put# [1:3:9853:0:0:65:0] Put# [1:3:9854:0:0:94:0] Put# [1:3:9855:0:0:58:0] Put# [1:3:9856:0:0:96:0] Put# [1:3:9857:0:0:5:0] Put# [1:3:9858:0:0:20:0] Put# [1:3:9859:0:0:34:0] Put# [1:3:9860:0:0:64:0] Put# [1:3:9861:0:0:76:0] Put# [1:3:9862:0:0:83:0] Put# [1:3:9863:0:0:24:0] Put# [1:3:9864:0:0:24:0] Put# [1:3:9865:0:0:55:0] Put# [1:3:9866:0:0:38:0] Put# [1:3:9867:0:0:53:0] Put# [1:3:9868:0:0:23:0] Put# [1:3:9869:0:0:75:0] Put# [1:3:9870:0:0:67:0] Put# [1:3:9871:0:0:89:0] Put# [1:3:9872:0:0:98:0] Put# [1:3:9873:0:0:12:0] Put# [1:3:9874:0:0:99:0] Put# [1:3:9875:0:0:81:0] Put# [1:3:9876:0:0:32:0] Put# [1:3:9877:0:0:62:0] Put# [1:3:9878:0:0:95:0] Put# [1:3:9879:0:0:38:0] Put# [1:3:9880:0:0:11:0] Put# [1:3:9881:0:0:38:0] Put# [1:3:9882:0:0:100:0] Put# [1:3:9883:0:0:33:0] Put# [1:3:9884:0:0:24:0] Put# [1:3:9885:0:0:56:0] Put# [1:3:9886:0:0:22:0] Put# [1:3:9887:0:0:60:0] Put# [1:3:9888:0:0:30:0] Put# [1:3:9889:0:0:21:0] Put# [1:3:9890:0:0:32:0] Put# [1:3:9891:0:0:47:0] Put# [1:3:9892:0:0:16:0] Put# [1:3:9893:0:0:28:0] Put# [1:3:9894:0:0:89:0] Put# [1:3:9895:0:0:73:0] Put# [1:3:9896:0:0:51:0] Put# [1:3:9897:0:0:25:0] Put# [1:3:9898:0:0:15:0] Put# [1:3:9899:0:0:55:0] Put# [1:3:9900:0:0:18:0] Put# [1:3:9901:0:0:65:0] Put# [1:3:9902:0:0:39:0] Put# [1:3:9903:0:0:92:0] Put# [1:3:9904:0:0:73:0] Put# [1:3:9905:0:0:21:0] Put# [1:3:9906:0:0:71:0] Put# [1:3:9907:0:0:7:0] Put# [1:3:9908:0:0:22:0] Put# [1:3:9909:0:0:80:0] Put# [1:3:9910:0:0:98:0] Put# [1:3:9911:0:0:20:0] Put# [1:3:9912:0:0:77:0] Put# [1:3:9913:0:0:24:0] Put# [1:3:9914:0:0:94:0] Put# [1:3:9915:0:0:12:0] Put# [1:3:9916:0:0:63:0] Put# [1:3:9917:0:0:15:0] Put# [1:3:9918:0:0:71:0] Put# [1:3:9919:0:0:68:0] Put# [1:3:9920:0:0:10:0] Put# [1:3:9921:0:0:7:0] Put# [1:3:9922:0:0:16:0] Put# [1:3:9923:0:0:35:0] Put# [1:3:9924:0:0:54:0] Put# [1:3:9925:0:0:92:0] Put# [1:3:9926:0:0:11:0] Put# [1:3:9927:0:0:72:0] Put# [1:3:9928:0:0:5:0] Put# [1:3:9929:0:0:49:0] Put# [1:3:9930:0:0:16:0] Put# [1:3:9931:0:0:49:0] Put# [1:3:9932:0:0:85:0] Put# [1:3:9933:0:0:24:0] Put# [1:3:9934:0:0:10:0] Put# [1:3:9935:0:0:31:0] Put# [1:3:9936:0:0:64:0] Put# [1:3:9937:0:0:94:0] Put# [1:3:9938:0:0:59:0] Put# [1:3:9939:0:0:90:0] Put# [1:3:9940:0:0:55:0] Put# [1:3:9941:0:0:15:0] Put# [1:3:9942:0:0:31:0] Put# [1:3:9943:0:0:9:0] Put# [1:3:9944:0:0:43:0] Put# [1:3:9945:0:0:19:0] Put# [1:3:9946:0:0:63:0] Put# [1:3:9947:0:0:43:0] Put# [1:3:9948:0:0:41:0] Put# [1:3:9949:0:0:7:0] Put# [1:3:9950:0:0:67:0] Put# [1:3:9951:0:0:24:0] Put# [1:3:9952:0:0:77:0] Put# [1:3:9953:0:0:89:0] Put# [1:3:9954:0:0:93:0] Put# [1:3:9955:0:0:50:0] Put# [1:3:9956:0:0:27:0] Put# [1:3:9957:0:0:54:0] Put# [1:3:9958:0:0:8:0] Put# [1:3:9959:0:0:18:0] Put# [1:3:9960:0:0:92:0] Put# [1:3:9961:0:0:61:0] Put# [1:3:9962:0:0:28:0] Put# [1:3:9963:0:0:37:0] Put# [1:3:9964:0:0:61:0] Put# [1:3:9965:0:0:62:0] Put# [1:3:9966:0:0:91:0] Put# [1:3:9967:0:0:89:0] Put# [1:3:9968:0:0:58:0] Put# [1:3:9969:0:0:22:0] Put# [1:3:9970:0:0:35:0] Put# [1:3:9971:0:0:53:0] Put# [1:3:9972:0:0:72:0] Put# [1:3:9973:0:0:42:0] Put# [1:3:9974:0:0:37:0] Put# [1:3:9975:0:0:61:0] Put# [1:3:9976:0:0:57:0] Put# [1:3:9977:0:0:1:0] Put# [1:3:9978:0:0:49:0] Put# [1:3:9979:0:0:59:0] Put# [1:3:9980:0:0:21:0] Put# [1:3:9981:0:0:69:0] Put# [1:3:9982:0:0:60:0] Put# [1:3:9983:0:0:22:0] Put# [1:3:9984:0:0:8:0] Put# [1:3:9985:0:0:76:0] Put# [1:3:9986:0:0:16:0] Put# [1:3:9987:0:0:53:0] Put# [1:3:9988:0:0:42:0] Put# [1:3:9989:0:0:12:0] Put# [1:3:9990:0:0:32:0] Put# [1:3:9991:0:0:76:0] Put# [1:3:9992:0:0:32:0] Put# [1:3:9993:0:0:40:0] Put# [1:3:9994:0:0:75:0] Put# [1:3:9995:0:0:70:0] Put# [1:3:9996:0:0:47:0] Put# [1:3:9997:0:0:28:0] Put# [1:3:9998:0:0:86:0] Put# [1:3:9999:0:0:65:0] Put# [1:3:10000:0:0:26:0] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Query |91.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs >> TBackupCollectionTests::DropCollectionWithMultipleBackups [GOOD] >> TBackupCollectionTests::DropCollectionWithNestedTables >> TKeyValueTest::TestRenameToLongKey [GOOD] >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:84:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:88:2117] sender: [8:89:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:88:2117] Leader for TabletID 72057594037927937 is [8:88:2117] sender: [8:108:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:109:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:88:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:91:2057] recipient: [10:90:2120] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:93:2057] recipient: [10:90:2120] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:92:2121] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:208:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2120] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:93:2057] recipient: [11:90:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2121] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:78:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:81:2057] recipient: [15:80:2112] Leader for TabletID 72057594037927937 is [15:82:2113] sender: [15:83:2057] recipient: [15:80:2112] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:82:2113] Leader for TabletID 72057594037927937 is [15:82:2113] sender: [15:198:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:79:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:82:2057] recipient: [17:81:2112] Leader for TabletID 72057594037927937 is [17:83:2113] sender: [17:84:2057] recipient: [17:81:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:83:2113] Leader for TabletID 72057594037927937 is [17:83:2113] sender: [17:199:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:85:2057] recipient: [18:84:2115] Leader for TabletID 72057594037927937 is [18:86:2116] sender: [18:87:2057] recipient: [18:84:2115] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:86:2116] Leader for TabletID 72057594037927937 is [18:86:2116] sender: [18:202:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:83:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:86:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:87:2116] sender: [20:88:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:87:2116] Leader for TabletID 72057594037927937 is [20:87:2116] sender: [20:203:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:89:2057] recipient: [21:88:2118] Leader for TabletID 72057594037927937 is [21:90:2119] sender: [21:91:2057] recipient: [21:88:2118] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:90:2119] Leader for TabletID 72057594037927937 is [21:90:2119] sender: [21:206:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:86:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:89:2057] recipient: [22:88:2118] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:91:2057] recipient: [22:88:2118] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:90:2119] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:206:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:87:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:90:2057] recipient: [23:89:2118] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:92:2057] recipient: [23:89:2118] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:91:2119] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:207:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> KqpQueryPerf::Replace+QueryService+UseSink >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::MigratedPathRecreation >> KqpQueryPerf::MultiRead-QueryService [GOOD] >> KqpQueryPerf::Insert-QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink >> KqpQueryPerf::IndexUpsert+QueryService-UseSink >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TBackupCollectionTests::BackupNonIncrementalCollection >> KqpQueryPerf::Delete-QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete-QueryService+UseSink >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath >> KqpQueryPerf::Upsert+QueryService-UseSink >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 9668, MsgBus: 24273 2025-12-23T18:31:13.990942Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122750880265605:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:13.990994Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00401b/r3tmp/tmp6LJAAv/pdisk_1.dat 2025-12-23T18:31:14.321046Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:14.419750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:14.419857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:14.433582Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122750880265578:2081] 1766514673989249 != 1766514673989252 2025-12-23T18:31:14.447085Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:14.454964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9668, node 1 2025-12-23T18:31:14.621710Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:14.643928Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:14.643960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:14.643973Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:14.644095Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24273 2025-12-23T18:31:15.005901Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514674519 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:15.287610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:15.296790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:15.319707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:15.490959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:15.709479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:15.805605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:18.128364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122772355103935:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.128521Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.134937Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122772355103945:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.135035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.525195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:18.558070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:18.593608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:18.629963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:18.671042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:18.703622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:18.749182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:18.795293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:18.880067Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122772355104815:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.880163Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.880388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122772355104820:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.880437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122772355104821:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.880563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.885447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:18.898587Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122772355104824:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:18.991418Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122750880265605:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:18.991513Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:31:18.997413Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122772355104875:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TBackupCollectionTests::DropCollectionWithNestedTables [GOOD] >> TBackupCollectionTests::DropLargeBackupCollection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 29289, MsgBus: 24632 2025-12-23T18:31:14.708410Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122755821697147:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:14.708690Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004010/r3tmp/tmpykQ46w/pdisk_1.dat 2025-12-23T18:31:15.132027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:15.132393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:15.132542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:15.137781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:15.224082Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122755821696936:2081] 1766514674667164 != 1766514674667167 2025-12-23T18:31:15.244602Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29289, node 1 2025-12-23T18:31:15.367105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:15.367159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:15.367173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:15.367314Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:15.398496Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24632 2025-12-23T18:31:15.706269Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514675394 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:15.964285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:15.971720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:15.980818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:16.111425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:16.295796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:16.372404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:18.705639Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122773001567989:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.705765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.706314Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122773001567999:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.706381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.084045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.117155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.152004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.213801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.240908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.313898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.346807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.386699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.453930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122777296536170:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.454002Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.454243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122777296536176:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.454256Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122777296536175:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.454279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.457731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:19.468440Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122777296536179:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:19.555422Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122777296536230:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:19.710837Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122755821697147:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:19.710920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-false [GOOD] >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-true >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> KqpQueryPerf::ComputeLength+QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19436, MsgBus: 3723 2025-12-23T18:31:15.896764Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122758225911606:2061];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:15.898764Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004008/r3tmp/tmpQhnCxH/pdisk_1.dat 2025-12-23T18:31:16.182600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:16.182720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:16.236689Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:16.248042Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122758225911585:2081] 1766514675895023 != 1766514675895026 2025-12-23T18:31:16.260617Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:16.262495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19436, node 1 2025-12-23T18:31:16.395409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:16.395442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:16.395450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:16.395552Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:16.477729Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3723 TClient is connected to server localhost:3723 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-12-23T18:31:16.977265Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514676318 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:17.180100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:17.190549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:31:17.215159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:17.365646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:17.543123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:17.618037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:19.660970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122775405782644:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.661097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.664272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122775405782654:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.664372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:20.021049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:20.073530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:20.100656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:20.130455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:20.158093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:20.188965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:20.220151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:20.266842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:20.341276Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122779700750816:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:20.341366Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:20.341688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122779700750821:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:20.341721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122779700750822:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:20.342035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:20.345843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:20.373046Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122779700750825:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:31:20.463753Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122779700750876:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:20.898841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122758225911606:2061];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:20.898928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace+QueryService+UseSink >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] >> TBackupCollectionTests::DropCollectionWithFullBackup >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] Test command err: RandomSeed# 3575276556187260083 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2025-12-23T18:30:01.106152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:01.106236Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:01.108997Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:30:01.132642Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:30:01.133061Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:30:01.133412Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:30:01.185951Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:30:01.197059Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:30:01.197172Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:30:01.198983Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:30:01.199077Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:30:01.199133Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:30:01.199475Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:30:01.199549Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:30:01.199626Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:30:01.305256Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:30:01.339722Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:30:01.339933Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:30:01.340055Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:30:01.340103Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:30:01.340150Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:30:01.340189Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:01.340376Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:01.340445Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:01.340837Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:30:01.340985Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:30:01.341146Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:30:01.341226Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:30:01.341286Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:30:01.341333Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:30:01.341369Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:30:01.341401Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:30:01.341454Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:30:01.341555Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:01.341597Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:01.341648Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:30:01.354549Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:30:01.354654Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:30:01.354830Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:30:01.355052Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:30:01.355116Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:30:01.355190Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:30:01.355246Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:30:01.355293Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:30:01.355342Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:30:01.355386Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:30:01.355728Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:30:01.355788Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:30:01.355834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:30:01.355903Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:30:01.355958Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:30:01.355991Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:30:01.356028Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:30:01.356068Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:30:01.356099Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:30:01.372866Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:30:01.372967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:30:01.373014Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:30:01.373059Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:30:01.373141Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:30:01.373716Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:01.373776Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:30:01.373827Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:30:01.373982Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:30:01.374017Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:30:01.374143Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:30:01.374205Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:30:01.374261Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:30:01.374452Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:30:01.378485Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:30:01.378650Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:30:01.382166Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:01.382246Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:30:01.382334Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:30:01.382386Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:30:01.382429Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:30:01.382475Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:30:01.382529Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 2:347:2314]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2025-12-23T18:31:23.702077Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.702106Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2025-12-23T18:31:23.702174Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2025-12-23T18:31:23.702197Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.702220Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2025-12-23T18:31:23.702295Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2025-12-23T18:31:23.702321Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.702353Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2025-12-23T18:31:23.702417Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2025-12-23T18:31:23.702440Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.702465Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2025-12-23T18:31:23.702524Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-12-23T18:31:23.702556Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.702579Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2025-12-23T18:31:23.702640Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-12-23T18:31:23.702663Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.702689Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2025-12-23T18:31:23.702782Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-12-23T18:31:23.702818Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.702850Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2025-12-23T18:31:23.702933Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-12-23T18:31:23.702956Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.702981Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2025-12-23T18:31:23.703086Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-12-23T18:31:23.703120Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.703151Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2025-12-23T18:31:23.703217Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-12-23T18:31:23.703240Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.703261Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2025-12-23T18:31:23.703340Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-12-23T18:31:23.703374Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.703404Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2025-12-23T18:31:23.703496Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-12-23T18:31:23.703527Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.703557Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2025-12-23T18:31:23.703644Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-12-23T18:31:23.703678Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.703709Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2025-12-23T18:31:23.703778Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-12-23T18:31:23.703812Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.703835Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2025-12-23T18:31:23.703925Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-12-23T18:31:23.703953Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.703977Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2025-12-23T18:31:23.704059Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-12-23T18:31:23.704094Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.704125Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2025-12-23T18:31:23.704187Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-12-23T18:31:23.704212Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.704234Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2025-12-23T18:31:23.704302Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-12-23T18:31:23.704332Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.704354Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2025-12-23T18:31:23.704435Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-12-23T18:31:23.704459Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.704480Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2025-12-23T18:31:23.704524Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-12-23T18:31:23.704546Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:31:23.704570Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 30 28 31 30 24 30 30 21 31 26 31 26 29 30 26 30 31 11 31 12 31 31 31 30 - - - 21 18 21 21 - actual 30 28 31 30 24 30 30 21 31 26 31 26 29 30 26 30 31 11 31 12 31 31 31 30 - - - 21 18 21 21 - interm - 5 5 3 5 3 4 3 5 - - 3 2 2 3 - - 2 - 2 - - - - - - - - - - - - |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] |91.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::Delete+QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete+QueryService+UseSink |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64956, MsgBus: 1528 2025-12-23T18:31:17.840670Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122768418823079:2173];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:17.840725Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004003/r3tmp/tmpr0NnXX/pdisk_1.dat 2025-12-23T18:31:18.390076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:18.390199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:18.416449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:18.446804Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:18.470111Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:18.472952Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122768418822926:2081] 1766514677818165 != 1766514677818168 TServer::EnableGrpc on GrpcPort 64956, node 1 2025-12-23T18:31:18.606624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:18.606665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:18.606706Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:18.606853Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:18.637146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1528 2025-12-23T18:31:18.852760Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514678551 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:19.147186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:19.155349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:19.171725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:19.301356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:19.447213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:19.520461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:21.472730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122785598693982:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.472863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.473178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122785598693992:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.473235Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.854621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.904422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.955879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.003442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.042571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.098813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.131156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.191487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.287125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122789893662159:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.287188Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.287313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122789893662164:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.287352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122789893662166:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.287382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.291274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:22.314438Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122789893662168:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:22.406383Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122789893662219:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:22.840041Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122768418823079:2173];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:22.840142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 24028, MsgBus: 6982 2025-12-23T18:31:18.298683Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122773447878957:2242];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:18.298841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:31:18.357904Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004002/r3tmp/tmpQW6puy/pdisk_1.dat 2025-12-23T18:31:18.706954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:18.721054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:18.721197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:18.788213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:18.835074Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122773447878752:2081] 1766514678267198 != 1766514678267201 2025-12-23T18:31:18.840110Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24028, node 1 2025-12-23T18:31:18.915461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:18.915484Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:18.915495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:18.915594Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:18.950829Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6982 2025-12-23T18:31:19.298608Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514678859 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:19.459631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:19.481816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:19.628057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:19.781768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:19.857304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:21.816636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122786332782512:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.816725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.817379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122786332782522:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.817432Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.148972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.186152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.217194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.254028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.290115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.344763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.398759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.456706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.547909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122790627750690:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.548001Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.548111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122790627750695:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.548400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122790627750697:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.548469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.552185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:22.564364Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122790627750698:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:22.645323Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122790627750750:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:23.299369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122773447878957:2242];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:23.299466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> TBackupCollectionTests::DropCollectionWithFullBackup [GOOD] >> TBackupCollectionTests::DropCollectionWithIncrementalBackup |91.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} >> VectorIndexBuildTest::PrefixedDuplicates [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-true |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9138, MsgBus: 1303 2025-12-23T18:31:19.624762Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122776570432059:2228];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:19.624841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ffd/r3tmp/tmpvHCWED/pdisk_1.dat 2025-12-23T18:31:19.934518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:19.934623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:19.993333Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:19.993504Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:19.994590Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122776570431869:2081] 1766514679609630 != 1766514679609633 2025-12-23T18:31:20.004714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9138, node 1 2025-12-23T18:31:20.081614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:20.081646Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:20.081655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:20.081738Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:20.179555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1303 TClient is connected to server localhost:1303 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:31:20.623254Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514680056 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:20.641931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:20.647752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:20.656196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:20.799416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:20.967411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:21.037803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.114072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122793750302922:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.114197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.114509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122793750302932:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.114556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.511769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.552966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.589563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.620616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.660860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.695834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.752720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.797288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.875269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122793750303805:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.875349Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.875454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122793750303810:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.875672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122793750303812:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.875732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.879828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:23.893847Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122793750303814:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:23.958412Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122793750303865:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:24.621955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122776570432059:2228];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:24.622074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuildWithOverlap >> TBackupCollectionTests::DropLargeBackupCollection [GOOD] >> TBackupCollectionTests::DropSpecificCollectionAmongMultiple |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReadyUniq [GOOD] >> IndexBuildTest::RejectsCancel |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> KqpQueryPerf::IdxLookupJoin+QueryService |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] >> TBackupCollectionTests::DropCollectionWithIncrementalBackup [GOOD] >> TBackupCollectionTests::DropCollectionDuringActiveBackup >> KqpQueryPerf::KvRead+QueryService |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |91.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut >> KqpQueryPerf::Upsert+QueryService+UseSink >> TBackupCollectionTests::DropSpecificCollectionAmongMultiple [GOOD] >> KqpQueryPerf::Update+QueryService-UseSink >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalBackup >> KqpQueryPerf::IndexInsert+QueryService-UseSink >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink >> KqpQueryPerf::RangeRead+QueryService >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::Update-QueryService+UseSink >> KqpQueryPerf::ComputeLength-QueryService [GOOD] >> TBackupCollectionTests::DropCollectionDuringActiveBackup [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink >> TBackupCollectionTests::DropCollectionVerifyCDCCleanup |91.5%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi >> KqpQueryPerf::IndexUpsert+QueryService+UseSink >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalBackup [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalDrop >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] >> TBackupCollectionTests::DropCollectionVerifyCDCCleanup [GOOD] >> TBackupCollectionTests::DropCollectionRollbackOnFailure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 13636, MsgBus: 13868 2025-12-23T18:31:17.253185Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122768382397032:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:17.261968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004004/r3tmp/tmpFj9UdG/pdisk_1.dat 2025-12-23T18:31:17.634917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:17.635503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:17.635597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:17.642532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:17.778892Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122768382396989:2081] 1766514677230590 != 1766514677230593 2025-12-23T18:31:17.791420Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13636, node 1 2025-12-23T18:31:17.993846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:17.994412Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:17.994419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:17.994425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:17.994519Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13868 2025-12-23T18:31:18.255298Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:13868 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514677851 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:18.651528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:18.661501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:31:18.686257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:18.820239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:19.003321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:19.082588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:21.012462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122785562268053:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.012619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.018038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122785562268063:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.018166Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.394076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.449032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.506614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.566630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.638891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.681400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.739086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.790420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.908541Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122785562268941:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.908617Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.910992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122785562268945:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.911060Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.911368Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122785562268948:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.916011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sc ... : Notification cookie mismatch for subscription [2:7587122797638635248:2081] 1766514684904887 != 1766514684904890 2025-12-23T18:31:25.031924Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:25.031997Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:25.036406Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20526, node 2 2025-12-23T18:31:25.173830Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:25.173865Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:25.173874Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:25.173958Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:25.189132Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9854 TClient is connected to server localhost:9854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514685089 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:25.665462Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:25.697025Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:31:25.756152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:25.890460Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:25.916930Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:25.958891Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:28.415018Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122814818506300:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.415126Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.415616Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122814818506309:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.415699Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.490622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.523193Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.558900Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.595969Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.635628Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.675158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.726926Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.795004Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.887447Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122814818507184:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.887545Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.887705Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122814818507190:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.887717Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122814818507189:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.887739Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.891805Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:28.906820Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122814818507193:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:28.994164Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122814818507246:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:29.906245Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122797638635275:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:29.906333Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |91.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] >> IndexBuildTest::RejectsCancel [GOOD] >> IndexBuildTest::NullsAreUniq >> TBackupCollectionTests::DropCollectionRollbackOnFailure [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalDrop [GOOD] >> TBackupCollectionTests::DropErrorRecoveryTest >> TTxDataShardReshuffleKMeansScan::MainToBuildWithOverlap [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Query [GOOD] >> KqpQueryPerf::IdxLookupJoin+QueryService [GOOD] >> TBackupCollectionTests::DropCollectionValidationCases >> TTxDataShardReshuffleKMeansScan::BuildToPosting >> KqpQueryPerf::IdxLookupJoin-QueryService >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] >> KqpQueryPerf::KvRead+QueryService [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Table >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-true [FAIL] >> TBackupCollectionTests::DropCollectionValidationCases [GOOD] >> TBackupCollectionTests::DropCollectionVerifyLocalDatabaseCleanup >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] >> KqpQueryPerf::RangeRead+QueryService [GOOD] >> KqpQueryPerf::KvRead-QueryService >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] >> TBackupCollectionTests::DropErrorRecoveryTest [GOOD] >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] >> VectorIndexBuildTest::Shard_Build_Error >> TBackupCollectionTests::RestorePathStatePersistenceAcrossRestart |91.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |91.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 20413, MsgBus: 20069 2025-12-23T18:31:31.191187Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122827091791077:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:31.191291Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ff0/r3tmp/tmpljidKk/pdisk_1.dat 2025-12-23T18:31:31.427103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:31.484828Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:31.486177Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122827091791054:2081] 1766514691190401 != 1766514691190404 2025-12-23T18:31:31.500317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:31.500478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:31.504422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20413, node 1 2025-12-23T18:31:31.598950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:31.598976Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:31.598986Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:31.599111Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:31.603643Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20069 TClient is connected to server localhost:20069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514691550 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:32.097748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:32.117839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.206706Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:32.284584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.409752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.473816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:33.869694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122835681727518:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.869805Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.870145Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122835681727528:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.870228Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.181820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.206178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.232286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.255950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.280754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.307339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.360563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.393285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.450345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122839976695696:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.450436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122839976695701:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.450442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.450640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122839976695703:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.450687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.453478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:34.461612Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122839976695704:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:34.517562Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122839976695756:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24146, MsgBus: 13912 2025-12-23T18:31:23.237090Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122794679423112:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:23.237268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ff9/r3tmp/tmpBgnMJV/pdisk_1.dat 2025-12-23T18:31:23.554853Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:23.608729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:23.608845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:23.610636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:23.685115Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24146, node 1 2025-12-23T18:31:23.774118Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:23.774146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:23.774152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:23.774272Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:23.819193Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13912 TClient is connected to server localhost:13912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:31:24.254897Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514683703 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:24.262073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:24.272407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:24.282384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:24.466920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:24.614006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:24.682289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:26.734413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122807564326813:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.734542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.736380Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122807564326823:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.736456Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:27.037702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:27.078935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:27.128132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:27.173979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:27.217946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:27.268572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:27.301495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:27.344458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:27.415052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122811859294990:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:27.415135Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:27.415141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122811859294995:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:27.415341Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122811859294997:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:27.415381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:27.418653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:27.428054Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122811859294999:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:27.497119Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122811859295050:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:28.235131Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122794679423112:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:28.235342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28944, MsgBus: 63709 2025-12-23T18:31:30.433536Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122824557222281:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:30.433595Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ff3/r3tmp/tmpuNkILG/pdisk_1.dat 2025-12-23T18:31:30.655307Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:30.708444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:30.708575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:30.719789Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:30.730408Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122824557222255:2081] 1766514690431854 != 1766514690431857 2025-12-23T18:31:30.731244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28944, node 1 2025-12-23T18:31:30.807869Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:30.807897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:30.807904Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:30.807982Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:30.925982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:63709 TClient is connected to server localhost:63709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514690780 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:31.264138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:31.285219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:31.396682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:31.444779Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:31.543217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:31.602963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:33.621914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122837442126021:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.622014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.622284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122837442126031:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.622328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.964721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.993078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.019524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.046316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.074025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.104801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.161483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.194854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.254952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122841737094199:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.255093Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.255175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122841737094204:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.255325Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122841737094206:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.255385Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.259048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:34.268834Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122841737094208:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:31:34.351717Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122841737094259:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:35.433769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122824557222281:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:35.433862Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5827, MsgBus: 15084 2025-12-23T18:31:22.603321Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122789552413224:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:22.603567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ffb/r3tmp/tmpAx5TbD/pdisk_1.dat 2025-12-23T18:31:22.826892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:22.838285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:22.838422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:22.897329Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:22.915794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5827, node 1 2025-12-23T18:31:23.012447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:23.012472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:23.012480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:23.012578Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:23.081221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15084 TClient is connected to server localhost:15084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514682975 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:23.571369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:23.586824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:31:23.597156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.619195Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:23.729619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.870772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.934373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:26.142502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122806732284247:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.142642Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.145390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122806732284257:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.145460Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.480960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.511386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.538379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.565164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.599236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.632503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.663120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.706064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.775678Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122806732285125:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.775765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.775987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122806732285130:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.776027Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122806732285131:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.776069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.780475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:26.792227Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122806732285134:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:26.851981Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122806732285185:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:27.603733Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122789552413224:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:27.603831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10831, MsgBus: 16587 2025-12-23T18:31:30.220933Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122824535463399:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:30.221390Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ff5/r3tmp/tmp4kgViR/pdisk_1.dat 2025-12-23T18:31:30.451805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:30.451939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:30.485504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:30.502448Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:30.510360Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:30.518506Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122824535463296:2081] 1766514690216941 != 1766514690216944 TServer::EnableGrpc on GrpcPort 10831, node 1 2025-12-23T18:31:30.578877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:30.578910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:30.578919Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:30.579086Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:30.732801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16587 TClient is connected to server localhost:16587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514690570 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:30.995498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:31.005311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:31.107777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:31.233251Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:31.243362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:31.313712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:33.056188Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122837420367057:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.056288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.056526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122837420367067:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.056576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.311375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.335642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.361888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.385517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.408024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.434522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.488661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.524283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.587316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122837420367938:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.587397Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.587502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122837420367943:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.587551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122837420367944:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.587611Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.590380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:33.599106Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122837420367947:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:33.668844Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122837420367998:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:35.219757Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122824535463399:2141];send_to=[0:7307199536658146131:7762515]; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1902, MsgBus: 16005 2025-12-23T18:31:14.615479Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122754714160854:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:14.615773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00400d/r3tmp/tmpPtwuKb/pdisk_1.dat 2025-12-23T18:31:15.042663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:15.042782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:15.089522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:15.110839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:15.143361Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122754714160751:2081] 1766514674599085 != 1766514674599088 2025-12-23T18:31:15.158700Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1902, node 1 2025-12-23T18:31:15.254135Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:15.254161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:15.254168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:15.254247Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:15.352089Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16005 2025-12-23T18:31:15.630911Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:16005 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514675212 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:15.876606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:15.891451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:16.035904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:31:16.203673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:16.279549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:18.584685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122771894031813:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.584799Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.585235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122771894031823:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.585306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.938813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:18.975730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.019346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.056253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.088652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.120904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.155810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.199559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.278726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122776188999994:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.278851Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.279146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122776189000000:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.279234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122776188999999:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.279345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.282662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:19.292973Z node 1 : ... 4037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:22.801751Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:22.805791Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10546, node 2 2025-12-23T18:31:22.852658Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:22.852681Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:22.852688Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:22.852764Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:22.973048Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24423 TClient is connected to server localhost:24423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514682849 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:23.285165Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:23.294682Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:31:23.301803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.414811Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.558479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.625621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.774762Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:25.901757Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122804218489788:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:25.901889Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:25.902296Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122804218489798:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:25.902396Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:25.997674Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.030050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.061987Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.096330Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.134501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.176283Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.214480Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.264896Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.344818Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122808513457964:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.344903Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.344992Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122808513457969:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.345073Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122808513457971:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.345131Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.348306Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:26.357727Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122808513457973:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:31:26.458906Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122808513458024:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:27.671518Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122791333586049:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:27.671621Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5536, MsgBus: 14263 2025-12-23T18:31:31.873346Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122826905787326:2140];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:31.873800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fec/r3tmp/tmpSEeRS6/pdisk_1.dat 2025-12-23T18:31:32.073837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:32.080005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:32.080110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:32.135903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:32.156894Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:32.157497Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122826905787223:2081] 1766514691859820 != 1766514691859823 TServer::EnableGrpc on GrpcPort 5536, node 1 2025-12-23T18:31:32.227696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:32.227736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:32.227748Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:32.227882Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:32.297376Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:14263 TClient is connected to server localhost:14263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514692215 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:32.700680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:32.720156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.845362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.930843Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:32.975956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:33.036991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:34.634628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122839790690982:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.634710Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.634957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122839790690992:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.635012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.939485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.968148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.992043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:35.015107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:35.040179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:35.071333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:35.098998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:35.135191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:35.193376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122844085659162:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.193446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.193525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122844085659167:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.193583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122844085659168:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.193648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.196964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:35.204721Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122844085659171:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:31:35.278394Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122844085659222:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 26016, MsgBus: 25561 2025-12-23T18:31:16.992436Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122761423086693:2152];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:16.992749Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:31:17.040916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004007/r3tmp/tmphjLmQI/pdisk_1.dat 2025-12-23T18:31:17.295645Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:17.300264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:17.300404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:17.336935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:17.403285Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26016, node 1 2025-12-23T18:31:17.496713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:17.568701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:17.568747Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:17.568758Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:17.568831Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25561 2025-12-23T18:31:18.028972Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:25561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514677424 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:18.382248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:18.397767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:18.565188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:18.710650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:18.789404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:20.765148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122778602957625:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:20.765266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:20.765665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122778602957635:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:20.765717Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.181154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.216934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.255766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.288634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.319743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.390897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.461342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.546560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:21.666703Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122782897925806:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.666839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.667122Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122782897925812:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.667412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122782897925811:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.667452Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:21.676259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025 ... 94037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:25.356663Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:25.379598Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7354, node 2 2025-12-23T18:31:25.519551Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:25.571321Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:25.571346Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:25.571354Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:25.571430Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14275 TClient is connected to server localhost:14275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514685446 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:31:26.043235Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:26.056054Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:26.073243Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:26.182005Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.183323Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:31:26.348135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:26.398486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:28.997334Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122813488193764:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.997438Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.999114Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122813488193773:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.999195Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:29.080420Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:29.108836Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:29.136479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:29.170061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:29.202147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:29.255690Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:29.290892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:29.339594Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:29.423592Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122817783161942:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:29.423701Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:29.424091Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122817783161947:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:29.424209Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122817783161948:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:29.424315Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:29.427770Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:29.438903Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122817783161951:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:29.522250Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122817783162002:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:30.161766Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122800603290137:2163];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:30.162900Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> VectorIndexBuildTest::Shard_Build_Error [GOOD] |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2025-12-23T18:30:47.660642Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122639176835420:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:47.660730Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b21/r3tmp/tmpbWCAQ3/pdisk_1.dat 2025-12-23T18:30:48.037078Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:48.073345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:48.073442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:48.103882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:48.206954Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63748, node 1 2025-12-23T18:30:48.347747Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:30:48.429732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:48.429755Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:48.429762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:48.429841Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21033 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-12-23T18:30:48.686964Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514648227 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:48.809935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:48.828598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:30:48.896204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:21033 2025-12-23T18:30:49.101894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:49.425950Z node 1 :PERSQUEUE ERROR: partition_read.cpp:831: [72075186224037888][Partition][0][StateIdle] reading from too big offset - topic stream_TestGetRecordsStreamWithSingleShard partition 0 client $without_consumer EndOffset 30 offset 100000 2025-12-23T18:30:49.426025Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'stream_TestGetRecordsStreamWithSingleShard' partition: 0 messageNo: 0 requestId: error: trying to read from future. ReadOffset 100000, 0 EndOffset 30 2025-12-23T18:30:52.713972Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587122658465729372:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:52.714028Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b21/r3tmp/tmplvdxpG/pdisk_1.dat 2025-12-23T18:30:52.767858Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:30:52.897540Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:52.919883Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:52.920002Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:52.951152Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:52.958851Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 26294, node 4 2025-12-23T18:30:53.052362Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:53.052389Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:53.052405Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:53.052542Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514653029 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:30:53.321325Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:30:53.382090Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:14816 2025-12-23T18:30:53.616188Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:30:53.753123Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:57.714509Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587122658465729372:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:30:57.714623Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:31:07.855638Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:31:07.855669Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:20.544501Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7587122780164974363:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:20.544937Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b21/r3tmp/tmpr9NKNC/pdisk_1.dat 2025-12-23T18:31:20.579316Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:20.689268Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:20.710960Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:20.711084Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:20.743935Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17571, node 7 2025-12-23T18:31:20.852392Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:20.863752Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:20.863787Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:20.863808Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:20.863906Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514680819 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:21.162536Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:21.241332Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:19113 2025-12-23T18:31:21.528293Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:31:21.556855Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:26.164919Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587122806955180976:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:26.164988Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b21/r3tmp/tmpE6FuPa/pdisk_1.dat 2025-12-23T18:31:26.211439Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:26.332302Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:26.359250Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:26.359346Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:26.382887Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61829, node 10 2025-12-23T18:31:26.440880Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:26.440905Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:26.440919Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:26.441014Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:26.495814Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:62636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514686447 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:26.701940Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:26.761511Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient is connected to server localhost:62636 2025-12-23T18:31:27.021635Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:31:27.189979Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5659, MsgBus: 12920 2025-12-23T18:31:15.155761Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122758079099978:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:15.155905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00400b/r3tmp/tmp9BJ0G9/pdisk_1.dat 2025-12-23T18:31:15.490393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:15.490532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:15.540358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:15.550030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 5659, node 1 2025-12-23T18:31:15.621046Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:15.651359Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122758079099938:2081] 1766514675140585 != 1766514675140588 2025-12-23T18:31:15.790900Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:15.790930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:15.790940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:15.791082Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:15.849231Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12920 2025-12-23T18:31:16.169478Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514675618 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:16.292439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:16.303288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:31:16.314878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:16.495649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:16.679540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:16.754338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:19.064898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122775258970999:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.065032Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.065827Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122775258971009:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.065924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.372194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.405429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.446572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.514073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.551156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.584994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.622787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.673032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.778288Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122775258971882:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.778421Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.778722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122775258971887:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.778816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122775258971888:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.778963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.783061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sche ... 594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:23.143543Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 25369, node 2 2025-12-23T18:31:23.150091Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:23.188929Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:23.188961Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:23.188972Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:23.189107Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:23.290116Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4344 TClient is connected to server localhost:4344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514683199 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:23.665094Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:23.671133Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:31:23.680122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.765082Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:31:23.909156Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.973036Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:24.062626Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:26.396346Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122808408062731:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.396478Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.396735Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122808408062740:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.396798Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.461292Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.489091Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.521220Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.548818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.579779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.641892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.678937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.726361Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.804820Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122808408063610:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.804966Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.804978Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122808408063615:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.805216Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122808408063617:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.805285Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.808740Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:26.820708Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122808408063618:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:31:26.910168Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122808408063670:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:28.005126Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122795523159000:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:28.005204Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23010, MsgBus: 24723 2025-12-23T18:31:14.449583Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122756278468517:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:14.450708Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004011/r3tmp/tmp2ALe0x/pdisk_1.dat 2025-12-23T18:31:14.748379Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:14.767441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:14.767603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:14.792173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:14.886889Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122756278468482:2081] 1766514674443576 != 1766514674443579 2025-12-23T18:31:14.891836Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23010, node 1 2025-12-23T18:31:14.998948Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:15.151668Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:15.151694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:15.151705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:15.151835Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24723 2025-12-23T18:31:15.470890Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514674974 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:15.766985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:15.772946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:15.787756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:15.941429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:16.127817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:16.219872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:18.753379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122773458339549:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.753566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.754097Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122773458339558:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:18.754149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.115524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.147420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.178234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.217897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.258947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.291953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.325974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.371910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:19.447588Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122756278468517:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:19.447699Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:31:19.454309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122777753307728:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.454416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.454483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122777753307733:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.454955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122777753307735:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:19.455023Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [W ... _CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14558 TClient is connected to server localhost:14558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514685138 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:25.744253Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:25.749496Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:25.760789Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:25.826272Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:25.978227Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:25.989991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:26.067970Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:28.511265Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122813265196344:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.511388Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.511799Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122813265196353:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.511896Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:28.595586Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.645566Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.693802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.744888Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.825951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.892250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.929960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:28.983875Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:29.077691Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122817560164525:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:29.077794Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:29.077892Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122817560164530:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:29.078294Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122817560164533:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:29.078391Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:29.082258Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:29.097006Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122817560164532:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:29.194318Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122817560164585:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:29.956202Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122796085325313:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:29.956271Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:31:30.761930Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:30.791315Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:30.821062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.6%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:89:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:92:2057] recipient: [13:91:2119] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:94:2057] recipient: [13:91:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:93:2120] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:209:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:78:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:81:2057] recipient: [17:80:2112] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:83:2057] recipient: [17:80:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:82:2113] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:198:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:79:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:81:2112] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:84:2057] recipient: [18:81:2112] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:83:2113] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:199:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:82:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:84:2115] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:87:2057] recipient: [20:84:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:86:2116] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:202:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:83:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:85:2115] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:88:2057] recipient: [21:85:2115] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:87:2116] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:203:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:86:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:89:2057] recipient: [22:88:2118] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:91:2057] recipient: [22:88:2118] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:90:2119] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:206:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:86:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:89:2057] recipient: [23:88:2118] Leader for TabletID 72057594037927937 is [23:90:2119] sender: [23:91:2057] recipient: [23:88:2118] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:90:2119] Leader for TabletID 72057594037927937 is [23:90:2119] sender: [23:206:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:87:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:90:2057] recipient: [24:89:2118] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:92:2057] recipient: [24:89:2118] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:91:2119] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:207:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10040, MsgBus: 29182 2025-12-23T18:31:19.412925Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122778345042736:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:19.413018Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ffe/r3tmp/tmpQ211Di/pdisk_1.dat 2025-12-23T18:31:19.653716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:19.653843Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:19.697373Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:19.705438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:19.713922Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:19.716613Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122778345042711:2081] 1766514679411320 != 1766514679411323 TServer::EnableGrpc on GrpcPort 10040, node 1 2025-12-23T18:31:19.835108Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:19.835137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:19.835142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:19.835251Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:19.937482Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:29182 TClient is connected to server localhost:29182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514679811 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:20.372041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:20.378511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:20.393001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:20.422297Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:31:20.593180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:20.760509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:20.840069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:22.895732Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122791229946472:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.895852Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.896159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122791229946482:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.896206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.321926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.370091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.402681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.461278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.491222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.522355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.557899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.605983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:23.710702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122795524914650:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.710796Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.710856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122795524914655:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.711205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122795524914657:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.711319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:23.715115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... : Notification cookie mismatch for subscription [2:7587122807678454343:2081] 1766514686818255 != 1766514686818258 2025-12-23T18:31:26.922595Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:26.922669Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:26.940750Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12354, node 2 2025-12-23T18:31:27.003976Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:27.004001Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:27.004007Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:27.004068Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:27.104490Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1717 TClient is connected to server localhost:1717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514687007 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:27.427203Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:27.435227Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:27.483706Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:27.659508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:27.721805Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:27.842656Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:30.344969Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122824858325391:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:30.345058Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:30.345249Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122824858325400:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:30.345296Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:30.408353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:30.452688Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:30.482342Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:30.511573Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:30.541122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:30.576388Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:30.610480Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:30.655326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:30.735608Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122824858326268:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:30.735718Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:30.735866Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122824858326273:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:30.735940Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122824858326275:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:30.735995Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:30.739438Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:30.750413Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122824858326277:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:30.829037Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122824858326328:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:31.819776Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122807678454375:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:31.819862Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TBackupCollectionTests::DropCollectionVerifyLocalDatabaseCleanup [GOOD] >> TBackupCollectionTests::DropCollectionDuringActiveOperation |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:105:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:108:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] ... r refreshed! new actor is[22:86:2116] Leader for TabletID 72057594037927937 is [22:86:2116] sender: [22:202:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:82:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:85:2057] recipient: [23:84:2115] Leader for TabletID 72057594037927937 is [23:86:2116] sender: [23:87:2057] recipient: [23:84:2115] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:86:2116] Leader for TabletID 72057594037927937 is [23:86:2116] sender: [23:202:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:83:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:86:2057] recipient: [24:85:2115] Leader for TabletID 72057594037927937 is [24:87:2116] sender: [24:88:2057] recipient: [24:85:2115] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:87:2116] Leader for TabletID 72057594037927937 is [24:87:2116] sender: [24:203:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:86:2057] recipient: [25:39:2086] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:89:2057] recipient: [25:88:2118] Leader for TabletID 72057594037927937 is [25:90:2119] sender: [25:91:2057] recipient: [25:88:2118] !Reboot 72057594037927937 (actor [25:58:2099]) rebooted! !Reboot 72057594037927937 (actor [25:58:2099]) tablet resolver refreshed! new actor is[25:90:2119] Leader for TabletID 72057594037927937 is [25:90:2119] sender: [25:206:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:56:2057] recipient: [26:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:56:2057] recipient: [26:53:2097] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:59:2057] recipient: [26:53:2097] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:76:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:86:2057] recipient: [26:39:2086] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:89:2057] recipient: [26:88:2118] Leader for TabletID 72057594037927937 is [26:90:2119] sender: [26:91:2057] recipient: [26:88:2118] !Reboot 72057594037927937 (actor [26:58:2099]) rebooted! !Reboot 72057594037927937 (actor [26:58:2099]) tablet resolver refreshed! new actor is[26:90:2119] Leader for TabletID 72057594037927937 is [26:90:2119] sender: [26:206:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:53:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:59:2057] recipient: [27:53:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:76:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:87:2057] recipient: [27:39:2086] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:90:2057] recipient: [27:89:2118] Leader for TabletID 72057594037927937 is [27:91:2119] sender: [27:92:2057] recipient: [27:89:2118] !Reboot 72057594037927937 (actor [27:58:2099]) rebooted! !Reboot 72057594037927937 (actor [27:58:2099]) tablet resolver refreshed! new actor is[27:91:2119] Leader for TabletID 72057594037927937 is [27:91:2119] sender: [27:207:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:59:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:76:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:90:2057] recipient: [28:39:2086] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:93:2057] recipient: [28:92:2121] Leader for TabletID 72057594037927937 is [28:94:2122] sender: [28:95:2057] recipient: [28:92:2121] !Reboot 72057594037927937 (actor [28:58:2099]) rebooted! !Reboot 72057594037927937 (actor [28:58:2099]) tablet resolver refreshed! new actor is[28:94:2122] Leader for TabletID 72057594037927937 is [28:94:2122] sender: [28:210:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:90:2057] recipient: [29:39:2086] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:93:2057] recipient: [29:92:2121] Leader for TabletID 72057594037927937 is [29:94:2122] sender: [29:95:2057] recipient: [29:92:2121] !Reboot 72057594037927937 (actor [29:58:2099]) rebooted! !Reboot 72057594037927937 (actor [29:58:2099]) tablet resolver refreshed! new actor is[29:94:2122] Leader for TabletID 72057594037927937 is [29:94:2122] sender: [29:210:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:91:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:94:2057] recipient: [30:93:2121] Leader for TabletID 72057594037927937 is [30:95:2122] sender: [30:96:2057] recipient: [30:93:2121] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:95:2122] Leader for TabletID 72057594037927937 is [30:95:2122] sender: [30:211:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:93:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:96:2057] recipient: [31:95:2123] Leader for TabletID 72057594037927937 is [31:97:2124] sender: [31:98:2057] recipient: [31:95:2123] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:97:2124] Leader for TabletID 72057594037927937 is [31:97:2124] sender: [31:213:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:93:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:96:2057] recipient: [32:95:2123] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:98:2057] recipient: [32:95:2123] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:97:2124] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:213:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:94:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:97:2057] recipient: [33:96:2123] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:99:2057] recipient: [33:96:2123] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:98:2124] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:214:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14900, MsgBus: 17640 2025-12-23T18:31:18.653734Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122771205706682:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:18.653794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004001/r3tmp/tmpbrfmqy/pdisk_1.dat 2025-12-23T18:31:18.869877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:18.888052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:18.888146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:18.923697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:18.991709Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:18.992940Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122771205706579:2081] 1766514678648048 != 1766514678648051 TServer::EnableGrpc on GrpcPort 14900, node 1 2025-12-23T18:31:19.046283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:19.046547Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:19.046574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:19.046595Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:19.046719Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17640 TClient is connected to server localhost:17640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514679020 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:19.539783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:19.553794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:19.673953Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:19.686213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:19.887887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:19.963206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:22.011486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122788385577641:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.011639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.015645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122788385577651:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.015743Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.369875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.404829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.442196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.484602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.521853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.584230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.619240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.658969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:22.738215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122788385578524:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.738298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.738391Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122788385578529:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.738613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122788385578531:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.738665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:22.743082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:22.756017Z node 1 ... tence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26938 TClient is connected to server localhost:26938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514688491 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:29.055316Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:29.062798Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:29.078214Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:29.152180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:29.286231Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:29.319486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:29.443828Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:31.523749Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122826034074815:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:31.523848Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:31.524132Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122826034074824:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:31.524188Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:31.591804Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:31.649556Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:31.677285Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:31.705788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:31.731242Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:31.764156Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:31.798994Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:31.844817Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:31.917909Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122826034075692:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:31.918009Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122826034075697:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:31.918021Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:31.918368Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122826034075700:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:31.918467Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:31.922055Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:31.932485Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122826034075699:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:32.026432Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122830329043048:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:33.285304Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122813149171086:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:33.286275Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:31:33.367451Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.426662Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.457526Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> IndexBuildTest::NullsAreUniq [GOOD] |91.6%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexInsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert+QueryService+UseSink >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |91.6%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18964, MsgBus: 15755 2025-12-23T18:31:31.896218Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122827756915451:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:31.896302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fed/r3tmp/tmphWu5xJ/pdisk_1.dat 2025-12-23T18:31:32.100000Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:32.127118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:32.127218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:32.160027Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:32.213204Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:32.218909Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122827756915429:2081] 1766514691895402 != 1766514691895405 TServer::EnableGrpc on GrpcPort 18964, node 1 2025-12-23T18:31:32.303575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:32.326056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:32.326079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:32.326085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:32.326181Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15755 TClient is connected to server localhost:15755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514692222 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:32.772321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:32.787486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.904267Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:32.910290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:33.051761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:33.114118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:34.656771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122840641819194:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.656886Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.657200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122840641819204:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.657253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.947748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.973275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.998222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:35.023383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:35.049612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:35.090717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:35.114862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:35.151725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:35.219084Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122844936787374:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.219163Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.219422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122844936787380:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.219430Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122844936787379:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.219464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.222826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:35.230616Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122844936787383:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:35.332940Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122844936787434:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:36.896158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122827756915451:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:36.896231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.6%| [TA] $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::NullsAreUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:30:23.662351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:30:23.662449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:23.662490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:30:23.662532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:30:23.662578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:30:23.662604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:30:23.662661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:23.662779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:30:23.663714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:30:23.664043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:30:23.742039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:23.742107Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:23.747631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:30:23.747889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:30:23.748066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:30:23.751884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:30:23.752147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:30:23.752966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:23.753211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:30:23.755345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:23.755505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:30:23.756564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:30:23.756641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:23.756755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:30:23.756808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:30:23.756847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:30:23.756961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:30:23.887713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.888351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.888429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.888482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.888528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.888637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.888685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.888723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.888774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.888830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.888886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.888928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.888972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.889032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:23.889083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 2/2 2025-12-23T18:31:38.449732Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 2/2, is published: true 2025-12-23T18:31:38.449798Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [5:1007:2949] message: TxId: 281474976725761 2025-12-23T18:31:38.449835Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 2/2 2025-12-23T18:31:38.449863Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:0 2025-12-23T18:31:38.449886Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976725761:0 2025-12-23T18:31:38.449946Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 4 2025-12-23T18:31:38.449979Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:1 2025-12-23T18:31:38.449994Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976725761:1 2025-12-23T18:31:38.450016Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 34] was 3 2025-12-23T18:31:38.453188Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-12-23T18:31:38.453254Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976725761 2025-12-23T18:31:38.453306Z node 5 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 107, txId# 281474976725761 2025-12-23T18:31:38.453419Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1658:3529], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0}, txId# 281474976725761 2025-12-23T18:31:38.454728Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Unlocking 2025-12-23T18:31:38.454885Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Unlocking TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1658:3529], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0} 2025-12-23T18:31:38.454937Z node 5 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-12-23T18:31:38.456141Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Done 2025-12-23T18:31:38.456223Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Done TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Done, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1658:3529], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0} 2025-12-23T18:31:38.456252Z node 5 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2025-12-23T18:31:38.456353Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-12-23T18:31:38.456390Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [5:1675:3546] TestWaitNotification: OK eventTxId 107 2025-12-23T18:31:38.456895Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2025-12-23T18:31:38.457136Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 107 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 107 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2025-12-23T18:31:38.457695Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-12-23T18:31:38.457923Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 248us result status StatusSuccess 2025-12-23T18:31:38.458341Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index1" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "index2" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "test_index" LocalPathId: 33 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "index1" KeyColumnNames: "index2" SchemaVersion: 2 PathOwnerId: 72075186233409549 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 33 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |91.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink >> KqpQueryPerf::DeleteOn+QueryService-UseSink >> KqpQueryPerf::AggregateToScalar+QueryService >> KqpQueryPerf::IndexInsert-QueryService-UseSink >> KqpQueryPerf::DeleteOn-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 32726, MsgBus: 26860 2025-12-23T18:31:22.120049Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122790620938180:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:22.120105Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ffc/r3tmp/tmpXpNNYF/pdisk_1.dat 2025-12-23T18:31:22.385447Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:22.455095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:22.455207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:22.458841Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122790620938052:2081] 1766514682104560 != 1766514682104563 2025-12-23T18:31:22.463413Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:22.473991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32726, node 1 2025-12-23T18:31:22.617004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:22.617027Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:22.617047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:22.617309Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:22.676151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26860 TClient is connected to server localhost:26860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:31:23.150817Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514682534 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:23.161821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:23.183544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.366249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.528572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.593135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:25.667698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122803505841824:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:25.667840Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:25.668289Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122803505841834:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:25.668372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.021888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.057466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.101680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.134362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.167212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.209779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.240557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.284712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.354629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122807800810005:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.354713Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.354771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122807800810010:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.354923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122807800810012:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.354974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.358372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:26.366662Z node 1 ... guration 2025-12-23T18:31:31.765317Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23854 TClient is connected to server localhost:23854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514691690 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:32.150025Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:32.170203Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.230967Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.392367Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.466268Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.592187Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:34.532119Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122839254278125:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.532210Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.532472Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122839254278135:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.532533Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.589649Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.616255Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.641959Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.668951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.695793Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.725101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.759054Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.800002Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.864544Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122839254279007:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.864632Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122839254279012:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.864644Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.864890Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122839254279015:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.864967Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.868052Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:34.877519Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122839254279014:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:34.950259Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122839254279067:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:36.185702Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:36.219851Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:36.258953Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:36.526660Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122826369374393:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:36.526709Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6274, MsgBus: 15146 2025-12-23T18:31:22.653298Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122788171698442:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:22.653357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ffa/r3tmp/tmp50sSDB/pdisk_1.dat 2025-12-23T18:31:22.942479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:22.942601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:22.997998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:23.010903Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:23.011757Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:23.012030Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122788171698414:2081] 1766514682651765 != 1766514682651768 TServer::EnableGrpc on GrpcPort 6274, node 1 2025-12-23T18:31:23.119828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:23.119852Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:23.119858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:23.119965Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15146 2025-12-23T18:31:23.316681Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514683052 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:23.636532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:23.642590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:31:23.658685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.663619Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:23.800498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:23.932546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:24.005650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:25.979456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122801056602180:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:25.979587Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:25.985671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122801056602190:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:25.985762Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.329765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.359224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.384439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.411306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.439588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.468149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.502107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.541774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:26.612523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122805351570359:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.612610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.612623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122805351570364:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.612863Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122805351570366:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.612909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:26.615813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sche ... tence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27442 TClient is connected to server localhost:27442 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514691844 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:32.247124Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:32.254931Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:31:32.269794Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.327831Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.456201Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:31:32.518809Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:32.716233Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:34.698187Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122840303513578:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.698287Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.698519Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122840303513588:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.698559Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.758725Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.785291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.808742Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.830314Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.856422Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.884995Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.913043Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.956379Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:35.027248Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122844598481753:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.027320Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.027555Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122844598481758:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.027643Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122844598481759:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.027785Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:35.047203Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:35.057198Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122844598481762:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:31:35.133528Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122844598481813:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:36.351355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:36.393827Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:36.424249Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:36.693905Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122827418609861:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:36.693976Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpQueryPerf::RangeRead-QueryService >> KqpQueryPerf::MultiRead+QueryService >> KqpQueryPerf::Replace-QueryService-UseSink >> TBackupCollectionTests::DropCollectionDuringActiveOperation [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TBackupCollectionTests::ConcurrentDropProtectionTest >> TKeyValueTest::TestGetStatusWorks |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.6%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |91.6%| [TA] {RESULT} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 >> KqpQueryPerf::Replace+QueryService-UseSink >> GenericFederatedQuery::IcebergHadoopSaSelectAll >> GenericFederatedQuery::IcebergHiveSaSelectAll >> GenericFederatedQuery::IcebergHiveTokenSelectAll >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId1 >> GenericFederatedQuery::PostgreSQLOnPremSelectAll >> GenericFederatedQuery::IcebergHiveBasicSelectAll |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |91.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> TBackupCollectionTests::RestorePathStatePersistenceAcrossRestart [GOOD] >> TBackupCollectionTests::IncrementalBackupOperation >> GenericFederatedQuery::IcebergHadoopBasicSelectAll >> KqpQueryPerf::KvRead-QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete >> GenericFederatedQuery::ClickHouseManagedSelectAll >> TBackupCollectionTests::ConcurrentDropProtectionTest [GOOD] >> TBackupCollectionTests::BackupServiceDirectoryValidation >> GenericFederatedQuery::YdbManagedSelectAll >> GenericFederatedQuery::IcebergHadoopTokenSelectAll |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.6%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6643, MsgBus: 7319 2025-12-23T18:31:30.083324Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122825203197418:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:30.083939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ff7/r3tmp/tmp8PVPy4/pdisk_1.dat 2025-12-23T18:31:30.288540Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:30.318070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:30.318169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:30.357957Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:30.359009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:30.360602Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122825203197315:2081] 1766514690079726 != 1766514690079729 TServer::EnableGrpc on GrpcPort 6643, node 1 2025-12-23T18:31:30.458535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:30.458571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:30.458580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:30.458702Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:30.505302Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7319 TClient is connected to server localhost:7319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514690423 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:30.913538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:30.923615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:31.042299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:31.094762Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:31.165815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:31.244286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:33.030947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122838088101074:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.031100Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.031439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122838088101084:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.031512Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.365631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.394079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.420635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.448006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.475979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.506963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.553074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.589327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.657268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122838088101954:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.657361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.657448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122838088101959:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.657486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122838088101961:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.657514Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.660795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:33.669796Z node 1 :KQP ... : Notification cookie mismatch for subscription [2:7587122850928546451:2081] 1766514696009286 != 1766514696009289 2025-12-23T18:31:36.097213Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:36.097288Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:36.100423Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27876, node 2 2025-12-23T18:31:36.140757Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:36.140790Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:36.140799Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:36.140877Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:36.201698Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5729 TClient is connected to server localhost:5729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514696149 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:36.462320Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:36.470044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:36.514451Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:36.640100Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:36.686595Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:37.015926Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:38.702684Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122859518482906:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:38.702786Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:38.703095Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122859518482916:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:38.703156Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:38.762279Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.794638Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.822626Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.854140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.887907Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.918230Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.988678Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:39.030126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:39.096474Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122863813451080:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:39.096557Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:39.096690Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122863813451085:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:39.096754Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122863813451087:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:39.096803Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:39.099432Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:39.107286Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122863813451089:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:39.170385Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122863813451140:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:41.014863Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122850928546477:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:41.014961Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19737, MsgBus: 20640 2025-12-23T18:31:29.481309Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122817409899365:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:29.481406Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ff8/r3tmp/tmpZ1Xpjl/pdisk_1.dat 2025-12-23T18:31:29.695916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:29.723458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:29.723654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:29.762389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:29.796655Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:29.796846Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122817409899342:2081] 1766514689480463 != 1766514689480466 TServer::EnableGrpc on GrpcPort 19737, node 1 2025-12-23T18:31:29.850139Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:29.850172Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:29.850183Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:29.850326Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:29.852525Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20640 TClient is connected to server localhost:20640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514689814 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:30.300970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:30.317049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:30.460693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:30.540608Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:30.605068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:30.667076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.523530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122830294803105:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:32.523658Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:32.523988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122830294803115:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:32.524029Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:32.807507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:32.835476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:32.858513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:32.881927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:32.904672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:32.936198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:32.992590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.030217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:33.086717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122834589771282:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.086795Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.086797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122834589771287:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.086968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122834589771289:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.087004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.090183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:33.099218Z node 1 ... Notification cookie mismatch for subscription [2:7587122845523098259:2081] 1766514695645035 != 1766514695645038 TServer::EnableGrpc on GrpcPort 30063, node 2 2025-12-23T18:31:35.761987Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:35.762079Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:35.770605Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:35.770641Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:35.770660Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:35.770785Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:35.778158Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:35.843979Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18734 TClient is connected to server localhost:18734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514695785 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:36.118085Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:36.136211Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:36.189510Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:36.302874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:36.379422Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:36.652452Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:38.374785Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122858408002012:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:38.374894Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:38.375254Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122858408002022:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:38.375306Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:38.421569Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.445856Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.470623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.499619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.524886Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.554874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.584467Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.626274Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:38.700573Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122858408002891:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:38.700661Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122858408002896:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:38.700661Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:38.700873Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122858408002898:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:38.700934Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:38.704420Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:38.714349Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122858408002899:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:31:38.808271Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122858408002951:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:40.646068Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122845523098286:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:40.646143Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TTxDataShardReshuffleKMeansScan::BuildToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuild |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |91.6%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> test_select.py::TestDML::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNotEnoughRetriesUniq |91.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TBackupCollectionTests::BackupServiceDirectoryValidation [GOOD] >> TBackupCollectionTests::BackupWithIndexes >> test_select.py::TestDML::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId1 [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId2 |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> Secret::Validation >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> VDiskBalancing::TestRandom_Block42 [GOOD] >> TBackupCollectionTests::IncrementalBackupOperation [GOOD] >> TBackupCollectionTests::EmptyIncrementalBackupRace >> TBackupCollectionTests::BackupWithIndexes [GOOD] >> TBackupCollectionTests::BackupWithIndexesOmit >> KqpQueryPerf::DeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn+QueryService+UseSink >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn-QueryService+UseSink >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] >> KqpQueryPerf::RangeRead-QueryService [GOOD] >> KqpQueryPerf::MultiRead+QueryService [GOOD] >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] >> KqpQueryPerf::AggregateToScalar-QueryService >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] |91.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 8236, MsgBus: 25653 2025-12-23T18:31:40.859423Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122866501576988:2135];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:40.859479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fe4/r3tmp/tmptWU63t/pdisk_1.dat 2025-12-23T18:31:41.167755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:41.318950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:41.319100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:41.328111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.397998Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:41.399647Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122866501576888:2081] 1766514700845632 != 1766514700845635 TServer::EnableGrpc on GrpcPort 8236, node 1 2025-12-23T18:31:41.413971Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:41.503454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:41.503482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:41.503489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:41.503604Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25653 2025-12-23T18:31:41.870887Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:25653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514701406 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:42.156384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:42.176002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.381833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.580035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.666394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:44.682561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122883681447947:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.682721Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.683105Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122883681447957:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.683156Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.107655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.142443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.200943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.235940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.279300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.318814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.354313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.399180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.495453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122887976416123:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.495525Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.495864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122887976416128:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.495958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122887976416129:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.496002Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.500445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:45.512530Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122887976416132:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:45.576672Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122887976416183:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:45.858953Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122866501576988:2135];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:45.859036Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |91.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9692, MsgBus: 22585 2025-12-23T18:31:40.908863Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122868676484942:2248];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:40.908957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fe7/r3tmp/tmpMKNw1R/pdisk_1.dat 2025-12-23T18:31:41.154143Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:41.159166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:41.159249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:41.184526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.251518Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122868676484732:2081] 1766514700888647 != 1766514700888650 2025-12-23T18:31:41.254223Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9692, node 1 2025-12-23T18:31:41.322839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:41.350968Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:41.350993Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:41.351001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:41.351130Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22585 TClient is connected to server localhost:22585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:31:41.910002Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514701308 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:41.993429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:42.014132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.225640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.406606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.496207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:44.453721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122885856355784:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.453904Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.456582Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122885856355794:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.456672Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.828266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.879008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.912280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.957931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.997684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.046239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.087811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.137737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.214799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122890151323962:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.214900Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.214985Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122890151323967:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.215090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122890151323969:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.215123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.218789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:45.230144Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122890151323971:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:31:45.319803Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122890151324022:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:45.908695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122868676484942:2248];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:45.908771Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3231, MsgBus: 16861 2025-12-23T18:31:31.135605Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122826113400043:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:31.135687Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ff2/r3tmp/tmpeoxulS/pdisk_1.dat 2025-12-23T18:31:31.354853Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:31.384999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:31.385100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:31.430161Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:31.442536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3231, node 1 2025-12-23T18:31:31.512829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:31.512861Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:31.512884Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:31.512974Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:31.571454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16861 TClient is connected to server localhost:16861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514691494 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:31.992328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:32.005057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.113000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.222430Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:32.274793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.338243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:33.935461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122834703336457:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.935588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.935824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122834703336467:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:33.935876Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.249253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.275664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.297723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.323633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.346664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.374516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.426819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.466702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.531559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122838998304632:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.531637Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.531646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122838998304637:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.531836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122838998304639:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.531879Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.534997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:34.544772Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122838998304640:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Trans ... guration 2025-12-23T18:31:38.893007Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30692 TClient is connected to server localhost:30692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514698823 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:39.235509Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:39.257425Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:39.342175Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:39.496268Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:39.593011Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:39.714931Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:42.182905Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122873087873119:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:42.183023Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:42.185683Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122873087873129:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:42.185793Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:42.257558Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:42.299560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:42.334763Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:42.378839Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:42.433534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:42.498044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:42.542990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:42.639365Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:42.740188Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122873087873999:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:42.740319Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:42.740917Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122873087874004:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:42.740967Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122873087874005:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:42.741033Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:42.746491Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:42.760743Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122873087874008:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:42.844872Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122873087874059:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:43.656649Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122855908002093:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:43.656755Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:31:44.590710Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.637854Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.681470Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 28982, MsgBus: 12083 2025-12-23T18:31:40.775630Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122868576446266:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:40.783100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fe9/r3tmp/tmpDicrvE/pdisk_1.dat 2025-12-23T18:31:41.074830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:41.085302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:41.085401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:41.094347Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.264856Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122868576446231:2081] 1766514700769752 != 1766514700769755 2025-12-23T18:31:41.285073Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28982, node 1 2025-12-23T18:31:41.327123Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:41.367105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:41.367131Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:41.367138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:41.367241Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12083 2025-12-23T18:31:41.789154Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514701217 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:42.217180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:42.225896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:42.242039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.476275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.651445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.715680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:44.569661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122885756317296:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.569810Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.570193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122885756317306:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.570260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.914827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.967455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.012798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.054883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.087149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.126427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.166036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.212663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.306350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122890051285474:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.306454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.306947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122890051285479:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.307003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122890051285480:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.307282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.311790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:45.323878Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122890051285483:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:45.417280Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122890051285534:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:45.774907Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122868576446266:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:45.774983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TBackupCollectionTests::BackupWithIndexesOmit [GOOD] >> TBackupCollectionTests::BackupWithIndexesDefault ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29971, MsgBus: 24169 2025-12-23T18:31:31.330552Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122829325845011:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:31.330638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fee/r3tmp/tmpOTaLZm/pdisk_1.dat 2025-12-23T18:31:31.550390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:31.550478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:31.574453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:31.628414Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:31.629748Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:31.631334Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122829325844984:2081] 1766514691328702 != 1766514691328705 TServer::EnableGrpc on GrpcPort 29971, node 1 2025-12-23T18:31:31.678468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:31.678496Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:31.678507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:31.678637Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24169 2025-12-23T18:31:31.900539Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514691662 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:32.164520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:32.183824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.328632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.340320Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:32.465584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:32.523574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:34.186517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122842210748744:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.186616Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.187131Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122842210748754:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.187195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.470729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.495594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.521804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.545363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.570088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.620132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.645830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.679233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:34.743611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122842210749628:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.743687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.743859Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122842210749633:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.743898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122842210749634:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.743946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:34.747697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:34.758012Z node 1 ... guration 2025-12-23T18:31:39.414872Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13675 TClient is connected to server localhost:13675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514699404 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:39.804182Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:39.812404Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:39.867459Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:39.998658Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:40.061890Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:40.224812Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:42.928937Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122875837745302:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:42.929034Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:42.929622Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122875837745312:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:42.929696Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:43.009803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:43.052309Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:43.085071Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:43.114259Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:43.171687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:43.212348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:43.250441Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:43.305369Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:43.385572Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122880132713480:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:43.385699Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:43.385961Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122880132713485:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:43.385973Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122880132713486:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:43.386031Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:43.389864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:43.401553Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122880132713489:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:43.496145Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122880132713540:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:44.196925Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122862952841567:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:44.196974Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:31:45.224293Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.262289Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.310287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26329, MsgBus: 21372 2025-12-23T18:31:41.077764Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122870891099556:2100];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:41.081672Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fe2/r3tmp/tmpBadpYg/pdisk_1.dat 2025-12-23T18:31:41.374835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:41.383849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:41.383946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:41.481810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.512301Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122870891099479:2081] 1766514701061395 != 1766514701061398 2025-12-23T18:31:41.525521Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:41.574930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 26329, node 1 2025-12-23T18:31:41.839471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:41.839502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:41.839507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:41.839589Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:42.085111Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21372 TClient is connected to server localhost:21372 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514701574 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:42.565061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:42.581577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.778067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.946063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:43.037338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:45.191823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122888070970544:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.191944Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.192326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122888070970554:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.192432Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.604740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.653087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.684783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.714186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.746564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.783667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.822618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.874837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.958371Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122888070971423:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.958495Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.958900Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122888070971428:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.958970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122888070971429:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.959021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.962910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:45.973019Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122888070971432:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:46.028028Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122892365938779:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:46.077545Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122870891099556:2100];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:46.077623Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId2 [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert-QueryService+UseSink >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Block42 [GOOD] Test command err: RandomSeed# 5880375791701116229 Step = 0 SEND TEvPut with key [1:1:0:0:0:585447:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:585447:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:619381:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:619381:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:725585:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:725585:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:2934723:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:2934723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2025-12-23T18:28:26.142127Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Step = 6 SEND TEvPut with key [1:1:6:0:0:3044947:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:3044947:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Stop node 7 2025-12-23T18:28:26.329959Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 7 SEND TEvPut with key [1:1:7:0:0:582354:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:582354:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 8 SEND TEvPut with key [1:1:8:0:0:1478820:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:1478820:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 9 SEND TEvPut with key [1:1:9:0:0:1360774:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:1360774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Start node 4 Step = 10 SEND TEvPut with key [1:1:10:0:0:1727870:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:1727870:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 11 SEND TEvPut with key [1:1:11:0:0:1883457:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:1883457:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 12 SEND TEvPut with key [1:1:12:0:0:568368:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:568368:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 13 SEND TEvPut with key [1:1:13:0:0:896600:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:896600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 15 SEND TEvPut with key [1:1:15:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 16 SEND TEvPut with key [1:1:16:0:0:670396:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:670396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 17 SEND TEvPut with key [1:1:17:0:0:1584741:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:1584741:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 18 SEND TEvPut with key [1:1:18:0:0:2384818:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:2384818:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 19 SEND TEvPut with key [1:1:19:0:0:2867010:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:2867010:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 20 SEND TEvPut with key [1:1:20:0:0:2911789:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:2911789:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 21 SEND TEvPut with key [1:1:21:0:0:2463622:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:2463622:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 23 SEND TEvPut with key [1:1:23:0:0:2119770:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:2119770:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 25 SEND TEvPut with key [1:1:25:0:0:2648607:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:2648607:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Stop node 0 2025-12-23T18:28:27.635476Z 3 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:296:63] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2025-12-23T18:28:27.635715Z 6 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:299:66] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2025-12-23T18:28:27.635842Z 5 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:7678:16] ServerId# [1:7686:1098] TabletId# 72057594037932033 PipeClientId# [5:7678:16] 2025-12-23T18:28:27.635955Z 4 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:297:64] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2025-12-23T18:28:27.636069Z 2 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:295:62] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2025-12-23T18:28:27.636277Z 7 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:300:67] TabletId# 72057594037932033 PipeClientId# [7:222:17] Step = 26 SEND TEvPut with key [1:1:26:0:0:539431:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:539431:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 28 SEND TEvPut with key [1:1:28:0:0:2673563:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:2673563:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 30 SEND TEvPut with key [1:1:30:0:0:2398732:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:2398732:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Compact vdisk 2 Step = 31 SEND TEvPut with key [1:1:31:0:0:2302132:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:2302132:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 32 SEND TEvPut with key [1:1:32:0:0:3112269:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:3112269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 33 SEND TEvPut with key [1:1:33:0:0:883758:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:883758:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 34 SEND TEvPut with key [1:1:34:0:0:1212958:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:1212958:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 35 SEND TEvPut with key [1:1:35:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 38 SEND TEvPut with key [1:1:38:0:0:1252178:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:1252178:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 39 SEND TEvPut with key [1:1:39:0:0:1897783:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:1897783:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 40 SEND TEvPut with key [1:1:40:0:0:1486678:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:1486678:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 41 SEND TEvPut with key [1:1:41:0:0:1285964:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:1285964:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 42 SEND TEvPut with key [1:1:42:0:0:1221731:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:1221731:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 43 SEND TEvPut with key [1:1:43:0:0:1613844:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:1613844:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 44 SEND TEvPut with key [1:1:44:0:0:2582908:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:2582908:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 45 SEND TEvPut with key [1:1:45:0:0:1703743:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:1703743:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 46 SEND TEvPut with key [1:1:46:0:0:1362981:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:1362981:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 47 SEND TEvPut with key [1:1:47:0:0:1469807:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:1469807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 48 SEND TEvPut with key [1:1:48:0:0:2832565:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:2832565:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 49 SEND TEvPut with key [1:1:49:0:0:1960611:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:1960611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 50 SEND TEvPut with key [1:1:50:0:0:1164230:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:1164230:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 51 SEND TEvPut with key [1:1:51:0:0:836900:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:836900:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 52 SEND TEvPut with key [1:1:52:0:0:838380:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:838380:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 53 SEND TEvPut with key [1:1:53:0:0:1975575:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:1975575:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Start node 0 Step = 54 SEND TEvPut with key [1:1:54:0:0:1888556:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:1888556:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 55 SEND TEvPut with key [1:1:55:0:0:715063:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:715063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 57 SEND TEvPut with key [1:1:57:0:0:1491407:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:1491407:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 58 SEND TEvPut with key [1:1:58:0:0:702845:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:702845:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 59 SEND TEvPut with key [1:1:59:0:0:2539948:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:2539948:0] Status ... 36 SEND TEvPut with key [1:1:936:0:0:2748248:0] TEvPutResult: TEvPutResult {Id# [1:1:936:0:0:2748248:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 937 SEND TEvPut with key [1:1:937:0:0:112302:0] TEvPutResult: TEvPutResult {Id# [1:1:937:0:0:112302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 938 SEND TEvPut with key [1:1:938:0:0:800417:0] TEvPutResult: TEvPutResult {Id# [1:1:938:0:0:800417:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 939 SEND TEvPut with key [1:1:939:0:0:2336442:0] TEvPutResult: TEvPutResult {Id# [1:1:939:0:0:2336442:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 940 SEND TEvPut with key [1:1:940:0:0:982070:0] TEvPutResult: TEvPutResult {Id# [1:1:940:0:0:982070:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Start node 4 Step = 941 SEND TEvPut with key [1:1:941:0:0:713632:0] TEvPutResult: TEvPutResult {Id# [1:1:941:0:0:713632:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 942 SEND TEvPut with key [1:1:942:0:0:1644191:0] TEvPutResult: TEvPutResult {Id# [1:1:942:0:0:1644191:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 943 SEND TEvPut with key [1:1:943:0:0:254634:0] TEvPutResult: TEvPutResult {Id# [1:1:943:0:0:254634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 944 SEND TEvPut with key [1:1:944:0:0:1141270:0] TEvPutResult: TEvPutResult {Id# [1:1:944:0:0:1141270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 945 SEND TEvPut with key [1:1:945:0:0:610103:0] TEvPutResult: TEvPutResult {Id# [1:1:945:0:0:610103:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Compact vdisk 6 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 948 SEND TEvPut with key [1:1:948:0:0:645630:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:645630:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 949 SEND TEvPut with key [1:1:949:0:0:2125890:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:2125890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 950 SEND TEvPut with key [1:1:950:0:0:2544891:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:2544891:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 951 SEND TEvPut with key [1:1:951:0:0:647007:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:647007:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 952 SEND TEvPut with key [1:1:952:0:0:2031652:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:2031652:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Step = 953 SEND TEvPut with key [1:1:953:0:0:2109805:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:2109805:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999573} Stop node 3 2025-12-23T18:30:59.639346Z 1 00h25m30.705941s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:1353403:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:1353403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Stop node 4 2025-12-23T18:31:00.893726Z 1 00h25m40.715587s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 955 SEND TEvPut with key [1:1:955:0:0:1286278:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:1286278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Start node 3 Step = 956 SEND TEvPut with key [1:1:956:0:0:1875483:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:1875483:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 957 SEND TEvPut with key [1:1:957:0:0:1021388:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:1021388:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Start node 4 Step = 958 SEND TEvPut with key [1:1:958:0:0:860806:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:860806:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 961 SEND TEvPut with key [1:1:961:0:0:1661659:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:1661659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 962 SEND TEvPut with key [1:1:962:0:0:771410:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:771410:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 963 SEND TEvPut with key [1:1:963:0:0:1414281:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:1414281:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 964 SEND TEvPut with key [1:1:964:0:0:2848837:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:2848837:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999768} Step = 965 SEND TEvPut with key [1:1:965:0:0:989600:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:989600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 966 SEND TEvPut with key [1:1:966:0:0:2761296:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:2761296:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 967 SEND TEvPut with key [1:1:967:0:0:981163:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:981163:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 969 SEND TEvPut with key [1:1:969:0:0:626285:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:626285:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999658} Stop node 7 2025-12-23T18:31:03.782820Z 1 00h26m10.741104s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 971 SEND TEvPut with key [1:1:971:0:0:972888:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:972888:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 972 SEND TEvPut with key [1:1:972:0:0:786055:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:786055:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Step = 973 SEND TEvPut with key [1:1:973:0:0:2707502:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:2707502:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999622} Stop node 1 2025-12-23T18:31:04.395476Z 1 00h26m20.741616s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:2660812:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:2660812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Start node 1 Step = 975 SEND TEvPut with key [1:1:975:0:0:3005283:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:3005283:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Stop node 1 2025-12-23T18:31:05.019570Z 1 00h26m40.742168s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:1542748:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:1542748:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 977 SEND TEvPut with key [1:1:977:0:0:2837300:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:2837300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 980 SEND TEvPut with key [1:1:980:0:0:1760402:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:1760402:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 981 SEND TEvPut with key [1:1:981:0:0:1711812:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:1711812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 982 SEND TEvPut with key [1:1:982:0:0:1422922:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:1422922:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 983 SEND TEvPut with key [1:1:983:0:0:2533122:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:2533122:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 985 SEND TEvPut with key [1:1:985:0:0:1862506:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:1862506:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 987 SEND TEvPut with key [1:1:987:0:0:672278:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:672278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 988 SEND TEvPut with key [1:1:988:0:0:2042425:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:2042425:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 989 SEND TEvPut with key [1:1:989:0:0:1201477:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:1201477:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 990 SEND TEvPut with key [1:1:990:0:0:1724337:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:1724337:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 991 SEND TEvPut with key [1:1:991:0:0:2174403:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:2174403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 993 SEND TEvPut with key [1:1:993:0:0:618508:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:618508:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 994 SEND TEvPut with key [1:1:994:0:0:2278246:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:2278246:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 995 SEND TEvPut with key [1:1:995:0:0:2001881:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:2001881:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 996 SEND TEvPut with key [1:1:996:0:0:1759634:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:1759634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 997 SEND TEvPut with key [1:1:997:0:0:2469234:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:2469234:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 998 SEND TEvPut with key [1:1:998:0:0:1329395:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:1329395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 999 SEND TEvPut with key [1:1:999:0:0:1243807:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:1243807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Starting nodes Start compaction 1 Start checking |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> TColumnShardTestReadWrite::WriteRead |91.7%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} >> TBackupCollectionTests::EmptyIncrementalBackupRace [GOOD] >> TBackupCollectionTests::SingleTableWithGlobalSyncIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::Shard_Build_Error [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:30:23.954693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:30:23.954797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:23.954850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:30:23.954887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:30:23.954927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:30:23.954953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:30:23.955007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:23.955078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:30:23.955935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:30:23.956244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:30:24.051318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:24.051361Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:24.065367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:30:24.065615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:30:24.065777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:30:24.069402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:30:24.069618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:30:24.070389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:24.070593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:30:24.072702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:24.072912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:30:24.074095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:30:24.074169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:24.074326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:30:24.074380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:30:24.074455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:30:24.074632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:30:24.220169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.221216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.221338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.221427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.221535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.221640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.221722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.221791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.221862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.221925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.221990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.222098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.222186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.222298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.222387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... meshard 72057594046678944 2025-12-23T18:31:38.085039Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:31:38.085150Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552133, Sender [6:1299:3230], Recipient [6:858:2822]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72057594046678944 State: 4 2025-12-23T18:31:38.085175Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3158: StateWork, processing event TEvDataShard::TEvStateChangedResult 2025-12-23T18:31:38.085195Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186233409547 state Offline 2025-12-23T18:31:38.085703Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2025-12-23T18:31:38.085888Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-12-23T18:31:38.086054Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [6:1372:3291], Recipient [6:858:2822]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046678944 ClientId: [6:1372:3291] ServerId: [6:1374:3293] } 2025-12-23T18:31:38.086089Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-12-23T18:31:38.086420Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268829696, Sender [6:1027:2973], Recipient [6:1037:2981]: NKikimr::TEvTablet::TEvTabletDead 2025-12-23T18:31:38.086990Z node 6 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186233409549 2025-12-23T18:31:38.087162Z node 6 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186233409549 2025-12-23T18:31:38.088631Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:31:38.088984Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 Forgetting tablet 72075186233409549 2025-12-23T18:31:38.090035Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268829696, Sender [6:839:2808], Recipient [6:858:2822]: NKikimr::TEvTablet::TEvTabletDead 2025-12-23T18:31:38.090378Z node 6 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186233409547 2025-12-23T18:31:38.090464Z node 6 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186233409547 2025-12-23T18:31:38.091640Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-12-23T18:31:38.091864Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 Forgetting tablet 72075186233409547 2025-12-23T18:31:38.092526Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:31:38.092572Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2025-12-23T18:31:38.092659Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:31:38.092705Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2025-12-23T18:31:38.092733Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:31:38.093294Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2025-12-23T18:31:38.093327Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:31:38.093544Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552133, Sender [6:1299:3230], Recipient [6:863:2825]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72057594046678944 State: 4 2025-12-23T18:31:38.093570Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3158: StateWork, processing event TEvDataShard::TEvStateChangedResult 2025-12-23T18:31:38.093591Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186233409548 state Offline 2025-12-23T18:31:38.093731Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409548 2025-12-23T18:31:38.093859Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [6:1373:3292], Recipient [6:863:2825]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046678944 ClientId: [6:1373:3292] ServerId: [6:1375:3294] } 2025-12-23T18:31:38.093883Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed Forgetting tablet 72075186233409548 2025-12-23T18:31:38.094011Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268829696, Sender [6:844:2811], Recipient [6:863:2825]: NKikimr::TEvTablet::TEvTabletDead 2025-12-23T18:31:38.094321Z node 6 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186233409548 2025-12-23T18:31:38.094411Z node 6 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186233409548 2025-12-23T18:31:38.095908Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:31:38.096071Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:31:38.098193Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:31:38.098255Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-12-23T18:31:38.098947Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:31:38.098978Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409547 2025-12-23T18:31:38.099320Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-12-23T18:31:38.099418Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:31:38.099471Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2025-12-23T18:31:38.099554Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:31:38.099602Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:31:38.099639Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:31:38.099776Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:31:38.099832Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409548 2025-12-23T18:31:38.100825Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:31:38.131938Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-12-23T18:31:38.132286Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 Issues { message: "One of the shards report BUILD_ERROR
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n at Filling stage, process has to be canceled, shardId: 72075186233409546, shardIdx: 72057594046678944:1" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: BUILD_ERROR UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 228 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 Issues { message: "One of the shards report BUILD_ERROR
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n at Filling stage, process has to be canceled, shardId: 72075186233409546, shardIdx: 72057594046678944:1" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: BUILD_ERROR UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 228 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuildWithOverlap >> TBackupCollectionTests::BackupWithIndexesDefault [GOOD] >> TBackupCollectionTests::CdcStreamRotationDuringIncrementalBackups >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring [GOOD] >> TKeyValueTest::TestGetStatusWorks [GOOD] >> TColumnShardTestSchema::HotTiers >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] >> GenericFederatedQuery::TestConnectorNotConfigured |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |91.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |91.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |91.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... 1:2057] recipient: [28:90:2119] Leader for TabletID 72057594037927937 is [28:92:2120] sender: [28:93:2057] recipient: [28:90:2119] !Reboot 72057594037927937 (actor [28:58:2099]) rebooted! !Reboot 72057594037927937 (actor [28:58:2099]) tablet resolver refreshed! new actor is[28:92:2120] Leader for TabletID 72057594037927937 is [28:92:2120] sender: [28:208:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:88:2057] recipient: [29:39:2086] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:91:2057] recipient: [29:90:2119] Leader for TabletID 72057594037927937 is [29:92:2120] sender: [29:93:2057] recipient: [29:90:2119] !Reboot 72057594037927937 (actor [29:58:2099]) rebooted! !Reboot 72057594037927937 (actor [29:58:2099]) tablet resolver refreshed! new actor is[29:92:2120] Leader for TabletID 72057594037927937 is [29:92:2120] sender: [29:208:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:89:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:92:2057] recipient: [30:91:2119] Leader for TabletID 72057594037927937 is [30:93:2120] sender: [30:94:2057] recipient: [30:91:2119] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:93:2120] Leader for TabletID 72057594037927937 is [30:93:2120] sender: [30:209:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:92:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:95:2057] recipient: [31:94:2122] Leader for TabletID 72057594037927937 is [31:96:2123] sender: [31:97:2057] recipient: [31:94:2122] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:96:2123] Leader for TabletID 72057594037927937 is [31:96:2123] sender: [31:212:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:92:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:95:2057] recipient: [32:94:2122] Leader for TabletID 72057594037927937 is [32:96:2123] sender: [32:97:2057] recipient: [32:94:2122] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:96:2123] Leader for TabletID 72057594037927937 is [32:96:2123] sender: [32:212:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:78:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:81:2057] recipient: [35:80:2112] Leader for TabletID 72057594037927937 is [35:82:2113] sender: [35:83:2057] recipient: [35:80:2112] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:82:2113] Leader for TabletID 72057594037927937 is [35:82:2113] sender: [35:198:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:78:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:81:2057] recipient: [36:80:2112] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:83:2057] recipient: [36:80:2112] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:82:2113] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:198:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:79:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:82:2057] recipient: [37:81:2112] Leader for TabletID 72057594037927937 is [37:83:2113] sender: [37:84:2057] recipient: [37:81:2112] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:83:2113] Leader for TabletID 72057594037927937 is [37:83:2113] sender: [37:199:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:81:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:84:2057] recipient: [38:83:2114] Leader for TabletID 72057594037927937 is [38:85:2115] sender: [38:86:2057] recipient: [38:83:2114] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:85:2115] Leader for TabletID 72057594037927937 is [38:85:2115] sender: [38:201:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:84:2057] recipient: [39:83:2114] Leader for TabletID 72057594037927937 is [39:85:2115] sender: [39:86:2057] recipient: [39:83:2114] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:85:2115] Leader for TabletID 72057594037927937 is [39:85:2115] sender: [39:201:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:82:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:85:2057] recipient: [40:84:2114] Leader for TabletID 72057594037927937 is [40:86:2115] sender: [40:87:2057] recipient: [40:84:2114] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:86:2115] Leader for TabletID 72057594037927937 is [40:86:2115] sender: [40:202:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TBackupCollectionTests::SingleTableWithGlobalSyncIndex [GOOD] >> TBackupCollectionTests::SingleTableWithMultipleGlobalSyncIndexes >> GenericFederatedQuery::IcebergHiveTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectConstant |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> GenericFederatedQuery::IcebergHiveBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectConstant >> GenericFederatedQuery::IcebergHadoopSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectConstant >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18057, MsgBus: 5232 2025-12-23T18:31:40.824975Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122868566668390:2163];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:40.825040Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fea/r3tmp/tmpwZHaDM/pdisk_1.dat 2025-12-23T18:31:41.107720Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:41.151522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:41.151622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:41.153477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.230481Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:41.236617Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122868566668265:2081] 1766514700817024 != 1766514700817027 TServer::EnableGrpc on GrpcPort 18057, node 1 2025-12-23T18:31:41.327785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:41.327805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:41.327812Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:41.327928Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:41.368600Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5232 2025-12-23T18:31:41.830652Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514701238 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:42.142142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:42.175472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.404627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.582710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.669425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:44.481937Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122885746539325:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.482113Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.484675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122885746539335:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.484757Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.817622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.857401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.891816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.922863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.969860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.014051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.053167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.103020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.185107Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122890041507503:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.185210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.185449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122890041507508:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.185592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122890041507509:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.185654Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.189165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:45.202398Z node 1 :K ... : Notification cookie mismatch for subscription [2:7587122901820318510:2081] 1766514708192394 != 1766514708192397 2025-12-23T18:31:48.336686Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:48.336782Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:48.340652Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30030, node 2 2025-12-23T18:31:48.450351Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:48.450374Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:48.450396Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:48.450491Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:48.477002Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7849 TClient is connected to server localhost:7849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514708385 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:48.948577Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:48.964178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:49.038124Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:49.204880Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:49.209440Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:49.277338Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:51.746654Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122914705222264:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:51.746763Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:51.747072Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122914705222274:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:51.747123Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:51.823767Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:51.864538Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:51.912975Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:51.950933Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:51.990806Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.062519Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.114352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.215251Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.310950Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122919000190444:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.311067Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.311424Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122919000190450:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.311439Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122919000190449:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.311503Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.316988Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:52.330388Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122919000190453:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:31:52.419144Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122919000190504:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:53.193999Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122901820318542:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:53.194126Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TestMalformedRequest::CompressedDeflateContentLengthHigher [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] >> TColumnShardTestReadWrite::ReadWithProgramNoProjection |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |91.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag >> GenericFederatedQuery::IcebergHiveSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectConstant |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant >> Normalizers::SchemaVersionsNormalizer |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |91.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9623, MsgBus: 2832 2025-12-23T18:31:40.730826Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122866733843831:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:40.730883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fe5/r3tmp/tmp4ga7oU/pdisk_1.dat 2025-12-23T18:31:40.978298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:40.978436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:40.978830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:41.005985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.092975Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:41.093542Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122866733843794:2081] 1766514700730001 != 1766514700730004 TServer::EnableGrpc on GrpcPort 9623, node 1 2025-12-23T18:31:41.183951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:41.219423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:41.219453Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:41.219460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:41.219546Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2832 TClient is connected to server localhost:2832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:31:41.744660Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514701091 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:41.837950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:41.878456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:31:42.054466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.239162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:42.317261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:44.403854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122883913714847:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.404034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.404543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122883913714857:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.404599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.804472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.845644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.913853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.950800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.994879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.054341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.092849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.139282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.217946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122888208683032:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.218090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.218360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122888208683038:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.218429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122888208683037:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.218505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.222218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:45.233323Z node 1 :KQP ... 8.173348Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:48.173437Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:48.179450Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5971, node 2 2025-12-23T18:31:48.358862Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:48.391281Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:48.391304Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:48.391313Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:48.391404Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24976 TClient is connected to server localhost:24976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514708224 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:48.982668Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:49.001393Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:49.018402Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.011612s 2025-12-23T18:31:49.057658Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:49.058577Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:49.234222Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:49.314228Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:52.337292Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122917686519067:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.337424Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.337747Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122917686519077:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.337791Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.424894Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.471628Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.548391Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.594490Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.637304Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.734250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.792824Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.895120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.992082Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122917686519949:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.992218Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.992519Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122917686519955:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.992555Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122917686519954:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.992584Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.996372Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:53.012087Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122917686519958:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:53.050378Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122900506648062:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:53.050479Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:31:53.079883Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122921981487308:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TColumnShardTestReadWrite::WriteRead [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 |91.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 62412, MsgBus: 28913 2025-12-23T18:31:40.792874Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122868177314765:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:40.792957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fe8/r3tmp/tmpDYBxUB/pdisk_1.dat 2025-12-23T18:31:41.062936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:41.066094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:41.066182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:41.125003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.168448Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:41.169834Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122868177314729:2081] 1766514700790879 != 1766514700790882 TServer::EnableGrpc on GrpcPort 62412, node 1 2025-12-23T18:31:41.294467Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:41.330889Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:41.330909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:41.330918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:41.331200Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28913 TClient is connected to server localhost:28913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:31:41.807746Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514701203 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:41.912636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:41.935319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:41.958099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.155121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:31:42.318850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.408071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.497465Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122885357185785:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.497579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.498044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122885357185795:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.498114Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.864899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.904342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.945055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.011511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.052465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.092747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.137499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.185575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.273491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122889652153962:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.273624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.273945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122889652153967:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.273991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122889652153968:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.274037Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.278536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... 4037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:48.735891Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:48.764558Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23040, node 2 2025-12-23T18:31:48.805065Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:48.830955Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:48.830977Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:48.830985Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:48.831067Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29647 TClient is connected to server localhost:29647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514708812 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:49.218304Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:49.227956Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:31:49.239511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:49.296044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:49.440734Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:49.521082Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:49.528297Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:52.432604Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122916591665031:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.432731Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.433235Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122916591665041:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.433284Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.506582Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.549450Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.588847Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.632441Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.663799Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.711682Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.801400Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.863470Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:52.993293Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122916591665917:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.993409Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.993709Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122916591665922:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.993743Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122916591665923:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.993996Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.998326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:53.025576Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122916591665926:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:31:53.112615Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122920886633273:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:53.512889Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122899411793984:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:53.512953Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |91.7%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TColumnShardTestSchema::HotTiersAfterTtl >> TColumnShardTestReadWrite::CompactionGCFailingBs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12252, MsgBus: 12013 2025-12-23T18:31:40.640079Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122865198593274:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:40.640229Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003feb/r3tmp/tmpmQgqkA/pdisk_1.dat 2025-12-23T18:31:40.922719Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:40.923040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:40.923167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:40.953318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.043104Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122865198593245:2081] 1766514700638017 != 1766514700638020 2025-12-23T18:31:41.046348Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12252, node 1 2025-12-23T18:31:41.142296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:41.142315Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:41.142323Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:41.142417Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:41.181173Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12013 TClient is connected to server localhost:12013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:31:41.656445Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514701042 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:41.733755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:41.758087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:41.958765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.172271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.279574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:44.255220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122882378464299:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.255320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.255685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122882378464309:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.255724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.657957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.700353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.755856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.785979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.814691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.854815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.890254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:44.950880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.064459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122886673432478:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.064568Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.064607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122886673432483:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.064725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122886673432485:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.064807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.068551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:45.081674Z node 1 ... 94037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:48.911117Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:48.915244Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8232, node 2 2025-12-23T18:31:49.027292Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:49.027315Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:49.027322Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:49.027391Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:49.034104Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27220 TClient is connected to server localhost:27220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514708987 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:49.412924Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:49.418458Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:49.444591Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:49.497728Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:49.710811Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:49.783590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:49.793521Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:52.916776Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122918941682672:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.916912Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.917599Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122918941682682:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.917678Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:52.996403Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:53.043638Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:53.084590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:53.123720Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:53.162194Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:53.219009Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:53.271683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:53.336473Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:53.439724Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122923236650853:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:53.439820Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:53.440301Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122923236650859:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:53.440384Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:53.440390Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122923236650858:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:53.445016Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:53.456935Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122923236650862:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:31:53.513556Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122923236650913:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:53.798823Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587122901761811662:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:53.798882Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TOosLogicTests::RenderHtml [GOOD] >> TVPatchTests::FindingPartsWhenError >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant >> TVPatchTests::FindingPartsWhenError [GOOD] >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] >> GenericFederatedQuery::YdbManagedSelectConstant >> TVPatchTests::PatchPartOk >> TestMalformedRequest::ContentLengthHigher [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Query >> TestMalformedRequest::CompressedGzipContentLengthHigher [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 >> TVPatchTests::PatchPartOk [GOOD] >> TBackupCollectionTests::SingleTableWithMultipleGlobalSyncIndexes [GOOD] >> TBackupCollectionTests::TableWithMixedIndexTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedDeflateContentLengthHigher [GOOD] Test command err: 2025-12-23T18:29:41.857996Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122356985689191:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:29:41.860062Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c52/r3tmp/tmpeF6b1i/pdisk_1.dat 2025-12-23T18:29:42.322928Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:29:42.391997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:42.392113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:42.457359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:42.550614Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:42.553677Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122356985689152:2081] 1766514581841656 != 1766514581841659 2025-12-23T18:29:42.582519Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 21979, node 1 2025-12-23T18:29:42.872701Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:29:42.910450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:29:42.910472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:29:42.910487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:29:42.910549Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27010 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514582581 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:29:43.391817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:29:43.398465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:27010 2025-12-23T18:29:43.690512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:29:43.703621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:29:43.705800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:29:43.747497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:43.933076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:43.996858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:44.066804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:44.108559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:44.166612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:44.234781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:44.311014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:44.373404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:44.436981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:46.331166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122378460527300:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:46.331166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122378460527288:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:46.331285Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:46.331594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122378460527303:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:46.331667Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:46.335291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:46.349242Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122378460527302:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-12-23T18:29:46.405690Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122378460527355:3102] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:46.860398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1: ... 56.243608Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:31:56.243650Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 9ms 2025-12-23T18:31:56.244053Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:31:56.244091Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-12-23T18:31:56.244241Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 11ms 2025-12-23T18:31:56.244794Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:31:56.334638Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122934199091239:2843], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:56.334777Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587122934199091240:2844], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:31:56.334842Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:56.335483Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122934199091243:2845], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:56.335587Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:56.339726Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122934199091237:2842]: Pool not found 2025-12-23T18:31:56.340368Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-12-23T18:31:56.970155Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122934199091266:2851], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:56.970249Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587122934199091267:2852], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:31:56.970303Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:56.971723Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122934199091270:2853], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:56.971786Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:56.978521Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122934199091264:2850]: Pool not found 2025-12-23T18:31:56.979170Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-12-23T18:31:56.982335Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122934199091290:2858], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:56.982430Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587122934199091291:2859], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:31:56.982477Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:56.983370Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122934199091294:2860], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:56.983422Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:56.987622Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122934199091288:2857]: Pool not found 2025-12-23T18:31:56.988245Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2025-12-23T18:31:57.122364Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:219: (#37,[::1]:55356) connection closed by inactivity timeout |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenError [GOOD] Test command err: Recv 65537 2025-12-23T18:31:58.927242Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-12-23T18:31:58.928087Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# ERROR ResultSize# 1 2025-12-23T18:31:58.928191Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-12-23T18:31:58.928288Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob >> GenericFederatedQuery::IcebergHadoopBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant >> TVPatchTests::FindingPartsWhenPartsAreDontExist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartOk [GOOD] Test command err: Recv 65537 2025-12-23T18:31:59.396261Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-12-23T18:31:59.397077Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-12-23T18:31:59.397135Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-12-23T18:31:59.397308Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-12-23T18:31:59.397361Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-12-23T18:31:59.397495Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-12-23T18:31:59.397540Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-12-23T18:31:59.397600Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-12-23T18:31:59.397810Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:628} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK 2025-12-23T18:31:59.397860Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-12-23T18:31:59.397908Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob [GOOD] >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] |91.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> TVPatchTests::PatchPartPutError >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] Test command err: 2025-12-23T18:31:52.830671Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:31:52.883898Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:31:52.884154Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:31:52.894069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:31:52.894371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:31:52.894631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:31:52.894834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:31:52.894990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:31:52.895114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:31:52.895243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:31:52.895363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:31:52.895491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:31:52.895620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:31:52.895753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:31:52.895870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:31:52.896009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:31:52.939267Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:31:52.939482Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:31:52.939543Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:31:52.939722Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:52.939928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:31:52.940021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:31:52.940073Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:31:52.940167Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:31:52.940239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:31:52.940282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:31:52.940355Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:31:52.940608Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:52.940681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:31:52.940723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:31:52.940753Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:31:52.940873Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:31:52.940937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:31:52.940983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:31:52.941018Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:31:52.941098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:31:52.941139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:31:52.941190Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:31:52.941257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:31:52.941301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:31:52.941340Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:31:52.941575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:31:52.941636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:31:52.941698Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:31:52.941851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:31:52.941907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:31:52.941940Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:31:52.941989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:31:52.942028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:31:52.942054Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:31:52.942102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:31:52.942136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:31:52.942164Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:31:52.942287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:31:52.942331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-12-23T18:31:57.506507Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-12-23T18:31:57.506888Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-12-23T18:31:57.507120Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:57.507331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:57.507557Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:57.507845Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:31:57.508056Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:57.508298Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:57.508720Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:477:2488] finished for tablet 9437184 2025-12-23T18:31:57.509311Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:476:2487];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":5437015,"name":"_full_task","f":5437015,"d_finished":0,"c":0,"l":5454484,"d":17469},"events":[{"name":"bootstrap","f":5437388,"d_finished":1936,"c":1,"l":5439324,"d":1936},{"a":5453508,"name":"ack","f":5451410,"d_finished":1882,"c":1,"l":5453292,"d":2858},{"a":5453492,"name":"processing","f":5439495,"d_finished":5179,"c":3,"l":5453296,"d":6171},{"name":"ProduceResults","f":5438765,"d_finished":3164,"c":6,"l":5454024,"d":3164},{"a":5454031,"name":"Finish","f":5454031,"d_finished":0,"c":0,"l":5454484,"d":453},{"name":"task_result","f":5439519,"d_finished":3224,"c":2,"l":5451157,"d":3224}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:57.509402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:31:57.509937Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:476:2487];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":5437015,"name":"_full_task","f":5437015,"d_finished":0,"c":0,"l":5455144,"d":18129},"events":[{"name":"bootstrap","f":5437388,"d_finished":1936,"c":1,"l":5439324,"d":1936},{"a":5453508,"name":"ack","f":5451410,"d_finished":1882,"c":1,"l":5453292,"d":3518},{"a":5453492,"name":"processing","f":5439495,"d_finished":5179,"c":3,"l":5453296,"d":6831},{"name":"ProduceResults","f":5438765,"d_finished":3164,"c":6,"l":5454024,"d":3164},{"a":5454031,"name":"Finish","f":5454031,"d_finished":0,"c":0,"l":5455144,"d":1113},{"name":"task_result","f":5439519,"d_finished":3224,"c":2,"l":5451157,"d":3224}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:57.510040Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:31:57.487118Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-12-23T18:31:57.510091Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:31:57.510393Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] >> TVPatchTests::PatchPartPutError [GOOD] >> TVPatchTests::PatchPartGetError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] Test command err: Recv 65537 2025-12-23T18:32:00.317333Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-12-23T18:32:00.319527Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-12-23T18:32:00.319605Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-12-23T18:32:00.319831Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-12-23T18:32:00.319942Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-12-23T18:32:00.320112Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# The diff at index 0 went beyound the blob part; DiffStart# 100 DiffEnd# 96 BlobPartSize# 32 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |91.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2025-12-23T18:32:00.484444Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-12-23T18:32:00.485818Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-12-23T18:32:00.485914Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-12-23T18:32:00.486028Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-12-23T18:32:00.806339Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-12-23T18:32:00.806948Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-12-23T18:32:00.807017Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-12-23T18:32:00.807201Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-12-23T18:32:00.807296Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-12-23T18:32:00.807390Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TVPatchTests::PatchPartGetError [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthHigher [GOOD] Test command err: 2025-12-23T18:29:44.681691Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122367077424426:2236];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:29:44.681735Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c4b/r3tmp/tmp9hDlVA/pdisk_1.dat 2025-12-23T18:29:45.061262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:45.061407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:45.161535Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:29:45.184621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:45.311255Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:45.354903Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122367077424220:2081] 1766514584670616 != 1766514584670619 2025-12-23T18:29:45.368626Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 4705, node 1 2025-12-23T18:29:45.571518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:29:45.571555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:29:45.571563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:29:45.571636Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:29:45.686868Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514585297 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:29:46.054468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:29:46.067831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:28819 2025-12-23T18:29:46.394616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:29:46.407597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:29:46.411729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:29:46.473249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:46.725949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:46.796626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-12-23T18:29:46.801224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:46.856080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:46.897805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:46.940857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:46.990267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.030224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.074829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.115592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:49.243295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122388552262363:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.243424Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.243623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122388552262374:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.243835Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122388552262377:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.243883Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.248095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:49.260340Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122388552262379:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:29:49.323750Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122388552262430:3106] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, stat ... :58.571720Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:31:58.571759Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 5ms 2025-12-23T18:31:58.572114Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:31:58.572146Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-12-23T18:31:58.572255Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 6ms 2025-12-23T18:31:58.572723Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:31:58.632390Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122943770416569:2900], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:58.632477Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587122943770416570:2901], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:31:58.632544Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:58.634034Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122943770416573:2902], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:58.634106Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:58.638853Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122943770416567:2899]: Pool not found 2025-12-23T18:31:58.639493Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-12-23T18:31:59.260158Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122948065383889:2907], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.260178Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587122948065383890:2908], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:31:59.260235Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.261246Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122948065383893:2909], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.261317Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.271565Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122948065383887:2906]: Pool not found 2025-12-23T18:31:59.272260Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-12-23T18:31:59.276111Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122948065383913:2914], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.276194Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587122948065383914:2915], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:31:59.276238Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.277314Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122948065383917:2916], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.277359Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.284359Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122948065383911:2913]: Pool not found 2025-12-23T18:31:59.285113Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2025-12-23T18:31:59.384344Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:219: (#37,[::1]:56652) connection closed by inactivity timeout |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2025-12-23T18:32:01.161466Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-12-23T18:32:01.163007Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-12-23T18:32:01.163082Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-12-23T18:32:01.163379Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-12-23T18:32:01.163565Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-12-23T18:32:01.163793Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-12-23T18:32:01.163867Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-12-23T18:32:01.164021Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-12-23T18:32:01.164188Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:628} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2025-12-23T18:32:01.164263Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-12-23T18:32:01.164326Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::ContentLengthHigher [GOOD] Test command err: 2025-12-23T18:29:45.026252Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122374386204660:2262];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:29:45.026870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c53/r3tmp/tmpwPf9i0/pdisk_1.dat 2025-12-23T18:29:45.402992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:29:45.475559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:29:45.475683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:29:45.499529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:29:45.522635Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:45.525484Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122370091237129:2081] 1766514585003627 != 1766514585003630 TServer::EnableGrpc on GrpcPort 24434, node 1 2025-12-23T18:29:45.768361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:29:45.788906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:29:45.788932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:29:45.788939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:29:45.789017Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:29:46.022592Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514585556 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:29:46.105493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:29:46.115651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:12733 2025-12-23T18:29:46.413175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:29:46.424760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:29:46.430042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:29:46.453696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-12-23T18:29:46.461162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:46.738500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:46.820364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:29:46.875990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:46.937322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:46.991227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.047733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.091240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.138517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:47.224922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:29:49.206927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122391566075279:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.206992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122391566075270:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.207205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.209475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122391566075285:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.209565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:49.211397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:49.229934Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122391566075284:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:29:49.309015Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122391566075337:3103] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, sta ... :58.514177Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:31:58.514225Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 5ms 2025-12-23T18:31:58.514721Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:31:58.514783Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-12-23T18:31:58.514917Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 5ms 2025-12-23T18:31:58.515488Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:31:58.626435Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122944378175858:2900], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:58.626542Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587122944378175859:2901], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:31:58.626627Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:58.627819Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122944378175862:2902], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:58.627888Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:58.633274Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122944378175856:2899]: Pool not found 2025-12-23T18:31:58.633478Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-12-23T18:31:59.170800Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587122948673143182:2909], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:31:59.170904Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122948673143181:2908], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.171041Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.172866Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122948673143185:2910], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.172968Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.180955Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122948673143179:2907]: Pool not found 2025-12-23T18:31:59.181753Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-12-23T18:31:59.185670Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122948673143205:2915], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.185775Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.185860Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587122948673143206:2916], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:31:59.186140Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122948673143209:2917], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.186214Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:59.190724Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587122948673143203:2914]: Pool not found 2025-12-23T18:31:59.191200Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2025-12-23T18:31:59.342898Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:219: (#37,[::1]:43688) connection closed by inactivity timeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] Test command err: 2025-12-23T18:31:57.587111Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:31:57.626619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:31:57.626963Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:31:57.635723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:31:57.636033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:31:57.636301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:31:57.636452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:31:57.636611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:31:57.636749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:31:57.636905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:31:57.637047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:31:57.637224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:31:57.637345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:31:57.637473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:31:57.637629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:31:57.637782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:31:57.672523Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:31:57.672759Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:31:57.672820Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:31:57.673069Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:57.673265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:31:57.673361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:31:57.673416Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:31:57.673558Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:31:57.673640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:31:57.673695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:31:57.673742Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:31:57.673985Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:57.674078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:31:57.674131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:31:57.674166Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:31:57.674267Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:31:57.674329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:31:57.674443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:31:57.674477Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:31:57.674547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:31:57.674606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:31:57.674655Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:31:57.674716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:31:57.674804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:31:57.674855Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:31:57.675099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:31:57.675174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:31:57.675223Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:31:57.675380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:31:57.675445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:31:57.675485Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:31:57.675553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:31:57.675603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:31:57.675642Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:31:57.675695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:31:57.675747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:31:57.675796Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:31:57.676040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:31:57.676096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... =(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=100; 2025-12-23T18:31:58.722654Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=100;batch_columns=level,timestamp; 2025-12-23T18:31:58.723040Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:327:2339];bytes=1200;rows=100;faults=0;finished=0;fault=0;schema=level: int32 timestamp: timestamp[us]; 2025-12-23T18:31:58.723308Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:58.723483Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:58.723729Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:58.723943Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:31:58.724163Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:58.724361Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:58.724704Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:335:2347] finished for tablet 9437184 2025-12-23T18:31:58.725298Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:327:2339];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.018},{"events":["l_ProduceResults","f_Finish"],"t":0.02},{"events":["l_ack","l_processing","l_Finish"],"t":0.021}],"full":{"a":1804656,"name":"_full_task","f":1804656,"d_finished":0,"c":0,"l":1825862,"d":21206},"events":[{"name":"bootstrap","f":1805066,"d_finished":3268,"c":1,"l":1808334,"d":3268},{"a":1825008,"name":"ack","f":1822998,"d_finished":1854,"c":1,"l":1824852,"d":2708},{"a":1824976,"name":"processing","f":1808733,"d_finished":6096,"c":3,"l":1824856,"d":6982},{"name":"ProduceResults","f":1807380,"d_finished":3107,"c":6,"l":1825493,"d":3107},{"a":1825499,"name":"Finish","f":1825499,"d_finished":0,"c":0,"l":1825862,"d":363},{"name":"task_result","f":1808760,"d_finished":4174,"c":2,"l":1822870,"d":4174}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:58.725439Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:327:2339];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:31:58.726252Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:327:2339];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.018},{"events":["l_ProduceResults","f_Finish"],"t":0.02},{"events":["l_ack","l_processing","l_Finish"],"t":0.021}],"full":{"a":1804656,"name":"_full_task","f":1804656,"d_finished":0,"c":0,"l":1826582,"d":21926},"events":[{"name":"bootstrap","f":1805066,"d_finished":3268,"c":1,"l":1808334,"d":3268},{"a":1825008,"name":"ack","f":1822998,"d_finished":1854,"c":1,"l":1824852,"d":3428},{"a":1824976,"name":"processing","f":1808733,"d_finished":6096,"c":3,"l":1824856,"d":7702},{"name":"ProduceResults","f":1807380,"d_finished":3107,"c":6,"l":1825493,"d":3107},{"a":1825499,"name":"Finish","f":1825499,"d_finished":0,"c":0,"l":1826582,"d":1083},{"name":"task_result","f":1808760,"d_finished":4174,"c":2,"l":1822870,"d":4174}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:31:58.726359Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:31:58.638402Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-12-23T18:31:58.726444Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:31:58.726630Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; 2025-12-23T18:31:58.727457Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2025-12-23T18:31:58.727884Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 100 scanId: 0 version: {1766514718547:100} readable: {1766514718547:max} at tablet 9437184 2025-12-23T18:31:58.728045Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2025-12-23T18:31:58.728372Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1766514718547:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2025-12-23T18:31:58.728495Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1766514718547:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2025-12-23T18:31:58.728658Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1766514718547:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: program has no projections; >> Normalizers::SchemaVersionsNormalizer [GOOD] >> Normalizers::RemoveWriteIdNormalizer |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> TVPatchTests::PatchPartFastXorDiffDisorder |91.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2025-12-23T18:32:01.449651Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-12-23T18:32:01.450563Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-12-23T18:32:01.450647Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-12-23T18:32:01.450906Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-12-23T18:32:01.450982Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-12-23T18:32:01.451190Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-12-23T18:32:01.451274Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] >> TVPatchTests::FindingPartsWhenSeveralPartsExist |91.7%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24919, MsgBus: 3182 2025-12-23T18:31:40.819419Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122867358782590:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:40.819499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:31:40.873035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fe6/r3tmp/tmp0W5GFx/pdisk_1.dat 2025-12-23T18:31:41.157447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:41.157519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:41.171106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.207872Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:41.295358Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24919, node 1 2025-12-23T18:31:41.385882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:41.385914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:41.385925Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:41.386035Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:41.403624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3182 2025-12-23T18:31:41.835007Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514701322 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:31:42.011694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:42.025070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.190856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.344852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:42.414125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:44.495428Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122884538653614:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.495574Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.496242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122884538653624:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.496299Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:44.929240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.007353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.077052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.105175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.134512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.170862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.203827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.249748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:45.334593Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122888833621796:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.334672Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.334834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122888833621802:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.334911Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.334970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122888833621801:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:45.338635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12 ... e 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:51.043498Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:51.043507Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:51.043590Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28604 TClient is connected to server localhost:28604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514710905 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:51.673050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:31:51.683336Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:31:51.691053Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:31:51.694367Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:51.780107Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:51.998183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:52.099460Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:31:54.828082Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122926608692624:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:54.828209Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:54.828674Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122926608692634:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:54.828748Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:54.925253Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:54.975713Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:55.020104Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:55.058498Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:55.098062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:55.142029Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:55.186601Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:55.284339Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:55.377938Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122930903660796:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:55.378083Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:55.378147Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122930903660801:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:55.378798Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587122930903660804:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:55.378895Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:31:55.391296Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:31:55.406510Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587122930903660803:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:31:55.502798Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587122930903660856:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:31:57.377776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:57.446605Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:31:57.489653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] Test command err: Recv 65537 2025-12-23T18:32:02.508915Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-12-23T18:32:02.509909Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-12-23T18:32:02.509992Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-12-23T18:32:02.510229Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-12-23T18:32:02.510428Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 5 PatchedPartId# 5 XorReceiver# yes ParityPart# yes ForceEnd# no 2025-12-23T18:32:02.510485Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:100:0] PullingPart# 5 Send NKikimr::TEvBlobStorage::TEvVGet >> TVPatchTests::FindingPartsWhenSeveralPartsExist [GOOD] >> TVPatchTests::FindingPartsWithTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] Test command err: Recv 65537 2025-12-23T18:32:02.322120Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-12-23T18:32:02.323095Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-12-23T18:32:02.323168Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-12-23T18:32:02.323378Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-12-23T18:32:02.323481Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-12-23T18:32:02.323681Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# [XorDiff from datapart] the start of the diff at index 0 righter than the start of the diff at index 1; PrevDiffStart# 2 DiffStart# 0 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] >> TBackupCollectionTests::TableWithMixedIndexTypes [GOOD] >> TBackupCollectionTests::MultipleTablesWithIndexes >> TColumnShardTestSchema::RebootForgetWithLostAnswer >> TTxDataShardReshuffleKMeansScan::BuildToBuildWithOverlap [GOOD] >> TTxDataShardSampleKScan::BadRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] Test command err: Recv 65537 2025-12-23T18:32:03.176070Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-12-23T18:32:03.177891Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-12-23T18:32:03.177991Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1 2] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-12-23T18:32:03.178268Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-12-23T18:32:03.178371Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-12-23T18:32:03.178504Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-12-23T18:32:03.486945Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NActors::TEvents::TEvWakeup 2025-12-23T18:32:03.498330Z node 2 :BS_VDISK_PATCH ERROR: {BSVSP11@skeleton_vpatch_actor.cpp:735} [0:1:0:0:0] TEvVPatch: the vpatch actor died due to a deadline, before receiving diff; 2025-12-23T18:32:03.498440Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-12-23T18:32:03.498543Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TColumnShardTestSchema::CreateTable-Reboots+GenerateInternalPathId >> TKeyValueTest::TestVacuumWithMockDisk [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> test_select.py::TestDML::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> TraverseDatashard::TraverseOneTable >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve |91.7%| [TA] $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |91.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRboPg::Select |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestVacuumWithMockDisk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] 2025-12-23T18:31:16.282287Z node 3 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:58:2057] recipient: [3:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:58:2057] recipient: [3:55:2099] Leader for TabletID 72057594037927937 is [3:60:2101] sender: [3:61:2057] recipient: [3:55:2099] Leader for TabletID 72057594037927937 is [3:60:2101] sender: [3:78:2057] recipient: [3:17:2064] 2025-12-23T18:31:16.624877Z node 4 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:58:2057] recipient: [4:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:58:2057] recipient: [4:55:2099] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:61:2057] recipient: [4:55:2099] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:78:2057] recipient: [4:17:2064] !Reboot 72057594037927937 (actor [4:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:80:2057] recipient: [4:42:2089] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:83:2057] recipient: [4:82:2114] Leader for TabletID 72057594037927937 is [4:84:2115] sender: [4:85:2057] recipient: [4:82:2114] !Reboot 72057594037927937 (actor [4:60:2101]) rebooted! !Reboot 72057594037927937 (actor [4:60:2101]) tablet resolver refreshed! new actor is[4:84:2115] Leader for TabletID 72057594037927937 is [4:84:2115] sender: [4:200:2057] recipient: [4:17:2064] 2025-12-23T18:31:18.742122Z node 5 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:58:2057] recipient: [5:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:58:2057] recipient: [5:55:2099] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:61:2057] recipient: [5:55:2099] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:78:2057] recipient: [5:17:2064] !Reboot 72057594037927937 (actor [5:60:2101]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:80:2057] recipient: [5:42:2089] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:83:2057] recipient: [5:82:2114] Leader for TabletID 72057594037927937 is [5:84:2115] sender: [5:85:2057] recipient: [5:82:2114] !Reboot 72057594037927937 (actor [5:60:2101]) rebooted! !Reboot 72057594037927937 (actor [5:60:2101]) tablet resolver refreshed! new actor is[5:84:2115] Leader for TabletID 72057594037927937 is [5:84:2115] sender: [5:200:2057] recipient: [5:17:2064] 2025-12-23T18:31:20.927234Z node 6 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:58:2057] recipient: [6:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:58:2057] recipient: [6:55:2099] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:61:2057] recipient: [6:55:2099] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:78:2057] recipient: [6:17:2064] !Reboot 72057594037927937 (actor [6:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:81:2057] recipient: [6:42:2089] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:84:2057] recipient: [6:83:2114] Leader for TabletID 72057594037927937 is [6:85:2115] sender: [6:86:2057] recipient: [6:83:2114] !Reboot 72057594037927937 (actor [6:60:2101]) rebooted! !Reboot 72057594037927937 (actor [6:60:2101]) tablet resolver refreshed! new actor is[6:85:2115] Leader for TabletID 72057594037927937 is [6:85:2115] sender: [6:201:2057] recipient: [6:17:2064] 2025-12-23T18:31:23.030698Z node 7 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:58:2057] recipient: [7:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:58:2057] recipient: [7:55:2099] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:61:2057] recipient: [7:55:2099] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:78:2057] recipient: [7:17:2064] !Reboot 72057594037927937 (actor [7:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:84:2057] recipient: [7:42:2089] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:87:2057] recipient: [7:86:2117] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:89:2057] recipient: [7:86:2117] !Reboot 72057594037927937 (actor [7:60:2101]) rebooted! !Reboot 72057594037927937 (actor [7:60:2101]) tablet resolver refreshed! new actor is[7:88:2118] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:204:2057] recipient: [7:17:2064] 2025-12-23T18:31:25.159931Z node 8 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:58:2057] recipient: [8:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:58:2057] recipient: [8:55:2099] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:61:2057] recipient: [8:55:2099] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:78:2057] recipient: [8:17:2064] !Reboot 72057594037927937 (actor [8:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:84:2057] recipient: [8:42:2089] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:87:2057] recipient: [8:86:2117] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:89:2057] recipient: [8:86:2117] !Reboot 72057594037927937 (actor [8:60:2101]) rebooted! !Reboot 72057594037927937 (actor [8:60:2101]) tablet resolver refreshed! new actor is[8:88:2118] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:204:2057] recipient: [8:17:2064] 2025-12-23T18:31:27.283943Z node 9 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:58:2057] recipient: [9:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:58:2057] recipient: [9:55:2099] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:61:2057] recipient: [9:55:2099] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:78:2057] recipient: [9:17:2064] !Reboot 72057594037927937 (actor [9:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:85:2057] recipient: [9:42:2089] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:60:2101]) rebooted! !Reboot 72057594037927937 (actor [9:60:2101]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:17:2064] 2025-12-23T18:31:29.428457Z node 10 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:58:2057] recipient: [10:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:58:2057] recipient: [10:55:2099] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:61:2057] recipient: [10:55:2099] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:78:2057] recipient: [10:17:2064] !Reboot 72057594037927937 (actor [10:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:88:2057] recipient: [10:42:2089] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:91:2057] recipient: [10:90:2120] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:93:2057] recipient: [10:90:2120] !Reboot 72057594037927937 (actor [10:60:2101]) rebooted! !Reboot 72057594037927937 (actor [10:60:2101]) tablet resolver refreshed! new actor is[10:92:2121] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:208:2057] recipient: [10:17:2064] 2025-12-23T18:31:31.319626Z node 11 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:58:2057] recipient: [11:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:58:2057] recipient: [11:55:2099] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:61:2057] recipient: [11:55:2099] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:78:2057] recipient: [11:17:2064] !Reboot 72057594037927937 (actor [11:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:88:2057] recipient: [11:42:2089] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:91:2057] recipient: [11:90:2120] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:93:2057] recipient: [11:90:2120] !Reboot 72057594037927937 (actor [11:60:2101]) rebooted! !Reboot 72057594037927937 (actor [11:60:2101]) tablet resolver refreshed! new actor is[11:92:2121] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:208:2057] recipient: [11:17:2064] 2025-12-23T18:31:33.426376Z node 12 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:58:2057] recipient: [12:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:58:2057] recipient: [12:55:2099] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:61:2057] recipient: [12:55:2099] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:78:2057] recipient: [12:17:2064] !Reboot 72057594037927937 (actor [12:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:89:2057] recipient: [12:42:2089] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:60:2101]) rebooted! !Reboot 72057594037927937 (actor [12:60:2101]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:212 ... pient: [18:42:2089] Leader for TabletID 72057594037927937 is [18:60:2101] sender: [18:100:2057] recipient: [18:99:2126] Leader for TabletID 72057594037927937 is [18:101:2127] sender: [18:102:2057] recipient: [18:99:2126] !Reboot 72057594037927937 (actor [18:60:2101]) rebooted! !Reboot 72057594037927937 (actor [18:60:2101]) tablet resolver refreshed! new actor is[18:101:2127] Leader for TabletID 72057594037927937 is [18:101:2127] sender: [18:217:2057] recipient: [18:17:2064] 2025-12-23T18:31:48.042508Z node 19 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:58:2057] recipient: [19:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:58:2057] recipient: [19:55:2099] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:61:2057] recipient: [19:55:2099] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:78:2057] recipient: [19:17:2064] !Reboot 72057594037927937 (actor [19:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:100:2057] recipient: [19:42:2089] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:103:2057] recipient: [19:102:2129] Leader for TabletID 72057594037927937 is [19:104:2130] sender: [19:105:2057] recipient: [19:102:2129] !Reboot 72057594037927937 (actor [19:60:2101]) rebooted! !Reboot 72057594037927937 (actor [19:60:2101]) tablet resolver refreshed! new actor is[19:104:2130] Leader for TabletID 72057594037927937 is [19:104:2130] sender: [19:220:2057] recipient: [19:17:2064] 2025-12-23T18:31:50.147034Z node 20 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:58:2057] recipient: [20:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:58:2057] recipient: [20:55:2099] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:61:2057] recipient: [20:55:2099] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:78:2057] recipient: [20:17:2064] !Reboot 72057594037927937 (actor [20:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:100:2057] recipient: [20:42:2089] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:103:2057] recipient: [20:102:2129] Leader for TabletID 72057594037927937 is [20:104:2130] sender: [20:105:2057] recipient: [20:102:2129] !Reboot 72057594037927937 (actor [20:60:2101]) rebooted! !Reboot 72057594037927937 (actor [20:60:2101]) tablet resolver refreshed! new actor is[20:104:2130] Leader for TabletID 72057594037927937 is [20:104:2130] sender: [20:220:2057] recipient: [20:17:2064] 2025-12-23T18:31:52.472280Z node 21 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:58:2057] recipient: [21:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:58:2057] recipient: [21:55:2099] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:61:2057] recipient: [21:55:2099] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:78:2057] recipient: [21:17:2064] !Reboot 72057594037927937 (actor [21:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:101:2057] recipient: [21:42:2089] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:104:2057] recipient: [21:103:2129] Leader for TabletID 72057594037927937 is [21:105:2130] sender: [21:106:2057] recipient: [21:103:2129] !Reboot 72057594037927937 (actor [21:60:2101]) rebooted! !Reboot 72057594037927937 (actor [21:60:2101]) tablet resolver refreshed! new actor is[21:105:2130] Leader for TabletID 72057594037927937 is [21:105:2130] sender: [21:221:2057] recipient: [21:17:2064] 2025-12-23T18:31:54.682345Z node 22 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:58:2057] recipient: [22:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:58:2057] recipient: [22:55:2099] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:61:2057] recipient: [22:55:2099] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:78:2057] recipient: [22:17:2064] !Reboot 72057594037927937 (actor [22:60:2101]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:102:2057] recipient: [22:42:2089] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:105:2057] recipient: [22:104:2130] Leader for TabletID 72057594037927937 is [22:106:2131] sender: [22:107:2057] recipient: [22:104:2130] !Reboot 72057594037927937 (actor [22:60:2101]) rebooted! !Reboot 72057594037927937 (actor [22:60:2101]) tablet resolver refreshed! new actor is[22:106:2131] Leader for TabletID 72057594037927937 is [22:106:2131] sender: [22:126:2057] recipient: [22:17:2064] 2025-12-23T18:31:55.032555Z node 23 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:58:2057] recipient: [23:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:58:2057] recipient: [23:55:2099] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:61:2057] recipient: [23:55:2099] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:78:2057] recipient: [23:17:2064] !Reboot 72057594037927937 (actor [23:60:2101]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:103:2057] recipient: [23:42:2089] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:106:2057] recipient: [23:105:2131] Leader for TabletID 72057594037927937 is [23:107:2132] sender: [23:108:2057] recipient: [23:105:2131] !Reboot 72057594037927937 (actor [23:60:2101]) rebooted! !Reboot 72057594037927937 (actor [23:60:2101]) tablet resolver refreshed! new actor is[23:107:2132] Leader for TabletID 72057594037927937 is [23:107:2132] sender: [23:127:2057] recipient: [23:17:2064] 2025-12-23T18:31:55.346149Z node 24 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:58:2057] recipient: [24:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:58:2057] recipient: [24:55:2099] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:61:2057] recipient: [24:55:2099] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:78:2057] recipient: [24:17:2064] !Reboot 72057594037927937 (actor [24:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:106:2057] recipient: [24:42:2089] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:109:2057] recipient: [24:108:2134] Leader for TabletID 72057594037927937 is [24:110:2135] sender: [24:111:2057] recipient: [24:108:2134] !Reboot 72057594037927937 (actor [24:60:2101]) rebooted! !Reboot 72057594037927937 (actor [24:60:2101]) tablet resolver refreshed! new actor is[24:110:2135] Leader for TabletID 72057594037927937 is [24:110:2135] sender: [24:226:2057] recipient: [24:17:2064] 2025-12-23T18:31:57.537631Z node 25 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:58:2057] recipient: [25:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:58:2057] recipient: [25:55:2099] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:61:2057] recipient: [25:55:2099] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:78:2057] recipient: [25:17:2064] !Reboot 72057594037927937 (actor [25:60:2101]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:106:2057] recipient: [25:42:2089] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:109:2057] recipient: [25:108:2134] Leader for TabletID 72057594037927937 is [25:110:2135] sender: [25:111:2057] recipient: [25:108:2134] !Reboot 72057594037927937 (actor [25:60:2101]) rebooted! !Reboot 72057594037927937 (actor [25:60:2101]) tablet resolver refreshed! new actor is[25:110:2135] Leader for TabletID 72057594037927937 is [25:110:2135] sender: [25:226:2057] recipient: [25:17:2064] 2025-12-23T18:31:59.689670Z node 26 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:58:2057] recipient: [26:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:58:2057] recipient: [26:55:2099] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:61:2057] recipient: [26:55:2099] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:78:2057] recipient: [26:17:2064] !Reboot 72057594037927937 (actor [26:60:2101]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:106:2057] recipient: [26:42:2089] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:109:2057] recipient: [26:108:2134] Leader for TabletID 72057594037927937 is [26:110:2135] sender: [26:111:2057] recipient: [26:108:2134] !Reboot 72057594037927937 (actor [26:60:2101]) rebooted! !Reboot 72057594037927937 (actor [26:60:2101]) tablet resolver refreshed! new actor is[26:110:2135] Leader for TabletID 72057594037927937 is [26:110:2135] sender: [26:226:2057] recipient: [26:17:2064] 2025-12-23T18:32:02.012354Z node 27 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:58:2057] recipient: [27:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:58:2057] recipient: [27:55:2099] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:61:2057] recipient: [27:55:2099] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:78:2057] recipient: [27:17:2064] !Reboot 72057594037927937 (actor [27:60:2101]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:111:2057] recipient: [27:42:2089] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:114:2057] recipient: [27:113:2138] Leader for TabletID 72057594037927937 is [27:115:2139] sender: [27:116:2057] recipient: [27:113:2138] !Reboot 72057594037927937 (actor [27:60:2101]) rebooted! !Reboot 72057594037927937 (actor [27:60:2101]) tablet resolver refreshed! new actor is[27:115:2139] Leader for TabletID 72057594037927937 is [27:115:2139] sender: [27:231:2057] recipient: [27:17:2064] 2025-12-23T18:32:04.342108Z node 28 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:58:2057] recipient: [28:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:58:2057] recipient: [28:55:2099] Leader for TabletID 72057594037927937 is [28:60:2101] sender: [28:61:2057] recipient: [28:55:2099] Leader for TabletID 72057594037927937 is [28:60:2101] sender: [28:78:2057] recipient: [28:17:2064] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] |91.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |91.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |91.7%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt >> KqpRboPg::OrderBy >> TColumnShardTestSchema::CreateTable-Reboots+GenerateInternalPathId [GOOD] >> TBackupCollectionTests::CdcStreamRotationDuringIncrementalBackups [GOOD] >> Normalizers::RemoveWriteIdNormalizer [GOOD] |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |91.8%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable-Reboots+GenerateInternalPathId [GOOD] Test command err: 2025-12-23T18:32:05.044044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:32:05.080636Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:32:05.080897Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:32:05.088240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:32:05.088492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:32:05.088729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:32:05.088837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:32:05.088936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:32:05.089026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:32:05.089181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:32:05.089281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:32:05.089391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:32:05.089523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:32:05.089647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:32:05.089779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:32:05.089892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:32:05.130567Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:32:05.131039Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:32:05.131126Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:32:05.131321Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:32:05.131519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:32:05.131630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:32:05.131678Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:32:05.131793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:32:05.131859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:32:05.131903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:32:05.131934Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:32:05.132143Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:32:05.132221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:32:05.132266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:32:05.132297Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:32:05.132401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:32:05.132457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:32:05.132534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:32:05.132575Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:32:05.132648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:32:05.132693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:32:05.132742Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:32:05.132807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:32:05.132860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:32:05.132919Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:32:05.133152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:32:05.133205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:32:05.133235Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:32:05.133389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:32:05.133442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:32:05.133476Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:32:05.133534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:32:05.133573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:32:05.133608Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:32:05.133653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:32:05.133688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:32:05.133726Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:32:05.133893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:32:05.133947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :TX_KIND_SCHEMA;min=1766514726062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2025-12-23T18:32:06.648186Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1766514726062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136756453991616;op_tx=119:TX_KIND_SCHEMA;min=1766514726062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1766514726062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136962615260736;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-12-23T18:32:06.648294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1766514726062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136756453991616;op_tx=119:TX_KIND_SCHEMA;min=1766514726062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1766514726062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136962615260736;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:32:06.648378Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1766514726062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136756453991616;op_tx=119:TX_KIND_SCHEMA;min=1766514726062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1766514726062;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136962615260736;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2025-12-23T18:32:06.648833Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-12-23T18:32:06.649020Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766514726062 at tablet 9437184, mediator 0 2025-12-23T18:32:06.649082Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] execute at tablet 9437184 2025-12-23T18:32:06.649423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2025-12-23T18:32:06.649475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2025-12-23T18:32:06.649559Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000202, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2025-12-23T18:32:06.649655Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000202; 2025-12-23T18:32:06.649724Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:145;event=RegisterTable;path_id=1000000202; 2025-12-23T18:32:06.650021Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2025-12-23T18:32:06.655049Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:532;event=OnTieringModified;path_id=1000000202; 2025-12-23T18:32:06.668161Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-12-23T18:32:06.670296Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=136756453994528;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1766514726065;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-12-23T18:32:06.683396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1766514726065;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136756453994528;op_tx=120:TX_KIND_SCHEMA;min=1766514726065;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:32:06.683503Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1766514726065;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136756453994528;op_tx=120:TX_KIND_SCHEMA;min=1766514726065;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-12-23T18:32:06.685323Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=136756453996320;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1766514726067;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-12-23T18:32:06.698371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1766514726067;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136756453996320;op_tx=121:TX_KIND_SCHEMA;min=1766514726067;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:32:06.698466Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1766514726067;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136756453996320;op_tx=121:TX_KIND_SCHEMA;min=1766514726067;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-12-23T18:32:06.700085Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=136756453998112;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1766514726068;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-12-23T18:32:06.713262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1766514726068;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136756453998112;op_tx=122:TX_KIND_SCHEMA;min=1766514726068;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:32:06.713338Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1766514726068;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136756453998112;op_tx=122:TX_KIND_SCHEMA;min=1766514726068;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> DataShardTxOrder::RandomPointsAndRanges [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectCount >> TBackupCollectionTests::MultipleTablesWithIndexes [GOOD] >> TBackupCollectionTests::IncrementalBackupWithIndexes >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> GenericFederatedQuery::IcebergHadoopSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::CdcStreamRotationDuringIncrementalBackups [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:31:10.280566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:31:10.280654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:10.280708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:31:10.280743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:31:10.280782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:31:10.280809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:31:10.280879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:10.280969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:31:10.281801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:31:10.282090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:31:10.374052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:31:10.374122Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:10.388922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:31:10.389161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:31:10.389392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:31:10.399770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:31:10.400027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:31:10.400854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:10.401093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:31:10.403585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:10.403808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:31:10.405054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:10.405121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:10.405278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:31:10.405368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:31:10.405471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:31:10.405629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:31:10.572472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.573590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.573720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.573808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.573908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.573996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.574072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.574176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.574275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.574363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.574435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.574503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.574621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.574756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:10.574857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ], Recipient [23:139:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:32:04.026004Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:32:04.650051Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [23:139:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:32:04.650136Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:32:04.651230Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [23:139:2160], Recipient [23:139:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:32:04.651286Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:32:05.311336Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [23:139:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:32:05.311421Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:32:05.312413Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [23:139:2160], Recipient [23:139:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:32:05.312472Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:32:05.955455Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [23:139:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:32:05.955527Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:32:05.956522Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [23:139:2160], Recipient [23:139:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:32:05.956570Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:32:06.649463Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [23:139:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:32:06.649570Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:32:06.650734Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [23:139:2160], Recipient [23:139:2160]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:32:06.650832Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:32:06.783988Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [23:2441:4089], Recipient [23:139:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TestTable/19700101000009Z_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2025-12-23T18:32:06.784130Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:32:06.784364Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TestTable/19700101000009Z_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:32:06.784841Z node 23 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TestTable/19700101000009Z_continuousBackupImpl" took 464us result status StatusPathDoesNotExist 2025-12-23T18:32:06.785095Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/TestTable/19700101000009Z_continuousBackupImpl\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 51], type: EPathTypeCdcStream, state: EPathStateNotExist), drop stepId: 5000047, drop txId: 108" Path: "/MyRoot/TestTable/19700101000009Z_continuousBackupImpl" PathId: 51 LastExistedPrefixPath: "/MyRoot/TestTable" LastExistedPrefixPathId: 40 LastExistedPrefixDescription { Self { Name: "TestTable" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:32:06.786160Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [23:2442:4090], Recipient [23:139:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TestTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-12-23T18:32:06.786268Z node 23 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:32:06.786466Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TestTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:32:06.803179Z node 23 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TestTable" took 16.7ms result status StatusSuccess 2025-12-23T18:32:06.804560Z node 23 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TestTable" PathDescription { Self { Name: "TestTable" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 7 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TestTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 7 IsBackup: false CdcStreams { Name: "19700101000014Z_continuousBackupImpl" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 55 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 51 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> GenericFederatedQuery::IcebergHiveBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectCount |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup_collection/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring [GOOD] |91.8%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |91.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup >> GenericFederatedQuery::IcebergHiveSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectCount >> TSchemeShardViewTest::ReadOnlyMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::RemoveWriteIdNormalizer [GOOD] Test command err: 2025-12-23T18:31:58.207591Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:31:58.246184Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:31:58.246494Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:31:58.256206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2025-12-23T18:31:58.256489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-12-23T18:31:58.256759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:31:58.256946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:31:58.257090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:31:58.257203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:31:58.257320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:31:58.257440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:31:58.257564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:31:58.257683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:31:58.257813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:31:58.258213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:31:58.258355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:31:58.258644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:31:58.299296Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:31:58.299689Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2025-12-23T18:31:58.299742Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-12-23T18:31:58.300045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2025-12-23T18:31:58.300129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-12-23T18:31:58.300181Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-12-23T18:31:58.300332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:58.300426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:31:58.300469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:31:58.300505Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-12-23T18:31:58.300600Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:31:58.300656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:31:58.300697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:31:58.300759Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-12-23T18:31:58.300963Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:58.301025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:31:58.301073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:31:58.301108Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-12-23T18:31:58.301236Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:31:58.301324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:31:58.301381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:31:58.301426Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:31:58.301506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:31:58.301564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:31:58.301649Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:31:58.301725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:31:58.301806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:31:58.301850Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:31:58.302140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:31:58.302235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:31:58.302286Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:31:58.302523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:31:58.302621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:31:58.302681Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:31:58.302764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:31:58.302812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:31:58.302853Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=n ... ;}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-12-23T18:32:07.050508Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2025-12-23T18:32:07.050573Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:32:07.050616Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:32:07.051328Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:32:07.051541Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:32:07.051587Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:32:07.051748Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2025-12-23T18:32:07.051819Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2025-12-23T18:32:07.052038Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:326:2327];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-12-23T18:32:07.052215Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:32:07.052384Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:32:07.052625Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:32:07.052801Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:32:07.052921Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:32:07.053049Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:32:07.053509Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:328:2328] finished for tablet 9437184 2025-12-23T18:32:07.054214Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:326:2327];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.117},{"events":["l_task_result"],"t":1.555},{"events":["l_ProduceResults","f_Finish"],"t":1.557},{"events":["l_ack","l_processing","l_Finish"],"t":1.558}],"full":{"a":7962983,"name":"_full_task","f":7962983,"d_finished":0,"c":0,"l":9521434,"d":1558451},"events":[{"name":"bootstrap","f":7963433,"d_finished":2030,"c":1,"l":7965463,"d":2030},{"a":9520613,"name":"ack","f":8080081,"d_finished":671738,"c":421,"l":9520508,"d":672559},{"a":9520600,"name":"processing","f":7965751,"d_finished":1335476,"c":843,"l":9520512,"d":1336310},{"name":"ProduceResults","f":7964782,"d_finished":1078397,"c":1266,"l":9520905,"d":1078397},{"a":9520912,"name":"Finish","f":9520912,"d_finished":0,"c":0,"l":9521434,"d":522},{"name":"task_result","f":7965774,"d_finished":648450,"c":422,"l":9518469,"d":648450}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:32:07.054336Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:326:2327];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:32:07.055059Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:326:2327];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.117},{"events":["l_task_result"],"t":1.555},{"events":["l_ProduceResults","f_Finish"],"t":1.557},{"events":["l_ack","l_processing","l_Finish"],"t":1.559}],"full":{"a":7962983,"name":"_full_task","f":7962983,"d_finished":0,"c":0,"l":9522229,"d":1559246},"events":[{"name":"bootstrap","f":7963433,"d_finished":2030,"c":1,"l":7965463,"d":2030},{"a":9520613,"name":"ack","f":8080081,"d_finished":671738,"c":421,"l":9520508,"d":673354},{"a":9520600,"name":"processing","f":7965751,"d_finished":1335476,"c":843,"l":9520512,"d":1337105},{"name":"ProduceResults","f":7964782,"d_finished":1078397,"c":1266,"l":9520905,"d":1078397},{"a":9520912,"name":"Finish","f":9520912,"d_finished":0,"c":0,"l":9522229,"d":1317},{"name":"task_result","f":7965774,"d_finished":648450,"c":422,"l":9518469,"d":648450}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:32:07.055175Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:32:05.488957Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2025-12-23T18:32:07.055238Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:32:07.055454Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount |91.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:32:06.670972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:32:06.671095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:06.671200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:32:06.671267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:32:06.671328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:32:06.671359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:32:06.671447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:06.671543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:32:06.672568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:06.672952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:32:06.773117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:06.773193Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:06.780416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:32:06.780659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:32:06.780840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:32:06.784596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:32:06.784852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:32:06.785638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:06.785981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:32:06.788390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:06.788598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:32:06.789861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:06.789926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:06.790092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:32:06.790166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:06.790229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:32:06.790443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:32:06.956715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.957969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.958144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.958229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.958370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.958511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.958603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.958684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.958786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.958868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.958999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.959095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.959218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.959322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:06.959394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd: 72057594046678944 2025-12-23T18:32:08.227026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2025-12-23T18:32:08.227192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2025-12-23T18:32:08.227399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:32:08.247582Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:791: [Export] [s3] Bootstrap: self# [1:816:2785], attempt# 0 2025-12-23T18:32:08.351523Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:445: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:816:2785], sender# [1:815:2784] FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2025-12-23T18:32:08.367422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:08.367518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:28374 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 30ACAF45-23AE-4A14-ADB4-A2455726F114 amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD 2025-12-23T18:32:08.367893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 S3_MOCK::HttpServeWrite: /metadata.json / / 107 2025-12-23T18:32:08.367961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:32:08.368425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:32:08.368507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:32:08.369403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:32:08.369551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:32:08.369607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:32:08.369669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:32:08.369724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:32:08.369836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:32:08.376258Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:405: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:816:2785], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:28374 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8B6F5F9F-955C-4794-A420-93B928AE26BB amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-12-23T18:32:08.395622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:32:08.395770Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:310: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:816:2785], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-12-23T18:32:08.395930Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:815:2784] 2025-12-23T18:32:08.396055Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:463: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:816:2785], sender# [1:815:2784], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:28374 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 98B40F20-EA78-483F-8607-DBEADE781880 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-12-23T18:32:08.407360Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:505: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:816:2785], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-12-23T18:32:08.407448Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:708: [Export] [s3] Finish: self# [1:816:2785], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-12-23T18:32:08.407649Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:815:2784], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-12-23T18:32:08.448137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:32:08.448240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:32:08.448459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:32:08.448615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:32:08.448766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:08.448810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:32:08.448869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:32:08.449010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:32:08.449233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:08.454636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:32:08.454951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:32:08.455042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:32:08.455194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:32:08.455238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:32:08.455302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:32:08.455362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:32:08.455432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:32:08.455544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2715] message: TxId: 102 2025-12-23T18:32:08.455609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:32:08.455655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:32:08.455693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:32:08.455962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:32:08.464023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:32:08.464125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:800:2770] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:32:07.112937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:32:07.113029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:07.113084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:32:07.113144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:32:07.113201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:32:07.113234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:32:07.113313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:07.113402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:32:07.114372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:07.114724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:32:07.223842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:07.223896Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:07.237863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:32:07.238071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:32:07.238264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:32:07.252203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:32:07.252461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:32:07.253265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:07.253528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:32:07.258866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:07.259110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:32:07.260852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:07.260922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:07.261109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:32:07.261258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:07.261308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:32:07.261482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:32:07.442689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.443904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.444079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.444175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.444256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.444369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.444465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.444552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.444633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.444723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.444788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.444890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.444970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.445057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:07.445172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2057594046678944 2025-12-23T18:32:08.664766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2025-12-23T18:32:08.664898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2025-12-23T18:32:08.665078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:32:08.688004Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:791: [Export] [s3] Bootstrap: self# [1:822:2790], attempt# 0 2025-12-23T18:32:08.848126Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:445: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:822:2790], sender# [1:821:2789] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:3474 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4125863B-BC44-4B21-B2D8-F045283CDEA3 amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2025-12-23T18:32:08.860763Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:405: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:822:2790], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2025-12-23T18:32:08.864345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:08.864420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:32:08.864755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:08.864804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:32:08.865352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:32:08.865433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:3474 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9878C890-B2DC-46AD-B4E5-3D575A9C387C amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-12-23T18:32:08.866333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:32:08.866450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:32:08.866491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:32:08.866530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:32:08.866570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:32:08.866682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:32:08.866989Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:310: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:822:2790], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-12-23T18:32:08.867181Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:821:2789] 2025-12-23T18:32:08.867417Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:463: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:822:2790], sender# [1:821:2789], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:3474 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D580C6D0-56E5-4F6D-843A-C6FC27C2AC5D amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-12-23T18:32:08.879126Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:505: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:822:2790], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-12-23T18:32:08.879217Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:708: [Export] [s3] Finish: self# [1:822:2790], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-12-23T18:32:08.879816Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:821:2789], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-12-23T18:32:08.899772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:32:08.916148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 718 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:32:08.916225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:32:08.916412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 718 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:32:08.916528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 718 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:32:08.916613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:08.916681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:32:08.916788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:32:08.916847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:32:08.917078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:08.924203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:32:08.924806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:32:08.924873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:32:08.925004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:32:08.925040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:32:08.925086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:32:08.925137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:32:08.925188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:32:08.925274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:745:2722] message: TxId: 102 2025-12-23T18:32:08.925328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:32:08.925369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:32:08.925430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:32:08.925566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:32:08.932674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:32:08.932755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:808:2777] TestWaitNotification: OK eventTxId 102 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |91.8%| [TA] $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} |91.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::TestConnectorNotConfigured [GOOD] >> KqpRboPg::Select [GOOD] >> KqpRboPg::ScalarSubquery >> TSchemeShardViewTest::DropView >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy >> TSchemeShardViewTest::CreateView >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] >> TSchemeShardViewTest::EmptyQueryText |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/test/tool/surg/surg >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] >> GenericFederatedQuery::YdbSelectCount >> TSchemeShardViewTest::ReadOnlyMode [GOOD] |91.8%| [TA] {RESULT} $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} |91.8%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/test/tool/surg/surg >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectCount >> KqpRboPg::OrderBy [GOOD] >> KqpRboPg::LeftJoinToKqpOpJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestConnectorNotConfigured [GOOD] Test command err: Trying to start YDB, gRPC: 8837, MsgBus: 27489 2025-12-23T18:31:41.494107Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122871907545636:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:41.494244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d54/r3tmp/tmpGDUojL/pdisk_1.dat 2025-12-23T18:31:41.889266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:41.889350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:41.942301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.949189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 8837, node 1 2025-12-23T18:31:42.066829Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:42.115624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:42.115645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:42.115651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:42.115746Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:42.206768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27489 2025-12-23T18:31:42.499211Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:27489 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514702015 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:42.698536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:31:42.698972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:31:42.701754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:42.702551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:31:42.705024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766514702750, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:31:42.706182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2025-12-23T18:31:42.706258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-12-23T18:31:42.706470Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7587122871907546019:2250] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2025-12-23T18:31:42.706897Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122871907545458:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.707038Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122871907545461:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.707309Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122871907545958:2212][/Root] Path was updated to new version: owner# [1:7587122871907545775:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.707675Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7587122871907546019:2250] Ack update: ack to# [1:7587122871907545854:2153], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2025-12-23T18:31:42.707850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-12-23T18:31:42.710279Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122871907545455:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.711047Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122876202513435:2287][/Root] Path was updated to new version: owner# [1:7587122871907546125:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.711333Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122876202513436:2288][/Root] Path was updated to new version: owner# [1:7587122871907546126:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } waiting... 2025-12-23T18:31:42.715808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14129, MsgBus: 2066 2025-12-23T18:31:45.669400Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587122889181583877:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:45.669459Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:31:45.678323Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d54/r3tmp/tmp4QZrlH/pdisk_1.dat 2025-12-23T18:31:45.763842Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:45.774575Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587122889181583851:2081] 1766514705668190 != 1766514705668193 2025-12-23T18:31:45.799153Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:45.799262Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:45.799338Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:45.822667Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14129, node 2 2025-12-23T18:31:45.882407Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:45.882430Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:45.882437Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:45.882521Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:45.978300Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2066 TClient is connected to server localhost:2066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: ... node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587122954010429795:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:00.116371Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:00.116742Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587122954010429805:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:00.116794Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:00.521189Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587122932535591537:2135];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:00.521300Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:00.678129Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:00.730175Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:00.770989Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:00.813258Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:00.855039Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:00.909339Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:00.996519Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:01.137899Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:01.247546Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587122958305397981:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:01.247667Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:01.247737Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587122958305397986:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:01.248014Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587122958305397988:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:01.248043Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:01.252045Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:01.277744Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587122958305397989:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:32:01.359273Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587122958305398041:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:32:03.860410Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:04.683586Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:32:05.224466Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:05.885179Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:06.440006Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:32:07.056599Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710692:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:32:07.649506Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:07.734274Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-12-23T18:32:10.388933Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710713:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-12-23T18:32:10.523739Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7587122996960105402:2898], status: GENERIC_ERROR, issues:
: Error: Table metadata loading, code: 1050
:2:17: Error: Unsupported. Failed to load metadata for table: /Root/external_data_source.[example_1] data source generic doesn't exist, please contact internal support 2025-12-23T18:32:10.531574Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=4&id=NWM3MzMzNmEtZWE0NTdjNmUtZTE2MDM4YTktNTM5ZmFkZmE=, ActorId: [4:7587122996960105400:2897], ActorState: ExecuteState, TraceId: 01kd67q9ev4x5d0ssngtd7bsd3, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 17 } message: "Unsupported. Failed to load metadata for table: /Root/external_data_source.[example_1] data source generic doesn\'t exist, please contact internal support" end_position { row: 2 column: 17 } severity: 1 } }, remove tx with tx_id: 2025-12-23T18:32:10.738847Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:32:10.738886Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-12-23T18:30:34.710286Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:36:2066] 2025-12-23T18:30:34.710350Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-12-23T18:30:34.710521Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:36:2066] 2025-12-23T18:30:34.710558Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-12-23T18:30:34.710597Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:37:2067] 2025-12-23T18:30:34.710623Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-12-23T18:30:34.710847Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:37:2067] 2025-12-23T18:30:34.710885Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-12-23T18:30:34.711057Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:39:2069][/root/tenant] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:34.711542Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:43:2069] 2025-12-23T18:30:34.711585Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:3:2050] Upsert description: path# /root/tenant 2025-12-23T18:30:34.711681Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:3:2050] Subscribe: subscriber# [1:43:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:30:34.711797Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:44:2069] 2025-12-23T18:30:34.711814Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:6:2053] Upsert description: path# /root/tenant 2025-12-23T18:30:34.711867Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:6:2053] Subscribe: subscriber# [1:44:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:30:34.711998Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:45:2069] 2025-12-23T18:30:34.712050Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:9:2056] Upsert description: path# /root/tenant 2025-12-23T18:30:34.712086Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:9:2056] Subscribe: subscriber# [1:45:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:30:34.712155Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2025-12-23T18:30:34.712212Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2069] 2025-12-23T18:30:34.712260Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:44:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2025-12-23T18:30:34.712294Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:44:2069] 2025-12-23T18:30:34.712338Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:45:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2025-12-23T18:30:34.712373Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:45:2069] 2025-12-23T18:30:34.712454Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:40:2069] 2025-12-23T18:30:34.712551Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:41:2069] 2025-12-23T18:30:34.712605Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:39:2069][/root/tenant] Set up state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:30:34.712662Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:42:2069] 2025-12-23T18:30:34.712696Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:39:2069][/root/tenant] Ignore empty state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-12-23T18:30:34.712865Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:36:2066], cookie# 0, event size# 103 2025-12-23T18:30:34.712900Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-12-23T18:30:34.712960Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-12-23T18:30:34.713094Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2025-12-23T18:30:34.713153Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:43:2069] 2025-12-23T18:30:34.713207Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:40:2069] 2025-12-23T18:30:34.713248Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:39:2069][/root/tenant] Update to strong state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-12-23T18:30:35.188102Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:36:2066] 2025-12-23T18:30:35.188164Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:3:2050] Successful handshake: owner# 800, generation# 1 2025-12-23T18:30:35.188321Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:36:2066] 2025-12-23T18:30:35.188358Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:3:2050] Commit generation: owner# 800, generation# 1 2025-12-23T18:30:35.188414Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:37:2067] 2025-12-23T18:30:35.188445Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:6:2053] Successful handshake: owner# 900, generation# 1 2025-12-23T18:30:35.188624Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:37:2067] 2025-12-23T18:30:35.188658Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:6:2053] Commit generation: owner# 900, generation# 1 2025-12-23T18:30:35.188828Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:39:2069][/root/tenant] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:30:35.189282Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:43:2069] 2025-12-23T18:30:35.189337Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:3:2050] Upsert description: path# /root/tenant 2025-12-23T18:30:35.189445Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:3:2050] Subscribe: subscriber# [3:43:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:30:35.189589Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:44:2069] 2025-12-23T18:30:35.189615Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:6:2053] Upsert description: path# /root/tenant 2025-12-23T18:30:35.189655Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:6:2053] Subscribe: subscriber# [3:44:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:30:35.189788Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:45:2069] 2025-12-23T18:30:35.189816Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:9:2056] Upsert description: path# /root/tenant 2025-12-23T18:30:35.189855Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:9:2056] Subscribe: subscriber# [3:45:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:30:35.190008Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2025-12-23T18:30:35.190084Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:43:2069] 2025-12-23T18:30:35.190143Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:44:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2025-12-23 ... 94: [399:3:2050] Commit generation: owner# 910, generation# 1 2025-12-23T18:32:11.572897Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:37:2067] 2025-12-23T18:32:11.572931Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [399:6:2053] Successful handshake: owner# 910, generation# 1 2025-12-23T18:32:11.573079Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:37:2067] 2025-12-23T18:32:11.573123Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [399:6:2053] Commit generation: owner# 910, generation# 1 2025-12-23T18:32:11.573327Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][399:39:2069][/Root/Tenant/table_inside] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[399:24339059:0], [399:1099535966835:0], [399:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:32:11.573817Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:43:2069] 2025-12-23T18:32:11.573853Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-12-23T18:32:11.573930Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:3:2050] Subscribe: subscriber# [399:43:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:32:11.574197Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:44:2069] 2025-12-23T18:32:11.574234Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-12-23T18:32:11.574291Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:6:2053] Subscribe: subscriber# [399:44:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:32:11.574478Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:45:2069] 2025-12-23T18:32:11.574507Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-12-23T18:32:11.574557Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:9:2056] Subscribe: subscriber# [399:45:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:32:11.574637Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:43:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2025-12-23T18:32:11.574696Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:43:2069] 2025-12-23T18:32:11.581526Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:44:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2025-12-23T18:32:11.581643Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:44:2069] 2025-12-23T18:32:11.581732Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:45:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2025-12-23T18:32:11.581780Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:45:2069] 2025-12-23T18:32:11.581874Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:40:2069] 2025-12-23T18:32:11.581963Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:41:2069] 2025-12-23T18:32:11.582022Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][399:39:2069][/Root/Tenant/table_inside] Set up state: owner# [399:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:32:11.582108Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:42:2069] 2025-12-23T18:32:11.582158Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][399:39:2069][/Root/Tenant/table_inside] Ignore empty state: owner# [399:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2025-12-23T18:32:12.096156Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [401:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [401:36:2066] 2025-12-23T18:32:12.096219Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [401:3:2050] Successful handshake: owner# 910, generation# 1 2025-12-23T18:32:12.096350Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [401:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [401:36:2066] 2025-12-23T18:32:12.096382Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [401:3:2050] Commit generation: owner# 910, generation# 1 2025-12-23T18:32:12.096427Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [401:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [401:37:2067] 2025-12-23T18:32:12.096456Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [401:6:2053] Successful handshake: owner# 910, generation# 1 2025-12-23T18:32:12.096643Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [401:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [401:37:2067] 2025-12-23T18:32:12.096672Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [401:6:2053] Commit generation: owner# 910, generation# 1 2025-12-23T18:32:12.096787Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][401:39:2069][/Root/Tenant/table_inside] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[401:24339059:0], [401:1099535966835:0], [401:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:32:12.097535Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:43:2069] 2025-12-23T18:32:12.097573Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-12-23T18:32:12.097643Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:3:2050] Subscribe: subscriber# [401:43:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:32:12.097806Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:44:2069] 2025-12-23T18:32:12.097834Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-12-23T18:32:12.097874Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:6:2053] Subscribe: subscriber# [401:44:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:32:12.098011Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:45:2069] 2025-12-23T18:32:12.098045Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-12-23T18:32:12.098086Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:9:2056] Subscribe: subscriber# [401:45:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-12-23T18:32:12.098154Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:43:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:3:2050] 2025-12-23T18:32:12.098207Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:43:2069] 2025-12-23T18:32:12.098254Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:44:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:6:2053] 2025-12-23T18:32:12.098295Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:44:2069] 2025-12-23T18:32:12.098337Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:45:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:9:2056] 2025-12-23T18:32:12.098378Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:45:2069] 2025-12-23T18:32:12.098465Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:40:2069] 2025-12-23T18:32:12.098527Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:41:2069] 2025-12-23T18:32:12.098578Z node 401 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][401:39:2069][/Root/Tenant/table_inside] Set up state: owner# [401:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:32:12.098638Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:42:2069] 2025-12-23T18:32:12.098678Z node 401 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][401:39:2069][/Root/Tenant/table_inside] Ignore empty state: owner# [401:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSchemeShardViewTest::EmptyName >> TSchemeShardViewTest::AsyncCreateDifferentViews |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |91.8%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> TSchemeShardViewTest::DropView [GOOD] >> TSchemeShardViewTest::CreateView [GOOD] |91.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/test/tool/surg/surg >> TTxDataShardSampleKScan::BadRequest [GOOD] >> TTxDataShardSampleKScan::RunScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:32:11.556586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:32:11.556677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:11.556714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:32:11.556749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:32:11.556786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:32:11.556833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:32:11.556888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:11.556961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:32:11.557750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:11.558029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:32:11.710090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:11.710167Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:11.716659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:32:11.716900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:32:11.717048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:32:11.720233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:32:11.720405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:32:11.721096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:11.721359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:32:11.723240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:11.723431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:32:11.724613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:11.724676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:11.724842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:32:11.724889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:11.724946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:32:11.725075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:32:11.945101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.946164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.946306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.946395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.946481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.946536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.946592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.946673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.946769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.946831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.946886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.946956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.947052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.947128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:11.947222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... less_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:32:13.015192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:780:2749] sender: [1:838:2058] recipient: [1:15:2062] 2025-12-23T18:32:13.059356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:32:13.059633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2025-12-23T18:32:13.059700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2025-12-23T18:32:13.059845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:32:13.059911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2025-12-23T18:32:13.059964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 103:0 type: TxCreateView target path: [OwnerId: 72057594046678944, LocalPathId: 39] source path: 2025-12-23T18:32:13.060026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:32:13.062697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 39, at schemeshard: 72057594046678944 2025-12-23T18:32:13.062911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2025-12-23T18:32:13.063114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:32:13.063184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2025-12-23T18:32:13.063242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-12-23T18:32:13.063349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:32:13.065813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-12-23T18:32:13.065977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000039 2025-12-23T18:32:13.066703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:13.066850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:32:13.066902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000039 2025-12-23T18:32:13.067040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2025-12-23T18:32:13.067203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:32:13.067279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 FAKE_COORDINATOR: Erasing txId 103 2025-12-23T18:32:13.069699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:13.069746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:13.069923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:32:13.070037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:13.070089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:830:2788], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-12-23T18:32:13.070133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:830:2788], at schemeshard: 72057594046678944, txId: 103, path id: 39 2025-12-23T18:32:13.070466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:32:13.070508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:32:13.070605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:32:13.070641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:32:13.070678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:32:13.070711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:32:13.070781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-12-23T18:32:13.070836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:32:13.070870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:32:13.070913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:32:13.070987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:32:13.071022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-12-23T18:32:13.071063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:32:13.071099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2025-12-23T18:32:13.071731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:32:13.071830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:32:13.071874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:32:13.071909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:32:13.071962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:32:13.072604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:32:13.072676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:32:13.072705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:32:13.072740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2025-12-23T18:32:13.072776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:32:13.072852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-12-23T18:32:13.076599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:32:13.077659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] Test command err: 2025-12-23T18:29:58.708436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:29:58.708502Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:29:58.711371Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:29:58.736299Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:29:58.736707Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:29:58.737015Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:29:58.791099Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:29:58.798385Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:29:58.798488Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:29:58.800193Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:29:58.800292Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:29:58.800358Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:29:58.800717Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:29:58.800786Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:29:58.800863Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:29:58.895905Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:29:58.926352Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:29:58.926547Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:29:58.926634Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:29:58.926666Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:29:58.926706Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:29:58.926735Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:29:58.926895Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:58.926956Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:58.927311Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:29:58.927395Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:29:58.927522Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:29:58.927577Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:29:58.927610Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:29:58.927681Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:29:58.927708Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:29:58.927730Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:29:58.927758Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:29:58.927828Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:58.927853Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:58.927894Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:29:58.934689Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:29:58.934802Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:29:58.934950Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:29:58.935167Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:29:58.935214Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:29:58.935290Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:29:58.935346Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:29:58.935384Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:29:58.935423Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:29:58.935458Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:29:58.935818Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:29:58.935918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:29:58.935953Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:29:58.935981Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:29:58.936017Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:29:58.936039Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:29:58.936067Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:29:58.936093Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:29:58.936111Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:29:58.950904Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:29:58.951015Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:29:58.951059Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:29:58.951117Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:29:58.951203Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:29:58.951824Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:58.951896Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:29:58.951959Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:29:58.952093Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:29:58.952127Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:29:58.952315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:29:58.952366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:29:58.952437Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:29:58.952479Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:29:58.965194Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:29:58.965296Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:29:58.965607Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:58.965655Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:29:58.965744Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:29:58.965790Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:29:58.965823Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:29:58.965861Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:29:58.965913Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... 00004:403] at 9437184 is Executed 2025-12-23T18:32:05.086965Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437184 executing on unit CompletedOperations 2025-12-23T18:32:05.086995Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000004:403] at 9437184 has finished 2025-12-23T18:32:05.087026Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:32:05.087053Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:32:05.087081Z node 4 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:32:05.087110Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:32:05.110572Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:32:05.110670Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:402] at 9437184 on unit CompleteOperation 2025-12-23T18:32:05.110767Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 402] from 9437184 at tablet 9437184 send result to client [4:103:2137], exec latency: 4 ms, propose latency: 6 ms 2025-12-23T18:32:05.110850Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 399} 2025-12-23T18:32:05.110902Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:32:05.111189Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2025-12-23T18:32:05.111239Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:32:05.111273Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:403] at 9437184 on unit StoreAndSendOutRS 2025-12-23T18:32:05.111313Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 400 at 9437184 from 9437184 to 9437185 txId 403 2025-12-23T18:32:05.111379Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:32:05.111408Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:403] at 9437184 on unit CompleteOperation 2025-12-23T18:32:05.111458Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 403] from 9437184 at tablet 9437184 send result to client [4:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:32:05.111518Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-12-23T18:32:05.111557Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:32:05.111908Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [4:240:2232], Recipient [4:458:2400]: {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 399} 2025-12-23T18:32:05.111965Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:32:05.112002Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 402 2025-12-23T18:32:05.112503Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [4:240:2232], Recipient [4:458:2400]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-12-23T18:32:05.112559Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:32:05.112596Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 403 2025-12-23T18:32:05.113311Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [4:240:2232], Recipient [4:350:2318]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-12-23T18:32:05.113367Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-12-23T18:32:05.113410Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437185 source 9437184 dest 9437185 producer 9437184 txId 403 2025-12-23T18:32:05.113492Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437185 got read set: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-12-23T18:32:05.113544Z node 4 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000004:403] from=9437184 to=9437185origin=9437184 2025-12-23T18:32:05.113627Z node 4 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437185 2025-12-23T18:32:05.113990Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [4:350:2318], Recipient [4:350:2318]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:32:05.114041Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:32:05.114098Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2025-12-23T18:32:05.114141Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:32:05.114186Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000004:403] at 9437185 for LoadAndWaitInRS 2025-12-23T18:32:05.114223Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit LoadAndWaitInRS 2025-12-23T18:32:05.114263Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is Executed 2025-12-23T18:32:05.114298Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit LoadAndWaitInRS 2025-12-23T18:32:05.114328Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit BlockFailPoint 2025-12-23T18:32:05.114361Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit BlockFailPoint 2025-12-23T18:32:05.114389Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is Executed 2025-12-23T18:32:05.114415Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit BlockFailPoint 2025-12-23T18:32:05.114458Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit ExecuteDataTx 2025-12-23T18:32:05.114487Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit ExecuteDataTx 2025-12-23T18:32:05.117735Z node 4 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000004:403] at tablet 9437185 with status COMPLETE 2025-12-23T18:32:05.117832Z node 4 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000004:403] at 9437185: {NSelectRow: 1, NSelectRange: 3, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 75, SelectRangeBytes: 600, UpdateRowBytes: 13, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:32:05.117900Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is ExecutedNoMoreRestarts 2025-12-23T18:32:05.117937Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit ExecuteDataTx 2025-12-23T18:32:05.117972Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit CompleteOperation 2025-12-23T18:32:05.118007Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit CompleteOperation 2025-12-23T18:32:05.118293Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is DelayComplete 2025-12-23T18:32:05.118333Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit CompleteOperation 2025-12-23T18:32:05.118367Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000004:403] at 9437185 to execution unit CompletedOperations 2025-12-23T18:32:05.118400Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000004:403] at 9437185 on unit CompletedOperations 2025-12-23T18:32:05.118456Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000004:403] at 9437185 is Executed 2025-12-23T18:32:05.118487Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000004:403] at 9437185 executing on unit CompletedOperations 2025-12-23T18:32:05.118518Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000004:403] at 9437185 has finished 2025-12-23T18:32:05.118549Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:32:05.118584Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2025-12-23T18:32:05.118614Z node 4 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2025-12-23T18:32:05.118644Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2025-12-23T18:32:05.152506Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-12-23T18:32:05.152596Z node 4 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:403] at 9437185 on unit CompleteOperation 2025-12-23T18:32:05.152680Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 403] from 9437185 at tablet 9437185 send result to client [4:103:2137], exec latency: 4 ms, propose latency: 6 ms 2025-12-23T18:32:05.152770Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2025-12-23T18:32:05.152822Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-12-23T18:32:05.153677Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [4:350:2318], Recipient [4:240:2232]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437184 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 400} 2025-12-23T18:32:05.153744Z node 4 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:32:05.153791Z node 4 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437185 consumer 9437185 txId 403 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for Tab ... t: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:85:2057] recipient: [35:84:2115] Leader for TabletID 72057594037927937 is [35:86:2116] sender: [35:87:2057] recipient: [35:84:2115] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:86:2116] Leader for TabletID 72057594037927937 is [35:86:2116] sender: [35:202:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:83:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:85:2115] Leader for TabletID 72057594037927937 is [36:87:2116] sender: [36:88:2057] recipient: [36:85:2115] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:78:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:80:2112] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:83:2057] recipient: [39:80:2112] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:82:2113] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:198:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:78:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:81:2057] recipient: [40:80:2112] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:83:2057] recipient: [40:80:2112] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:82:2113] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:198:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:79:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:82:2057] recipient: [41:81:2112] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:84:2057] recipient: [41:81:2112] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:83:2113] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:199:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:82:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:85:2057] recipient: [42:84:2115] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:87:2057] recipient: [42:84:2115] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:86:2116] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:202:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:82:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:85:2057] recipient: [43:84:2115] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:87:2057] recipient: [43:84:2115] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:86:2116] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:202:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:83:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:86:2057] recipient: [44:85:2115] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:88:2057] recipient: [44:85:2115] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:87:2116] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:203:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:86:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:89:2057] recipient: [45:88:2118] Leader for TabletID 72057594037927937 is [45:90:2119] sender: [45:91:2057] recipient: [45:88:2118] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:90:2119] Leader for TabletID 72057594037927937 is [45:90:2119] sender: [45:206:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:86:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:89:2057] recipient: [46:88:2118] Leader for TabletID 72057594037927937 is [46:90:2119] sender: [46:91:2057] recipient: [46:88:2118] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:90:2119] Leader for TabletID 72057594037927937 is [46:90:2119] sender: [46:206:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:87:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:90:2057] recipient: [47:89:2118] Leader for TabletID 72057594037927937 is [47:91:2119] sender: [47:92:2057] recipient: [47:89:2118] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:91:2119] Leader for TabletID 72057594037927937 is [47:91:2119] sender: [47:207:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] >> TSchemeShardViewTest::EmptyQueryText [GOOD] >> TBackupCollectionTests::IncrementalBackupWithIndexes [GOOD] >> TBackupCollectionTests::OmitIndexesFlag |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount |91.8%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:32:12.802113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:32:12.802196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:12.802233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:32:12.802265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:32:12.802301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:32:12.802346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:32:12.802406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:12.802522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:32:12.803346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:12.803606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:32:12.891526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:12.891591Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:12.897132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:32:12.897377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:32:12.897528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:32:12.900774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:32:12.900945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:32:12.901636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:12.901855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:32:12.903696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:12.903892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:32:12.905062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:12.905121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:12.905261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:32:12.905308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:12.905346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:32:12.905464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:32:13.097427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.098392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.098582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.098734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.098841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.098901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.098995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.099082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.099159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.099224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.099284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.099366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.099465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.099532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.099624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:32:14.033711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-12-23T18:32:14.033854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2025-12-23T18:32:14.034146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:14.034277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:32:14.034343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_view.cpp:43: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000039 2025-12-23T18:32:14.034486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2025-12-23T18:32:14.034630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:32:14.034682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:32:14.036489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:14.036538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:14.036719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:32:14.037278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:14.037334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-12-23T18:32:14.037400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:32:14.037628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:32:14.037686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:32:14.037776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:32:14.037811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:32:14.037848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:32:14.037883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:32:14.037951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:32:14.037985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:32:14.038019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:32:14.038048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:32:14.038159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:32:14.038205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:32:14.038239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:32:14.038280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2025-12-23T18:32:14.039048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:32:14.039156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:32:14.039210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:32:14.039255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:32:14.039298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:32:14.039945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:32:14.040054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:32:14.040084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:32:14.040110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2025-12-23T18:32:14.040142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:32:14.040222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:32:14.040635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:32:14.040680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:32:14.040743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:32:14.044020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:32:14.044397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:32:14.045393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:32:14.045659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:32:14.045785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:32:14.046309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:32:14.046385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:32:14.046417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:725:2715] TestWaitNotification: OK eventTxId 102 2025-12-23T18:32:14.046897Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:32:14.047082Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 209us result status StatusPathDoesNotExist 2025-12-23T18:32:14.047247Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:32:13.061920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:32:13.062012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:13.062048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:32:13.062084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:32:13.062120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:32:13.062174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:32:13.062239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:13.062304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:32:13.063132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:13.063380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:32:13.159819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:13.159885Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:13.165221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:32:13.165448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:32:13.165586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:32:13.168570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:32:13.168733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:32:13.169415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:13.169621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:32:13.171601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:13.171771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:32:13.173138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:13.173200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:13.173362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:32:13.173407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:13.173449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:32:13.173576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:32:13.453134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.464405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.464615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.464711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.464785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.464844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.464908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.465002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.465088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.465154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.465220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.465297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.465410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.465476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.465578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :2661] Become StateWork (SchemeCache [1:676:2666]) 2025-12-23T18:32:14.550699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:32:14.550975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2025-12-23T18:32:14.551042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2025-12-23T18:32:14.551156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:32:14.551234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2025-12-23T18:32:14.551294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 101:0 type: TxCreateView target path: [OwnerId: 72057594046678944, LocalPathId: 38] source path: 2025-12-23T18:32:14.551360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:32:14.552428Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:671:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:32:14.555669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 38, at schemeshard: 72057594046678944 2025-12-23T18:32:14.555891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-12-23T18:32:14.556208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:32:14.556261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2025-12-23T18:32:14.556342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-12-23T18:32:14.556480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:32:14.557025Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-12-23T18:32:14.558432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-12-23T18:32:14.558591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2025-12-23T18:32:14.559001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:14.559152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:32:14.559211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2025-12-23T18:32:14.559338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2025-12-23T18:32:14.559502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:32:14.559567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:32:14.561468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:14.561517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:14.561687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:32:14.561840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:14.561897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:32:14.561951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 38 2025-12-23T18:32:14.562180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:32:14.562229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:32:14.562325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:32:14.562360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:32:14.562399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:32:14.562429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:32:14.562486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:32:14.562529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:32:14.562562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:32:14.562597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:32:14.562681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:32:14.562756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-12-23T18:32:14.562795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-12-23T18:32:14.562823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2025-12-23T18:32:14.564032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:32:14.564137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:32:14.564194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:32:14.564232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-12-23T18:32:14.564291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:32:14.565410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:32:14.565502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:32:14.565535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:32:14.565563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:32:14.565615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:32:14.565688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:32:14.568213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:32:14.569315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:32:12.957851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:32:12.957931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:12.957986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:32:12.958024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:32:12.958058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:32:12.958092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:32:12.958145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:12.958255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:32:12.960435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:12.960727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:32:13.095925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:13.095978Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:13.120814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:32:13.121008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:32:13.121195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:32:13.141807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:32:13.142022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:32:13.145068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:13.145345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:32:13.152766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:13.155273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:32:13.156758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:13.156833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:13.157001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:32:13.157049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:13.157151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:32:13.158314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:32:13.367736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.368720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.368829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.368943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.369017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.369100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.369186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.369274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.369355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.369422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.369499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.369574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.369645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.369735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:13.369807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-12-23T18:32:14.297829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-12-23T18:32:14.297970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2025-12-23T18:32:14.298282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:14.298378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:32:14.298457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2025-12-23T18:32:14.298601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2025-12-23T18:32:14.298766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:32:14.298836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:32:14.300574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:14.300629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:14.300817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:32:14.300947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:14.300986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:32:14.301028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 101, path id: 38 2025-12-23T18:32:14.301095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:32:14.301137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:32:14.301240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:32:14.301284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:32:14.301320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:32:14.301350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:32:14.301388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:32:14.301449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:32:14.301512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:32:14.301548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:32:14.301612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:32:14.301651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-12-23T18:32:14.301681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-12-23T18:32:14.301710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2025-12-23T18:32:14.302792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:32:14.302888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:32:14.302929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:32:14.302981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-12-23T18:32:14.303081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:32:14.304296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:32:14.304379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:32:14.304408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:32:14.304452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:32:14.304483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:32:14.304545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:32:14.306578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:32:14.307824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:32:14.308055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:32:14.308099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:32:14.308467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:32:14.308566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:32:14.308601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:707:2696] TestWaitNotification: OK eventTxId 101 2025-12-23T18:32:14.309038Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:32:14.309222Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 208us result status StatusSuccess 2025-12-23T18:32:14.309611Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] >> test_select.py::TestDML::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView >> YdbTableSplit::MergeByNoLoadAfterSplit >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Query [GOOD] >> TSchemeShardViewTest::AsyncDropSameView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:32:15.128881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:32:15.128977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:15.129016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:32:15.129053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:32:15.129087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:32:15.129141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:32:15.129203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:15.129273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:32:15.130259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:15.130600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:32:15.224978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:15.225109Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:15.246160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:32:15.246475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:32:15.246648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:32:15.250108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:32:15.250336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:32:15.251282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:15.251640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:32:15.254178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:15.254491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:32:15.256188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:15.256274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:15.256469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:32:15.256535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:15.256598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:32:15.256801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:32:15.423586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.424571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.424730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.424810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.424880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.424963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.425026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.425114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.425193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.425256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.425337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.425403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.425502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.425587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.425699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:32:16.746887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:32:16.746914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2025-12-23T18:32:16.746942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:32:16.747003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:32:16.750137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:32:16.751181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-12-23T18:32:16.751512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:32:16.752690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-12-23T18:32:16.752823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:32:16.752860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-12-23T18:32:16.752953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:32:16.752977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:32:16.753511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:32:16.753685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:32:16.754853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:736:2726] 2025-12-23T18:32:16.755027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:32:16.755135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:32:16.755172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:32:16.755194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:736:2726] 2025-12-23T18:32:16.755300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:32:16.755330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:736:2726] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-12-23T18:32:16.755781Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:32:16.756022Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 255us result status StatusSuccess 2025-12-23T18:32:16.756482Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "FirstView" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "SecondView" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:32:16.757026Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:32:16.757208Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 162us result status StatusSuccess 2025-12-23T18:32:16.757506Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:32:16.758020Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:32:16.758177Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 147us result status StatusSuccess 2025-12-23T18:32:16.758496Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:32:14.889438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:32:14.889533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:14.889568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:32:14.889604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:32:14.889641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:32:14.889782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:32:14.889838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:14.889906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:32:14.890880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:14.891155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:32:15.043272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:15.043384Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:15.053060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:32:15.053453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:32:15.053664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:32:15.064068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:32:15.064415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:32:15.065410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:15.065811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:32:15.068915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:15.069180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:32:15.070833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:15.070915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:15.071141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:32:15.071206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:15.071260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:32:15.071444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:32:15.356323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.357273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.357407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.357504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.357574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.357631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.357690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.357781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.357850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.357936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.357997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.358073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.358171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.358250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:15.358355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... irst GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:16.493888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:16.494106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:32:16.494413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:32:16.494564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:32:16.494614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:32:16.494654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:32:16.497361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:32:16.497455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:32:16.497515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:32:16.499969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:32:16.500030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:32:16.500101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:32:16.500160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:32:16.500346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:32:16.502292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:32:16.502463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:32:16.502881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:16.503035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:32:16.503092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:32:16.503441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:32:16.503502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:32:16.503688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:32:16.503772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:32:16.506117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:16.506166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:16.506412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:16.506474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:32:16.506818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:32:16.506891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:32:16.507005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:32:16.507052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:32:16.507099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:32:16.507145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:32:16.507191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:32:16.507247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:32:16.507296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:32:16.507338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:32:16.507414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:32:16.507455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:32:16.507490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:32:16.508217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:32:16.508330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:32:16.508389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:32:16.508431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:32:16.508475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:32:16.508570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:32:16.517885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:32:16.518494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:32:16.519257Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:671:2661] Bootstrap 2025-12-23T18:32:16.520489Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:671:2661] Become StateWork (SchemeCache [1:676:2666]) 2025-12-23T18:32:16.523792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:32:16.524006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2025-12-23T18:32:16.524072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2025-12-23T18:32:16.524176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-12-23T18:32:16.525367Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:671:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:32:16.528753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:32:16.529001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE VIEW, path: /MyRoot/ 2025-12-23T18:32:16.529452Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> YdbTableSplit::RenameTablesAndSplit |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> TBackupCollectionTests::OmitIndexesFlag [GOOD] >> YdbTableSplit::SplitByLoadWithUpdates |91.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> KqpRboPg::ScalarSubquery [GOOD] >> KqpRboPg::PredicatePushdownLeftJoin >> IncrementalBackup::MultiBackup |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs >> TSchemeShardViewTest::AsyncDropSameView [GOOD] |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |91.8%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |91.8%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher >> KqpRboPg::LeftJoinToKqpOpJoin [GOOD] >> KqpRboPg::FiveJoinsCBO ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:32:17.967192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:32:17.967312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:17.967357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:32:17.967400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:32:17.967441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:32:17.967492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:32:17.967560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:17.967631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:32:17.968699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:17.969072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:32:18.089141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:18.089256Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:18.100735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:32:18.101053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:32:18.101221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:32:18.107299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:32:18.107511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:32:18.108210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:18.108513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:32:18.110914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:18.111138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:32:18.112542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:18.112622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:18.112797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:32:18.112853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:18.112904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:32:18.113060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:32:18.279284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.280539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.280734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.280825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.280975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.281043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.281115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.281226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.281317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.281388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.281483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.281568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.281738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.281832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.281949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... EBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:32:19.084212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:19.084274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:19.084527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:32:19.084673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:19.084734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:32:19.084787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 38 2025-12-23T18:32:19.085076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:32:19.085128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:32:19.085233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:32:19.085292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:32:19.085337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:32:19.085371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:32:19.085422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:32:19.085470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:32:19.085530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:32:19.085564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:32:19.085652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:32:19.085710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-12-23T18:32:19.085750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-12-23T18:32:19.085779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2025-12-23T18:32:19.086834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:32:19.086995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:32:19.087049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:32:19.087098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-12-23T18:32:19.087138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:32:19.087998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:32:19.088075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:32:19.088108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:32:19.088133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:32:19.088154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:32:19.088211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:32:19.091881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:32:19.093046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-12-23T18:32:19.093423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:32:19.093472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-12-23T18:32:19.093622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:32:19.093648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-12-23T18:32:19.093716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:32:19.093741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:32:19.094375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:32:19.094517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:32:19.094593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:32:19.094686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:705:2695] 2025-12-23T18:32:19.094893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:32:19.095000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:32:19.095029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:705:2695] 2025-12-23T18:32:19.095143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:32:19.095165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:705:2695] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-12-23T18:32:19.095590Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:32:19.095756Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 186us result status StatusSuccess 2025-12-23T18:32:19.096064Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:32:18.384864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:32:18.384948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:18.385003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:32:18.385045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:32:18.385086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:32:18.385128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:32:18.385195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:18.385292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:32:18.386138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:18.386399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:32:18.479010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:18.479071Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:18.490396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:32:18.490643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:32:18.490881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:32:18.502255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:32:18.502493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:32:18.503205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:18.503431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:32:18.507490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:18.507679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:32:18.508995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:18.509064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:18.509236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:32:18.509290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:18.509393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:32:18.509520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:32:18.660705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.661637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.661749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.661839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.661920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.661984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.662042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.662117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.662189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.662251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.662316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.662382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.662477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.662559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:18.662655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:32:19.406792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:32:19.409792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:19.409839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:19.410012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:32:19.410141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:19.410194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-12-23T18:32:19.410318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:32:19.410687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:32:19.410766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:32:19.410877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:32:19.410911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:32:19.410949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:32:19.410977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:32:19.411016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:32:19.411053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:32:19.411085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:32:19.411113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:32:19.411191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:32:19.411233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:32:19.411282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:32:19.411330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2025-12-23T18:32:19.412121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:32:19.412240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:32:19.412286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:32:19.412321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:32:19.412358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:32:19.412961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:32:19.413037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:32:19.413061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:32:19.413086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2025-12-23T18:32:19.413111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:32:19.413206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:32:19.413687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:32:19.413747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:32:19.413823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:32:19.421865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:32:19.422176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:32:19.426129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 102 2025-12-23T18:32:19.426436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:32:19.426508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-12-23T18:32:19.426591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:32:19.426628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2025-12-23T18:32:19.426701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:32:19.426730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:32:19.427385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:32:19.427517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:32:19.427593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:32:19.427628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:735:2724] 2025-12-23T18:32:19.427772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:32:19.427814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:32:19.427833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:735:2724] 2025-12-23T18:32:19.427928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:32:19.427971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:735:2724] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2025-12-23T18:32:19.428432Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:32:19.428654Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 219us result status StatusPathDoesNotExist 2025-12-23T18:32:19.428873Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::OmitIndexesFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:31:08.457654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:31:08.457770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:08.457816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:31:08.457876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:31:08.457917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:31:08.457951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:31:08.458029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:31:08.458129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:31:08.459100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:31:08.459446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:31:08.541475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:31:08.541536Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:08.549793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:31:08.550045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:31:08.550194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:31:08.554261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:31:08.554469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:31:08.555233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:31:08.555495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:31:08.557926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:08.558132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:31:08.559499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:31:08.559573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:31:08.559780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:31:08.559842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:31:08.559951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:31:08.560125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:31:08.742363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.745601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.745798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.745899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.745983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.746058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.746128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.746224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.746362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.746449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.746550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.746672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.746812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.746922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:31:08.747020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... true } 2025-12-23T18:32:17.894171Z node 22 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:32:17.894382Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndex/ValueIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:32:17.895372Z node 22 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndex/ValueIndex" took 1ms result status StatusSuccess 2025-12-23T18:32:17.896634Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndex/ValueIndex" PathDescription { Self { Name: "ValueIndex" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:32:17.897965Z node 22 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [22:1235:3126], Recipient [22:127:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-12-23T18:32:17.898074Z node 22 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:32:17.898280Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:32:17.898863Z node 22 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" took 582us result status StatusSuccess 2025-12-23T18:32:17.900074Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 SUCCESS: OmitIndexes flag works correctly - main table has CDC, index does not |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup_collection/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails |91.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |91.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |91.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |91.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> TCertificateCheckerTest::CheckSubjectDns >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration >> TraverseDatashard::TraverseOneTable [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk >> GenericFederatedQuery::IcebergHadoopSaSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:32:08.858129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:09.049158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:09.055460Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:32:09.055949Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:32:09.056036Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:32:09.695731Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:09.829736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:09.829866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:09.918411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:10.105917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004a60/r3tmp/tmpHUn3KJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30371, node 1 2025-12-23T18:32:11.000425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:11.001387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:11.001425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:11.001458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:11.001630Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:11.070736Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24342 2025-12-23T18:32:11.856182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:32:16.342565Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:16.350802Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:32:16.355592Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:16.393288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:16.393417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:16.438055Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:32:16.440396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:16.709269Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:16.709380Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:16.711151Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:16.712116Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:16.712879Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:16.713730Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:16.714090Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:16.714261Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:16.714357Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:16.714580Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:16.718997Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:16.737607Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:17.001435Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:17.068144Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:32:17.068255Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:32:17.125356Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:32:17.126360Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:32:17.126690Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:32:17.126802Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:32:17.126853Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:32:17.126915Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:32:17.126972Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:32:17.127028Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:32:17.127613Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:32:17.133341Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:32:17.133820Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2118:2584], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:32:17.161036Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2156:2618] 2025-12-23T18:32:17.161942Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2156:2618], schemeshard id = 72075186224037897 2025-12-23T18:32:17.296689Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2246:2663] 2025-12-23T18:32:17.305044Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:32:17.323822Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Describe result: PathErrorUnknown 2025-12-23T18:32:17.323894Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Creating table 2025-12-23T18:32:17.323995Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:32:17.330246Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2305:2692], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:32:17.336391Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:17.344932Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:32:17.345106Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Subscribe on create table tx: 281474976720657 2025-12-23T18:32:17.363260Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:32:17.666785Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:32:17.746940Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:32:18.244878Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:32:18.379209Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:32:18.379313Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Column diff is empty, finishing 2025-12-23T18:32:19.084503Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:32:19.454019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2740:3369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:19.454236Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:19.454688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2758:3374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:19.455177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:19.477484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:20.068764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3044:3424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:20.068930Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:20.163375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3048:3427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:20.163491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:20.164366Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3051:3430]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:32:20.164531Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:32:20.164592Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3053:3432] 2025-12-23T18:32:20.164645Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3053:3432] 2025-12-23T18:32:20.165286Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3054:3182] 2025-12-23T18:32:20.165664Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3053:3432], server id = [2:3054:3182], tablet id = 72075186224037894, status = OK 2025-12-23T18:32:20.165866Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3054:3182], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:32:20.165940Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-12-23T18:32:20.166255Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-12-23T18:32:20.166355Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3051:3430], StatRequests.size() = 1 2025-12-23T18:32:20.195766Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:32:20.196412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3058:3436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:20.196662Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:20.197319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3062:3440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:20.197472Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:20.197553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3065:3443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:20.218971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:20.411548Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:32:20.411636Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:32:20.519486Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3053:3432], schemeshard count = 1 2025-12-23T18:32:20.831413Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3067:3445], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-12-23T18:32:21.011206Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3170:3510] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:32:21.027887Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3193:3526]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:32:21.028165Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:32:21.028213Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3193:3526], StatRequests.size() = 1 2025-12-23T18:32:21.182612Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3242:3225]], StatType[ 2 ], StatRequestsCount[ 2 ] 2025-12-23T18:32:21.186038Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:32:21.186114Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:32:21.186505Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:32:21.186567Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:32:21.186620Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:32:21.220851Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T18:32:21.233160Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-12-23T18:32:21.233554Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-12-23T18:32:21.233760Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2025-12-23T18:32:21.233864Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> GenericFederatedQuery::IcebergHiveBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown >> TTxDataShardSampleKScan::RunScan [GOOD] >> TTxDataShardSampleKScan::SkipForeign >> KqpRboPg::PredicatePushdownLeftJoin [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query >> GenericFederatedQuery::IcebergHiveTokenSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> TConfigsDispatcherObservabilityTests::TestGetStateRequestResponse >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact >> TConfigsDispatcherObservabilityTests::TestSeedNodesInitialization >> GenericFederatedQuery::IcebergHiveSaSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveSaFilterPushdown >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] >> GenericFederatedQuery::PostgreSQLFilterPushdown |91.9%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist >> KqpRboPg::FiveJoinsCBO [GOOD] |91.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConfigsDispatcherTests::TestYamlEndToEnd >> TConfigsDispatcherObservabilityTests::TestGetStateRequestResponse [GOOD] >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> TConfigsDispatcherObservabilityTests::TestSeedNodesInitialization [GOOD] >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> TConfigsDispatcherTests::TestYamlConfigAndIcb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::PredicatePushdownLeftJoin [GOOD] Test command err: Trying to start YDB, gRPC: 17824, MsgBus: 31244 2025-12-23T18:32:06.358679Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122978370006739:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:06.358887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:32:06.386012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d54/r3tmp/tmp5b88E6/pdisk_1.dat 2025-12-23T18:32:06.984697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:06.984861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:07.018495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:07.048574Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:07.117708Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17824, node 1 2025-12-23T18:32:07.281329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:07.341147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:07.341166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:07.341175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:07.341248Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:07.370159Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:31244 TClient is connected to server localhost:31244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514727117 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:08.102578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:08.128632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:32:11.007888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122999844843984:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:11.008003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122999844843994:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:11.008084Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:11.008918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122999844843999:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:11.008987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:11.013578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:11.034053Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122999844843998:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:32:11.135544Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122999844844051:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:32:11.359574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122978370006739:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:11.359644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61278, MsgBus: 21986 2025-12-23T18:32:12.371286Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123005657813829:2146];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:12.371427Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:32:12.420739Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d54/r3tmp/tmpPZEVzP/pdisk_1.dat 2025-12-23T18:32:12.569699Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:12.574915Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587123005657813721:2081] 1766514732362331 != 1766514732362334 2025-12-23T18:32:12.597894Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:12.598000Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:12.598861Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:12.644903Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61278, node 2 2025-12-23T18:32:12.843070Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:12.930366Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:12.930387Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:12.930394Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:12.930785Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21986 2025-12-23T18:32:13.383252Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514732710 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:13.684159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:13.697448Z node 2 : ... adService] [TPoolFetcherActor] ActorId: [2:7587123027132651278:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:17.758200Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:17.758794Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123027132651283:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:17.758832Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123027132651284:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:17.758887Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:17.763044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:17.776368Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123027132651287:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:32:17.841606Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123027132651338:2660] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13930, MsgBus: 22847 2025-12-23T18:32:19.193089Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587123034192062639:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:19.193380Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d54/r3tmp/tmpe6dElP/pdisk_1.dat 2025-12-23T18:32:19.298990Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:19.392352Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:19.398963Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587123034192062607:2081] 1766514739171728 != 1766514739171731 2025-12-23T18:32:19.406560Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:19.406629Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:19.427807Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13930, node 3 2025-12-23T18:32:19.567118Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:19.663408Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:19.663435Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:19.663444Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:19.663523Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22847 2025-12-23T18:32:20.211296Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514739486 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:20.441262Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:20.451853Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:32:24.195152Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587123034192062639:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:24.195227Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:24.202945Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123055666899975:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.203065Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.203533Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123055666899985:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.203591Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.251283Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:24.301512Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:24.425538Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123055666900167:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.425646Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.425939Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123055666900172:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.425972Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123055666900173:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.426241Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.430613Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:24.460189Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587123055666900176:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-12-23T18:32:24.517732Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587123055666900228:2658] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse [GOOD] >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRenameWorksNewApi |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> TConfigsDispatcherTests::TestSubscriptionNotification >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact >> TConfigsDispatcherTests::TestYamlConfigAndIcb [GOOD] >> GenericFederatedQuery::YdbSelectCount [GOOD] >> GenericFederatedQuery::YdbFilterPushdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::FiveJoinsCBO [GOOD] Test command err: Trying to start YDB, gRPC: 27532, MsgBus: 29926 2025-12-23T18:32:06.984561Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122978597580184:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:06.984594Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d47/r3tmp/tmpMbSsbJ/pdisk_1.dat 2025-12-23T18:32:07.397390Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:07.418906Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:07.419079Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122978597580150:2081] 1766514726954366 != 1766514726954369 2025-12-23T18:32:07.434329Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:07.434457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:07.481767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27532, node 1 2025-12-23T18:32:07.654826Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:07.723275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:07.723302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:07.723308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:07.723390Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29926 2025-12-23T18:32:08.047817Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:29926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514727558 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:08.697370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:08.712071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:32:11.513092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123000072417523:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:11.513214Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:11.513583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123000072417533:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:11.513672Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:11.916125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:11.986954Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122978597580184:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:11.987153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:12.059039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:12.182872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123004367385014:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:12.182978Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:12.183410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123004367385019:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:12.183492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123004367385020:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:12.183823Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:12.187027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:12.204752Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123004367385023:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:32:12.268607Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123004367385074:2659] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 14433, MsgBus: 7185 2025-12-23T18:32:14.189418Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123014205420017:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:14.189481Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d47/r3tmp/tmpY2Fu42/pdisk_1.dat 2025-12-23T18:32:14.277478Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:14.404180Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:14.404278Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:14.415035Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587123014205419996:2081] 1766514734185962 != 1766514734185965 2025-12-23T18:32:14.429747Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:14.430650Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14433, node 2 2025-12-23T18:32:14.477914Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T18:32:14.477943Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T18:32:14.559166Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:14.583715Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:14.583737Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:14.583743Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initial ... 644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:18.756039Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123031385290417:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:32:18.846215Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123031385290468:2754] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:32:19.189744Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123014205420017:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:19.189824Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8240, MsgBus: 21112 2025-12-23T18:32:20.752260Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587123036566648447:2265];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:20.752395Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d47/r3tmp/tmpjS9lUy/pdisk_1.dat 2025-12-23T18:32:20.811024Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:20.937817Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:20.939529Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587123036566648206:2081] 1766514740716357 != 1766514740716360 2025-12-23T18:32:20.971718Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:20.971811Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:20.985441Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8240, node 3 2025-12-23T18:32:21.067385Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:21.159565Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:21.159588Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:21.159597Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:21.159697Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21112 2025-12-23T18:32:21.738993Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514741033 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:21.882058Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:25.186446Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123058041485570:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.186572Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.187099Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123058041485580:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.187232Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.270495Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:25.344052Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:25.386087Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:25.443390Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:25.484102Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:25.526427Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:25.624643Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123058041486024:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.624821Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.625302Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123058041486029:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.625357Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123058041486030:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.625718Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.631837Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:25.649395Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587123058041486033:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-12-23T18:32:25.742411Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587123058041486085:2825] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:32:25.747852Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587123036566648447:2265];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:25.747920Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached |91.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/vector_index/large/ydb-tests-datashard-vector_index-large |91.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/vector_index/large/ydb-tests-datashard-vector_index-large >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess >> GenericFederatedQuery::IcebergHadoopBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestYamlConfigAndIcb [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount [GOOD] >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown >> TConfigsCacheTests::TestFullConfigurationRestore |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |91.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization [GOOD] Test command err: 2025-12-23T18:32:28.189678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:28.189749Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:28.437176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |91.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |91.9%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [LD] {RESULT} $(B)/ydb/tests/datashard/vector_index/large/ydb-tests-datashard-vector_index-large |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> ExternalBlobsMultipleChannels::WithCompaction >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] Test command err: 2025-12-23T18:32:29.540082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:29.540148Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:29.772754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:31.129904Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:31.129959Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:31.226622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |91.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ChangeExternalCount >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple >> IncrementalBackup::MultiBackup [GOOD] >> IncrementalBackup::MultiShardIncrementalRestore >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns >> ExternalBlobsMultipleChannels::SingleChannel >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] Test command err: 2025-12-23T18:32:31.021984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:31.022057Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:31.254902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:32.560630Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:32:32.560708Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:32.663684Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TA] $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:105:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099 ... 57] recipient: [56:39:2086] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:142:2057] recipient: [56:141:2158] Leader for TabletID 72057594037927937 is [56:143:2159] sender: [56:144:2057] recipient: [56:141:2158] !Reboot 72057594037927937 (actor [56:58:2099]) rebooted! !Reboot 72057594037927937 (actor [56:58:2099]) tablet resolver refreshed! new actor is[56:143:2159] Leader for TabletID 72057594037927937 is [56:143:2159] sender: [56:259:2057] recipient: [56:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:59:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:76:2057] recipient: [57:14:2061] !Reboot 72057594037927937 (actor [57:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:140:2057] recipient: [57:39:2086] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:143:2057] recipient: [57:142:2158] Leader for TabletID 72057594037927937 is [57:144:2159] sender: [57:145:2057] recipient: [57:142:2158] !Reboot 72057594037927937 (actor [57:58:2099]) rebooted! !Reboot 72057594037927937 (actor [57:58:2099]) tablet resolver refreshed! new actor is[57:144:2159] Leader for TabletID 72057594037927937 is [0:0:0] sender: [58:56:2057] recipient: [58:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [58:56:2057] recipient: [58:53:2097] Leader for TabletID 72057594037927937 is [58:58:2099] sender: [58:59:2057] recipient: [58:53:2097] Leader for TabletID 72057594037927937 is [58:58:2099] sender: [58:76:2057] recipient: [58:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [59:56:2057] recipient: [59:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [59:56:2057] recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:59:2057] recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:76:2057] recipient: [59:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:53:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:59:2057] recipient: [60:53:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:76:2057] recipient: [60:14:2061] !Reboot 72057594037927937 (actor [60:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:78:2057] recipient: [60:39:2086] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:81:2057] recipient: [60:80:2112] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:83:2057] recipient: [60:80:2112] !Reboot 72057594037927937 (actor [60:58:2099]) rebooted! !Reboot 72057594037927937 (actor [60:58:2099]) tablet resolver refreshed! new actor is[60:82:2113] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:198:2057] recipient: [60:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:59:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:76:2057] recipient: [61:14:2061] !Reboot 72057594037927937 (actor [61:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:78:2057] recipient: [61:39:2086] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:81:2057] recipient: [61:80:2112] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:83:2057] recipient: [61:80:2112] !Reboot 72057594037927937 (actor [61:58:2099]) rebooted! !Reboot 72057594037927937 (actor [61:58:2099]) tablet resolver refreshed! new actor is[61:82:2113] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:198:2057] recipient: [61:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:53:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:59:2057] recipient: [62:53:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:76:2057] recipient: [62:14:2061] !Reboot 72057594037927937 (actor [62:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:79:2057] recipient: [62:39:2086] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:82:2057] recipient: [62:81:2112] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:84:2057] recipient: [62:81:2112] !Reboot 72057594037927937 (actor [62:58:2099]) rebooted! !Reboot 72057594037927937 (actor [62:58:2099]) tablet resolver refreshed! new actor is[62:83:2113] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:199:2057] recipient: [62:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:53:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:59:2057] recipient: [63:53:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:76:2057] recipient: [63:14:2061] !Reboot 72057594037927937 (actor [63:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:81:2057] recipient: [63:39:2086] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:84:2057] recipient: [63:83:2114] Leader for TabletID 72057594037927937 is [63:85:2115] sender: [63:86:2057] recipient: [63:83:2114] !Reboot 72057594037927937 (actor [63:58:2099]) rebooted! !Reboot 72057594037927937 (actor [63:58:2099]) tablet resolver refreshed! new actor is[63:85:2115] Leader for TabletID 72057594037927937 is [63:85:2115] sender: [63:201:2057] recipient: [63:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:59:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:76:2057] recipient: [64:14:2061] !Reboot 72057594037927937 (actor [64:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:81:2057] recipient: [64:39:2086] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:84:2057] recipient: [64:83:2114] Leader for TabletID 72057594037927937 is [64:85:2115] sender: [64:86:2057] recipient: [64:83:2114] !Reboot 72057594037927937 (actor [64:58:2099]) rebooted! !Reboot 72057594037927937 (actor [64:58:2099]) tablet resolver refreshed! new actor is[64:85:2115] Leader for TabletID 72057594037927937 is [64:85:2115] sender: [64:201:2057] recipient: [64:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:53:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:59:2057] recipient: [65:53:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:76:2057] recipient: [65:14:2061] !Reboot 72057594037927937 (actor [65:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:82:2057] recipient: [65:39:2086] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:85:2057] recipient: [65:84:2114] Leader for TabletID 72057594037927937 is [65:86:2115] sender: [65:87:2057] recipient: [65:84:2114] !Reboot 72057594037927937 (actor [65:58:2099]) rebooted! !Reboot 72057594037927937 (actor [65:58:2099]) tablet resolver refreshed! new actor is[65:86:2115] Leader for TabletID 72057594037927937 is [65:86:2115] sender: [65:202:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:53:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:59:2057] recipient: [66:53:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:76:2057] recipient: [66:14:2061] !Reboot 72057594037927937 (actor [66:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:85:2057] recipient: [66:39:2086] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:88:2057] recipient: [66:87:2117] Leader for TabletID 72057594037927937 is [66:89:2118] sender: [66:90:2057] recipient: [66:87:2117] !Reboot 72057594037927937 (actor [66:58:2099]) rebooted! !Reboot 72057594037927937 (actor [66:58:2099]) tablet resolver refreshed! new actor is[66:89:2118] Leader for TabletID 72057594037927937 is [66:89:2118] sender: [66:205:2057] recipient: [66:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:59:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:76:2057] recipient: [67:14:2061] !Reboot 72057594037927937 (actor [67:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:85:2057] recipient: [67:39:2086] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:88:2057] recipient: [67:87:2117] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:90:2057] recipient: [67:87:2117] !Reboot 72057594037927937 (actor [67:58:2099]) rebooted! !Reboot 72057594037927937 (actor [67:58:2099]) tablet resolver refreshed! new actor is[67:89:2118] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:205:2057] recipient: [67:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:53:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:59:2057] recipient: [68:53:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:76:2057] recipient: [68:14:2061] !Reboot 72057594037927937 (actor [68:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:86:2057] recipient: [68:39:2086] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:89:2057] recipient: [68:88:2117] Leader for TabletID 72057594037927937 is [68:90:2118] sender: [68:91:2057] recipient: [68:88:2117] !Reboot 72057594037927937 (actor [68:58:2099]) rebooted! !Reboot 72057594037927937 (actor [68:58:2099]) tablet resolver refreshed! new actor is[68:90:2118] Leader for TabletID 72057594037927937 is [68:90:2118] sender: [68:206:2057] recipient: [68:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:59:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:76:2057] recipient: [69:14:2061] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TTxDataShardSampleKScan::SkipForeign [GOOD] >> TTxDataShardValidateUniqueIndexScan::BadRequest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> IncrementalBackup::BackupRestore >> ExternalBlobsMultipleChannels::ChangeExternalCount [GOOD] >> IncrementalBackup::SimpleBackup >> IncrementalBackup::E2EBackupCollection >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] Test command err: 2025-12-23T18:32:24.461538Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123054023113573:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:24.462175Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00327b/r3tmp/tmpwaf6GE/pdisk_1.dat 2025-12-23T18:32:24.752743Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:32:24.772343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:24.772511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:24.814903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:24.858902Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123054023113466:2081] 1766514744454637 != 1766514744454640 2025-12-23T18:32:24.860560Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:24.955026Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:29523 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514744953 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:32:25.285980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:25.291618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:32:25.467136Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2025-12-23T18:32:26.328522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:28.959921Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7587123071202983677:2600], ActorId: [1:7587123071202983678:2600], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=Yzc3MjViNy0zYzI1NzE2Mi1lZmZkZTk3Yi1hMzE2YzQyMQ==, TxId: 01kd67qvgk26hr7nkb3qyd3efm 2025-12-23T18:32:28.962613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123071202983699:2326], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:28.962697Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:28.963261Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123071202983712:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:28.963336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:29.134120Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123077251676164:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:29.135257Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00327b/r3tmp/tmpAhHMpR/pdisk_1.dat 2025-12-23T18:32:29.203683Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:32:29.376951Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:29.377054Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:29.378506Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587123077251676029:2081] 1766514749087939 != 1766514749087942 2025-12-23T18:32:29.381123Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:29.406881Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:32:29.409429Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:29.461085Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123054023113573:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:29.461294Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:29.744427Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:32:30.148685Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:29053 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514749461 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:32:30.255858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:30.261335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2025-12-23T18:32:31.270209Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] Test command err: 2025-12-23T18:32:23.994413Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123049552667982:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:23.995737Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00327c/r3tmp/tmpn2LCEZ/pdisk_1.dat 2025-12-23T18:32:24.397323Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:32:24.403298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:24.403445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:24.438572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:24.540643Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:24.647813Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:12528 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514744568 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:32:24.879564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:25.011352Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2025-12-23T18:32:25.913746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:28.890415Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123073378104995:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:28.890553Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00327c/r3tmp/tmploT6uX/pdisk_1.dat 2025-12-23T18:32:28.906489Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:28.995150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123049552667982:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:28.995259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:29.032726Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:29.035290Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587123073378104957:2081] 1766514748885650 != 1766514748885653 2025-12-23T18:32:29.035597Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:32:29.044367Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:29.044472Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:29.077947Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:29.297719Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:32:29.905609Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3951 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514749188 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:32:30.134576Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:30.140074Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2025-12-23T18:32:31.149220Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:31.308211Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> YdbTableSplit::SplitByLoadWithDeletes |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |91.9%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-true [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-false >> YdbTableSplit::SplitByLoadWithReads ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ChangeExternalCount [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:32:36.208313Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:36.330606Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:36.343103Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:32:36.343534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:32:36.343614Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:32:36.681969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:36.682169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:36.783268Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514753031603 != 1766514753031607 2025-12-23T18:32:36.798531Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:36.853168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:36.945585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b60/r3tmp/tmpkgVoUw/pdisk_1.dat 2025-12-23T18:32:37.275778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:37.290116Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:32:37.405796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:37.799513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T18:32:38.031302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 101:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> KqpRboYql::ConstantFolding+ColumnStore >> KqpRboPg::Bench_Select >> KqpRboPg::UnionAll >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query [GOOD] >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] >> TTxDataShardValidateUniqueIndexScan::BadRequest [GOOD] >> TTxDataShardValidateUniqueIndexScan::RunScan >> TKeyValueTest::TestRenameWorksNewApi [GOOD] |91.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |91.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |91.9%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |92.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> IncrementalBackup::MultiShardIncrementalRestore [GOOD] >> IncrementalBackup::ResetOperationIncrementalBackup >> KqpRboPg::Bench_Select [GOOD] >> KqpRboPg::Bench_Filter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:32:35.213105Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:35.329043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:35.342415Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:32:35.343733Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:32:35.343857Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:32:35.726248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:35.726401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:35.817098Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514751966119 != 1766514751966123 2025-12-23T18:32:35.830712Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:35.878896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:35.981440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b65/r3tmp/tmpvrbXhZ/pdisk_1.dat 2025-12-23T18:32:36.329162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:36.345517Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:32:36.453063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:36.820876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T18:32:37.104744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1013:2859], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:37.104897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1023:2864], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:37.104993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:37.106191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1029:2869], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:37.106402Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:37.111408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:37.308937Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1027:2867], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:32:37.375792Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1086:2907] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13 ... recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:59:2057] recipient: [59:53:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:76:2057] recipient: [59:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:53:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:59:2057] recipient: [60:53:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:76:2057] recipient: [60:14:2061] !Reboot 72057594037927937 (actor [60:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:78:2057] recipient: [60:39:2086] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:81:2057] recipient: [60:80:2112] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:83:2057] recipient: [60:80:2112] !Reboot 72057594037927937 (actor [60:58:2099]) rebooted! !Reboot 72057594037927937 (actor [60:58:2099]) tablet resolver refreshed! new actor is[60:82:2113] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:198:2057] recipient: [60:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:59:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:76:2057] recipient: [61:14:2061] !Reboot 72057594037927937 (actor [61:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:78:2057] recipient: [61:39:2086] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:81:2057] recipient: [61:80:2112] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:83:2057] recipient: [61:80:2112] !Reboot 72057594037927937 (actor [61:58:2099]) rebooted! !Reboot 72057594037927937 (actor [61:58:2099]) tablet resolver refreshed! new actor is[61:82:2113] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:198:2057] recipient: [61:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:53:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:59:2057] recipient: [62:53:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:76:2057] recipient: [62:14:2061] !Reboot 72057594037927937 (actor [62:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:79:2057] recipient: [62:39:2086] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:82:2057] recipient: [62:81:2112] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:84:2057] recipient: [62:81:2112] !Reboot 72057594037927937 (actor [62:58:2099]) rebooted! !Reboot 72057594037927937 (actor [62:58:2099]) tablet resolver refreshed! new actor is[62:83:2113] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:199:2057] recipient: [62:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:53:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:59:2057] recipient: [63:53:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:76:2057] recipient: [63:14:2061] !Reboot 72057594037927937 (actor [63:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:82:2057] recipient: [63:39:2086] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:85:2057] recipient: [63:84:2115] Leader for TabletID 72057594037927937 is [63:86:2116] sender: [63:87:2057] recipient: [63:84:2115] !Reboot 72057594037927937 (actor [63:58:2099]) rebooted! !Reboot 72057594037927937 (actor [63:58:2099]) tablet resolver refreshed! new actor is[63:86:2116] Leader for TabletID 72057594037927937 is [63:86:2116] sender: [63:202:2057] recipient: [63:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:59:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:76:2057] recipient: [64:14:2061] !Reboot 72057594037927937 (actor [64:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:82:2057] recipient: [64:39:2086] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:85:2057] recipient: [64:84:2115] Leader for TabletID 72057594037927937 is [64:86:2116] sender: [64:87:2057] recipient: [64:84:2115] !Reboot 72057594037927937 (actor [64:58:2099]) rebooted! !Reboot 72057594037927937 (actor [64:58:2099]) tablet resolver refreshed! new actor is[64:86:2116] Leader for TabletID 72057594037927937 is [64:86:2116] sender: [64:202:2057] recipient: [64:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:53:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:59:2057] recipient: [65:53:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:76:2057] recipient: [65:14:2061] !Reboot 72057594037927937 (actor [65:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:83:2057] recipient: [65:39:2086] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:86:2057] recipient: [65:85:2115] Leader for TabletID 72057594037927937 is [65:87:2116] sender: [65:88:2057] recipient: [65:85:2115] !Reboot 72057594037927937 (actor [65:58:2099]) rebooted! !Reboot 72057594037927937 (actor [65:58:2099]) tablet resolver refreshed! new actor is[65:87:2116] Leader for TabletID 72057594037927937 is [65:87:2116] sender: [65:203:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:53:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:59:2057] recipient: [66:53:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:76:2057] recipient: [66:14:2061] !Reboot 72057594037927937 (actor [66:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:84:2057] recipient: [66:39:2086] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:87:2057] recipient: [66:86:2116] Leader for TabletID 72057594037927937 is [66:88:2117] sender: [66:89:2057] recipient: [66:86:2116] !Reboot 72057594037927937 (actor [66:58:2099]) rebooted! !Reboot 72057594037927937 (actor [66:58:2099]) tablet resolver refreshed! new actor is[66:88:2117] Leader for TabletID 72057594037927937 is [66:88:2117] sender: [66:108:2057] recipient: [66:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:59:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:76:2057] recipient: [67:14:2061] !Reboot 72057594037927937 (actor [67:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:85:2057] recipient: [67:39:2086] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:88:2057] recipient: [67:87:2117] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:90:2057] recipient: [67:87:2117] !Reboot 72057594037927937 (actor [67:58:2099]) rebooted! !Reboot 72057594037927937 (actor [67:58:2099]) tablet resolver refreshed! new actor is[67:89:2118] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:109:2057] recipient: [67:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:53:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:59:2057] recipient: [68:53:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:76:2057] recipient: [68:14:2061] !Reboot 72057594037927937 (actor [68:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:88:2057] recipient: [68:39:2086] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:91:2057] recipient: [68:90:2120] Leader for TabletID 72057594037927937 is [68:92:2121] sender: [68:93:2057] recipient: [68:90:2120] !Reboot 72057594037927937 (actor [68:58:2099]) rebooted! !Reboot 72057594037927937 (actor [68:58:2099]) tablet resolver refreshed! new actor is[68:92:2121] Leader for TabletID 72057594037927937 is [68:92:2121] sender: [68:208:2057] recipient: [68:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:59:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:76:2057] recipient: [69:14:2061] !Reboot 72057594037927937 (actor [69:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:88:2057] recipient: [69:39:2086] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:91:2057] recipient: [69:90:2120] Leader for TabletID 72057594037927937 is [69:92:2121] sender: [69:93:2057] recipient: [69:90:2120] !Reboot 72057594037927937 (actor [69:58:2099]) rebooted! !Reboot 72057594037927937 (actor [69:58:2099]) tablet resolver refreshed! new actor is[69:92:2121] Leader for TabletID 72057594037927937 is [69:92:2121] sender: [69:208:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [70:56:2057] recipient: [70:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [70:56:2057] recipient: [70:53:2097] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:59:2057] recipient: [70:53:2097] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:76:2057] recipient: [70:14:2061] !Reboot 72057594037927937 (actor [70:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:89:2057] recipient: [70:39:2086] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:92:2057] recipient: [70:91:2120] Leader for TabletID 72057594037927937 is [70:93:2121] sender: [70:94:2057] recipient: [70:91:2120] !Reboot 72057594037927937 (actor [70:58:2099]) rebooted! !Reboot 72057594037927937 (actor [70:58:2099]) tablet resolver refreshed! new actor is[70:93:2121] Leader for TabletID 72057594037927937 is [0:0:0] sender: [71:56:2057] recipient: [71:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [71:56:2057] recipient: [71:53:2097] Leader for TabletID 72057594037927937 is [71:58:2099] sender: [71:59:2057] recipient: [71:53:2097] Leader for TabletID 72057594037927937 is [71:58:2099] sender: [71:76:2057] recipient: [71:14:2061] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> IncrementalBackup::SimpleBackup [GOOD] >> IncrementalBackup::SimpleRestore >> KqpRboPg::UnionAll [GOOD] >> KqpRboYql::Aggregation+ColumnStore >> IncrementalBackup::BackupRestore [GOOD] >> IncrementalBackup::ComplexBackupBackupCollection >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:32:35.371028Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:35.506005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:35.520086Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:32:35.520478Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:32:35.520568Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:32:35.844034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:35.844201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:35.953562Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514752010278 != 1766514752010282 2025-12-23T18:32:35.967759Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:36.017035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:36.132046Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b62/r3tmp/tmpU91xgo/pdisk_1.dat 2025-12-23T18:32:36.453362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:36.468002Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:32:36.589818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:36.991321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:959:2815], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:36.991496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:970:2820], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:36.991586Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:36.992963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:975:2825], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:36.993256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:36.998233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:37.140583Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:973:2823], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:32:37.213674Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1031:2862] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:32:46.702352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-false [FAIL] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:32:37.235399Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:37.345017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:37.356361Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:32:37.356729Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:32:37.356816Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:32:37.648925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:37.649063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:37.729547Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514754196018 != 1766514754196022 2025-12-23T18:32:37.742593Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:37.792163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:37.897207Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b5e/r3tmp/tmpASO7gB/pdisk_1.dat 2025-12-23T18:32:38.223458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:38.238990Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:32:38.361030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:38.737959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:959:2815], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:38.738095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:970:2820], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:38.738186Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:38.739526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:975:2825], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:38.739809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:38.744458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:38.900557Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:973:2823], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:32:38.998160Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1031:2862] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] |92.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> IncrementalBackup::E2EBackupCollection [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore+WithIncremental |92.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRboPg::Aggregation+ColumnStore >> KqpRboPg::Filter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:32:37.806353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:37.929343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:37.939198Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:32:37.939513Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:32:37.939576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:32:38.280378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:38.280524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:38.362938Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514754340883 != 1766514754340887 2025-12-23T18:32:38.376286Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:38.431114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:38.522904Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b5b/r3tmp/tmpljoUw4/pdisk_1.dat 2025-12-23T18:32:38.873958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:38.887956Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:32:39.006381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:39.338081Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:957:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:39.338203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:968:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:39.338269Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:39.339269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:973:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:39.339478Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:39.343795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:39.475710Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:971:2822], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:32:39.562483Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1029:2861] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] Test command err: 2025-12-23T18:32:00.056467Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:32:00.128859Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:32:00.129218Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:32:00.138076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:32:00.138464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:32:00.138840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:32:00.139038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:32:00.139194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:32:00.139355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:32:00.139484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:32:00.139611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:32:00.139768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:32:00.139892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:32:00.140003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:32:00.140159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:32:00.140284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:32:00.185233Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:32:00.185473Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:32:00.185535Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:32:00.185733Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:32:00.185908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:32:00.185985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:32:00.186053Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:32:00.186152Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:32:00.186217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:32:00.186264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:32:00.186296Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:32:00.186522Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:32:00.186618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:32:00.186665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:32:00.186717Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:32:00.186868Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:32:00.186929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:32:00.186975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:32:00.187007Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:32:00.187072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:32:00.187110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:32:00.187167Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:32:00.187237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:32:00.187280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:32:00.187323Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:32:00.187587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:32:00.187642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:32:00.187675Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:32:00.187803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:32:00.187850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:32:00.187878Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:32:00.187923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:32:00.187960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:32:00.187999Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:32:00.188043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:32:00.188079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:32:00.188108Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:32:00.188234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:32:00.188314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2025-12-23T18:32:46.802794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1071; 2025-12-23T18:32:46.802860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=84779; 2025-12-23T18:32:46.802911Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=84909; 2025-12-23T18:32:46.802982Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2025-12-23T18:32:46.803412Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=374; 2025-12-23T18:32:46.803466Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=86003; 2025-12-23T18:32:46.803653Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=122; 2025-12-23T18:32:46.803794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=86; 2025-12-23T18:32:46.804198Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=347; 2025-12-23T18:32:46.804569Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=313; 2025-12-23T18:32:46.819581Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=14922; 2025-12-23T18:32:46.834111Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=14386; 2025-12-23T18:32:46.834240Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=17; 2025-12-23T18:32:46.834305Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=15; 2025-12-23T18:32:46.834346Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:32:46.834431Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2025-12-23T18:32:46.834477Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:32:46.834594Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=59; 2025-12-23T18:32:46.834642Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-12-23T18:32:46.834711Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2025-12-23T18:32:46.834824Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=58; 2025-12-23T18:32:46.834915Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=48; 2025-12-23T18:32:46.834953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=127609; 2025-12-23T18:32:46.835103Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:32:46.835226Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:32:46.835289Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:32:46.835361Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:32:46.835411Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:32:46.835627Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:32:46.835698Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:32:46.835737Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:32:46.835787Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:32:46.835855Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766514422435;tx_id=18446744073709551615;;current_snapshot_ts=1766514721500; 2025-12-23T18:32:46.835902Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:32:46.835950Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:32:46.835988Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:32:46.836092Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:32:46.836352Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.137000s; 2025-12-23T18:32:46.841077Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:32:46.841236Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:32:46.841288Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:32:46.841374Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:32:46.841427Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:32:46.841497Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766514422435;tx_id=18446744073709551615;;current_snapshot_ts=1766514721500; 2025-12-23T18:32:46.841576Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:32:46.841632Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:32:46.841677Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:32:46.841760Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:32:46.841815Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:32:46.842890Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.113000s; 2025-12-23T18:32:46.842946Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:32:37.544544Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:37.669575Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:37.681006Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:32:37.681492Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:32:37.681580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:32:38.003063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:38.003204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:38.084589Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514754338646 != 1766514754338650 2025-12-23T18:32:38.097615Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:38.152056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:38.258142Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b5c/r3tmp/tmp1naiiH/pdisk_1.dat 2025-12-23T18:32:38.600579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:38.615309Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:32:38.730676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:39.054589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:959:2815], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:39.054701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:970:2820], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:39.054783Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:39.055752Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:975:2825], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:39.056081Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:39.059665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:39.191636Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:973:2823], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:32:39.279762Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1031:2862] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> KqpRboYql::ConstantFolding+ColumnStore [GOOD] >> KqpRboYql::ConstantFolding-ColumnStore >> KqpRboYql::Select |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration >> KqpRboPg::TestCrossInnerJoin+ColumnStore >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 |92.0%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact >> KqpRboYql::Filter+ColumnStore |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |92.0%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> KqpRboPg::Bench_Filter [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 >> KqpRboPg::Bench_CrossFilter |92.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> IncrementalBackup::ResetOperationIncrementalBackup [GOOD] >> IncrementalBackup::ReplaceIntoIncrementalBackup >> IncrementalBackup::SimpleRestore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2025-12-23T18:32:19.135527Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123034785969184:2088];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:19.137520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00521c/r3tmp/tmpnRHrke/pdisk_1.dat 2025-12-23T18:32:19.542908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:19.572856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:19.572982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:19.601162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:19.724237Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:19.742689Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 1377, node 1 2025-12-23T18:32:19.976028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:19.976062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:19.976076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:19.976203Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:20.171503Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:2691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514739787 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:20.511535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:2691 2025-12-23T18:32:24.138874Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123034785969184:2088];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:24.138997Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:24.421499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123056260807037:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.421642Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.422139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123056260807047:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.422216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.742349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:24.979127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123056260807216:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.979234Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.979519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123056260807219:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.979564Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.028443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766514744883 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766514744883 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) 2025-12-23T18:32:25.271963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123060555774626:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.272096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.272753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123060555774648:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.272802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123060555774649:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.272832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123060555774650:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.272913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123060555774651:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:25.272977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123060555774652:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool ... ADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7587123060555774619:2381]. 2025-12-23T18:32:46.768859Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=Zjk3NjZkNWItNzczODkzZmUtMjRhNzkzODItNjcyZGFlYmI=, ActorId: [1:7587123060555774619:2381], ActorState: ExecuteState, TraceId: 01kd67rcx8ezgq0n81kfnfdrff, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7587123150750157215:2381] from: [1:7587123150750157214:2381] 2025-12-23T18:32:46.768932Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:7587123150750157215:2381] TxId: 281474976721314. Ctx: { TraceId: 01kd67rcx8ezgq0n81kfnfdrff, Database: , SessionId: ydb://session/3?node_id=1&id=Zjk3NjZkNWItNzczODkzZmUtMjRhNzkzODItNjcyZGFlYmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. OVERLOADED: {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } 2025-12-23T18:32:46.769184Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=Zjk3NjZkNWItNzczODkzZmUtMjRhNzkzODItNjcyZGFlYmI=, ActorId: [1:7587123060555774619:2381], ActorState: ExecuteState, TraceId: 01kd67rcx8ezgq0n81kfnfdrff, Create QueryResponse for error on request, msg: , status: OVERLOADED, issues: { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } 2025-12-23T18:32:46.781787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-12-23T18:32:46.781887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2025-12-23T18:32:46.781913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2025-12-23T18:32:46.781960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2025-12-23T18:32:46.784008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715693:0 2025-12-23T18:32:46.854633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2025-12-23T18:32:46.854819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, next wakeup# 14.999779s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2025-12-23T18:32:46.855231Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7587123034785969513:2205] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037889 2025-12-23T18:32:46.855453Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7587123034785969513:2205] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037890 2025-12-23T18:32:46.924733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 69, next wakeup# 14.929872s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2025-12-23T18:32:46.924775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 70, next wakeup# 14.929818s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2025-12-23T18:32:46.927512Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037889 CompletedLoansChanged 2025-12-23T18:32:46.927514Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037890 CompletedLoansChanged 2025-12-23T18:32:46.939268Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2025-12-23T18:32:46.945088Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:32:46.945165Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:32:46.950317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:32:46.950587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:32:46.967029Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-12-23T18:32:46.967984Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-12-23T18:32:46.968913Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2025-12-23T18:32:46.969035Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2025-12-23T18:32:47.031240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, next wakeup# 14.823368s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2025-12-23T18:32:47.031412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, next wakeup# 14.823187s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2025-12-23T18:32:47.033269Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7587123034785969513:2205] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037890 2025-12-23T18:32:47.033365Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7587123034785969513:2205] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037889 2025-12-23T18:32:47.033649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 2, next wakeup# 14.820953s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2025-12-23T18:32:47.033689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 2, next wakeup# 14.820906s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 TClient::Ls request: /Root/Foo 2025-12-23T18:32:50.262806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, compactionInfo# {72057594046644480:3, SH# 3, Rows# 3556, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 580.628022s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766514744883 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) 2025-12-23T18:32:50.265890Z node 1 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 2 of 72075186224037890 tableId# 38 localTid# 1001, requested from [1:7587123034785969513:2205], partsCount# 2, memtableSize# 2304, memtableWaste# 2304, memtableRows# 16 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766514744883 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) 2025-12-23T18:32:50.428568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 165 ms, with status# 0, next wakeup in# 580.462268s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 Table has 2 shards |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.0%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> IncrementalBackup::SimpleBackupRestoreWithIndex |92.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TColumnShardTestSchema::HotTiers [GOOD] >> KqpRboYql::Select [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] >> KqpRboYql::ConstantFolding-ColumnStore [GOOD] >> KqpRboPg::Filter [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists >> TKeyValueTest::TestInlineCopyRangeWorks >> KqpRboPg::Bench_CrossFilter [GOOD] >> KqpRboYql::TPCH_YDB_PERF >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-true [FAIL] >> TTxDataShardValidateUniqueIndexScan::RunScan [GOOD] >> IncrementalBackup::ComplexBackupBackupCollection [GOOD] >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] >> KqpRboYql::Filter+ColumnStore [GOOD] >> IncrementalBackup::ReplaceIntoIncrementalBackup [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] >> IncrementalBackup::SimpleBackupRestoreWithIndex [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore+WithIncremental [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false >> KqpRboPg::Bench_JoinFilter >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental >> KqpRboPg::ConstantFolding >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration >> KqpRboYql::BasicJoins >> KqpRboYql::Filter-ColumnStore |92.0%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest >> IncrementalBackup::ResetVsUpsertColumnStateSerialization |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |92.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental >> IncrementalBackup::ForgedMultiShardIncrementalRestore-WithIncremental >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> KqpRboPg::Bench_JoinFilter [GOOD] >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2025-12-23T18:31:53.837715Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:31:53.875828Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:31:53.876076Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:31:53.884236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:31:53.884574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:31:53.884802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:31:53.884935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:31:53.885075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:31:53.885226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:31:53.885390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:31:53.885535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:31:53.885663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:31:53.885786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:31:53.885913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:31:53.886035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:31:53.886166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:31:53.928129Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:31:53.928326Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:31:53.928396Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:31:53.928642Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:53.928796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:31:53.928873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:31:53.928922Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:31:53.929031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:31:53.929110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:31:53.929161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:31:53.929202Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:31:53.929400Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:53.929466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:31:53.929507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:31:53.929537Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:31:53.929629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:31:53.929680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:31:53.929744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:31:53.929783Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:31:53.929869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:31:53.929919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:31:53.929955Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:31:53.930024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:31:53.930074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:31:53.930119Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:31:53.930388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:31:53.930470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:31:53.930506Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:31:53.930672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:31:53.930723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:31:53.930900Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:31:53.930961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:31:53.931040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:31:53.931076Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:31:53.931134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:31:53.931193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:31:53.931230Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:31:53.931369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:31:53.931411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2025-12-23T18:33:01.781165Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1166; 2025-12-23T18:33:01.781227Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=82605; 2025-12-23T18:33:01.781283Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=82731; 2025-12-23T18:33:01.781355Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=16; 2025-12-23T18:33:01.781903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=471; 2025-12-23T18:33:01.781963Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=83888; 2025-12-23T18:33:01.782147Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=122; 2025-12-23T18:33:01.782287Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=76; 2025-12-23T18:33:01.782790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=447; 2025-12-23T18:33:01.783203Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=354; 2025-12-23T18:33:01.806378Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=23076; 2025-12-23T18:33:01.827761Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=21220; 2025-12-23T18:33:01.827907Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=16; 2025-12-23T18:33:01.827995Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=17; 2025-12-23T18:33:01.828049Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-12-23T18:33:01.828141Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=54; 2025-12-23T18:33:01.828192Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-12-23T18:33:01.828344Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=111; 2025-12-23T18:33:01.828392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-12-23T18:33:01.828472Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=40; 2025-12-23T18:33:01.828570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2025-12-23T18:33:01.828659Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=52; 2025-12-23T18:33:01.828726Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=142322; 2025-12-23T18:33:01.828882Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:33:01.829028Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:33:01.829101Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:33:01.829195Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:33:01.829252Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:33:01.829525Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:33:01.829599Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:33:01.829644Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:33:01.829696Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:33:01.829770Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766514417964;tx_id=18446744073709551615;;current_snapshot_ts=1766514715353; 2025-12-23T18:33:01.829820Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:33:01.829870Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:01.829913Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:01.830018Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:33:01.830229Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.016000s; 2025-12-23T18:33:01.835279Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:33:01.835700Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:33:01.835765Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:33:01.835854Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:33:01.835931Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:33:01.836010Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766514417964;tx_id=18446744073709551615;;current_snapshot_ts=1766514715353; 2025-12-23T18:33:01.836060Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:33:01.836109Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:01.836154Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:01.836240Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2025-12-23T18:33:01.836295Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:33:01.836984Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.071000s; 2025-12-23T18:33:01.837033Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> KqpRboPg::ConstantFolding [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] >> KqpRboPg::FallbackToYqlEnabled >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=166515322.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166515322.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166515322.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=166515322.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515322.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166515322.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=166515322.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146514122.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146515322.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=146515322.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146514122.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146514122.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=146514122.000000s;Name=;Codec=}; 2025-12-23T18:32:03.072516Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:32:03.104177Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:32:03.104430Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:32:03.112087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:32:03.112367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:32:03.112588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:32:03.112708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:32:03.112819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:32:03.112941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:32:03.113043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:32:03.113145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:32:03.113255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:32:03.113362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:32:03.113462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:32:03.113557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:32:03.113655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:32:03.155543Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:32:03.155725Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:32:03.155782Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:32:03.155961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:32:03.156125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:32:03.156213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:32:03.156259Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:32:03.156357Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:32:03.156420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:32:03.156459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:32:03.156488Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:32:03.156717Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:32:03.156792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:32:03.156833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:32:03.156864Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:32:03.156980Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:32:03.157034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:32:03.157079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:32:03.157125Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:32:03.157201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:32:03.157242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:32:03.157272Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:32:03.157314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:32:03.157349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:32:03.157382Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:32:03.157572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:32:03.157615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:32:03.157645Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:32:03.157755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:32:03.157797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:3 ... ground=cleanup;skip_reason=no_changes; 2025-12-23T18:33:01.517921Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:33:01.518099Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766514771388:max} readable: {1766514771388:max} at tablet 9437184 2025-12-23T18:33:01.518215Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:33:01.520152Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766514771388:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:33:01.520237Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766514771388:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:33:01.520827Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766514771388:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-12-23T18:33:01.522684Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766514771388:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:33:01.523701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766514771388:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1451:3396];trace_detailed=; 2025-12-23T18:33:01.524395Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:83;ff_first=(column_ids=1;column_names=timestamp;);; 2025-12-23T18:33:01.524624Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:33:01.524878Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:33:01.525053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:33:01.525452Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:33:01.525592Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:33:01.525709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:33:01.526094Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1451:3396] finished for tablet 9437184 2025-12-23T18:33:01.526663Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1450:3395];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":63094148,"name":"_full_task","f":63094148,"d_finished":0,"c":0,"l":63096701,"d":2553},"events":[{"name":"bootstrap","f":63094621,"d_finished":1006,"c":1,"l":63095627,"d":1006},{"a":63095961,"name":"ack","f":63095961,"d_finished":0,"c":0,"l":63096701,"d":740},{"a":63095942,"name":"processing","f":63095942,"d_finished":0,"c":0,"l":63096701,"d":759},{"name":"ProduceResults","f":63095296,"d_finished":591,"c":2,"l":63096281,"d":591},{"a":63096287,"name":"Finish","f":63096287,"d_finished":0,"c":0,"l":63096701,"d":414}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:33:01.526768Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1450:3395];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:33:01.527251Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1450:3395];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":63094148,"name":"_full_task","f":63094148,"d_finished":0,"c":0,"l":63097358,"d":3210},"events":[{"name":"bootstrap","f":63094621,"d_finished":1006,"c":1,"l":63095627,"d":1006},{"a":63095961,"name":"ack","f":63095961,"d_finished":0,"c":0,"l":63097358,"d":1397},{"a":63095942,"name":"processing","f":63095942,"d_finished":0,"c":0,"l":63097358,"d":1416},{"name":"ProduceResults","f":63095296,"d_finished":591,"c":2,"l":63096281,"d":591},{"a":63096287,"name":"Finish","f":63096287,"d_finished":0,"c":0,"l":63097358,"d":1071}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1451:3396]->[1:1450:3395] 2025-12-23T18:33:01.527364Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:33:01.522654Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:33:01.527413Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:33:01.527549Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] Test command err: 2025-12-23T18:32:53.783769Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123178082019409:2143];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:53.783924Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00326e/r3tmp/tmpY94YeA/pdisk_1.dat 2025-12-23T18:32:53.859921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:54.320025Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:32:54.323921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:54.324044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:54.326075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:54.339936Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:13679 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-12-23T18:32:54.621515Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514774416 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:32:54.703908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:54.793653Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2025-12-23T18:32:55.723989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:58.551219Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7587123199556856807:2599], ActorId: [1:7587123199556856808:2599], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=NzIwNDEyZWEtY2M3NTFhNzItYTAzZmEzZGYtODM4ODRkYw==, TxId: 01kd67rrdgbjghvyeq2bzg69xe 2025-12-23T18:32:58.553772Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123199556856832:2326], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:58.554009Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:58.557768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123199556856844:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:58.557941Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:58.677811Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123200939934382:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:58.678326Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00326e/r3tmp/tmpjriXVQ/pdisk_1.dat 2025-12-23T18:32:58.738935Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:32:58.778135Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123178082019409:2143];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:58.778238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:58.842076Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:58.845336Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:58.846008Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:58.898331Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:58.954872Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:20799 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514778952 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:32:59.631941Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:59.636691Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:32:59.707235Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2025-12-23T18:33:00.645198Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:02.801854Z node 2 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [2:7587123218119804452:2599], ActorId: [2:7587123218119804453:2599], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=2&id=MTM3N2JhNDMtZmFlMWZhMTAtY2U5Y2Q0NTYtYTcxMmJhYjM=, TxId: 01kd67rwjdcfm91yfc6wcrhz6a 2025-12-23T18:33:02.803552Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123218119804475:2326], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:02.803625Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:02.803942Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123218119804488:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:02.803978Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> IncrementalBackup::ResetVsUpsertColumnStateSerialization [GOOD] >> IncrementalBackup::QueryIncrementalBackupImplTableAfterRestore >> KqpRboPg::FallbackToYqlEnabled [GOOD] >> KqpRboPg::FallbackToYqlDisabled |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] Test command err: 2025-12-23T18:32:53.563287Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123182201839603:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:53.563428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00326c/r3tmp/tmpKgz5K1/pdisk_1.dat 2025-12-23T18:32:53.857071Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:32:53.858475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:53.858593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:53.989029Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:53.995981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:54.149121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:7974 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514774052 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:32:54.356319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:54.369952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:32:54.586905Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2025-12-23T18:32:55.399560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:58.022152Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7587123203676677056:2598], ActorId: [1:7587123203676677057:2598], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=YjI4NzE4NGQtZTg2OWZjMGMtNzM4NjdhYTItOTA1NGRhOTc=, TxId: 01kd67rqwra21q5p6v5zy4anj4 2025-12-23T18:32:58.024501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123203676677079:2326], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:58.024599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:58.030083Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123203676677091:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:58.030196Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } >> TExternalDataSourceTest::DropTableTwice >> KqpRboYql::Filter-ColumnStore [GOOD] >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false [FAIL] >> KqpRboPg::TestCrossInnerJoin+ColumnStore [GOOD] >> KqpRboYql::OlapPredicatePushdown |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> KqpRboPg::TestCrossInnerJoin-ColumnStore |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview >> TExternalDataSourceTest::ReadOnlyMode >> KqpRboYql::Aggregation+ColumnStore [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties >> TExternalDataSourceTest::DropTableTwice [GOOD] >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] >> KqpRboYql::Aggregation-ColumnStore >> TExternalDataSourceTest::ParallelCreateExternalDataSource |92.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/build_index/ut/unittest >> TTxDataShardValidateUniqueIndexScan::RunScan [GOOD] Test command err: 2025-12-23T18:25:10.763802Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587121189494342654:2147];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:25:10.764167Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0059a0/r3tmp/tmp1cr9Ue/pdisk_1.dat 2025-12-23T18:25:11.318121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:25:11.335256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:25:11.335334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:25:11.490883Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587121189494342532:2081] 1766514310680323 != 1766514310680326 2025-12-23T18:25:11.504603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:25:11.506241Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:25:11.566808Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:25:11.614050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:25:11.626945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046644480 2025-12-23T18:25:11.629504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:25:11.679809Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7587121193789310633:2282] 2025-12-23T18:25:11.680090Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:25:11.723333Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:25:11.723403Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:25:11.724772Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:25:11.724809Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:25:11.724834Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:25:11.725180Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:25:11.725219Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:25:11.725249Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7587121193789310648:2282] in generation 1 2025-12-23T18:25:11.750812Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:25:11.778678Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:25:11.778939Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:25:11.778980Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7587121193789310650:2283] 2025-12-23T18:25:11.778994Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:25:11.779006Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:25:11.779015Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:25:11.779371Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:25:11.779439Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:25:11.779454Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:25:11.779467Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:25:11.779485Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:25:11.779499Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:25:11.779559Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:25:11.786843Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7587121193789310631:2498], serverId# [1:7587121193789310635:2499], sessionId# [0:0:0] 2025-12-23T18:25:11.786947Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:25:11.787177Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:25:11.787237Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-12-23T18:25:11.791438Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:25:11.791515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:25:11.798818Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:25:11.806164Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7587121193789310673:2517], serverId# [1:7587121193789310674:2518], sessionId# [0:0:0] 2025-12-23T18:25:11.811086Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1766514311842 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1766514311842 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:25:11.811114Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:25:11.811208Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:25:11.811287Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:25:11.811303Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:25:11.811328Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1766514311842:281474976710657] in PlanQueue unit at 72075186224037888 2025-12-23T18:25:11.811583Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1766514311842:281474976710657 keys extracted: 0 2025-12-23T18:25:11.811710Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:25:11.811793Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:25:11.811824Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:25:11.814151Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:25:11.831971Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:25:11.833266Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1766514311841 2025-12-23T18:25:11.833280Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:25:11.833319Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1766514311849 2025-12-23T18:25:11.833889Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1766514311842} 2025-12-23T18:25:11.833945Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:25:11.834000Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:25:11.834016Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:25:11.834035Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:25:11.834079Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1766514311842 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7587121193789310181:2148], exec latency: 19 ms, propose latency: 22 ms 2025-12-23T18:25:11.834109Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-12-23T18:25:11.834136Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:25:11.840551Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-12-23T18:25:11.840601Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075 ... _DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:32:58.172955Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:32:58.173037Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:32:58.173167Z node 50 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2025-12-23T18:32:58.173315Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3000 : 281474976715662] from 72075186224037889 at tablet 72075186224037889 send result to client [50:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:32:58.175054Z node 50 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-12-23T18:32:58.175400Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:32:58.177121Z node 50 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:32:58.177870Z node 50 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:32:58.179995Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 3000 2025-12-23T18:32:58.181700Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2025-12-23T18:32:58.181863Z node 50 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-12-23T18:32:58.203325Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1173:2966], serverId# [50:1174:2967], sessionId# [0:0:0] 2025-12-23T18:32:58.203795Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:246: Starting TValidateUniqueIndexScan TabletId: 72075186224037889 Id: 1 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 43 IndexColumns: "key_part1" IndexColumns: "key_part2" SeqNoGeneration: 42 SeqNoRound: 45 2025-12-23T18:32:58.204013Z node 50 :BUILD_INDEX INFO: unique_index.cpp:51: Create TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:32:58.205230Z node 50 :BUILD_INDEX INFO: unique_index.cpp:58: Prepare TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:32:58.205374Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:63: Seek 0 TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:32:58.205547Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:134: Exhausted TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:32:58.205848Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:124: Done TValidateUniqueIndexScan Id: 1 Status: DONE Issues: Id: 1 TabletId: 72075186224037889 Status: DONE RequestSeqNoGeneration: 42 RequestSeqNoRound: 45 MeteringStats { ReadRows: 0 ReadBytes: 0 } 2025-12-23T18:32:58.206885Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:32:58.206998Z node 50 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:32:58.207081Z node 50 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:32:58.207199Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:32:58.501187Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1195:2981], serverId# [50:1196:2982], sessionId# [0:0:0] 2025-12-23T18:32:58.502040Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2025-12-23T18:32:58.502343Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=2 2025-12-23T18:32:58.525959Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:32:58.545258Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1202:2987], serverId# [50:1203:2988], sessionId# [0:0:0] 2025-12-23T18:32:58.545777Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:246: Starting TValidateUniqueIndexScan TabletId: 72075186224037889 Id: 1 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 43 IndexColumns: "key_part1" IndexColumns: "key_part2" SeqNoGeneration: 42 SeqNoRound: 46 2025-12-23T18:32:58.545968Z node 50 :BUILD_INDEX INFO: unique_index.cpp:51: Create TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:32:58.551314Z node 50 :BUILD_INDEX INFO: unique_index.cpp:58: Prepare TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:32:58.551486Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:63: Seek 0 TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:32:58.559518Z node 50 :BUILD_INDEX ERROR: unique_index.cpp:126: Failed TValidateUniqueIndexScan Id: 1 Status: BUILD_ERROR Issues: {
: Error: Duplicate key found: (key_part1=1, key_part2=1) } Id: 1 TabletId: 72075186224037889 Status: BUILD_ERROR Issues { message: "Duplicate key found: (key_part1=1, key_part2=1)" severity: 1 } RequestSeqNoGeneration: 42 RequestSeqNoRound: 46 MeteringStats { ReadRows: 2 ReadBytes: 42 } 2025-12-23T18:32:58.560395Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:32:58.560508Z node 50 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:32:58.560614Z node 50 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:32:58.560742Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:32:59.661315Z node 50 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037889 Acquired lock# 281474976715664, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 43] 2025-12-23T18:32:59.782990Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037889 2025-12-23T18:32:59.799037Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037889, row count=2 2025-12-23T18:32:59.814889Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:33:00.307067Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:5] at 72075186224037889 2025-12-23T18:33:00.307334Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:5] at 72075186224037889, row count=2 2025-12-23T18:33:00.319115Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:33:00.331230Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1252:3019], serverId# [50:1253:3020], sessionId# [0:0:0] 2025-12-23T18:33:00.331688Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:246: Starting TValidateUniqueIndexScan TabletId: 72075186224037889 Id: 1 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 43 IndexColumns: "key_part1" IndexColumns: "key_part2" SeqNoGeneration: 42 SeqNoRound: 47 2025-12-23T18:33:00.331882Z node 50 :BUILD_INDEX INFO: unique_index.cpp:51: Create TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:33:00.332818Z node 50 :BUILD_INDEX INFO: unique_index.cpp:58: Prepare TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:33:00.332958Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:63: Seek 0 TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:33:00.333220Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:134: Exhausted TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:33:00.333532Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:124: Done TValidateUniqueIndexScan Id: 1 Status: DONE Issues: Id: 1 TabletId: 72075186224037889 Status: DONE RequestSeqNoGeneration: 42 RequestSeqNoRound: 47 MeteringStats { ReadRows: 2 ReadBytes: 37 } 2025-12-23T18:33:00.334537Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:33:00.334654Z node 50 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:33:00.334898Z node 50 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:33:00.335037Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:33:00.573008Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037889 2025-12-23T18:33:00.573309Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:6] at 72075186224037889, row count=3 2025-12-23T18:33:00.585277Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:33:00.595479Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1278:3037], serverId# [50:1279:3038], sessionId# [0:0:0] 2025-12-23T18:33:00.595971Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:246: Starting TValidateUniqueIndexScan TabletId: 72075186224037889 Id: 1 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 43 IndexColumns: "key_part1" IndexColumns: "key_part2" SeqNoGeneration: 42 SeqNoRound: 48 2025-12-23T18:33:00.596166Z node 50 :BUILD_INDEX INFO: unique_index.cpp:51: Create TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:33:00.597122Z node 50 :BUILD_INDEX INFO: unique_index.cpp:58: Prepare TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:33:00.597295Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:63: Seek 0 TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:33:00.597592Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:134: Exhausted TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2025-12-23T18:33:00.597923Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:124: Done TValidateUniqueIndexScan Id: 1 Status: DONE Issues: Id: 1 TabletId: 72075186224037889 Status: DONE RequestSeqNoGeneration: 42 RequestSeqNoRound: 48 MeteringStats { ReadRows: 5 ReadBytes: 45 } 2025-12-23T18:33:00.598699Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:33:00.598823Z node 50 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:33:00.598931Z node 50 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:33:00.599048Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 |92.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166515324.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=166515324.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515324.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=146515324.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146514124.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=146514124.000000s;Name=;Codec=}; 2025-12-23T18:32:06.523818Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:32:06.561264Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:32:06.561572Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:32:06.569666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:32:06.569981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:32:06.570284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:32:06.570458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:32:06.570623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:32:06.570789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:32:06.570918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:32:06.571036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:32:06.571167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:32:06.571292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:32:06.571415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:32:06.571556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:32:06.571678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:32:06.619445Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:32:06.619642Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:32:06.619707Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:32:06.619896Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:32:06.620065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:32:06.620161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:32:06.620214Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:32:06.620320Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:32:06.620389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:32:06.620450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:32:06.620490Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:32:06.620658Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:32:06.620725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:32:06.620768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:32:06.620800Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:32:06.620894Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:32:06.620950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:32:06.621034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:32:06.621078Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:32:06.621154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:32:06.621198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:32:06.621228Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:32:06.621270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:32:06.621318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:32:06.621347Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:32:06.621567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:32:06.621654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:32:06.621694Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:32:06.621820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:32:06.621876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:32:06.621909Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:32:06.621964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:32:06.622011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:32:06.622046Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:32:06.622100Z node 1 :TX_COLUM ... d;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=7; 2025-12-23T18:32:55.833169Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=64; 2025-12-23T18:32:55.833192Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=2573; 2025-12-23T18:32:55.833232Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=2659; 2025-12-23T18:32:55.833279Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2025-12-23T18:32:55.833340Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=24; 2025-12-23T18:32:55.833372Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=3195; 2025-12-23T18:32:55.833498Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=82; 2025-12-23T18:32:55.833596Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=56; 2025-12-23T18:32:55.833703Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=69; 2025-12-23T18:32:55.833799Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=62; 2025-12-23T18:32:55.839031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5150; 2025-12-23T18:32:55.841699Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2519; 2025-12-23T18:32:55.841801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-12-23T18:32:55.841851Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-12-23T18:32:55.841888Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:32:55.841958Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2025-12-23T18:32:55.841997Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:32:55.842074Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2025-12-23T18:32:55.842149Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-12-23T18:32:55.842234Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2025-12-23T18:32:55.842322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=46; 2025-12-23T18:32:55.842578Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=218; 2025-12-23T18:32:55.842629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=23013; 2025-12-23T18:32:55.842775Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:32:55.842883Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:32:55.842943Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:32:55.843009Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:32:55.853048Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=1; 2025-12-23T18:32:55.853233Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:32:55.853336Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2025-12-23T18:32:55.853400Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766514464050;tx_id=18446744073709551615;;current_snapshot_ts=1766514751937; 2025-12-23T18:32:55.853458Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:32:55.853501Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:32:55.853528Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:32:55.853591Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:32:55.853757Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.137000s; 2025-12-23T18:32:55.854805Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:32:55.854926Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:32:55.854954Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:32:55.855001Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2025-12-23T18:32:55.855039Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766514464050;tx_id=18446744073709551615;;current_snapshot_ts=1766514751937; 2025-12-23T18:32:55.855081Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:32:55.855113Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:32:55.855150Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:32:55.855228Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:32:55.856759Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.106000s; 2025-12-23T18:32:55.856820Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:114:2144] 2025-12-23T18:32:59.773262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:32:59.773360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:59.773418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:32:59.773463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:32:59.773517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:32:59.773545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:32:59.773627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:59.773703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:32:59.774646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:59.774979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:32:59.935678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:32:59.935773Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:59.936610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:59.965718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:32:59.965976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:32:59.966166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:32:59.998325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:32:59.998671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:00.002633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:00.003731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:00.020190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:00.020426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:00.021967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:00.022041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:00.022194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:00.022252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:00.024758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:00.025073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:00.260896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.261950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.262081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.262142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.262215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.262278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.262364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.262459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.262538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.262615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.262691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.262799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.262868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.263024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:00.263115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModif ... eshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:01.218855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_data_source.cpp:34: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000039 2025-12-23T18:33:01.218984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2025-12-23T18:33:01.219155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:33:01.219228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:01.220317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:33:01.220855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:33:01.222485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:01.222549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:01.222731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:33:01.222871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:33:01.223031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:01.223076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-12-23T18:33:01.223116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:33:01.223141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:33:01.223595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:33:01.223660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:33:01.223769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:33:01.223809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:01.223857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:33:01.223894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:01.223937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:33:01.223982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:01.224018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:33:01.224059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:33:01.224150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:33:01.224191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:33:01.224226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-12-23T18:33:01.224295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2025-12-23T18:33:01.228895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:01.229008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:01.229051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:33:01.229102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-12-23T18:33:01.229159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:33:01.229994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:01.230095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:01.230202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:33:01.230242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:33:01.230282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:01.230352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:33:01.237006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:33:01.237405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:33:01.237608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:33:01.237647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:33:01.238104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:33:01.238217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.238253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:745:2734] TestWaitNotification: OK eventTxId 102 2025-12-23T18:33:01.238718Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:01.239022Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 289us result status StatusSuccess 2025-12-23T18:33:01.239376Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> IncrementalBackup::ForgedMultiShardIncrementalRestore-WithIncremental [GOOD] >> IncrementalBackup::E2EMultipleBackupRestoreCycles |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] Test command err: 2025-12-23T18:32:54.035710Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123185921299398:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:54.035756Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:32:54.091589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00326a/r3tmp/tmptirCn2/pdisk_1.dat 2025-12-23T18:32:54.572410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:54.572538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:54.622450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:32:54.646780Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123181626332077:2081] 1766514774032985 != 1766514774032988 2025-12-23T18:32:54.659311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:54.660012Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:54.871698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:11437 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514774731 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:32:55.051364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:55.060753Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2025-12-23T18:32:56.081924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:56.408220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:58.993118Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123200598475945:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:58.993221Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00326a/r3tmp/tmpgpUYHp/pdisk_1.dat 2025-12-23T18:32:59.036968Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:32:59.038828Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123185921299398:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:59.038898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:59.182949Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:59.184331Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587123200598475919:2081] 1766514778991772 != 1766514778991775 2025-12-23T18:32:59.201023Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:59.201101Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:59.236080Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:59.309468Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:33:00.018609Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21480 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514779288 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:33:00.346480Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... --------------------------- INIT FINISHED --------------------------- 2025-12-23T18:33:01.359366Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:03.076113Z node 2 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [2:7587123222073313415:2595], ActorId: [2:7587123222073313416:2595], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=2&id=MTczNGY2YmQtYmNkMjhmMzUtOWIyNWZiZTgtOWY4MWFmNjM=, TxId: 01kd67rwtye6yf427qgr1d26g9 2025-12-23T18:33:03.078323Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123222073313437:2326], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:03.078402Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:03.078804Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123222073313450:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:03.078864Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:33:11.493274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:11.493377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:11.493419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:11.493460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:11.493513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:11.493546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:11.493639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:11.493748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:11.494769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:11.495126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:11.622946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:33:11.623048Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:11.623968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:11.640349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:11.640531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:11.640738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:11.649095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:11.649380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:11.650233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:11.650635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:11.653532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:11.653746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:11.655070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:11.655142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:11.655282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:11.655333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:11.655378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:11.655573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:11.823536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.824361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.824507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.824574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.824624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.824688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.824744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.824785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.824825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.824869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.824923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.824982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.825026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.825085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.825177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... rRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2025-12-23T18:33:12.619529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:12.619649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:12.619712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000041 2025-12-23T18:33:12.619827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:12.619950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2025-12-23T18:33:12.620134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:33:12.620214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:33:12.620903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:33:12.622833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:33:12.624034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:12.624098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:12.624279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:33:12.624435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:12.624489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-12-23T18:33:12.624547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 38 FAKE_COORDINATOR: Erasing txId 104 2025-12-23T18:33:12.624826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:33:12.624872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:33:12.624987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:33:12.625028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:33:12.625071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:33:12.625122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:33:12.625178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-12-23T18:33:12.625226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:33:12.625273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:33:12.625314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:33:12.625392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:12.625452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-12-23T18:33:12.625490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-12-23T18:33:12.625524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2025-12-23T18:33:12.625973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:33:12.626099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:33:12.626149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:33:12.626202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2025-12-23T18:33:12.626248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:33:12.626557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:33:12.626601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:33:12.626674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:33:12.626932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:33:12.627036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:33:12.627073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:33:12.627148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-12-23T18:33:12.627191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:33:12.627263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-12-23T18:33:12.631436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:33:12.631550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:33:12.631609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:33:12.631910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:33:12.631966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:33:12.632477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:33:12.632568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:33:12.632627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:792:2782] TestWaitNotification: OK eventTxId 104 2025-12-23T18:33:12.633299Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:12.633549Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 257us result status StatusPathDoesNotExist 2025-12-23T18:33:12.633714Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:32:59.182351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:32:59.182436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:59.182471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:32:59.182506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:32:59.182569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:32:59.182616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:32:59.182674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:32:59.182797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:32:59.183804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:59.184103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:32:59.327180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:32:59.327269Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:59.328125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:32:59.347501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:32:59.347687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:32:59.347924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:32:59.360593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:32:59.360878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:32:59.361650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:32:59.362034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:32:59.364900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:59.365107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:32:59.366275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:32:59.366349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:32:59.366473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:32:59.366536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:32:59.366595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:32:59.366827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:32:59.540986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.541760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.541916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.541963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.542037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.542097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.542153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.542196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.542241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.542282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.542342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.542392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.542453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.542558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:32:59.542613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... onResult 2025-12-23T18:33:01.914239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.914315Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 117, at schemeshard: 72057594046678944 2025-12-23T18:33:01.914412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 118, at schemeshard: 72057594046678944 2025-12-23T18:33:01.914480Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 119, at schemeshard: 72057594046678944 2025-12-23T18:33:01.914540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.914562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.914676Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2025-12-23T18:33:01.914782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.914816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.914890Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2025-12-23T18:33:01.914998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.915022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.915136Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2025-12-23T18:33:01.915189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2025-12-23T18:33:01.915255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.915276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.915334Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-12-23T18:33:01.915412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.915445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.915555Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-12-23T18:33:01.915601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.915623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.915702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.915722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.915797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.915819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.915884Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-12-23T18:33:01.915957Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-12-23T18:33:01.915998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.916021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.916149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.916176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.916245Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2025-12-23T18:33:01.916356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.916385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.916473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.916493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.916580Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-12-23T18:33:01.916663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.916686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.916823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.916848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.916996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.917022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.917066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.917089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.917245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.917270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [2:805:2795] 2025-12-23T18:33:01.917405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-12-23T18:33:01.917428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [2:805:2795] TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 2025-12-23T18:33:01.921203Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:01.921436Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 282us result status StatusSuccess 2025-12-23T18:33:01.921812Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/other_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] >> TExternalDataSourceTest::ReadOnlyMode [GOOD] |92.0%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/build_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::Bench_JoinFilter [GOOD] Test command err: Trying to start YDB, gRPC: 9777, MsgBus: 12462 2025-12-23T18:32:42.608909Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123133862917635:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:42.610834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:32:42.649844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d27/r3tmp/tmpuK6jRs/pdisk_1.dat 2025-12-23T18:32:42.958787Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:42.978008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:42.978110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:43.008107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:43.102534Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9777, node 1 2025-12-23T18:32:43.235645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:43.235677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:43.235687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:43.235833Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:43.244142Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12462 2025-12-23T18:32:43.618173Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514763118 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:43.764498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:43.779268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:32:46.451176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123151042787644:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:46.451297Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:46.451618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123151042787652:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:46.451699Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123151042787658:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:46.451997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:46.456296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:46.471711Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123151042787660:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:32:46.539558Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123151042787711:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:32:47.611377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123133862917635:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:47.611469Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7163, MsgBus: 1541 2025-12-23T18:32:48.274934Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123159006652879:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:48.275006Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:32:48.332509Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d27/r3tmp/tmph6D8bn/pdisk_1.dat 2025-12-23T18:32:48.428724Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:48.429215Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587123159006652841:2081] 1766514768265029 != 1766514768265032 2025-12-23T18:32:48.460477Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:48.460565Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:48.475097Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7163, node 2 2025-12-23T18:32:48.617261Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:48.715404Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:48.715425Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:48.715433Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:48.715501Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1541 TClient is connected to server localhost:1541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514768522 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:49.213844Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:49.223546Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:32:49.287124Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:32:52.311935Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [ ... LOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:58.716282Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123199927565719:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:58.716318Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123199927565720:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:58.716421Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:58.720267Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:58.732712Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587123199927565723:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:32:58.809980Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587123199927565774:2637] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:33:00.011240Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587123187042662804:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:00.011322Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21361, MsgBus: 9921 2025-12-23T18:33:01.134300Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587123215148174010:2146];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:01.134363Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d27/r3tmp/tmpB7zyd7/pdisk_1.dat 2025-12-23T18:33:01.152702Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:01.325345Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:01.351716Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:01.365933Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:01.366032Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:01.373717Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21361, node 4 2025-12-23T18:33:01.543347Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:01.543375Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:01.543384Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:01.543497Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:33:01.720062Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9921 TClient is connected to server localhost:9921 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514781416 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:02.061238Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:33:02.140378Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:05.340322Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587123232328043965:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:05.340437Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:05.343286Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587123232328043975:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:05.343406Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:05.379273Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:05.483998Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:05.664758Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587123232328044139:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:05.664861Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:05.665087Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587123232328044144:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:05.665451Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587123232328044146:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:05.665526Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:05.670919Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:33:05.683137Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587123232328044147:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:33:05.751497Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587123232328044199:2639] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:33:06.134034Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587123215148174010:2146];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:06.134126Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:32:09.129181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:09.366283Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:09.394415Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:32:09.394858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:32:09.394981Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:32:10.122550Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:10.304988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:10.305161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:10.344966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:10.376107Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004a4a/r3tmp/tmphTXrO9/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8553, node 1 2025-12-23T18:32:11.434840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:11.436208Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:11.436273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:11.436315Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:11.437082Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:11.524079Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:61583 2025-12-23T18:32:12.170123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:32:16.736663Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:16.745102Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:32:16.754190Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:16.832332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:16.832456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:16.892865Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:32:16.899286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:17.194918Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:17.195696Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:17.196433Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:17.197443Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:17.197731Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:17.197934Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:17.198139Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:17.198353Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:17.198538Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:32:17.388259Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:32:17.456723Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:17.456851Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:17.472567Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:17.666500Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:17.749367Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:32:17.749497Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:32:17.789144Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:32:17.790815Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:32:17.791153Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:32:17.791228Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:32:17.791305Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:32:17.791368Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:32:17.791431Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:32:17.791493Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:32:17.795762Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:32:17.958085Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:32:17.958389Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:32:17.986954Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:32:17.988334Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:32:18.187177Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:32:18.187706Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:32:18.190282Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:32:18.190346Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:32:18.190461Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:32:18.197192Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:32:18.220563Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:18.230858Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:32:18.231010Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:32:18.252529Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:32:18.571884Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:32:19.139286Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:32:19.266717Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:32:19.266851Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:26 ... ner id: 72075186224037897 2025-12-23T18:32:57.067460Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2025-12-23T18:32:57.067513Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1766514776912797 2025-12-23T18:32:57.067561Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-12-23T18:32:57.067604Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2025-12-23T18:32:57.067710Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-12-23T18:32:57.067799Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:32:57.067921Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-12-23T18:32:57.067999Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:32:57.068069Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:32:57.068159Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:32:57.068374Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:32:57.069781Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:32:57.070314Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5651:4800] Owner: [2:5650:4799]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:32:57.070391Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5651:4800] Owner: [2:5650:4799]. Column diff is empty, finishing 2025-12-23T18:32:57.070975Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-12-23T18:32:57.071099Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR cookie 0 2025-12-23T18:32:57.094124Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5660:4807] 2025-12-23T18:32:57.094283Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5616:4779], server id = [2:5660:4807], tablet id = 72075186224037894, status = OK 2025-12-23T18:32:57.094518Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5660:4807], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-12-23T18:32:57.094897Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5661:4808] 2025-12-23T18:32:57.095024Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5661:4808], schemeshard id = 72075186224037897 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse 2025-12-23T18:32:57.240186Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2025-12-23T18:32:57.240319Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2025-12-23T18:32:57.241792Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:32:57.266113Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:32:57.266441Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-12-23T18:32:57.273265Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5668:4815], server id = [2:5672:4819], tablet id = 72075186224037899, status = OK 2025-12-23T18:32:57.273716Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5668:4815], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:32:57.274241Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5669:4816], server id = [2:5673:4820], tablet id = 72075186224037900, status = OK 2025-12-23T18:32:57.274357Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5669:4816], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:32:57.275756Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5670:4817], server id = [2:5674:4821], tablet id = 72075186224037901, status = OK 2025-12-23T18:32:57.275821Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5670:4817], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:32:57.276427Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5671:4818], server id = [2:5675:4822], tablet id = 72075186224037902, status = OK 2025-12-23T18:32:57.276488Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5671:4818], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:32:57.290487Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-12-23T18:32:57.296043Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5668:4815], server id = [2:5672:4819], tablet id = 72075186224037899 2025-12-23T18:32:57.296131Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:32:57.297256Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-12-23T18:32:57.297757Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5669:4816], server id = [2:5673:4820], tablet id = 72075186224037900 2025-12-23T18:32:57.297794Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:32:57.298169Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-12-23T18:32:57.303219Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5671:4818], server id = [2:5675:4822], tablet id = 72075186224037902 2025-12-23T18:32:57.303277Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:32:57.303631Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-12-23T18:32:57.303695Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:32:57.303995Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:32:57.304177Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:32:57.304480Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5688:4831], ActorId: [2:5689:4832], Starting query actor #1 [2:5690:4833] 2025-12-23T18:32:57.304551Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5689:4832], ActorId: [2:5690:4833], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:32:57.308778Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5670:4817], server id = [2:5674:4821], tablet id = 72075186224037901 2025-12-23T18:32:57.308839Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:32:57.310101Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5689:4832], ActorId: [2:5690:4833], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YzliNTliYTItZjU3Zjc0NTgtOGZkZDU5ODEtZjUzZjU1YWQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:32:57.490812Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5699:4842]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:32:57.491202Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:32:57.491267Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5699:4842], StatRequests.size() = 1 2025-12-23T18:32:57.872882Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5689:4832], ActorId: [2:5690:4833], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzliNTliYTItZjU3Zjc0NTgtOGZkZDU5ODEtZjUzZjU1YWQ=, TxId: 2025-12-23T18:32:57.873007Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5689:4832], ActorId: [2:5690:4833], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzliNTliYTItZjU3Zjc0NTgtOGZkZDU5ODEtZjUzZjU1YWQ=, TxId: 2025-12-23T18:32:57.873523Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5688:4831], ActorId: [2:5689:4832], Got response [2:5690:4833] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-12-23T18:32:57.874068Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5713:4848]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:32:57.874426Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:32:57.885409Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:32:57.885508Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:32:57.886758Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:32:57.886838Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:32:57.886908Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:32:57.891418Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166515318.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166515318.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=166515318.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515318.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166515318.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=166515318.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146514118.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146515318.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=146515318.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146514118.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146514118.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=146514118.000000s;Name=;Codec=}; 2025-12-23T18:31:58.827871Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:31:58.857997Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:31:58.858272Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:31:58.865669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:31:58.865948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:31:58.866155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:31:58.866282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:31:58.866402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:31:58.866533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:31:58.866642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:31:58.866804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:31:58.866943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:31:58.867065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:31:58.867160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:31:58.867246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:31:58.867338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:31:58.908245Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:31:58.908439Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:31:58.908520Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:31:58.908713Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:58.908872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:31:58.908962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:31:58.909009Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:31:58.909103Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:31:58.909173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:31:58.909238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:31:58.909279Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:31:58.909460Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:58.909525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:31:58.909568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:31:58.909600Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:31:58.909696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:31:58.909750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:31:58.909798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:31:58.909853Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:31:58.909931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:31:58.909975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:31:58.910009Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:31:58.910077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:31:58.910119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:31:58.910150Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:31:58.910356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:31:58.910453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:31:58.910494Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:31:58.910619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:31:58.910691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:31:58.910 ... ground=cleanup;skip_reason=no_changes; 2025-12-23T18:32:56.807904Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:32:56.808100Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766514767137:max} readable: {1766514767137:max} at tablet 9437184 2025-12-23T18:32:56.808221Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:32:56.808392Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766514767137:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:32:56.808454Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766514767137:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:32:56.808973Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766514767137:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-12-23T18:32:56.810825Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766514767137:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:32:56.811889Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766514767137:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1457:3401];trace_detailed=; 2025-12-23T18:32:56.812417Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:83;ff_first=(column_ids=1;column_names=timestamp;);; 2025-12-23T18:32:56.812662Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:32:56.812931Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:32:56.813121Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:32:56.813565Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:32:56.813759Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:32:56.813911Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:32:56.814137Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1457:3401] finished for tablet 9437184 2025-12-23T18:32:56.814670Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1456:3400];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":62442693,"name":"_full_task","f":62442693,"d_finished":0,"c":0,"l":62445116,"d":2423},"events":[{"name":"bootstrap","f":62442996,"d_finished":1066,"c":1,"l":62444062,"d":1066},{"a":62444442,"name":"ack","f":62444442,"d_finished":0,"c":0,"l":62445116,"d":674},{"a":62444421,"name":"processing","f":62444421,"d_finished":0,"c":0,"l":62445116,"d":695},{"name":"ProduceResults","f":62443716,"d_finished":676,"c":2,"l":62444839,"d":676},{"a":62444844,"name":"Finish","f":62444844,"d_finished":0,"c":0,"l":62445116,"d":272}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:32:56.814809Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1456:3400];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:32:56.815319Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1456:3400];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":62442693,"name":"_full_task","f":62442693,"d_finished":0,"c":0,"l":62445774,"d":3081},"events":[{"name":"bootstrap","f":62442996,"d_finished":1066,"c":1,"l":62444062,"d":1066},{"a":62444442,"name":"ack","f":62444442,"d_finished":0,"c":0,"l":62445774,"d":1332},{"a":62444421,"name":"processing","f":62444421,"d_finished":0,"c":0,"l":62445774,"d":1353},{"name":"ProduceResults","f":62443716,"d_finished":676,"c":2,"l":62444839,"d":676},{"a":62444844,"name":"Finish","f":62444844,"d_finished":0,"c":0,"l":62445774,"d":930}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1457:3401]->[1:1456:3400] 2025-12-23T18:32:56.815435Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:32:56.810736Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:32:56.815485Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:32:56.815697Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query [GOOD] Test command err: 2025-12-23T18:28:54.696075Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122153142686915:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:54.696138Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005e5e/r3tmp/tmpFpY147/pdisk_1.dat 2025-12-23T18:28:54.818046Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:28:55.322864Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:55.391767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:28:55.391893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:28:55.570849Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122153142686860:2081] 1766514534667049 != 1766514534667052 2025-12-23T18:28:55.572549Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:28:55.582814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:28:55.612093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10049, node 1 2025-12-23T18:28:55.779982Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:28:55.871548Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005e5e/r3tmp/yandexUyiDpx.tmp 2025-12-23T18:28:55.871572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005e5e/r3tmp/yandexUyiDpx.tmp 2025-12-23T18:28:55.871776Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005e5e/r3tmp/yandexUyiDpx.tmp 2025-12-23T18:28:55.871886Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:28:55.966727Z INFO: TTestServer started on Port 23041 GrpcPort 10049 2025-12-23T18:28:56.097492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23041 PQClient connected to localhost:10049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514535765 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:28:56.826499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:28:56.855894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:28:56.866543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:28:56.872918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:28:59.699464Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587122153142686915:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:28:59.699546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:29:01.987270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122183207458966:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:01.987464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:01.987957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122183207459003:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:01.995339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122183207459004:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:01.995464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:02.024817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:29:02.039141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122187502426335:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:02.039252Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:02.041672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587122187502426339:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:02.044154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:29:02.071702Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587122183207459007:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:29:02.144735Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587122187502426362:2663] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:29:02.895573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:29:02.908577Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587122187502426371:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:29:02.915605Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MjQ0YTFlYWMtZWE4OTY5NjUtODVkNjMyMWItZmUyMjZjMDU=, ActorId: [1:7587122183207458962:2333], ActorState: ExecuteState, TraceId: 01kd67hhbx27r4a9bdcrbw7czm, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:29:02.924821Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\ ... Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:32:45.289091Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.289108Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:32:45.289132Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.289152Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:32:45.300406Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:32:45.300448Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.300467Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:32:45.300496Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.300516Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:32:45.370105Z :INFO: [/Root] [/Root] [7219aaec-ca156891-1388e7c6-cf732f8a] Closing read session. Close timeout: 0.000000s 2025-12-23T18:32:45.370169Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:11:12 2025-12-23T18:32:45.370231Z :INFO: [/Root] [/Root] [7219aaec-ca156891-1388e7c6-cf732f8a] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2091 BytesRead: 15000000 MessagesRead: 12 BytesReadCompressed: 15000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:32:45.370329Z :NOTICE: [/Root] [/Root] [7219aaec-ca156891-1388e7c6-cf732f8a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-12-23T18:32:45.370380Z :DEBUG: [/Root] [/Root] [7219aaec-ca156891-1388e7c6-cf732f8a] [] Abort session to cluster 2025-12-23T18:32:45.370937Z :DEBUG: [/Root] 0x00007D1299B2B590 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_11902992963975888442_v1 Close 2025-12-23T18:32:45.371333Z :DEBUG: [/Root] 0x00007D1299B2B590 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_11902992963975888442_v1 Close 2025-12-23T18:32:45.371463Z :NOTICE: [/Root] [/Root] [7219aaec-ca156891-1388e7c6-cf732f8a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:32:45.372087Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|75a1e0d2-cb64730c-e4875076-1b05e133_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2025-12-23T18:32:45.372136Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|75a1e0d2-cb64730c-e4875076-1b05e133_0] PartitionId [0] Generation [2] Write session will now close 2025-12-23T18:32:45.372184Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|75a1e0d2-cb64730c-e4875076-1b05e133_0] PartitionId [0] Generation [2] Write session: aborting 2025-12-23T18:32:45.372594Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|75a1e0d2-cb64730c-e4875076-1b05e133_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2025-12-23T18:32:45.372635Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|75a1e0d2-cb64730c-e4875076-1b05e133_0] PartitionId [0] Generation [2] Write session: destroy 2025-12-23T18:32:45.374205Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_13_1_11902992963975888442_v1 grpc read done: success# 0, data# { } 2025-12-23T18:32:45.374235Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_13_1_11902992963975888442_v1 grpc read failed 2025-12-23T18:32:45.374231Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [13:7587123136683350412:2541]: session cookie 2 consumer test-consumer session test-consumer_13_1_11902992963975888442_v1 grpc read done: success# 0, data# { } 2025-12-23T18:32:45.374257Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [13:7587123136683350412:2541]: session cookie 2 consumer test-consumer session test-consumer_13_1_11902992963975888442_v1grpc read failed 2025-12-23T18:32:45.374270Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_13_1_11902992963975888442_v1 grpc closed 2025-12-23T18:32:45.374304Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [13:7587123136683350412:2541]: session cookie 2 consumer test-consumer session test-consumer_13_1_11902992963975888442_v1 grpc closed 2025-12-23T18:32:45.374304Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_13_1_11902992963975888442_v1 is DEAD 2025-12-23T18:32:45.374324Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [13:7587123136683350412:2541]: session cookie 2 consumer test-consumer session test-consumer_13_1_11902992963975888442_v1 proxy is DEAD 2025-12-23T18:32:45.374937Z node 13 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|75a1e0d2-cb64730c-e4875076-1b05e133_0 grpc read done: success: 0 data: 2025-12-23T18:32:45.374959Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|75a1e0d2-cb64730c-e4875076-1b05e133_0 grpc read failed 2025-12-23T18:32:45.375000Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|75a1e0d2-cb64730c-e4875076-1b05e133_0 grpc closed 2025-12-23T18:32:45.375044Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|75a1e0d2-cb64730c-e4875076-1b05e133_0 is DEAD 2025-12-23T18:32:45.375339Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [13:7587123136683350404:2536] disconnected. 2025-12-23T18:32:45.375373Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [13:7587123136683350404:2536] disconnected; active server actors: 1 2025-12-23T18:32:45.375392Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [13:7587123136683350404:2536] client test-consumer disconnected session test-consumer_13_1_11902992963975888442_v1 2025-12-23T18:32:45.375518Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_11902992963975888442_v1 2025-12-23T18:32:45.375531Z node 13 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:32:45.375554Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037894] server disconnected, pipe [13:7587123136683350407:2539] destroyed 2025-12-23T18:32:45.375589Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_13_1_11902992963975888442_v1 2025-12-23T18:32:45.376338Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037894] server disconnected, pipe [13:7587123128093415744:2516] destroyed 2025-12-23T18:32:45.376381Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T18:32:45.376413Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:32:45.376433Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.376448Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:32:45.376469Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.376481Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:32:45.389457Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:32:45.389490Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.389503Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:32:45.389520Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.389534Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:32:45.400672Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:32:45.400711Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.400728Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:32:45.400753Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.400774Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:32:45.489842Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:32:45.489882Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.489900Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:32:45.489927Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.489947Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:32:45.501063Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:32:45.501103Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.501122Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:32:45.501147Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:32:45.501167Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist |92.0%| [TM] {RESULT} ydb/core/tx/datashard/build_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2025-12-23T18:32:40.064649Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123125467224083:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:40.064862Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005212/r3tmp/tmp8SoQxk/pdisk_1.dat 2025-12-23T18:32:40.394243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:40.513541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:40.513662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:40.561991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:40.599228Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:40.620986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 17349, node 1 2025-12-23T18:32:40.714940Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.009286s 2025-12-23T18:32:40.715065Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007162s 2025-12-23T18:32:40.827701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:40.827742Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:40.827756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:40.827890Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26167 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-12-23T18:32:41.083661Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514760668 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:41.262441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:26167 2025-12-23T18:32:43.929074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123138352127259:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:43.929235Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:43.929548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123138352127269:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:43.929598Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.309510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:44.526867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123142647094731:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.526984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.527287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123142647094734:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.527325Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.545013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766514764462 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766514764462 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) 2025-12-23T18:32:44.749677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123142647094841:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.749864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.750363Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123142647094862:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.750419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123142647094863:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.750438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123142647094860:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.750459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123142647094865:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.750510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123142647094867:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.750581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService ... ode: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } 2025-12-23T18:33:05.800593Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:876: SelfId: [1:7587123232841476754:2365], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7587123142647094818:2365]Got OVERLOADED for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7587123232841476754:2365]. Ignored this error.{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } 2025-12-23T18:33:05.800673Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:7587123232841476717:2365], SessionActorId: [1:7587123142647094818:2365], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7587123142647094818:2365]. 2025-12-23T18:33:05.800802Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=ZDIyN2MwYTMtNzA3ODU1YzgtY2MxZTMyYjQtZjljNjE0OWM=, ActorId: [1:7587123142647094818:2365], ActorState: ExecuteState, TraceId: 01kd67rzfefqm4745re1az7f40, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7587123232841476718:2365] from: [1:7587123232841476717:2365] 2025-12-23T18:33:05.800885Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:7587123232841476718:2365] TxId: 281474976721268. Ctx: { TraceId: 01kd67rzfefqm4745re1az7f40, Database: , SessionId: ydb://session/3?node_id=1&id=ZDIyN2MwYTMtNzA3ODU1YzgtY2MxZTMyYjQtZjljNjE0OWM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. OVERLOADED: {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } 2025-12-23T18:33:05.801101Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=ZDIyN2MwYTMtNzA3ODU1YzgtY2MxZTMyYjQtZjljNjE0OWM=, ActorId: [1:7587123142647094818:2365], ActorState: ExecuteState, TraceId: 01kd67rzfefqm4745re1az7f40, Create QueryResponse for error on request, msg: , status: OVERLOADED, issues: { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } 2025-12-23T18:33:05.801623Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:876: SelfId: [1:7587123232841476758:2380], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7587123142647094833:2380]Got OVERLOADED for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7587123232841476758:2380]. Ignored this error.{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } 2025-12-23T18:33:05.801690Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:7587123232841476719:2380], SessionActorId: [1:7587123142647094833:2380], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7587123142647094833:2380]. 2025-12-23T18:33:05.801810Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=ZjY4MGQ4NjEtYmZjZTE3Y2MtZjVmNzBmOTMtODlkZDA0Njc=, ActorId: [1:7587123142647094833:2380], ActorState: ExecuteState, TraceId: 01kd67rzfe6p10zn0hnr7fhkdk, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7587123232841476720:2380] from: [1:7587123232841476719:2380] 2025-12-23T18:33:05.801864Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:7587123232841476720:2380] TxId: 281474976721269. Ctx: { TraceId: 01kd67rzfe6p10zn0hnr7fhkdk, Database: , SessionId: ydb://session/3?node_id=1&id=ZjY4MGQ4NjEtYmZjZTE3Y2MtZjVmNzBmOTMtODlkZDA0Njc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. OVERLOADED: {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } 2025-12-23T18:33:05.802056Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=ZjY4MGQ4NjEtYmZjZTE3Y2MtZjVmNzBmOTMtODlkZDA0Njc=, ActorId: [1:7587123142647094833:2380], ActorState: ExecuteState, TraceId: 01kd67rzfe6p10zn0hnr7fhkdk, Create QueryResponse for error on request, msg: , status: OVERLOADED, issues: { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } 2025-12-23T18:33:05.804270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710693:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710693 TabletId: 72075186224037888 2025-12-23T18:33:05.804609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710693:0 131 -> 132 2025-12-23T18:33:05.806572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:33:05.806825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:33:05.806897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710693:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:33:05.807749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976710693 2025-12-23T18:33:05.807792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710693 2025-12-23T18:33:05.807804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2025-12-23T18:33:05.817206Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2025-12-23T18:33:05.819085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-12-23T18:33:05.819185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2025-12-23T18:33:05.819211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2025-12-23T18:33:05.819262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710693:0 2025-12-23T18:33:05.821793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710693:0 2025-12-23T18:33:05.822061Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:33:05.822510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:33:05.827737Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-12-23T18:33:05.828474Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2025-12-23T18:33:05.828593Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2025-12-23T18:33:05.829506Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766514764462 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766514764462 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) Table has 2 shards >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:33:11.041577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:11.041652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:11.041687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:11.041713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:11.041746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:11.041784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:11.041843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:11.041893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:11.042635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:11.042985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:11.161195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:33:11.161971Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:11.164880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:11.188271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:11.188495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:11.188712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:11.199684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:11.200097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:11.201046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:11.201523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:11.205407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:11.205676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:11.207203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:11.207277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:11.207449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:11.207505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:11.207594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:11.207847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:11.390239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.391252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.391402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.391493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.391579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.391666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.391751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.391813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.391875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.391933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.392013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.392098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.392175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.392248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:11.392374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... ingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:13.723344Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:13.723580Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 254us result status StatusSuccess 2025-12-23T18:33:13.723940Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:13.724856Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:13.725037Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 203us result status StatusSuccess 2025-12-23T18:33:13.725487Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyExternalDataSource1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:13.726043Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:13.726241Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 221us result status StatusSuccess 2025-12-23T18:33:13.726561Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:13.727143Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:13.727309Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 191us result status StatusSuccess 2025-12-23T18:33:13.727664Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |92.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |92.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |92.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |92.0%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:33:13.492291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:13.492377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:13.492427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:13.492475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:13.492530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:13.492560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:13.492616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:13.492721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:13.493545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:13.493821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:13.644548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:33:13.644629Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:13.645545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:13.680934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:13.686967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:13.687178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:13.696040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:13.696264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:13.696986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:13.697674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:13.701093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:13.701289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:13.702468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:13.702530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:13.702654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:13.702701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:13.702764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:13.702950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:13.871512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.872528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.872680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.872748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.872837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.872904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.872982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.873061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.873133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.873232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.873329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.873395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.873497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.873571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:13.873699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... : 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:33:14.605029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:33:14.605483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:14.605692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:14.605749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:33:14.606073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:33:14.606146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:33:14.606346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:33:14.606507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:33:14.608684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:14.608760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:14.609009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:14.609100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:33:14.609454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:33:14.609507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:33:14.609617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:33:14.609654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:33:14.609695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:33:14.609759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:33:14.609812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:33:14.609876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:33:14.609930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:33:14.609976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:33:14.610054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:33:14.610109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:33:14.610150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:33:14.610900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:33:14.611004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:33:14.611047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:33:14.611125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:33:14.611180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:33:14.611282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:33:14.614804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:33:14.615355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:33:14.616773Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:678:2668] Bootstrap 2025-12-23T18:33:14.618053Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:678:2668] Become StateWork (SchemeCache [1:683:2673]) 2025-12-23T18:33:14.621609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:33:14.621950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-12-23T18:33:14.622041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-12-23T18:33:14.622100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-12-23T18:33:14.623824Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:678:2668] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:33:14.626533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:14.626876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2025-12-23T18:33:14.627434Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:33:14.627692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:33:14.627743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:33:14.628170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:33:14.628277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:33:14.628328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:693:2683] TestWaitNotification: OK eventTxId 101 2025-12-23T18:33:14.628847Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:14.629092Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 265us result status StatusPathDoesNotExist 2025-12-23T18:33:14.629334Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.0%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest |92.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |92.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> KqpRboPg::FallbackToYqlDisabled [GOOD] |92.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |92.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |92.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:33:12.479706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:12.479818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:12.479860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:12.479897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:12.479944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:12.479990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:12.480057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:12.480143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:12.481282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:12.481633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:12.612722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:33:12.612829Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:12.613619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:12.628738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:12.628888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:12.629078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:12.636859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:12.637116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:12.637820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:12.638170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:12.640988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:12.641212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:12.644504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:12.644602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:12.644745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:12.644796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:12.644861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:12.645075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:12.828328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.829449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.829654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.829736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.829832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.829929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.830020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.830097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.830173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.830248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.830350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.830444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.830587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.830662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.831000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... nByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2025-12-23T18:33:15.299240Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:15.299395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936753 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:15.299461Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000039 2025-12-23T18:33:15.299593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:15.299700Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2025-12-23T18:33:15.299910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:33:15.299989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:33:15.301385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:33:15.301714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:33:15.303303Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:15.303348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:15.303497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:33:15.303646Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:15.303685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-12-23T18:33:15.303757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:33:15.303991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:33:15.304044Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:33:15.304161Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:33:15.304207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:15.304275Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:33:15.304316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:15.304360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:33:15.304410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:15.304451Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:33:15.304488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:33:15.304587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:15.304634Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:33:15.304678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-12-23T18:33:15.304716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2025-12-23T18:33:15.305159Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:15.305278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:15.305319Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:33:15.305367Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2025-12-23T18:33:15.305416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:33:15.305833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:33:15.305886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:33:15.305965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:33:15.306332Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:15.306431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:15.306462Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:33:15.306493Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-12-23T18:33:15.306524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:33:15.306604Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:33:15.317769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:33:15.318595Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:33:15.318688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:33:15.318947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:33:15.318996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:33:15.319489Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:33:15.319620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:33:15.319665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:737:2727] TestWaitNotification: OK eventTxId 102 2025-12-23T18:33:15.320199Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:15.320424Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 271us result status StatusPathDoesNotExist 2025-12-23T18:33:15.320620Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.1%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:33:12.289831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:12.289902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:12.289977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:12.290016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:12.290090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:12.290142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:12.290250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:12.290343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:12.291244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:12.291528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:12.428553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:33:12.428630Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:12.429671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:12.442289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:12.447518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:12.447778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:12.455477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:12.455803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:12.456660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:12.457384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:12.459924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:12.460104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:12.461030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:12.461094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:12.461219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:12.461276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:12.461366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:12.461565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:12.605383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.606506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.606663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.606756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.606897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.606996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.607088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.607163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.607234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.607317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.607416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.607498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.607623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.607703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:12.607847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... fCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:33:15.548648Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:33:15.548732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:33:15.548765Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:33:15.548801Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2025-12-23T18:33:15.548835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:33:15.549780Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:33:15.549860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:33:15.549892Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:33:15.549921Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:33:15.549952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:15.550030Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:33:15.551581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:33:15.553279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:33:15.553371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:33:15.553580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:33:15.553626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:33:15.554038Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:33:15.554172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:33:15.554215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:739:2729] TestWaitNotification: OK eventTxId 101 2025-12-23T18:33:15.554678Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:15.554934Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 293us result status StatusSuccess 2025-12-23T18:33:15.555299Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-12-23T18:33:15.558923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:33:15.559111Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_external_data_source.cpp:116: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2025-12-23T18:33:15.559235Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2025-12-23T18:33:15.561961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:15.562240Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:33:15.562576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:33:15.562619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:33:15.563075Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:33:15.563180Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:33:15.563222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:747:2737] TestWaitNotification: OK eventTxId 103 2025-12-23T18:33:15.563777Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:15.564073Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 371us result status StatusSuccess 2025-12-23T18:33:15.564458Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { References { Path: "/MyRoot/ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.1%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpRboPg::TestCrossInnerJoin-ColumnStore [GOOD] >> KqpRboPg::TPCH |92.1%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |92.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |92.1%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |92.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |92.1%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |92.1%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |92.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay/ydb_query_replay >> YdbTableSplit::SplitByLoadWithReads [GOOD] |92.1%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |92.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews |92.1%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysViewTest::CreateExistingSysView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false [FAIL] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:30:17.775001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:30:17.775093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:17.775134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:30:17.775170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:30:17.775207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:30:17.775242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:30:17.775319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:17.775397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:30:17.776321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:30:17.776638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:30:17.875315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:17.875383Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:17.890546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:30:17.890727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:30:17.890886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:30:17.894943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:30:17.895111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:30:17.895722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:17.895893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:30:17.899352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:17.899507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:30:17.900410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:30:17.900475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:17.900566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:30:17.900617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:30:17.900650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:30:17.900755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:30:18.053263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.054366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.054505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.054612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.054680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.054860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.054947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.055039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.055125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.055192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.055304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.055382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.055452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.055533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:18.055621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... /index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 2025-12-23T18:33:05.544892Z node 5 :BUILD_INDEX DEBUG: reshuffle_kmeans.cpp:255: Handle TEvUploadRowsResponse TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 ev->Sender: [5:2567:4317] 2025-12-23T18:33:05.544955Z node 5 :BUILD_INDEX DEBUG: common_helper.h:205: TBatchRowsUploader StartUploadRowsInternal UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 2025-12-23T18:33:05.562080Z node 5 :BUILD_INDEX DEBUG: reshuffle_kmeans.cpp:255: Handle TEvUploadRowsResponse TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 ev->Sender: [5:2575:4325] 2025-12-23T18:33:05.562173Z node 5 :BUILD_INDEX DEBUG: common_helper.h:205: TBatchRowsUploader StartUploadRowsInternal UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 2025-12-23T18:33:05.577096Z node 5 :BUILD_INDEX DEBUG: reshuffle_kmeans.cpp:255: Handle TEvUploadRowsResponse TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 ev->Sender: [5:2583:4333] 2025-12-23T18:33:05.577178Z node 5 :BUILD_INDEX DEBUG: common_helper.h:205: TBatchRowsUploader StartUploadRowsInternal UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 2025-12-23T18:33:05.600974Z node 5 :BUILD_INDEX DEBUG: reshuffle_kmeans.cpp:255: Handle TEvUploadRowsResponse TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 ev->Sender: [5:2589:4339] 2025-12-23T18:33:05.601051Z node 5 :BUILD_INDEX DEBUG: common_helper.h:205: TBatchRowsUploader StartUploadRowsInternal UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 2025-12-23T18:33:05.616050Z node 5 :BUILD_INDEX DEBUG: reshuffle_kmeans.cpp:255: Handle TEvUploadRowsResponse TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 ev->Sender: [5:2595:4345] 2025-12-23T18:33:05.616134Z node 5 :BUILD_INDEX DEBUG: common_helper.h:205: TBatchRowsUploader StartUploadRowsInternal UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 2025-12-23T18:33:05.647591Z node 5 :BUILD_INDEX DEBUG: reshuffle_kmeans.cpp:255: Handle TEvUploadRowsResponse TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 ev->Sender: [5:2601:4351] 2025-12-23T18:33:05.647674Z node 5 :BUILD_INDEX DEBUG: common_helper.h:205: TBatchRowsUploader StartUploadRowsInternal UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 2025-12-23T18:33:05.665746Z node 5 :BUILD_INDEX DEBUG: reshuffle_kmeans.cpp:255: Handle TEvUploadRowsResponse TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 ev->Sender: [5:2609:4359] 2025-12-23T18:33:05.665815Z node 5 :BUILD_INDEX DEBUG: common_helper.h:205: TBatchRowsUploader StartUploadRowsInternal UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 2025-12-23T18:33:05.683331Z node 5 :BUILD_INDEX DEBUG: reshuffle_kmeans.cpp:255: Handle TEvUploadRowsResponse TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 ev->Sender: [5:2615:4365] 2025-12-23T18:33:05.683423Z node 5 :BUILD_INDEX DEBUG: common_helper.h:205: TBatchRowsUploader StartUploadRowsInternal UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 2025-12-23T18:33:05.701385Z node 5 :BUILD_INDEX DEBUG: reshuffle_kmeans.cpp:255: Handle TEvUploadRowsResponse TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 ev->Sender: [5:2623:4373] 2025-12-23T18:33:05.701477Z node 5 :BUILD_INDEX DEBUG: common_helper.h:205: TBatchRowsUploader StartUploadRowsInternal UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 2025-12-23T18:33:05.702187Z node 5 :BUILD_INDEX TRACE: reshuffle_kmeans.cpp:225: Exhausted TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 2025-12-23T18:33:05.702264Z node 5 :BUILD_INDEX TRACE: reshuffle_kmeans.cpp:198: Seek 1 TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 2025-12-23T18:33:05.719051Z node 5 :BUILD_INDEX DEBUG: reshuffle_kmeans.cpp:255: Handle TEvUploadRowsResponse TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 UploadTable: /MyRoot/ServerLessDB/Table/index1/indexImplPostingTable0build UploadBuf size: 66 RetryCount: 0 ev->Sender: [5:2631:4381] 2025-12-23T18:33:05.719147Z node 5 :BUILD_INDEX TRACE: reshuffle_kmeans.cpp:198: Seek 1 TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 2025-12-23T18:33:05.719354Z node 5 :BUILD_INDEX NOTICE: reshuffle_kmeans.cpp:165: Done TReshuffleKMeansScan TabletId: 72075186233409552 Id: 109 Parent: 0 Child: 1 K: 4 Id: 109 TabletId: 72075186233409552 RequestSeqNoGeneration: 2 RequestSeqNoRound: 5 Status: DONE MeteringStats { UploadRows: 50 UploadBytes: 850 ReadRows: 50 ReadBytes: 450 CpuTimeUs: 2543 } ... blocking NKikimr::TEvDataShard::TEvReshuffleKMeansResponse from to FLAT_SCHEMESHARD_ACTOR cookie 0 ... waiting for reshuffle (done) 2025-12-23T18:33:05.721333Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:1703: Handle TEvRemoteHttpInfo: BuildIndexId=109&Page=BuildIndexInfo 2025-12-23T18:33:05.723037Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:441: TTxMonitoring.Execute: BuildIndexId=109&Page=BuildIndexInfo ... waiting for metering 2025-12-23T18:33:10.539324Z node 5 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2408: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 109 2025-12-23T18:33:10.540425Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index_tx_base.cpp:170: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: ApplyBill: make a bill, id#109, billRecord: {"usage":{"start":0,"quantity":130,"finish":10,"unit":"request_unit","type":"delta"},"tags":{},"id":"109-72075186233409549-32-0-0-0-0-4-800-84-7200","cloud_id":"CLOUD_ID_VAL","source_wt":10,"source_id":"sless-docapi-ydb-ss","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.requests.v1","folder_id":"FOLDER_ID_VAL","version":"1.0.0"} , toBill: UploadRows: 4 UploadBytes: 84 ReadRows: 800 ReadBytes: 7200 CpuTimeUs: 120600, explain: ReadTable: 128, BulkUpsert: 2, CPU: 80, buildInfo: TBuildInfo{ IndexBuildId: 109, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Filling, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [5:1777:3626], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 2, DoneShards: 0, ShardsInProgress: 72075186233409549:4, Processed: UploadRows: 4 UploadBytes: 84 ReadRows: 800 ReadBytes: 7200 CpuTimeUs: 120600, Billed: UploadRows: 4 UploadBytes: 84 ReadRows: 800 ReadBytes: 7200 CpuTimeUs: 120600} TEvWriteMeteringJson {"usage":{"start":0,"quantity":130,"finish":10,"unit":"request_unit","type":"delta"},"tags":{},"id":"109-72075186233409549-32-0-0-0-0-4-800-84-7200","cloud_id":"CLOUD_ID_VAL","source_wt":10,"source_id":"sless-docapi-ydb-ss","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.requests.v1","folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ... blocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering cookie 0 ... waiting for metering (done) assertion failed at ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp:954, void NTestSuiteVectorIndexBuildTest::Metering_ServerLessDB_Restarts(NUnitTest::TTestContext &) [doRestarts = false]: (meteringBlocker[0]->Get()->MeteringJson == expectedBill.ToString()) failed: ("{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-32-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n" != "{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-2-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n") , with diff: "{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-(3|)2-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n" TBackTrace::Capture()+28 (0x179B207C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x17E7E86C) void NTestSuiteVectorIndexBuildTest::Metering_ServerLessDB_Restarts(NUnitTest::TTestContext&)+25533 (0x1759C05D) std::__y1::__function::__func, void ()>::operator()()+280 (0x1750BC18) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x17EADCAA) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x17E85548) NTestSuiteVectorIndexBuildTest::TCurrentTest::Execute()+1229 (0x1750AF6D) NUnitTest::TTestFactory::Execute()+2176 (0x17E86D00) NUnitTest::RunMain(int, char**)+5805 (0x17EA7B0D) ??+0 (0x7F665A398D90) __libc_start_main+128 (0x7F665A398E40) _start+41 (0x14AFB029) >> IndexBuildTest::CancellationNotEnoughRetriesUniq [GOOD] >> IndexBuildTest::CancellationNoTable >> TSchemeShardSysViewTest::AsyncCreateSameSysView |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest |92.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |92.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |92.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots >> TSchemeShardSysViewTest::EmptyName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::FallbackToYqlDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 63935, MsgBus: 63497 2025-12-23T18:32:51.359472Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123173220529469:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:51.359811Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d11/r3tmp/tmpzjSmnA/pdisk_1.dat 2025-12-23T18:32:51.694917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:51.764418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:51.764546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:51.816111Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:51.830914Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123173220529361:2081] 1766514771328893 != 1766514771328896 2025-12-23T18:32:51.831281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63935, node 1 2025-12-23T18:32:51.964234Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:51.974754Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:51.974778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:51.974791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:51.974938Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63497 2025-12-23T18:32:52.375063Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:63497 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514771882 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:52.709261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:55.699316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123190400399439:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:55.699418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:55.699817Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123190400399449:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:55.699903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:56.032945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:56.251157Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123194695366849:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:56.251282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:56.255075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123194695366854:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:56.255164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123194695366855:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:56.255500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:56.260284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:56.316081Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123194695366858:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:32:56.345365Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123173220529469:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:56.345478Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:56.382097Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123194695366910:2606] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20543, MsgBus: 3472 2025-12-23T18:32:57.958654Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123196275166939:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:57.959724Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d11/r3tmp/tmpp0OCON/pdisk_1.dat 2025-12-23T18:32:57.991940Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:58.079468Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:58.081744Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:58.081845Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:58.103868Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20543, node 2 2025-12-23T18:32:58.235297Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:58.235317Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:58.235323Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:58.235434Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:58.287767Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3472 TClient is connected to server localhost:3472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: ... T18:33:07.069681Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587123238107659284:2592] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:33:07.158039Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with new RBO failed, retrying with YQL optimizer, self: [3:7587123238107659293:2345], database: /Root, text: "\n --!syntax_pg\n SET TablePathPrefix = \"/Root/\";\n select sum(distinct t1.c), sum(distinct t1.a) from t1 group by t1.b order by t1.b;\n " 2025-12-23T18:33:08.306856Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587123220927789114:2090];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:08.307583Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:33:09.261152Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with new RBO failed, retrying with YQL optimizer, self: [3:7587123246697593957:2369], database: /Root, text: "\n INSERT INTO `/Root/t1` (a, b, c) VALUES (1, 2, 3);\n " Trying to start YDB, gRPC: 24394, MsgBus: 17222 2025-12-23T18:33:10.436692Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587123253787193467:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:10.436756Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d11/r3tmp/tmp17jjB8/pdisk_1.dat 2025-12-23T18:33:10.451691Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:10.524272Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:10.558054Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:10.558135Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:10.566029Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24394, node 4 2025-12-23T18:33:10.653234Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:10.667221Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:10.667247Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:10.667254Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:10.667328Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17222 TClient is connected to server localhost:17222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514790614 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:11.161430Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:33:11.444345Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:14.474947Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587123270967063493:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:14.475157Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:14.480455Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587123270967063503:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:14.480597Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:14.555160Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:15.041702Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587123275262030897:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:15.041859Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:15.042282Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587123275262030902:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:15.042336Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587123275262030903:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:15.042720Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:15.048758Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:33:15.063640Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587123275262030906:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:33:15.138198Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587123275262030957:2597] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:33:15.210575Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7587123275262030966:2346], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
:4:1: Fatal: ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:1079: Multiple distinct is not supported, code: 1 2025-12-23T18:33:15.211087Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=4&id=MTcxMzZlYi1kNmI5MWNjNS03M2JjNjZiOS01NTQ2NGE5OA==, ActorId: [4:7587123270967063597:2335], ActorState: ExecuteState, TraceId: 01kd67s8h0fgcsy2zmc1yq2mtc, ReplyQueryCompileError, status: INTERNAL_ERROR, issues: { message: "Execution" issue_code: 1060 issues { position { row: 4 column: 1 } message: "ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:1079: Multiple distinct is not supported" end_position { row: 4 column: 1 } issue_code: 1 } }, remove tx with tx_id: 2025-12-23T18:33:15.292884Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7587123275262030991:2356], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/core/expr_nodes_gen/yql_expr_nodes_gen.h:294 TCallable(): requirement Match(node.Get()) failed, code: 1 2025-12-23T18:33:15.294390Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=4&id=YTJjYjI2NzgtODNiMWQxODctNTIyMDA2YzEtMmRlYWUxMzM=, ActorId: [4:7587123275262030983:2351], ActorState: ExecuteState, TraceId: 01kd67s8qq4ryxzh1v8zf9pvyt, ReplyQueryCompileError, status: INTERNAL_ERROR, issues: { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/core/expr_nodes_gen/yql_expr_nodes_gen.h:294 TCallable(): requirement Match(node.Get()) failed" issue_code: 1 } }, remove tx with tx_id: 2025-12-23T18:33:15.442849Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587123253787193467:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:15.442939Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews >> TSchemeShardSysViewTest::ReadOnlyMode >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews [GOOD] >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews [GOOD] >> TSchemeShardSysViewTest::CreateExistingSysView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2025-12-23T18:32:40.334253Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123122354464978:2180];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:40.334428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005210/r3tmp/tmp4cjGQG/pdisk_1.dat 2025-12-23T18:32:40.706215Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:40.722985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:40.723116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:40.750257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:40.901487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:40.924375Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 16324, node 1 2025-12-23T18:32:41.147102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:41.147125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:41.147132Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:41.147233Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:41.359325Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514760962 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:41.882811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:1639 2025-12-23T18:32:44.484852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123139534335382:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.484965Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.485454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123139534335392:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.485506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.761284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:44.952058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123139534335552:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.952183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.952767Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123139534335555:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.952828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.977578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766514764875 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766514764875 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) 2025-12-23T18:32:45.257760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123143829302971:2380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:45.257879Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:45.258048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123143829302992:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:45.258087Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123143829302993:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:45.258125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123143829302997:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:45.258147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123143829302995:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:45.258542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123143829302998:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:45.258543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123143829302994:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:45.258926Z node ... ctor.cpp:1107: TxId: 281474976726034, task: 1, CA Id [1:7587123251203600585:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-12-23T18:33:10.124393Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726032, task: 1, CA Id [1:7587123251203600568:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-12-23T18:33:10.124449Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726029, task: 1, CA Id [1:7587123251203600565:2376]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-12-23T18:33:10.125381Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2025-12-23T18:33:10.125519Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2025-12-23T18:33:10.166925Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726034, task: 1, CA Id [1:7587123251203600585:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.166970Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726030, task: 1, CA Id [1:7587123251203600559:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.166993Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726031, task: 1, CA Id [1:7587123251203600562:2363]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.167016Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726032, task: 1, CA Id [1:7587123251203600568:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.167039Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726029, task: 1, CA Id [1:7587123251203600565:2376]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.167061Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726028, task: 1, CA Id [1:7587123251203600556:2369]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.167083Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726033, task: 1, CA Id [1:7587123251203600582:2366]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766514764875 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) 2025-12-23T18:33:10.433648Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726034, task: 1, CA Id [1:7587123251203600585:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.433659Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726030, task: 1, CA Id [1:7587123251203600559:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.433708Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726031, task: 1, CA Id [1:7587123251203600562:2363]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.433719Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726032, task: 1, CA Id [1:7587123251203600568:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.433735Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726028, task: 1, CA Id [1:7587123251203600556:2369]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.636225Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726029, task: 1, CA Id [1:7587123251203600565:2376]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.636313Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726033, task: 1, CA Id [1:7587123251203600582:2366]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.918442Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726034, task: 1, CA Id [1:7587123251203600585:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.918442Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726032, task: 1, CA Id [1:7587123251203600568:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:10.918505Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726030, task: 1, CA Id [1:7587123251203600559:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:11.119566Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726031, task: 1, CA Id [1:7587123251203600562:2363]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:11.119586Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726029, task: 1, CA Id [1:7587123251203600565:2376]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:11.119625Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726028, task: 1, CA Id [1:7587123251203600556:2369]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:11.346815Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726033, task: 1, CA Id [1:7587123251203600582:2366]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:11.826580Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726034, task: 1, CA Id [1:7587123251203600585:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:11.826644Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726031, task: 1, CA Id [1:7587123251203600562:2363]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:11.826672Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726032, task: 1, CA Id [1:7587123251203600568:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:11.826706Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726028, task: 1, CA Id [1:7587123251203600556:2369]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:12.031317Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726030, task: 1, CA Id [1:7587123251203600559:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:12.031334Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726029, task: 1, CA Id [1:7587123251203600565:2376]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:12.334713Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726033, task: 1, CA Id [1:7587123251203600582:2366]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:12.556907Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726034, task: 1, CA Id [1:7587123251203600585:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:12.556960Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726028, task: 1, CA Id [1:7587123251203600556:2369]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:12.811215Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726030, task: 1, CA Id [1:7587123251203600559:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:12.811264Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726031, task: 1, CA Id [1:7587123251203600562:2363]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:12.811292Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726032, task: 1, CA Id [1:7587123251203600568:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:13.132785Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726029, task: 1, CA Id [1:7587123251203600565:2376]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:13.132849Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726033, task: 1, CA Id [1:7587123251203600582:2366]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:13.132893Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726034, task: 1, CA Id [1:7587123251203600585:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:13.344407Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726028, task: 1, CA Id [1:7587123251203600556:2369]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:13.546248Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726031, task: 1, CA Id [1:7587123251203600562:2363]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:13.746728Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726030, task: 1, CA Id [1:7587123251203600559:2367]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:13.746846Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726032, task: 1, CA Id [1:7587123251203600568:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:13.746904Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726029, task: 1, CA Id [1:7587123251203600565:2376]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-12-23T18:33:13.958856Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976726033, task: 1, CA Id [1:7587123251203600582:2366]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766514764875 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) Table has 2 shards |92.1%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest >> IndexBuildTest::CancellationNoTable [GOOD] >> IndexBuildTest::CancellationNoTableUniq >> TSchemeShardSysViewTest::DropSysView >> TSchemeShardSysViewTest::AsyncCreateSameSysView [GOOD] >> TSchemeShardSysViewTest::AsyncDropSameSysView >> TSchemeShardSysViewTest::CreateSysView >> TSchemeShardTTLTestsWithReboots::CopyTable >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:33:17.778238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:17.778372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:17.778419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:17.778459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:17.778504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:17.778537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:17.778600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:17.778673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:17.779630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:17.779973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:17.898139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:17.898214Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:17.908704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:17.908927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:17.909069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:17.915820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:17.916047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:17.916860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:17.917115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:17.921180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:17.921407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:17.922654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:17.922734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:17.922934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:17.922984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:17.923035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:17.923175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:18.095190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.096189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.096320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.096392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.096460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.096523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.096588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.096673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.096761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.096828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.096907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.096971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.097063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.097142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.097230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ished: true CreateTxId: 281474976710675 CreateStep: 5000031 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_partitions_one_hour" PathId: 36 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710691 CreateStep: 5000018 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_partitions_one_minute" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710664 CreateStep: 5000007 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_cpu_time_one_hour" PathId: 14 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710669 CreateStep: 5000006 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_cpu_time_one_minute" PathId: 28 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710683 CreateStep: 5000025 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_duration_one_hour" PathId: 37 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710692 CreateStep: 5000016 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_duration_one_minute" PathId: 25 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710680 CreateStep: 5000030 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_read_bytes_one_hour" PathId: 15 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710670 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_read_bytes_one_minute" PathId: 16 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710671 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_request_units_one_hour" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710665 CreateStep: 5000005 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_request_units_one_minute" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710684 CreateStep: 5000023 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:18.794152Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:671:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:33:18.796651Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-12-23T18:33:18.797410Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:18.797642Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 251us result status StatusSuccess 2025-12-23T18:33:18.798009Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710685 CreateStep: 5000021 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:18.798547Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:18.798779Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/ds_pdisks" took 209us result status StatusSuccess 2025-12-23T18:33:18.799112Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/ds_pdisks" PathDescription { Self { Name: "ds_pdisks" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710662 CreateStep: 5000011 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "ds_pdisks" Type: EPDisks SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:18.799654Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/query_metrics_one_minute" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:18.799916Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/query_metrics_one_minute" took 240us result status StatusSuccess 2025-12-23T18:33:18.800239Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/query_metrics_one_minute" PathDescription { Self { Name: "query_metrics_one_minute" PathId: 34 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710689 CreateStep: 5000022 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "query_metrics_one_minute" Type: EQueryMetricsOneMinute SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 34 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews >> TSchemeShardSysViewTest::EmptyName [GOOD] |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::CreateExistingSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:33:17.904474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:17.904557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:17.904585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:17.904610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:17.904643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:17.904661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:17.904694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:17.904742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:17.905356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:17.905566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:17.993292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:17.993362Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:18.001752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:18.001988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:18.002145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:18.009232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:18.009441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:18.010217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:18.010479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:18.017294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:18.017498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:18.018825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:18.018910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:18.019066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:18.019131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:18.019183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:18.019351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:18.180491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.181453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.181579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.181657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.181722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.181795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.181876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.181961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.182028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.182094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.182173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.182238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.182345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.182438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.182517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 55672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2025-12-23T18:33:18.857007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:33:18.857154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:33:18.857198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:33:18.857247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 39 2025-12-23T18:33:18.857302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2025-12-23T18:33:18.858622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:33:18.858719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:33:18.858772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:33:18.858811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:33:18.858845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:33:18.858898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:33:18.860848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:33:18.861689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:33:18.861864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:33:18.861896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:33:18.862302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:33:18.862430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:33:18.862464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:701:2691] TestWaitNotification: OK eventTxId 101 2025-12-23T18:33:18.862968Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:18.863206Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 258us result status StatusSuccess 2025-12-23T18:33:18.863618Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-12-23T18:33:18.867280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView CreateSysView { Name: "new_sys_view" Type: ENodes } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:33:18.867475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_sysview.cpp:117: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/new_sys_view, opId: 102:0 2025-12-23T18:33:18.867547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_sysview.cpp:123: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/new_sys_view, opId: 102:0, sysViewDescription: Name: "new_sys_view" Type: ENodes 2025-12-23T18:33:18.867733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.sys/new_sys_view', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:33:18.869896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-12-23T18:33:18.870179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.sys/new_sys_view', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNoChanges), operation: CREATE SYSTEM VIEW, path: /MyRoot/.sys/new_sys_view TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:33:18.870502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:33:18.870546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:33:18.870958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:33:18.871064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:33:18.871106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:709:2699] TestWaitNotification: OK eventTxId 102 2025-12-23T18:33:18.871635Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:18.871904Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 294us result status StatusSuccess 2025-12-23T18:33:18.872299Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation >> KqpRboYql::Aggregation-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:33:18.497409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:18.497493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:18.497533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:18.497572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:18.497616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:18.497645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:18.497701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:18.497763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:18.498597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:18.498913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:18.576152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:18.576235Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:18.582285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:18.582528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:18.582664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:18.585835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:18.586006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:18.586811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:18.587025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:18.588961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:18.589166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:18.590386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:18.590473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:18.590670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:18.590756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:18.590807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:18.590939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:18.750997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.752039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.752208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.752300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.752381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.752437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.752501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.752592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.752658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.752719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.752792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.752869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.752983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.753079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.753166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 678944 2025-12-23T18:33:19.428172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:33:19.428208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:33:19.430431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:33:19.430495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:33:19.430537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:33:19.432597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:33:19.432653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:33:19.432715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:33:19.432781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:33:19.432925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:33:19.434703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:33:19.434860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:33:19.435194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:19.435341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:19.435385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:33:19.435675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:33:19.435744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:33:19.435915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:33:19.435981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:33:19.438136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:19.438181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:19.438421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:19.438462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:33:19.438807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:33:19.438883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:33:19.438990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:33:19.439022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:33:19.439077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:33:19.439111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:33:19.439145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:33:19.439186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:33:19.439220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:33:19.439255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:33:19.439341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:33:19.439380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:33:19.439411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:33:19.440076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:33:19.440182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:33:19.440219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:33:19.440259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:33:19.440295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:33:19.440368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:33:19.443535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:33:19.444011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:33:19.444617Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:671:2661] Bootstrap 2025-12-23T18:33:19.445644Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:671:2661] Become StateWork (SchemeCache [1:676:2666]) 2025-12-23T18:33:19.448253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView CreateSysView { Name: "" Type: EPartitionStats } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:33:19.448410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_sysview.cpp:117: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/, opId: 101:0 2025-12-23T18:33:19.448464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_sysview.cpp:123: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/, opId: 101:0, sysViewDescription: Name: "" Type: EPartitionStats 2025-12-23T18:33:19.448580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-12-23T18:33:19.449753Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:671:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:33:19.452651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/.sys/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:19.452860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/', error: path part shouldn't be empty, operation: CREATE SYSTEM VIEW, path: /MyRoot/.sys/ 2025-12-23T18:33:19.453132Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:33:19.453354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:33:19.453405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:33:19.453702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:33:19.453781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:33:19.453820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:686:2676] TestWaitNotification: OK eventTxId 101 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] >> IndexBuildTest::CancellationNoTableUniq [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:33:17.731899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:17.732013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:17.732052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:17.732089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:17.732133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:17.732161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:17.732242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:17.732336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:17.733206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:17.733532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:17.843316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:17.843375Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:17.857440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:17.857657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:17.857868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:17.873973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:17.874198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:17.874953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:17.875191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:17.879623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:17.879815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:17.881254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:17.881325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:17.881500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:17.881586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:17.881635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:17.881757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:18.021374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.022396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.022538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.022645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.022718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.022915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.023023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.023111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.023204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.023271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.023335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.023414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.023497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.023592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.023687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:33:19.404791Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:33:19.404864Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:33:19.404893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:19.404924Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:33:19.404949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:19.404977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:33:19.405009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:19.405037Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:33:19.405062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:33:19.405118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-12-23T18:33:19.405149Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:33:19.405174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-12-23T18:33:19.405197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-12-23T18:33:19.405827Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:19.405904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:19.405934Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:33:19.405962Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-12-23T18:33:19.405994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:33:19.407174Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:19.407255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:19.407283Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:33:19.407309Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-12-23T18:33:19.407337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-12-23T18:33:19.407397Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:33:19.409263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:33:19.410217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-12-23T18:33:19.410476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:33:19.410524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-12-23T18:33:19.410624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:33:19.410646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:33:19.411056Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:33:19.411182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:33:19.411221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:327:2317] 2025-12-23T18:33:19.411379Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:33:19.411441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:33:19.411464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:327:2317] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-12-23T18:33:19.411824Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:19.411998Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys" took 209us result status StatusSuccess 2025-12-23T18:33:19.412399Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys" PathDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:19.412864Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:19.413038Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 232us result status StatusSuccess 2025-12-23T18:33:19.413312Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] >> TSchemeShardSysViewTest::DropSysView [GOOD] >> TSchemeShardSysViewTest::CreateSysView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:33:18.047138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:18.047267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:18.047306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:18.047340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:18.047377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:18.047406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:18.047456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:18.047514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:18.048331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:18.048642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:18.120120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:18.120168Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:18.126293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:18.126518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:18.126669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:18.129607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:18.129792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:18.130571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:18.130833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:18.134247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:18.134436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:18.135667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:18.135726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:18.135868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:18.135930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:18.135971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:18.136112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:18.288592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.289594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.289743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.289822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.289896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.289963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.290051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.290140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.290228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.290322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.290406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.290512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.290609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.290692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:18.290830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... nStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:20.250564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:20.250620Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_sysview.cpp:43: [72057594046678944] TDropSysView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000039 2025-12-23T18:33:20.250773Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2025-12-23T18:33:20.250966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2025-12-23T18:33:20.251032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:33:20.252946Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:20.252995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:33:20.253329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:33:20.253513Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:20.253557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-12-23T18:33:20.253604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:33:20.254081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:33:20.254147Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:33:20.254258Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:33:20.254296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:20.254385Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:33:20.254422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:20.254466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:33:20.254507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:20.254545Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:33:20.254580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:33:20.254655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:20.254697Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:33:20.254730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 40 2025-12-23T18:33:20.254785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2025-12-23T18:33:20.255395Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:20.255508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:20.255547Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:33:20.255589Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 40 2025-12-23T18:33:20.255635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2025-12-23T18:33:20.256759Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:20.256829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:20.256848Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:33:20.256869Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2025-12-23T18:33:20.256890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:33:20.256956Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:33:20.257773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:33:20.257828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:33:20.257926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2025-12-23T18:33:20.260347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:33:20.262340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:33:20.262476Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 102 2025-12-23T18:33:20.262796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:33:20.262856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-12-23T18:33:20.262948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:33:20.262973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:33:20.263479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:33:20.263605Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:33:20.263660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:33:20.263699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:733:2723] 2025-12-23T18:33:20.263888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:33:20.263919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:733:2723] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-12-23T18:33:20.264401Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:20.264636Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 261us result status StatusPathDoesNotExist 2025-12-23T18:33:20.264844Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:33:19.264601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:19.264684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:19.264720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:19.264750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:19.264788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:19.264817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:19.264876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:19.264948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:19.265763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:19.266059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:19.360584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:19.360650Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:19.375894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:19.376198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:19.376329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:19.387670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:19.387874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:19.388534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:19.388756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:19.401714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:19.401914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:19.403203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:19.403281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:19.403437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:19.403490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:19.403535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:19.403658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:19.551944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.552634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.552715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.552757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.552811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.552849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.552891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.552985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.553034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.553078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.553123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.553158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.553215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.553274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.553338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 8944, txId: 281474976720657, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:33:20.437868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:20.437917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:765:2741], at schemeshard: 72057594046678944, txId: 281474976720657, path id: 2 2025-12-23T18:33:20.437963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:765:2741], at schemeshard: 72057594046678944, txId: 281474976720657, path id: 38 FAKE_COORDINATOR: Erasing txId 281474976720657 2025-12-23T18:33:20.438165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046678944 2025-12-23T18:33:20.438216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976720657:0 ProgressState 2025-12-23T18:33:20.438355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976720657:0 progress is 1/1 2025-12-23T18:33:20.438404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-12-23T18:33:20.438444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976720657:0 progress is 1/1 2025-12-23T18:33:20.438488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-12-23T18:33:20.438531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2025-12-23T18:33:20.438573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-12-23T18:33:20.438607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976720657:0 2025-12-23T18:33:20.438637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976720657:0 2025-12-23T18:33:20.438701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:20.438757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976720657, publications: 2, subscribers: 1 2025-12-23T18:33:20.438821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976720657, [OwnerId: 72057594046678944, LocalPathId: 2], 40 2025-12-23T18:33:20.438872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976720657, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2025-12-23T18:33:20.440891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 281474976720657 2025-12-23T18:33:20.440996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 281474976720657 2025-12-23T18:33:20.441110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720657 2025-12-23T18:33:20.441187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720657, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 40 2025-12-23T18:33:20.441232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2025-12-23T18:33:20.442086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976720657 2025-12-23T18:33:20.442159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976720657 2025-12-23T18:33:20.442194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720657 2025-12-23T18:33:20.442226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720657, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:33:20.442267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:33:20.442375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720657, subscribers: 1 2025-12-23T18:33:20.442481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:769:2745] 2025-12-23T18:33:20.444954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720657 2025-12-23T18:33:20.447171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720657 2025-12-23T18:33:20.447331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:769:2745] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/ds_pdisks' 2025-12-23T18:33:20.447395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:769:2745] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished Leader for TabletID 72057594046678944 is [1:713:2700] sender: [1:797:2058] recipient: [1:15:2062] 2025-12-23T18:33:20.519701Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:20.519987Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 263us result status StatusSuccess 2025-12-23T18:33:20.520390Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710685 CreateStep: 5000021 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:20.521086Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:20.521303Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/ds_pdisks" took 206us result status StatusSuccess 2025-12-23T18:33:20.521600Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/ds_pdisks" PathDescription { Self { Name: "ds_pdisks" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976720657 CreateStep: 5000039 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "ds_pdisks" Type: EPDisks SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:33:19.343173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:19.343264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:19.343306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:19.343345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:19.343396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:19.343429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:19.343483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:19.343550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:19.344428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:19.345005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:19.451165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:19.451256Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:19.483502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:19.483831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:19.483985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:19.507541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:19.507759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:19.508566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:19.508821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:19.515474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:19.515695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:19.517018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:19.517116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:19.517307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:19.517370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:19.517418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:19.517551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:19.703700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.704821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.705004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.705102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.705207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.705281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.705353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.705473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.705550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.705624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.705719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.705811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.705914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.706004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:19.706114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... el: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:33:20.959713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-12-23T18:33:20.959960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000038 2025-12-23T18:33:20.960949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:20.961101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:20.961162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:45: [72057594046678944] TCreateSysView::TPropose, opId: 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2025-12-23T18:33:20.961337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2025-12-23T18:33:20.961524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2025-12-23T18:33:20.961613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:33:20.964115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:20.964199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:33:20.964603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:33:20.964741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:20.964810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:813:2771], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-12-23T18:33:20.964867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:813:2771], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:33:20.965308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:33:20.965364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:33:20.965484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:33:20.965525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:20.965579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:33:20.965629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:20.965688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:33:20.965739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:20.965792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:33:20.965829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:33:20.965918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:20.965976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:33:20.966038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 39 2025-12-23T18:33:20.966071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2025-12-23T18:33:20.967204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:20.967349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:20.967402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:33:20.967468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 39 2025-12-23T18:33:20.967514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2025-12-23T18:33:20.968290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:20.968375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:20.968424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:33:20.968501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:33:20.968562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:33:20.968642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:33:20.971741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:33:20.972868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:33:20.973151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:33:20.973197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:33:20.973658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:33:20.973776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:33:20.973825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:842:2798] TestWaitNotification: OK eventTxId 102 2025-12-23T18:33:20.974474Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:20.974775Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 298us result status StatusSuccess 2025-12-23T18:33:20.975272Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::Aggregation-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 3699, MsgBus: 32587 2025-12-23T18:32:42.986086Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123134194434324:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:42.986154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d26/r3tmp/tmp2QhpEP/pdisk_1.dat 2025-12-23T18:32:43.291499Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:43.317723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:43.317816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:43.358473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:43.439908Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123134194434220:2081] 1766514762959923 != 1766514762959926 2025-12-23T18:32:43.443583Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3699, node 1 2025-12-23T18:32:43.495163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:43.528476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:43.528510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:43.528518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:43.528594Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32587 2025-12-23T18:32:44.012776Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:32587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514763433 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:44.270463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:44.279971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:32:46.668931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123151374304293:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:46.669053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:46.669386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123151374304303:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:46.669446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:47.025276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:47.223134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:47.274085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:47.326171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:47.493616Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123155669271934:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:47.493718Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:47.494058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123155669271939:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:47.494137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123155669271940:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:47.494187Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:47.499320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:47.514100Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123155669271943:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:32:47.612492Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123155669271994:2759] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:32:47.986491Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123134194434324:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:47.986591Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8859, MsgBus: 15596 2025-12-23T18:32:49.251559Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123162544452306:2258];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:49.251739Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d26/r3tmp/tmpiXwB1D/pdisk_1.dat 2025-12-23T18:32:49.278851Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:49.346232Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:49.349402Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587123162544452075:2081] 1766514769225227 != 1766514769225230 2025-12-23T18:32:49.389252Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:49.389325Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:49.406966Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8859, node 2 2025-12-23T18:32:49.535310Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated ... or you don't have access permissions } 2025-12-23T18:33:00.031848Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123209789098494:3148], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:00.031878Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:00.036615Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:33:00.051430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-12-23T18:33:00.057925Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123209789098497:3149], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-12-23T18:33:00.145739Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123209789098550:5264] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:33:04.320426Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:33:04.320453Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 22459, MsgBus: 23825 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d26/r3tmp/tmpSdN1Tg/pdisk_1.dat 2025-12-23T18:33:12.139025Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:12.139134Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:33:12.242544Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:12.253933Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587123263071580108:2081] 1766514792077761 != 1766514792077764 2025-12-23T18:33:12.257343Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:12.257451Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:12.281298Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22459, node 3 2025-12-23T18:33:12.335900Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:12.335928Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:12.335939Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:12.336038Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:33:12.376121Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23825 TClient is connected to server localhost:23825 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514792343 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:12.776235Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:33:13.088528Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:16.532703Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123280251450176:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:16.532833Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:16.535276Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123280251450185:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:16.535396Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:16.572196Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:16.647204Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123280251450279:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:16.647293Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:16.647641Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123280251450282:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:16.647711Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:16.675529Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:16.859781Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123280251450372:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:16.859961Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:16.861300Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123280251450378:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:16.861332Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123280251450377:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:16.861388Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:16.865875Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:33:16.888095Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587123280251450381:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:33:16.948649Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587123280251450432:2660] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews [GOOD] |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::CreateSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:33:19.757617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:19.757706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:19.757746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:19.757782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:19.757838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:19.757870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:19.757921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:19.758045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:19.759267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:19.759615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:19.874724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:19.874864Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:19.880211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:19.880432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:19.880574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:19.883609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:19.883800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:19.884539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:19.884724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:19.886701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:19.886877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:19.888016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:19.888080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:19.888252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:19.888298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:19.888346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:19.888462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:20.059409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.063517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.063689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.063796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.063870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.063932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.064026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.064148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.064219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.064287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.064349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.064442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.064544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.065643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.065781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -12-23T18:33:21.028858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2025-12-23T18:33:21.028884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2025-12-23T18:33:21.028925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2025-12-23T18:33:21.028977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2025-12-23T18:33:21.029005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2025-12-23T18:33:21.029034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2025-12-23T18:33:21.029059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2025-12-23T18:33:21.029090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2025-12-23T18:33:21.029115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2025-12-23T18:33:21.029160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2025-12-23T18:33:21.029187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2025-12-23T18:33:21.029212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2025-12-23T18:33:21.029237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2025-12-23T18:33:21.029272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2025-12-23T18:33:21.029324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2025-12-23T18:33:21.029359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2025-12-23T18:33:21.029402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2025-12-23T18:33:21.029432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2025-12-23T18:33:21.029454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2025-12-23T18:33:21.029479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2025-12-23T18:33:21.029503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2025-12-23T18:33:21.029524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2025-12-23T18:33:21.029547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2025-12-23T18:33:21.029570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2025-12-23T18:33:21.029789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.029910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.029993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.030118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.030249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.030444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.031074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.031212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.031700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.031799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.032051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.032237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.032301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.032402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.032622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.032714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.033169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.033476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.033563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.033626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.033893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.033968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.034033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.049728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:21.055069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:21.055164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:21.055319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:21.055422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:21.055481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:21.055821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:710:2697] sender: [1:770:2058] recipient: [1:15:2062] 2025-12-23T18:33:21.140086Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:21.140421Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 364us result status StatusSuccess 2025-12-23T18:33:21.140814Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardUserAttrsTest::Boot |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::DropSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:33:19.732115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:19.732212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:19.732261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:19.732300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:19.732343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:19.732377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:19.732435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:19.732504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:19.733399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:19.733719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:19.827039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:19.827110Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:19.835121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:19.835435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:19.835604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:19.839394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:19.839543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:19.840220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:19.840396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:19.842294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:19.842510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:19.843854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:19.843923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:19.844116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:19.844180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:19.844229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:19.844365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:20.010342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.011632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.011784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.011893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.011973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.012039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.012141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.012252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.012330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.012403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.012472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.012573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.012680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.012775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.012867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... n for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2025-12-23T18:33:21.024859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2025-12-23T18:33:21.024900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2025-12-23T18:33:21.024929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2025-12-23T18:33:21.024946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2025-12-23T18:33:21.024961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2025-12-23T18:33:21.024978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2025-12-23T18:33:21.024995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2025-12-23T18:33:21.025023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2025-12-23T18:33:21.025047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2025-12-23T18:33:21.025064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2025-12-23T18:33:21.025081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2025-12-23T18:33:21.025098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2025-12-23T18:33:21.025156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2025-12-23T18:33:21.025192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2025-12-23T18:33:21.025223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2025-12-23T18:33:21.025253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2025-12-23T18:33:21.025303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2025-12-23T18:33:21.025344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2025-12-23T18:33:21.025380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2025-12-23T18:33:21.025410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2025-12-23T18:33:21.025440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2025-12-23T18:33:21.025475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2025-12-23T18:33:21.025514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2025-12-23T18:33:21.025568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2025-12-23T18:33:21.025595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2025-12-23T18:33:21.025647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2025-12-23T18:33:21.025675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2025-12-23T18:33:21.025698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2025-12-23T18:33:21.025736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2025-12-23T18:33:21.025947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.026044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.026133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.026263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.026387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.026571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.026980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.027126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.027656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.027752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.028033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.028216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.028294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.028406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.028659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.028803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.029387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.029748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.029865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.029943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.030181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.030242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.030295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:33:21.038429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:21.042775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:21.042875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:21.043684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:21.043750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:21.043799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:21.052048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:734:2721] sender: [1:796:2058] recipient: [1:15:2062] 2025-12-23T18:33:21.128090Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:21.128435Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 367us result status StatusPathDoesNotExist 2025-12-23T18:33:21.128637Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> KqpRboYql::OlapPredicatePushdown [GOOD] |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse >> IncrementalBackup::QueryIncrementalBackupImplTableAfterRestore [GOOD] >> IncrementalBackup::OmitIndexesIncrementalBackup >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:33:20.224479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:20.224566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:20.224612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:20.224649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:20.224693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:20.224721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:20.224776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:20.224839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:20.225650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:20.225943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:20.375683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:20.375738Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:20.381209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:20.381403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:20.381536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:20.389519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:20.389694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:20.390407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:20.390615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:20.399442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:20.399617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:20.400701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:20.400771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:20.400930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:20.400975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:20.401013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:20.401122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:20.570927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.572004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.572125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.572199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.572269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.572330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.572400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.572504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.572582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.572672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.572737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.572797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.572894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.572990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.573076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... views_update.cpp:213: SysViewsRosterUpdate# [1:813:2789] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, drop sys view '/MyRoot/.sys/new_ds_pdisks' 2025-12-23T18:33:21.905316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 43 PathOwnerId: 72057594046678944, cookie: 281474976720658 2025-12-23T18:33:21.905452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 43 PathOwnerId: 72057594046678944, cookie: 281474976720658 2025-12-23T18:33:21.905495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720658 2025-12-23T18:33:21.905538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720658, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 43 2025-12-23T18:33:21.905605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 39 2025-12-23T18:33:21.906713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720658 2025-12-23T18:33:21.907003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720658 2025-12-23T18:33:21.907041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720658 2025-12-23T18:33:21.907090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720658, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2025-12-23T18:33:21.907137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:21.907242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720658, subscribers: 1 2025-12-23T18:33:21.907295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:813:2789] 2025-12-23T18:33:21.911042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720658 2025-12-23T18:33:21.913615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720658 2025-12-23T18:33:21.913848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:813:2789] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, drop sys view '/MyRoot/.sys/new_sys_view' 2025-12-23T18:33:21.913911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:813:2789] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished Leader for TabletID 72057594046678944 is [1:755:2742] sender: [1:855:2058] recipient: [1:15:2062] 2025-12-23T18:33:21.986983Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:21.987326Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 369us result status StatusSuccess 2025-12-23T18:33:21.987823Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710685 CreateStep: 5000021 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:21.988761Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:21.989099Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 295us result status StatusPathDoesNotExist 2025-12-23T18:33:21.989297Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNotExist), drop stepId: 5000042, drop txId: 281474976720658" Path: "/MyRoot/.sys/new_sys_view" PathId: 38 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:21.989982Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:21.990215Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_ds_pdisks" took 237us result status StatusPathDoesNotExist 2025-12-23T18:33:21.990391Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_ds_pdisks\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSysView, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976720657" Path: "/MyRoot/.sys/new_ds_pdisks" PathId: 39 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:21.991028Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:21.991276Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_partition_stats" took 253us result status StatusSuccess 2025-12-23T18:33:21.991653Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_partition_stats" PathDescription { Self { Name: "new_partition_stats" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] >> TSchemeShardUserAttrsTest::Boot [GOOD] |92.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |92.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |92.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] |92.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] |92.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::OlapPredicatePushdown [GOOD] Test command err: Trying to start YDB, gRPC: 3958, MsgBus: 24563 2025-12-23T18:32:54.425641Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123185976790768:2251];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:54.425671Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d09/r3tmp/tmpZatZVn/pdisk_1.dat 2025-12-23T18:32:55.055360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:55.055460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:55.064054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:55.126270Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 3958, node 1 2025-12-23T18:32:55.253714Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123185976790555:2081] 1766514774376449 != 1766514774376452 2025-12-23T18:32:55.362900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:55.378292Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:55.424465Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:32:55.442680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:55.442711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:55.442718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:55.442809Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24563 TClient is connected to server localhost:24563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514775249 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:56.188054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:59.198777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123207451627925:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:59.198893Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:59.202857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123207451627935:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:59.202962Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:59.428666Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123185976790768:2251];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:59.428974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:59.538080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:33:00.323495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587123207451628352:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:33:00.323495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7587123207451628497:2332];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:33:00.323841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7587123207451628497:2332];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:33:00.324122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7587123207451628497:2332];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:33:00.324250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7587123207451628497:2332];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:33:00.324367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7587123207451628497:2332];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:33:00.324545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7587123207451628497:2332];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:33:00.324651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7587123207451628497:2332];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:33:00.324717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7587123207451628497:2332];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:33:00.324843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7587123207451628497:2332];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:33:00.324923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7587123207451628497:2332];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:33:00.324993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7587123207451628497:2332];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:33:00.325065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7587123207451628497:2332];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:33:00.325129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;self_id=[1:7587123207451628497:2332];tablet_id=72075186224037946;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:33:00.325469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587123207451628352:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:33:00.325658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587123207451628352:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:33:00.325755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587123207451628352:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:33:00.325867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587123207451628352:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:33:00.325971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587123207451628352:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:33:00.326089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587123207451628352:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:33:00.326201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587123207451628352:2330];ta ... reV0ChunksMeta; 2025-12-23T18:33:18.733984Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:33:18.734008Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:33:18.734162Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:33:18.734206Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-12-23T18:33:18.734261Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:33:18.734296Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:33:18.734338Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-12-23T18:33:18.734370Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-12-23T18:33:18.734415Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:33:18.734447Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:33:18.734662Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:33:18.734689Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:33:18.735287Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:33:18.735333Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:33:18.735359Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:33:18.735388Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:33:18.735407Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:33:18.735449Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:33:18.735487Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:33:18.735492Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:33:18.735512Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:33:18.735513Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:33:18.735756Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:33:18.735792Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:33:18.735954Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:33:18.735993Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:33:18.736035Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:33:18.736065Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:33:18.736068Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:33:18.736105Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:33:18.736125Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:33:18.736139Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:33:18.736252Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:33:18.736301Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:33:18.736323Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:33:18.736351Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-12-23T18:33:18.736358Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:33:18.736380Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:33:18.736443Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-12-23T18:33:18.736469Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-12-23T18:33:18.736478Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:33:18.736520Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:33:18.736911Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:33:18.736932Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:33:18.736966Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:33:18.736969Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:33:18.737079Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:33:18.737118Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:33:18.737189Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:33:18.737254Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:33:18.737338Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:33:18.737364Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> IndexBuildTest::CancelBuildUniq |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:33:22.630977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:22.631081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:22.631156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:22.631200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:22.631236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:22.631264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:22.631343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:22.631411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:22.632204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:22.632478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:22.736621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:22.736669Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:22.759093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:22.759297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:22.759490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:22.782589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:22.782847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:22.783562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:22.783795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:22.797395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:22.797599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:22.799424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:22.799494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:22.799667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:22.799721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:22.799824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:22.799958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:22.985956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.987074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.987208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.987332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.987421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.987493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.987582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.987656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.987741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.987807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.987869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.987932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.988032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.988140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:22.988211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -23T18:33:23.905034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:23.905118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:23.905146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:33:23.905175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:33:23.905209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:23.905897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:33:23.908627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-12-23T18:33:23.908772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000038 2025-12-23T18:33:23.909409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:23.909533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:23.909588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038, at schemeshard: 72057594046678944 2025-12-23T18:33:23.909710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2025-12-23T18:33:23.909885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:33:23.909956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:33:23.911016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:33:23.911511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:33:23.913216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:23.913265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:23.913448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:33:23.913572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:23.913626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-12-23T18:33:23.913665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:33:23.914072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:33:23.914118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:33:23.914224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:33:23.914292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:23.914340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:33:23.914369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:23.914418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:33:23.914456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:33:23.914495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:33:23.914527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:33:23.914590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:23.914627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:33:23.914660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:33:23.914689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2025-12-23T18:33:23.915381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:23.915474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:23.915566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:33:23.915620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:33:23.915659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:33:23.916367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:23.917049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:33:23.917108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:33:23.917139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:33:23.917183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:33:23.917272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:33:23.920650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:33:23.920735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-12-23T18:33:23.923836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:33:23.924081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:33:23.924219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2025-12-23T18:33:23.926491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:23.926787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:33:22.984726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:22.984844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:22.984912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:22.984966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:22.985007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:22.985051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:22.985119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:22.985197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:22.986185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:22.986595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:23.093867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:23.093936Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:23.107338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:23.107593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:23.107846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:23.121519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:23.121793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:23.122643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:23.122953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:23.127912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:23.128136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:23.129664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:23.129742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:23.129937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:23.129996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:23.130129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:23.130303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:23.308321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.309412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.309566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.309676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.309750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.309837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.309919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.310015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.310102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.310177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.310252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.310345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.310427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.310523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.310608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... meshard: 72057594046678944, cookie: 281474976710672 2025-12-23T18:33:24.063727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710672 2025-12-23T18:33:24.063836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/streaming_queries' 2025-12-23T18:33:24.063885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished 2025-12-23T18:33:24.071272Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2025-12-23T18:33:24.082099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:33:24.082387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:33:24.082609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:33:24.082657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:33:24.082944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:33:24.083026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:33:24.085389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:24.085641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:33:24.085848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:33:24.085939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:33:24.085981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:33:24.086040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:33:24.088178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:33:24.088258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:33:24.088307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:33:24.090519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:33:24.090582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:33:24.090629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:33:24.090703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:33:24.090873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:33:24.092924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:33:24.093101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:33:24.093487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:24.093624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:24.093670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:33:24.093966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:33:24.094028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:33:24.094186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:33:24.094264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:33:24.096647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:24.096702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:24.096944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:24.096989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:33:24.097343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:33:24.097394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:33:24.097492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:33:24.097536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:33:24.097577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:33:24.097609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:33:24.097668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:33:24.097710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:33:24.097743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:33:24.097789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:33:24.097862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:33:24.097900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:33:24.097938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:33:24.098694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:33:24.098831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:33:24.098874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:33:24.098936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:33:24.098992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:33:24.099089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:33:24.102669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:33:24.103351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:33:23.026488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:23.026606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:23.026647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:23.026692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:23.026732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:23.026809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:23.026869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:23.026974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:23.027871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:23.028202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:23.152100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:23.152189Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:23.159594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:23.159931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:23.160108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:23.167169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:23.167417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:23.168286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:23.168582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:23.171307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:23.171527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:23.173115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:23.173215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:23.173394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:23.173446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:23.173494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:23.173650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:23.357161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.358181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.358360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.358435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.358501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.358581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.358650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.358846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.358964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.359032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.359123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.359194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.359298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.359364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.359471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... alue: "ValA2" } } ApplyIf { PathId: 38 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:33:24.221206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_user_attrs.cpp:26: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:33:24.221340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2025-12-23T18:33:24.221384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 103:0 type: TxAlterUserAttributes target path: [OwnerId: 72057594046678944, LocalPathId: 38] source path: 2025-12-23T18:33:24.221525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:33:24.221578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2025-12-23T18:33:24.224451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:24.224691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2025-12-23T18:33:24.224934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:33:24.224987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:97: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:33:24.225041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-12-23T18:33:24.225185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:33:24.227139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-12-23T18:33:24.227252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2025-12-23T18:33:24.227597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:24.227720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:24.227769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:114: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000040, at schemeshard: 72057594046678944 2025-12-23T18:33:24.227970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:33:24.228020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:33:24.228061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:33:24.228093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:33:24.228155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:33:24.228236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-12-23T18:33:24.228300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:33:24.228332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:33:24.228382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:33:24.228410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:33:24.228453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:33:24.228490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-12-23T18:33:24.228513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2025-12-23T18:33:24.229973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:24.230014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:33:24.230145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:24.230172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 38 FAKE_COORDINATOR: Erasing txId 103 2025-12-23T18:33:24.230589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:33:24.230678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:33:24.230723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:33:24.230776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2025-12-23T18:33:24.230820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:33:24.230914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-12-23T18:33:24.232325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:33:24.232605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:33:24.232655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:33:24.233372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:33:24.233482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:33:24.233520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:751:2741] TestWaitNotification: OK eventTxId 103 2025-12-23T18:33:24.233931Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:24.234129Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 201us result status StatusSuccess 2025-12-23T18:33:24.234552Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::DecompressRaw >> TSchemeShardUserAttrsTest::VariousUse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-12-23T18:33:24.633424Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.633452Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.633537Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:24.636347Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:24.655006Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:24.672555Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.673010Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:24.674581Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.674605Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.674624Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:24.689075Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:24.702866Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:24.703158Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.709286Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:24.709812Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-12-23T18:33:24.715181Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.715222Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.715252Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:24.715600Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:24.723072Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:24.723305Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.727034Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:24.729022Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.735360Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:33:24.738855Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:24.738958Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-12-23T18:33:24.743586Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.743619Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.743643Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:24.760779Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:24.774500Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:24.774730Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.779016Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:24.779656Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.779805Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:33:24.779896Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:24.779947Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-12-23T18:33:24.781061Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.781087Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.786526Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:24.795089Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:24.805435Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:24.805684Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.806024Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:24.806601Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.806774Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:33:24.806906Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:24.806959Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-12-23T18:33:24.811872Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.811910Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.811937Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:24.826122Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:24.829721Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:24.829938Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.830983Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:24.831745Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.831988Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:33:24.832096Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:24.832143Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-12-23T18:33:24.847672Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.847709Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.847735Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:24.848130Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:24.848862Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:24.848997Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.853472Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:24.863605Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.864124Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:33:24.864251Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:24.864306Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-12-23T18:33:24.865728Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.865762Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.865786Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:24.866393Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:24.873393Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:24.873649Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:24.879109Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:24.880368Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:33:24.881187Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:33:24.883137Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-12-23T18:33:24.883305Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-12-23T18:33:24.884084Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:24.884133Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:33:24.884158Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-12-23T18:33:24.884179Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-12-23T18:33:24.884224Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-12-23T18:33:24.884248Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-12-23T18:33:24.884437Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2025-12-23T18:33:24.884604Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } |92.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> TSchemeShardUserAttrsTest::MkDir >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:33:23.438099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:23.438189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:23.438252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:23.438309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:23.438350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:23.438378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:23.438433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:23.438497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:23.439341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:23.439659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:23.541666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:23.541716Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:23.556357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:23.556546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:23.556778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:23.572006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:23.572227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:23.572944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:23.573168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:23.577450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:23.577634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:23.578955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:23.579022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:23.579227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:23.579290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:23.579396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:23.579548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:23.807074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.808016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.808140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.808245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.808307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.808369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.808458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.808515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.808593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.808654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.808709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.808785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.808859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.808958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:23.809023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Operation and all the parts is done, operation id: 105:0 2025-12-23T18:33:24.796627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 105:0 2025-12-23T18:33:24.796709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:33:24.796756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-12-23T18:33:24.796790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-12-23T18:33:24.796828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 40], 18446744073709551615 2025-12-23T18:33:24.798025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:33:24.799303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:33:24.801106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:24.801160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:24.801372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2025-12-23T18:33:24.801566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:24.801603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-12-23T18:33:24.801645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 105, path id: 40 FAKE_COORDINATOR: Erasing txId 105 2025-12-23T18:33:24.802394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:33:24.802525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:33:24.802578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:33:24.802627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-12-23T18:33:24.802684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-12-23T18:33:24.803600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:33:24.803704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:33:24.803758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:33:24.803800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2025-12-23T18:33:24.803847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:33:24.803949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-12-23T18:33:24.804188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:33:24.804236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2025-12-23T18:33:24.804315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:33:24.807286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:33:24.808112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:33:24.809326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-12-23T18:33:24.809787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:33:24.809859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-12-23T18:33:24.810460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:33:24.810609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:33:24.810652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:803:2792] TestWaitNotification: OK eventTxId 105 2025-12-23T18:33:24.811425Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:24.811687Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 263us result status StatusPathDoesNotExist 2025-12-23T18:33:24.811909Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:33:24.812624Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:24.812842Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 241us result status StatusSuccess 2025-12-23T18:33:24.813385Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 12 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> TSchemeShardUserAttrsTest::SetAttrs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:33:23.816250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:23.816350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:23.816405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:23.816461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:23.816510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:23.816538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:23.816604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:23.816663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:23.817497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:23.817785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:23.953208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:23.953261Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:23.970551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:23.970930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:23.971193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:23.984296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:23.984516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:23.985167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:23.985375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:23.994138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:23.994331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:23.995682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:23.995742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:23.995902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:23.995950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:23.996006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:23.996109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:24.262541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.263599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.263720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.263813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.263903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.263967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.264027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.264127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.264209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.264290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.264636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.264697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.264787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.264873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:24.264958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at schemeshard: 72057594046678944, cookie: 112 2025-12-23T18:33:25.513293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:25.513344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:25.513496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:33:25.513576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2025-12-23T18:33:25.513738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:25.513777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-12-23T18:33:25.513813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 112, path id: 39 2025-12-23T18:33:25.513845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 112, path id: 40 FAKE_COORDINATOR: Erasing txId 112 2025-12-23T18:33:25.514681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:33:25.514797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:33:25.514835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 112 2025-12-23T18:33:25.514874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-12-23T18:33:25.514965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:33:25.515425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:33:25.515496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:33:25.515522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-12-23T18:33:25.515548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2025-12-23T18:33:25.515575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:33:25.516252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:33:25.516332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:33:25.516376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-12-23T18:33:25.516402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2025-12-23T18:33:25.516431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:33:25.516499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-12-23T18:33:25.516802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:33:25.516861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2025-12-23T18:33:25.516929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:33:25.524120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-12-23T18:33:25.524965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-12-23T18:33:25.526247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-12-23T18:33:25.526465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-12-23T18:33:25.526892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-12-23T18:33:25.526938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-12-23T18:33:25.527523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-12-23T18:33:25.527634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-12-23T18:33:25.527670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:907:2896] TestWaitNotification: OK eventTxId 112 2025-12-23T18:33:25.528425Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:25.528611Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 215us result status StatusSuccess 2025-12-23T18:33:25.529013Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000044 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2025-12-23T18:33:25.532499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 38 PathVersion: 8 } ApplyIf { PathId: 39 PathVersion: 7 } ApplyIf { PathId: 40 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:33:25.532691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2025-12-23T18:33:25.532807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2025-12-23T18:33:25.537430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:25.537668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::DropBackupCollectionSqlNonExistent |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-12-23T18:33:25.375087Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.375117Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.375144Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:25.383038Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:25.383795Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-12-23T18:33:25.383880Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.392048Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.392076Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.392098Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:25.415442Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:25.418964Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-12-23T18:33:25.419044Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.423742Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.423776Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.423798Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:25.425335Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-12-23T18:33:25.425395Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.425422Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.425559Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-12-23T18:33:25.439688Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.439715Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.439734Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:25.451391Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-12-23T18:33:25.451460Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.451503Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.451575Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-12-23T18:33:25.464080Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-12-23T18:33:25.464108Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-12-23T18:33:25.464135Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:25.471120Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:25.478411Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:25.508859Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-12-23T18:33:25.511005Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:25.546597Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-12-23T18:33:25.552384Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-12-23T18:33:25.554602Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:25.554680Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:33:25.554709Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-12-23T18:33:25.554735Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-12-23T18:33:25.554803Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-12-23T18:33:25.554832Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-12-23T18:33:25.554854Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-12-23T18:33:25.554877Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-12-23T18:33:25.554945Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-12-23T18:33:25.554975Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-12-23T18:33:25.554999Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-12-23T18:33:25.555020Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-12-23T18:33:25.555041Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-12-23T18:33:25.555063Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-12-23T18:33:25.555089Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-12-23T18:33:25.555113Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-12-23T18:33:25.555182Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-12-23T18:33:25.555209Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-12-23T18:33:25.555242Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-12-23T18:33:25.555274Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-12-23T18:33:25.555293Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-12-23T18:33:25.555311Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-12-23T18:33:25.555334Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-12-23T18:33:25.555355Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-12-23T18:33:25.555397Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-12-23T18:33:25.555431Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-12-23T18:33:25.555451Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-12-23T18:33:25.555472Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-12-23T18:33:25.555506Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-12-23T18:33:25.555533Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-12-23T18:33:25.555564Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-12-23T18:33:25.555587Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-12-23T18:33:25.555679Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-12-23T18:33:25.555705Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-12-23T18:33:25.555727Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-12-23T18:33:25.555750Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-12-23T18:33:25.555772Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-12-23T18:33:25.555804Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-12-23T18:33:25.555832Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-12-23T18:33:25.555855Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-12-23T18:33:25.555894Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-12-23T18:33:25.555926Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-12-23T18:33:25.555951Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-12-23T18:33:25.555983Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-12-23T18:33:25.556010Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-12-23T18:33:25.556033Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-12-23T18:33:25.556054Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-12-23T18:33:25.556075Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-12-23T18:33:25.556096Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-12-23T18:33:25.556117Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-12-23T18:33:25.556205Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-12-23T18:33:25.563142Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-12-23T18:33:25.563400Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-12-23T18:33:25.563450Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-12-23T18:33:25.563476Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-12-23T18:33:25.563504Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-12-23T18:33:25.563543Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-12-23T18:33:25.563585Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-12-23T18:33:25.563617Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-12-23T18:33:25.563638Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-12-23T18:33:25.563725Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-12-23T18:33:25.563753Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-12-23T18:33:25.563773Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-12-23T18:33:25.563795Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-12-23T18:33:25.563814Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-12-23T18:33:25.563836Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-12-23T18:33:25.563855Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-12-23T18:33:25.563875Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-12-23T18:33:25.563931Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-12-23T18:33:25.563958Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-12-23T18:33:25.563977Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-12-23T18:33:25.564054Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-12-23T18:33:25.564078Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-12-23T18:33:25.564108Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-12-23T18:33:25.564134Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-12-23T18:33:25.564155Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-12-23T18:33:25.564184Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-12-23T18:33:25.564211Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-12-23T18:33:25.564230Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-12-23T18:33:25.564254Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-12-23T18:33:25.564299Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-12-23T18:33:25.564320Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-12-23T18:33:25.564341Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-12-23T18:33:25.564363Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-12-23T18:33:25.564456Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-12-23T18:33:25.564479Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-12-23T18:33:25.564509Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-12-23T18:33:25.564531Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-12-23T18:33:25.564550Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-12-23T18:33:25.564581Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-12-23T18:33:25.564606Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-12-23T18:33:25.564626Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-12-23T18:33:25.564658Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-12-23T18:33:25.564694Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-12-23T18:33:25.564716Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-12-23T18:33:25.564753Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-12-23T18:33:25.564772Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-12-23T18:33:25.564792Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-12-23T18:33:25.564811Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-12-23T18:33:25.564831Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-12-23T18:33:25.564856Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-12-23T18:33:25.564876Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-12-23T18:33:25.564940Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-12-23T18:33:25.565104Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-12-23T18:33:25.583951Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.583986Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.584011Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:25.589809Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:25.590477Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:25.590819Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.593229Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:25.696056Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.698996Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-12-23T18:33:25.699092Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:25.699149Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-12-23T18:33:25.699228Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-12-23T18:33:25.902857Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-12-23T18:33:26.003415Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-12-23T18:33:26.003631Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-12-23T18:33:26.003823Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-12-23T18:33:26.005156Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.005184Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.005209Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:26.005584Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:26.006097Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:26.006276Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.006969Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:26.109861Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.110124Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-12-23T18:33:26.110189Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:26.110232Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-12-23T18:33:26.110334Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-12-23T18:33:26.110458Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-12-23T18:33:26.110702Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-12-23T18:33:26.110817Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-12-23T18:33:26.110943Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |92.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-12-23T18:33:26.071310Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.071345Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.071371Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:26.087917Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-12-23T18:33:26.087980Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.088013Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.089235Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007905s 2025-12-23T18:33:26.098928Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:26.111248Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-12-23T18:33:26.111405Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.115873Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.115913Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.115936Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:26.121031Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-12-23T18:33:26.121085Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.121116Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.121184Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005911s 2025-12-23T18:33:26.123792Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:26.135061Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-12-23T18:33:26.135221Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.139833Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.139859Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.139892Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:26.151131Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-12-23T18:33:26.151182Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.151216Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.151310Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.281923s 2025-12-23T18:33:26.167559Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:26.179025Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-12-23T18:33:26.179162Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.183833Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.183860Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.183880Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:26.200618Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-12-23T18:33:26.200670Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.200740Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.200814Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.258153s 2025-12-23T18:33:26.211069Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:26.214069Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-12-23T18:33:26.214196Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.216704Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.216728Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.216750Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:26.227031Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:26.239045Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:26.261890Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.262983Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-12-23T18:33:26.263020Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.263043Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.263100Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.284773s 2025-12-23T18:33:26.263287Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-12-23T18:33:26.293146Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.293173Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.293190Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:26.303053Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:26.303570Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:26.303859Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.304245Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:26.305081Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:33:26.305614Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:33:26.307111Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-12-23T18:33:26.307217Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:33:26.307309Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:26.307355Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-12-23T18:33:26.307406Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-12-23T18:33:26.307429Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-12-23T18:33:26.308989Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.309019Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.309045Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:26.309429Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:26.309861Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:26.310043Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.315262Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:26.418963Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.419473Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-12-23T18:33:26.419549Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:26.419594Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-12-23T18:33:26.419655Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-12-23T18:33:26.523045Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-12-23T18:33:26.523229Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-12-23T18:33:26.524585Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.524624Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.524646Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:26.525899Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:26.526487Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:26.526714Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.527158Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:26.628314Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.629120Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-12-23T18:33:26.629197Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:26.629242Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-12-23T18:33:26.629335Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-12-23T18:33:26.630644Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-12-23T18:33:26.630945Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-12-23T18:33:26.631889Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-12-23T18:33:26.632816Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |92.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink >> DataShardSnapshots::VolatileSnapshotSplit >> TSchemeShardUserAttrsTest::MkDir [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink >> DataShardSnapshots::MvccSnapshotTailCleanup >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] >> DataShardSnapshots::MvccSnapshotAndSplit >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:33:26.727626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:26.727719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:26.727772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:26.727818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:26.727873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:26.727907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:26.727974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:26.728053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:26.728907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:26.729194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:26.906532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:26.906592Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:26.934946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:26.935154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:26.935348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:26.947650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:26.947873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:26.948515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:26.948742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:26.952940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:26.953123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:26.954488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:26.954556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:26.954726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:26.954802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:26.954977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:26.955121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:27.124515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.125598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.125717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.125819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.125888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.125998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.126067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.126151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.126237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.126324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.126399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.126474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.126545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.126639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.126727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... StateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:28.291379Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:28.291588Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 207us result status StatusSuccess 2025-12-23T18:33:28.292007Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } ChildrenExist: true } Children { Name: "SubDirA" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:28.292609Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:28.292764Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 153us result status StatusSuccess 2025-12-23T18:33:28.293091Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:28.293598Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:28.293834Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 190us result status StatusSuccess 2025-12-23T18:33:28.294216Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirB" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:28.294850Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:28.295033Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 170us result status StatusSuccess 2025-12-23T18:33:28.295398Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:33:27.570094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:27.570202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:27.570268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:27.570310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:27.570355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:27.570419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:27.570496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:27.570572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:27.571445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:27.571772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:27.740429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:27.740483Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:27.757387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:27.757572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:27.757747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:27.768857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:27.769073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:27.769671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:27.769860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:27.773557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:27.773724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:27.774942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:27.774999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:27.775156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:27.775202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:27.775305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:27.775452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:27.930930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.932000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.932131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.932226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.932293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.932400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.932488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.932546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.932627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.932699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.932766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.932853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.932928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.933029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:27.933844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eTx for txid 103:0 type: TxAlterUserAttributes target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:33:28.950423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:33:28.950488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2025-12-23T18:33:28.963649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:28.963837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2025-12-23T18:33:28.964102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:33:28.964153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:97: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:33:28.964204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-12-23T18:33:28.964331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:33:28.975246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-12-23T18:33:28.975418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2025-12-23T18:33:28.975782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:28.975919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:28.975971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:114: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000040, at schemeshard: 72057594046678944 2025-12-23T18:33:28.976179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:33:28.976225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:33:28.976263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:33:28.976292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:33:28.976367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:33:28.976452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-12-23T18:33:28.976515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:28.976553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:33:28.976586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:33:28.976616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:33:28.976666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:33:28.976700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-12-23T18:33:28.976731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-12-23T18:33:28.988098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:28.988194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:28.988442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:28.988495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 103, path id: 1 FAKE_COORDINATOR: Erasing txId 103 2025-12-23T18:33:28.989069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:33:28.989200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:33:28.989241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:33:28.989278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-12-23T18:33:28.989341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:33:28.989436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-12-23T18:33:28.997149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:33:28.997479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:33:28.997524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:33:28.997951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:33:28.998077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:33:28.998115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:749:2738] TestWaitNotification: OK eventTxId 103 2025-12-23T18:33:28.998616Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:28.998872Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 280us result status StatusSuccess 2025-12-23T18:33:28.999425Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2025-12-23T18:32:39.509146Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123120443011300:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:39.509214Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:32:39.575352Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00521b/r3tmp/tmpefLElH/pdisk_1.dat 2025-12-23T18:32:39.905841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:39.917628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:39.917761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:39.925759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:40.063023Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3508, node 1 2025-12-23T18:32:40.280025Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:40.285209Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:40.285225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:40.285234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:40.285295Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:40.539816Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:2070 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514760136 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:40.757408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:2070 2025-12-23T18:32:43.412612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123137622881881:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:43.412704Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:43.413002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123137622881891:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:43.413049Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:43.722840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:44.046770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123141917849365:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.047190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.048672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123141917849371:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.060048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123141917849372:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.063265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:44.065148Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.070165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123141917849416:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.070265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123141917849414:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.070308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123141917849418:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.070387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123141917849419:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.079571Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.099911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123141917849429:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.100011Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.111734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123141917849432:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.112297Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123141917849439:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.112364Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.112421Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123141917849447:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.112762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123141917849446:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:44.124355Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123141917849437:3098] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" s ... ingChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-12-23T18:33:24.282716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715694:0 progress is 1/1 2025-12-23T18:33:24.282792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715694:0 progress is 1/1 2025-12-23T18:33:24.282833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715694:0 2025-12-23T18:33:24.289622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715694:0 2025-12-23T18:33:24.290238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715695:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-12-23T18:33:24.290323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715695:0 progress is 1/1 2025-12-23T18:33:24.290350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715695:0 progress is 1/1 2025-12-23T18:33:24.290405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715695:0 TClient::Ls request: /Root/Foo 2025-12-23T18:33:24.295933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715695:0 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766514763839 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) 2025-12-23T18:33:24.355085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037892, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2025-12-23T18:33:24.355308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, next wakeup# 14.999747s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2025-12-23T18:33:24.355405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037893, next wakeup# 14.999647s, rate# 0, in queue# 1 shards, running# 2 shards at schemeshard 72057594046644480 2025-12-23T18:33:24.355496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037894, next wakeup# 14.999556s, rate# 0, in queue# 1 shards, running# 3 shards at schemeshard 72057594046644480 2025-12-23T18:33:24.355573Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7587123120443011667:2210] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037892 2025-12-23T18:33:24.355639Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7587123120443011667:2210] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037891 2025-12-23T18:33:24.355752Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7587123120443011667:2210] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037893 2025-12-23T18:33:24.355824Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7587123120443011667:2210] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037894 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766514763839 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) 2025-12-23T18:33:24.390724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037893, shardIdx# 72057594046644480:6 in# 35, next wakeup# 14.964350s, rate# 0, in queue# 0 shards, running# 3 shards at schemeshard 72057594046644480 2025-12-23T18:33:24.392690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, shardIdx# 72057594046644480:4 in# 37, next wakeup# 14.962376s, rate# 0, in queue# 0 shards, running# 2 shards at schemeshard 72057594046644480 2025-12-23T18:33:24.396830Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037893 CompletedLoansChanged 2025-12-23T18:33:24.397217Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037891 CompletedLoansChanged 2025-12-23T18:33:24.404548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037892, shardIdx# 72057594046644480:5 in# 49, next wakeup# 14.950519s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2025-12-23T18:33:24.406760Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037892 CompletedLoansChanged 2025-12-23T18:33:24.411944Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-12-23T18:33:24.414615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037894, shardIdx# 72057594046644480:7 in# 59, next wakeup# 14.940451s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2025-12-23T18:33:24.418084Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:33:24.418887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:33:24.422609Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037894 CompletedLoansChanged 2025-12-23T18:33:24.425966Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-12-23T18:33:24.426640Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-12-23T18:33:24.427814Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2025-12-23T18:33:24.427911Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2025-12-23T18:33:24.428806Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-12-23T18:33:24.432785Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:33:24.433312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:33:24.440165Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-12-23T18:33:24.441251Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-12-23T18:33:24.443188Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2025-12-23T18:33:24.443298Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2025-12-23T18:33:24.490614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037893, next wakeup# 14.864460s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2025-12-23T18:33:24.490848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, next wakeup# 14.864207s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2025-12-23T18:33:24.491370Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7587123120443011667:2210] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037893 2025-12-23T18:33:24.491489Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7587123120443011667:2210] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037891 2025-12-23T18:33:24.491691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037893, shardIdx# 72057594046644480:6 in# 1, next wakeup# 14.863366s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2025-12-23T18:33:24.491739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, shardIdx# 72057594046644480:4 in# 0, next wakeup# 14.863312s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 Table has 4 shards |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |92.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/cfg/bin/ydb_configure >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime |92.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/cfg/bin/ydb_configure |92.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [LD] {RESULT} $(B)/ydb/tools/cfg/bin/ydb_configure |92.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |92.1%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |92.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica >> TGRpcCmsTest::DescribeOptionsTest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] Test command err: 2025-12-23T18:31:58.579255Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:31:58.625521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:31:58.625800Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:31:58.636078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:31:58.636588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:31:58.636878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:31:58.637027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:31:58.637165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:31:58.637276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:31:58.637423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:31:58.637554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:31:58.637672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:31:58.637789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:31:58.637917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:31:58.638038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:31:58.638143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:31:58.670707Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:31:58.671013Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:31:58.671113Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:31:58.671311Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:58.671469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:31:58.671549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:31:58.671602Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:31:58.671718Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:31:58.671794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:31:58.671837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:31:58.671874Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:31:58.672090Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:58.672179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:31:58.672245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:31:58.672295Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:31:58.672399Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:31:58.672459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:31:58.672506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:31:58.672535Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:31:58.672604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:31:58.672642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:31:58.672673Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:31:58.672744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:31:58.672792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:31:58.672827Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:31:58.673083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:31:58.673140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:31:58.673169Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:31:58.673297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:31:58.673380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:31:58.673412Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:31:58.673469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:31:58.673508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:31:58.673537Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:31:58.673591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:31:58.673626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:31:58.673661Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:31:58.673818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:31:58.673883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ry_size=286;data_size=262;sum=25914;count=92;size_of_portion=192; 2025-12-23T18:33:25.231233Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=9005; 2025-12-23T18:33:25.231331Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2025-12-23T18:33:25.232482Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1080; 2025-12-23T18:33:25.232561Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=10506; 2025-12-23T18:33:25.232613Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=10660; 2025-12-23T18:33:25.232690Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=21; 2025-12-23T18:33:25.232808Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=57; 2025-12-23T18:33:25.232860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=11473; 2025-12-23T18:33:25.233037Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=119; 2025-12-23T18:33:25.233168Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=87; 2025-12-23T18:33:25.233371Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=163; 2025-12-23T18:33:25.233592Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=155; 2025-12-23T18:33:25.236502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2851; 2025-12-23T18:33:25.239989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=3399; 2025-12-23T18:33:25.240083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-12-23T18:33:25.240135Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-12-23T18:33:25.240195Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:33:25.240275Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=48; 2025-12-23T18:33:25.240313Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:33:25.240419Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=72; 2025-12-23T18:33:25.240466Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-12-23T18:33:25.240556Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=43; 2025-12-23T18:33:25.240654Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=50; 2025-12-23T18:33:25.240755Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=65; 2025-12-23T18:33:25.240806Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=30153; 2025-12-23T18:33:25.241009Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110400112;raw_bytes=155849958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:33:25.241193Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:33:25.241296Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:33:25.241381Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:33:25.241451Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:33:25.241601Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:33:25.241674Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:33:25.241724Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:33:25.241775Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:33:25.241851Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:33:25.241904Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:25.241965Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:25.242087Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:33:25.242351Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.039000s; 2025-12-23T18:33:25.246517Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:33:25.247189Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:33:25.247272Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:33:25.247366Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:33:25.247433Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:33:25.247518Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:33:25.247585Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:25.247647Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:25.247738Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2025-12-23T18:33:25.247809Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:33:25.249496Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.162000s; 2025-12-23T18:33:25.249551Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.2%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/cfg |92.2%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/cfg >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> TGRpcCmsTest::RemoveWithAnotherTokenTest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2025-12-23T18:33:20.879648Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123297776690496:2152];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:20.879783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:33:20.916265Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.009091s test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0038af/r3tmp/tmpEviB58/pdisk_1.dat 2025-12-23T18:33:21.335469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:21.384278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:21.384381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:21.463044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:21.556401Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:21.570610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 5777, node 1 2025-12-23T18:33:21.719596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:21.719626Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:21.719633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:21.719706Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:33:21.887761Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514801618 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:22.346647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:33:22.456929Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7587123306366626098:2301], Recipient [1:7587123302071658153:2213]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:55540" } 2025-12-23T18:33:22.456984Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-12-23T18:33:22.457008Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:22.457024Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:22.457140Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:55540" 2025-12-23T18:33:22.457287Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1766514802456813) 2025-12-23T18:33:22.457771Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1766514802456813 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-12-23T18:33:22.457949Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-12-23T18:33:22.485465Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2025-12-23T18:33:22.486115Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514802456813&action=1" } } } 2025-12-23T18:33:22.488699Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:22.488793Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-12-23T18:33:22.489015Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-12-23T18:33:22.499248Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285139, Sender [1:7587123306366626098:2301], Recipient [1:7587123302071658153:2213]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514802456813&action=1" } UserToken: "" PeerName: "ipv6:[::1]:55540" } 2025-12-23T18:33:22.499308Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:968: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2025-12-23T18:33:22.499542Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:3434: Add subscription to /Root/users/user-1 for [1:7587123306366626098:2301] 2025-12-23T18:33:22.499693Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3442: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514802456813&action=1" } } 2025-12-23T18:33:22.500300Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-12-23T18:33:22.500475Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-12-23T18:33:22.505971Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-12-23T18:33:22.506030Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:22.506116Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7587123306366626103:2213], Recipient [1:7587123302071658153:2213]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:22.506140Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:22.506153Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:22.506161Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:22.506198Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-12-23T18:33:22.506220Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-12-23T18:33:22.506280Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-12-23T18:33:22.520511Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-12-23T18:33:22.520544Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:22.520553Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:22.520561Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:22.520613Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-12-23T18:33:22.520644Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1766514802456813 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:22.523650Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-12-23T18:33:22.523818Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:22.523855Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-12-23T18:33:22.523866Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2025-12-23T18:33:22.533505Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggr ... 186224037888 shardIdx 72057594046644480:2 2025-12-23T18:33:23.770204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:7 2025-12-23T18:33:23.770238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:4 2025-12-23T18:33:23.770268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:10 2025-12-23T18:33:23.789629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2025-12-23T18:33:23.789768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2025-12-23T18:33:23.789825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2025-12-23T18:33:23.789871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2025-12-23T18:33:23.789944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2025-12-23T18:33:23.790205Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2025-12-23T18:33:23.790215Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2025-12-23T18:33:23.790268Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-12-23T18:33:23.790416Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7587123310661593972:2213], Recipient [1:7587123302071658153:2213]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-12-23T18:33:23.790445Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-12-23T18:33:23.790476Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:23.790493Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:23.790534Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-12-23T18:33:23.790568Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1766514803574867 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:23.790634Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1766514803574867 issue= 2025-12-23T18:33:23.793837Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-12-23T18:33:23.812228Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-12-23T18:33:23.812317Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-12-23T18:33:23.812340Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:23.812790Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7587123302071658034:2203], Recipient [1:7587123302071658153:2213]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-12-23T18:33:23.812886Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-12-23T18:33:23.812912Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:23.812923Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:23.812951Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-12-23T18:33:23.812980Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1766514803574867 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:23.824441Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-12-23T18:33:23.824492Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:23.824540Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-12-23T18:33:23.824679Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-12-23T18:33:23.825783Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-12-23T18:33:23.825872Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-12-23T18:33:23.846030Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-12-23T18:33:23.846169Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7587123310661594099:2213], Recipient [1:7587123302071658153:2213]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-12-23T18:33:23.846206Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-12-23T18:33:23.846222Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:23.846232Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:23.846341Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-12-23T18:33:23.846368Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-12-23T18:33:23.877698Z node 3 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [170b7153d33525b7] Result# TEvPutResult {Id# [72075186224037888:1:16:0:0:142:0] Status# ERROR StatusFlags# { } ErrorReason# "Request got Poison" ApproximateFreeSpaceShare# 0} GroupId# 2181038081 Marker# BPP12 2025-12-23T18:33:23.877894Z node 3 :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038081 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.279 sample PartId# [72075186224037888:1:16:0:0:142:1] QueryCount# 1 VDiskId# [82000001:2:0:0:0] NodeId# 1 } ] } 2025-12-23T18:33:23.901057Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-12-23T18:33:23.901092Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:23.901106Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:23.901114Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:23.901188Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1766514803574867 2025-12-23T18:33:23.901200Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1766514803574867 issue= 2025-12-23T18:33:23.901217Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1766514803574867 issue= 2025-12-23T18:33:23.901225Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2025-12-23T18:33:23.901314Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1766514803574867 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:23.904829Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2025-12-23T18:33:23.905098Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2422: Send /Root/users/user-1 notification to [1:7587123310661593947:2350]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514803574867&action=2" ready: true status: SUCCESS } } 2025-12-23T18:33:23.905213Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:23.959693Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7587123310661594130:2353], Recipient [1:7587123302071658153:2213]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" PeerName: "ipv6:[::1]:55540" } 2025-12-23T18:33:23.959731Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-12-23T18:33:23.959894Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3368: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-12-23T18:33:23.968299Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285123, Sender [1:7587123310661594133:2354], Recipient [1:7587123302071658153:2213]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:55540" } 2025-12-23T18:33:23.968353Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:967: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-12-23T18:33:23.968575Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3412: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-12-23T18:33:23.986513Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-12-23T18:33:23.986722Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected |92.2%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> KqpRboPg::Aggregation+ColumnStore [GOOD] >> KqpRboPg::Aggregation-ColumnStore |92.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-12-23T18:33:26.329826Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.329865Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.329889Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:26.330534Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:26.331060Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:26.342271Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.342961Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-12-23T18:33:26.344523Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:33:26.344748Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-12-23T18:33:26.345084Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-12-23T18:33:26.345307Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-12-23T18:33:26.345425Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:26.345470Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:33:26.345521Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-12-23T18:33:26.345658Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-12-23T18:33:26.345715Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-12-23T18:33:26.345745Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-12-23T18:33:26.345763Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-12-23T18:33:26.345886Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-12-23T18:33:26.345964Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-12-23T18:33:26.345982Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-12-23T18:33:26.345999Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-12-23T18:33:26.346063Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-12-23T18:33:26.346085Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-12-23T18:33:26.346106Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-12-23T18:33:26.346126Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-12-23T18:33:26.346205Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-12-23T18:33:26.347653Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.347675Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.347696Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:26.362521Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:26.365217Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:26.365495Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:26.370958Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-12-23T18:33:26.372057Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:33:26.372293Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-12-23T18:33:26.372657Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-12-23T18:33:26.372869Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-12-23T18:33:26.373105Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:26.373153Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:33:26.373202Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-12-23T18:33:26.373352Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-12-23T18:33:26.373389Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-12-23T18:33:26.373434Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-12-23T18:33:26.373461Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-12-23T18:33:26.373581Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-12-23T18:33:26.373654Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-12-23T18:33:26.373676Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-12-23T18:33:26.373692Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-12-23T18:33:26.373756Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-12-23T18:33:26.373781Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-12-23T18:33:26.373799Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-12-23T18:33:26.373817Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00 ... tream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-12-23T18:33:30.769792Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2025-12-23T18:33:30.974494Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-12-23T18:33:30.974544Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-12-23T18:33:30.974580Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:30.994067Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:30.994704Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:30.995659Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-12-23T18:33:31.003931Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-12-23T18:33:31.354685Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-12-23T18:33:31.358508Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:31.360593Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:33:31.363880Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-12-23T18:33:31.364863Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-12-23T18:33:31.370174Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-12-23T18:33:31.371160Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-12-23T18:33:31.372114Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-12-23T18:33:31.373089Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-12-23T18:33:31.383374Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-12-23T18:33:31.384349Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-12-23T18:33:31.384427Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-12-23T18:33:31.384619Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-12-23T18:33:31.388656Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2025-12-23T18:33:31.404580Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:31.404608Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:31.404638Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:31.410966Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:31.422998Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:31.423205Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:31.425853Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:31.426358Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-12-23T18:33:31.427679Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:31.427706Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:31.427728Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:31.428062Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:31.438411Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:31.438620Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:31.443478Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:31.447095Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:33:31.450848Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:31.450924Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-12-23T18:33:31.454306Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 |92.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] Test command err: 2025-12-23T18:31:53.923503Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:31:53.963225Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:31:53.963516Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:31:53.972270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:31:53.972579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:31:53.972835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:31:53.972973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:31:53.973127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:31:53.973267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:31:53.973396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:31:53.973519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:31:53.973622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:31:53.973726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:31:53.973850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:31:53.973975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:31:53.974087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:31:54.007907Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:31:54.008175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:31:54.008270Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:31:54.008487Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:54.008664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:31:54.008752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:31:54.008807Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:31:54.008937Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:31:54.009020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:31:54.009073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:31:54.009113Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:31:54.009316Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:54.009401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:31:54.009458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:31:54.009508Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:31:54.009627Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:31:54.009706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:31:54.009754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:31:54.009787Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:31:54.009852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:31:54.009904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:31:54.009938Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:31:54.010010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:31:54.010062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:31:54.010103Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:31:54.010389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:31:54.010474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:31:54.010513Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:31:54.010685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:31:54.010758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:31:54.010817Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:31:54.010886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:31:54.010933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:31:54.010969Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:31:54.011022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:31:54.011064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:31:54.011100Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:31:54.011301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:31:54.011365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=8903; 2025-12-23T18:33:24.828166Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2025-12-23T18:33:24.829104Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=881; 2025-12-23T18:33:24.829167Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=10165; 2025-12-23T18:33:24.829216Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=10323; 2025-12-23T18:33:24.829288Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=15; 2025-12-23T18:33:24.829403Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=64; 2025-12-23T18:33:24.829454Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=11220; 2025-12-23T18:33:24.829645Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=122; 2025-12-23T18:33:24.829785Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=92; 2025-12-23T18:33:24.829972Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=146; 2025-12-23T18:33:24.830175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=142; 2025-12-23T18:33:24.832581Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2338; 2025-12-23T18:33:24.835194Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2531; 2025-12-23T18:33:24.835280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2025-12-23T18:33:24.835336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2025-12-23T18:33:24.835378Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-12-23T18:33:24.835465Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2025-12-23T18:33:24.835512Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-12-23T18:33:24.835625Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=62; 2025-12-23T18:33:24.835672Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-12-23T18:33:24.835746Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=39; 2025-12-23T18:33:24.835839Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=55; 2025-12-23T18:33:24.835969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=94; 2025-12-23T18:33:24.836015Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=30865; 2025-12-23T18:33:24.836195Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110437896;raw_bytes=171489958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:33:24.836328Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:33:24.836397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:33:24.836495Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:33:24.836569Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:33:24.836721Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:33:24.836804Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:33:24.836849Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:33:24.836917Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:33:24.836988Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:33:24.837044Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:24.837087Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:24.837205Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:33:24.837438Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.125000s; 2025-12-23T18:33:24.839701Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:33:24.843820Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:33:24.843922Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:33:24.844027Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:33:24.844092Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:33:24.844179Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:33:24.844271Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:33:24.844344Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:24.844398Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:24.844502Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:33:24.844572Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:33:24.845336Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.076000s; 2025-12-23T18:33:24.845390Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] >> IndexBuildTest::CancelBuildUniq [GOOD] |92.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |92.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |92.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} >> TIncrHugeBlobIdDict::Basic [GOOD] |92.2%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut >> TGRpcCmsTest::SimpleTenantsTest >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] >> TIncrHugeBasicTest::Recovery [GOOD] >> TIncrHugeBasicTest::Defrag |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBlobIdDict::Basic [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/incrhuge/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/incrhuge/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::Recovery [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/incrhuge/ut/unittest >> IncrementalBackup::DropBackupCollectionSqlNonExistent [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore+WithIncremental |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest |92.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |92.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |92.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2025-12-23T18:32:18.250017Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123029869554096:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:18.283355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00522d/r3tmp/tmp3V7QEk/pdisk_1.dat 2025-12-23T18:32:18.635487Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:18.768321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:18.768427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:18.789009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:18.830721Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:18.848310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 65341, node 1 2025-12-23T18:32:19.155374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:19.155400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:19.155408Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:19.158810Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:19.287998Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:25615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514738898 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:19.633451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... Triggering split by load TClient is connected to server localhost:25615 2025-12-23T18:32:23.210131Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123029869554096:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:23.210189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:23.601275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123051344391995:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:23.601374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:23.602146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123051344392005:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:23.602192Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:23.921181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:24.194921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123055639359475:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.195030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.199120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123055639359478:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.199213Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.226942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766514744064 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766514744064 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) 2025-12-23T18:32:24.564518Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123055639359579:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.564717Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.567056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123055639359603:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.567101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123055639359605:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.567162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123055639359606:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.567218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123055639359607:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.567245Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123055639359608:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: { ... tPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) 2025-12-23T18:33:29.294248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:560: Propose merge request: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 } Internal: true FailOnExist: false } TxId: 281474976710694 TabletId: 72057594046644480, reason: shard with tabletId: 72075186224037890 merge by load (shardLoad: 0.02), shardToMergeCount: 2, totalSize: 0, sizeToMerge: 0, totalLoad: 0.04, loadThreshold: 0.07 2025-12-23T18:33:29.294475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976710694:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2025-12-23T18:33:29.295195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976710694:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000+\250\313\201\000\000\000\200" TabletID: 72075186224037889 ShardIdx: 2 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000+\250\313\201\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037890 ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 4 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2025-12-23T18:33:29.295254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710694:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:33:29.300052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710694:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-12-23T18:33:29.311514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710694:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-12-23T18:33:29.311608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710694:0 2 -> 3 2025-12-23T18:33:29.316218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710694:0, at schemeshard: 72057594046644480 2025-12-23T18:33:29.327996Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7587123334812342677:6463] 2025-12-23T18:33:29.371719Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2025-12-23T18:33:29.371853Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-12-23T18:33:29.372039Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2025-12-23T18:33:29.381097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976710694:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710694:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710694 TabletId: 72075186224037891 2025-12-23T18:33:29.381140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710694:0 3 -> 131 2025-12-23T18:33:29.383762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976710694:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:33:29.424421Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037891 2025-12-23T18:33:29.424552Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:33:29.424601Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2025-12-23T18:33:29.424625Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037891 2025-12-23T18:33:29.424892Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2025-12-23T18:33:29.428695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710694:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710694 TabletId: 72075186224037889 2025-12-23T18:33:29.431221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976710694:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710694 TabletId: 72075186224037890 2025-12-23T18:33:29.431529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710694:0 131 -> 132 2025-12-23T18:33:29.435820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:33:29.436104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:33:29.436166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976710694:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:33:29.438824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710694 2025-12-23T18:33:29.438876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710694 2025-12-23T18:33:29.438898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710694, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2025-12-23T18:33:29.448097Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-12-23T18:33:29.450427Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-12-23T18:33:29.452423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-12-23T18:33:29.452704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-12-23T18:33:29.452768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710694:0 progress is 1/1 2025-12-23T18:33:29.452791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710694:0 progress is 1/1 2025-12-23T18:33:29.452830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710694:0 2025-12-23T18:33:29.456687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710694:0 2025-12-23T18:33:29.458266Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:33:29.460613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:33:29.461449Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:33:29.462511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:33:29.474462Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-12-23T18:33:29.474511Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-12-23T18:33:29.475147Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-12-23T18:33:29.475180Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-12-23T18:33:29.476447Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2025-12-23T18:33:29.476536Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2025-12-23T18:33:29.477570Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2025-12-23T18:33:29.477637Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766514744064 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 3 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/incrhuge/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/incrhuge/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/incrhuge/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] Test command err: 2025-12-23T18:31:59.010783Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:31:59.043228Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:31:59.043625Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:31:59.054687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:31:59.055063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:31:59.055349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:31:59.055524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:31:59.055673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:31:59.055813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:31:59.055933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:31:59.056064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:31:59.056214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:31:59.056347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:31:59.056459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:31:59.056609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:31:59.056754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:31:59.095103Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:31:59.095302Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:31:59.095356Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:31:59.095583Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:59.095765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:31:59.095856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:31:59.095910Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:31:59.096030Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:31:59.096105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:31:59.096174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:31:59.096228Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:31:59.096451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:59.096531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:31:59.096580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:31:59.096616Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:31:59.096721Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:31:59.096786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:31:59.096861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:31:59.096906Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:31:59.096974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:31:59.097022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:31:59.097055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:31:59.097127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:31:59.097172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:31:59.097220Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:31:59.097497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:31:59.097563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:31:59.097599Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:31:59.097750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:31:59.097797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:31:59.097842Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:31:59.097895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:31:59.097941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:31:59.097975Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:31:59.098030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:31:59.098087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:31:59.098131Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:31:59.098280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:31:59.098327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ent=StartCleanup;portions_count=2; 2025-12-23T18:33:09.891363Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=20;drop=0;skip=0;portions_counter=20;chunks=2240;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:33:09.891515Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=manager.cpp:10;event=lock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::d4ec9e68-e02d11f0-b0e49d47-2746a8e7; 2025-12-23T18:33:09.891582Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=ro_controller.cpp:41;event=CS::CLEANUP::PORTIONS;tablet_id=9437184; 2025-12-23T18:33:09.891696Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=abstract.cpp:13;event=new_stage;stage=Started;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7; 2025-12-23T18:33:09.892416Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessorResources;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7; 2025-12-23T18:33:09.892570Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessors;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7; 2025-12-23T18:33:09.892766Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:09.892870Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.626500s; 2025-12-23T18:33:09.892964Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:33:09.893536Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7; 2025-12-23T18:33:09.893868Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_finished;consumer=CLEANUP_PORTIONS;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7; Cleanup old portions: 2 4 9 8 1 6 3 7 10 5 19 14 18 13 21 11 15 22 20 17 2025-12-23T18:33:09.894183Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2025-12-23T18:33:09.894250Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7; 2025-12-23T18:33:09.894400Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[183] (CS::CLEANUP::PORTIONS) apply at tablet 9437184 2025-12-23T18:33:09.895300Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:345:2353];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7; 2025-12-23T18:33:09.895854Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=148108480;raw_bytes=150092142;count=25;records=1800002} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=12353440;raw_bytes=14738900;count=2;records=150000} inactive {blob_bytes=123422240;raw_bytes=125076680;count=20;records=1500000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:33:09.909100Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7; 2025-12-23T18:33:09.909198Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2025-12-23T18:33:09.909610Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;fline=manager.cpp:15;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::d4ec9e68-e02d11f0-b0e49d47-2746a8e7; 2025-12-23T18:33:09.909694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:33:09.909776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2025-12-23T18:33:09.909855Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:33:09.909959Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:33:09.910025Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:09.910082Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:09.910183Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.625000s; 2025-12-23T18:33:09.910256Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=d4ec9e68-e02d11f0-b0e49d47-2746a8e7;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:33:09.910370Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:20:4:0:6171112:0] 2025-12-23T18:33:09.910465Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:2:4:0:6171112:0] 2025-12-23T18:33:09.910532Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:5:4:0:6171112:0] 2025-12-23T18:33:09.910577Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:16:3:0:6171112:0] 2025-12-23T18:33:09.910620Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:19:3:0:6171112:0] 2025-12-23T18:33:09.910660Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:1:3:0:6171112:0] 2025-12-23T18:33:09.910703Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:4:3:0:6171112:0] 2025-12-23T18:33:09.910765Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:12:2:0:6171112:0] 2025-12-23T18:33:09.910819Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:7:3:0:6171112:0] 2025-12-23T18:33:09.910865Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:6171112:0] 2025-12-23T18:33:09.910907Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:3:2:0:6171112:0] 2025-12-23T18:33:09.910947Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:6:2:0:6171112:0] 2025-12-23T18:33:09.910985Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:8:4:0:6171112:0] 2025-12-23T18:33:09.911030Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:18:2:0:6171112:0] 2025-12-23T18:33:09.911085Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:9:2:0:6171112:0] 2025-12-23T18:33:09.911137Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:17:4:0:6171112:0] 2025-12-23T18:33:09.911179Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:13:3:0:6171112:0] 2025-12-23T18:33:09.911222Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:11:4:0:6171112:0] 2025-12-23T18:33:09.911266Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:6171112:0] 2025-12-23T18:33:09.911307Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:15:2:0:6171112:0] GC for channel 2 deletes blobs: WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 Compactions happened: 2 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/incrhuge/ut/unittest >> IncrementalBackup::E2EMultipleBackupRestoreCycles [GOOD] >> IncrementalBackup::DropBackupCollectionSqlPathResolution |92.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |92.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |92.2%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> KqpRboYql::TPCH_YDB_PERF [GOOD] >> KqpRboYql::TPCH_YQL >> KqpRboYql::BasicJoins [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TReplicaTest::Commit |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/incrhuge/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2025-12-23T18:31:58.457662Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:31:58.495268Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:31:58.495562Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:31:58.504924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:31:58.505264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:31:58.505540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:31:58.505690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:31:58.505843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:31:58.505977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:31:58.506131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:31:58.506269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:31:58.506381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:31:58.506517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:31:58.506652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:31:58.506821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:31:58.506952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:31:58.548201Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:31:58.548446Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:31:58.548523Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:31:58.548732Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:58.548917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:31:58.549009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:31:58.549063Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:31:58.549163Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:31:58.549251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:31:58.549310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:31:58.549354Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:31:58.549589Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:31:58.549671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:31:58.549743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:31:58.549794Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:31:58.549900Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:31:58.549966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:31:58.550049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:31:58.550087Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:31:58.550154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:31:58.550205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:31:58.550236Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:31:58.550302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:31:58.550353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:31:58.550432Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:31:58.550702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:31:58.550862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:31:58.550903Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:31:58.551145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:31:58.551223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:31:58.551262Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:31:58.551328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:31:58.551375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:31:58.551406Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:31:58.551455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:31:58.551493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:31:58.551524Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:31:58.551711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:31:58.551768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=19723; 2025-12-23T18:33:31.417942Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=11; 2025-12-23T18:33:31.431061Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=13026; 2025-12-23T18:33:31.431182Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=33252; 2025-12-23T18:33:31.431242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=33415; 2025-12-23T18:33:31.431317Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2025-12-23T18:33:31.431413Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=53; 2025-12-23T18:33:31.431458Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=44751; 2025-12-23T18:33:31.431666Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=145; 2025-12-23T18:33:31.431814Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=97; 2025-12-23T18:33:31.431994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=137; 2025-12-23T18:33:31.432190Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=149; 2025-12-23T18:33:31.443698Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=11421; 2025-12-23T18:33:31.446729Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2861; 2025-12-23T18:33:31.454429Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=21; 2025-12-23T18:33:31.454497Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2025-12-23T18:33:31.454536Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-12-23T18:33:31.454654Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=87; 2025-12-23T18:33:31.454695Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:33:31.454805Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=75; 2025-12-23T18:33:31.454849Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-12-23T18:33:31.454922Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=41; 2025-12-23T18:33:31.455055Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=74; 2025-12-23T18:33:31.455186Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=89; 2025-12-23T18:33:31.455246Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=105665; 2025-12-23T18:33:31.455408Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110400112;raw_bytes=155849958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:33:31.455550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:33:31.455627Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:33:31.455738Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:33:31.455805Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:33:31.455950Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:33:31.456030Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:33:31.456073Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:33:31.456131Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:33:31.456205Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:33:31.456255Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:31.456303Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:31.456448Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:33:31.456694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.084000s; 2025-12-23T18:33:31.467072Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:33:31.471364Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:33:31.471465Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:33:31.471564Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:33:31.471616Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:33:31.471681Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:33:31.471762Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:33:31.471829Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:31.471896Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:33:31.471994Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:33:31.472084Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:33:31.472977Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.102000s; 2025-12-23T18:33:31.473035Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic >> TGRpcCmsTest::DisabledTxTest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TReplicaTest::Unsubscribe >> TReplicaTest::Update >> TReplicaTest::Commit [GOOD] >> TReplicaTest::AckNotifications >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental >> TReplicaTest::Unsubscribe [GOOD] >> TReplicaTest::UnsubscribeUnknownPath >> TReplicaTest::Handshake >> TReplicaTest::AckNotifications [GOOD] >> TReplicaTest::AckNotificationsUponPathRecreation >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge >> TGRpcCmsTest::DescribeOptionsTest [GOOD] >> TReplicaTest::UnsubscribeUnknownPath [GOOD] >> TReplicaTest::Update [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] >> TGRpcCmsTest::AuthTokenTest >> TReplicaTest::Handshake [GOOD] >> TReplicaTest::DoubleUnsubscribe >> TGRpcCmsTest::AlterRemoveTest >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] >> TReplicaTest::DoubleUnsubscribe [GOOD] >> TReplicaTest::DoubleDelete >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink >> IncrementalBackup::OmitIndexesIncrementalBackup [GOOD] >> IncrementalBackup::MultipleIndexesIncrementalRestore >> TReplicaTest::DoubleDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeUnknownPath [GOOD] Test command err: 2025-12-23T18:33:38.450595Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2025-12-23T18:33:38.450680Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:38.450848Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2025-12-23T18:33:38.450890Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# path 2025-12-23T18:33:38.451019Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:38.451114Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:10:2057] 2025-12-23T18:33:38.451155Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:10:2057], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:38.451279Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:38.451333Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:38.458182Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:38.471049Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:9:2056] 2025-12-23T18:33:38.471134Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:9:2056], path# path 2025-12-23T18:33:38.471238Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 40 2025-12-23T18:33:38.471305Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-12-23T18:33:38.471346Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [1:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:38.722900Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Subscribe >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink >> TReplicaTest::Subscribe [GOOD] >> TReplicaTest::SubscribeUnknownPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] Test command err: 2025-12-23T18:33:38.129269Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2025-12-23T18:33:38.129348Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:38.129484Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2025-12-23T18:33:38.129520Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:7:2054] Commit generation: owner# 1, generation# 1 2025-12-23T18:33:38.129566Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:8:2055] 2025-12-23T18:33:38.129607Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 2 2025-12-23T18:33:38.505198Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:9:2056] 2025-12-23T18:33:38.505261Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# path 2025-12-23T18:33:38.505399Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:9:2056], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-12-23T18:33:38.505513Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-12-23T18:33:38.505550Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:38.505666Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:38.505703Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:38.516676Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:38.516925Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:7:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:9:2056] 2025-12-23T18:33:38.517039Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 40 2025-12-23T18:33:38.517084Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-12-23T18:33:38.517125Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [2:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:38.517199Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:7:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [2:9:2056] 2025-12-23T18:33:38.843684Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2025-12-23T18:33:38.843760Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:38.843888Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:38.843928Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:38.844027Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:38.844172Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2025-12-23T18:33:38.844279Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-12-23T18:33:38.844393Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:38.844426Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:38.844479Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 3, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:38.844665Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:38.844701Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-12-23T18:33:38.844733Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:38.844817Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7:2054] Upsert description: path# path 2025-12-23T18:33:38.844897Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-12-23T18:33:38.844949Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:38.845048Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 3 }: sender# [3:9:2056] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::BasicJoins [GOOD] Test command err: Trying to start YDB, gRPC: 4260, MsgBus: 7586 2025-12-23T18:32:42.561030Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123133690092589:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:42.561225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d31/r3tmp/tmpVblgLN/pdisk_1.dat 2025-12-23T18:32:42.962263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:42.962383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:42.971182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:42.972641Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123133690092459:2081] 1766514762540142 != 1766514762540145 2025-12-23T18:32:42.991477Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:42.999397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4260, node 1 2025-12-23T18:32:43.143581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:43.143607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:43.143623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:43.143757Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:43.213972Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7586 2025-12-23T18:32:43.576276Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514763048 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:43.746184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:43.754977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:32:46.154024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123150869962539:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:46.154144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:46.154984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123150869962549:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:46.155056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:46.584621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:32:47.329332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7587123150869963114:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:32:47.329663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7587123150869963114:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:32:47.329948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7587123150869963114:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:32:47.330072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7587123150869963114:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:32:47.330174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7587123150869963114:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:32:47.330268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7587123150869963114:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:32:47.330381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7587123150869963114:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:32:47.330485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7587123150869963114:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:32:47.330605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7587123150869963114:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:32:47.330706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7587123150869963114:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:32:47.335651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7587123150869963114:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:32:47.335826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7587123150869963114:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:32:47.335948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7587123150869963114:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:32:47.386430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587123150869963152:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:32:47.386528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587123150869963152:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:32:47.386782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587123150869963152:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:32:47.386913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587123150869963152:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:32:47.387021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587123150869963152:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:32:47.387141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587123150869963152:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:32:47.387251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587123150869963152:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:32:47.387353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587123150869963152:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:32:47.387446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224 ... 2-23T18:33:11.046051Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038058;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.055114Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.055198Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.055224Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.060154Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.060212Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.060243Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.063874Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038056;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.063942Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038056;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.063960Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038056;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.067552Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.067654Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.067683Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.077692Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.077764Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.077784Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.087506Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.087584Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.087603Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.096993Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.097076Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.097098Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.108457Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.108550Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.108572Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.108626Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.108706Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.108752Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2025-12-23T18:33:11.242848Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123255553581856:3344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:11.242966Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:11.243220Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123255553581861:3347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:11.243221Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123255553581862:3348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:11.243291Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:11.246907Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:33:11.279063Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587123255553581865:3349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-12-23T18:33:11.357975Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587123255553581916:6618] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:33:13.022629Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:33:13.022666Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:33.992190Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7587123350042872505:11426], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
:3:135: Fatal: ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:592: Condition violated: `(isLeftSidePredicate && !isRightSidePredicate) || (isRightSidePredicate && !isLeftSidePredicate)', code: 1 2025-12-23T18:33:33.995461Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=3&id=ODY5ZDlmYzgtYTE4ZjM2NzUtNjU2M2U1OTctMjU5NjNlMzM=, ActorId: [3:7587123350042872502:11424], ActorState: ExecuteState, TraceId: 01kd67sttw8jdy9qts8s80j55y, ReplyQueryCompileError, status: INTERNAL_ERROR, issues: { message: "Execution" issue_code: 1060 issues { position { row: 3 column: 135 } message: "ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:592: Condition violated: `(isLeftSidePredicate && !isRightSidePredicate) || (isRightSidePredicate && !isLeftSidePredicate)\'" end_position { row: 3 column: 135 } issue_code: 1 } }, remove tx with tx_id: 2025-12-23T18:33:34.166793Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7587123354337839819:11435], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
:3:143: Fatal: ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:531: OR in join predicate is not supported, code: 1 2025-12-23T18:33:34.171818Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=3&id=NTZhMWM5NzMtZTM3OTY5MTUtNzU0ZWNhM2MtOWU5MzNjZGE=, ActorId: [3:7587123354337839816:11433], ActorState: ExecuteState, TraceId: 01kd67sv3hagj3t33r47mgnhw0, ReplyQueryCompileError, status: INTERNAL_ERROR, issues: { message: "Execution" issue_code: 1060 issues { position { row: 3 column: 143 } message: "ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:531: OR in join predicate is not supported" end_position { row: 3 column: 143 } issue_code: 1 } }, remove tx with tx_id: |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SubscribeUnknownPath [GOOD] >> TReplicaTest::SyncVersion |92.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest |92.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |92.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] Test command err: 2025-12-23T18:33:38.542289Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2025-12-23T18:33:38.542369Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:38.542591Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:38.542634Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:38.550346Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:38.550551Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-12-23T18:33:38.550637Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:38.550873Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2025-12-23T18:33:38.550929Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# path 2025-12-23T18:33:38.551032Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:8:2055] 2025-12-23T18:33:38.551091Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-12-23T18:33:38.551226Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:8:2055] 2025-12-23T18:33:38.551275Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:38.915629Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-12-23T18:33:38.915710Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:38.915881Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:38.915932Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:38.916018Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:38.916107Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SyncVersion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2025-12-23T18:33:31.962003Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.007310s 2025-12-23T18:33:32.123377Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123347021941820:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:32.123433Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0038a2/r3tmp/tmpWVXp3l/pdisk_1.dat 2025-12-23T18:33:32.855024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:32.985440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:32.985538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:33.119003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:33.140281Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:33.154819Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:33.159225Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TServer::EnableGrpc on GrpcPort 11327, node 1 2025-12-23T18:33:33.579455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:33.579480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:33.579488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:33.579572Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514813217 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:34.118223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:10380 2025-12-23T18:33:34.470860Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:131: TTxProcessor(tenants) is now locking 2025-12-23T18:33:34.470880Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:143: TTxProcessor(tenants) is now locked by parent 2025-12-23T18:33:34.482927Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:102: TTxProcessor(tenants) is now active 2025-12-23T18:33:34.574479Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285140, Sender [1:7587123355611877477:2303], Recipient [1:7587123347021942268:2216]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:44812" } 2025-12-23T18:33:34.574537Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:964: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2025-12-23T18:33:34.577133Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3326: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::DoubleDelete [GOOD] Test command err: 2025-12-23T18:33:38.816699Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2025-12-23T18:33:38.816789Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:39.097527Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-12-23T18:33:39.097596Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:39.097744Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:39.097782Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:39.120730Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:39.120955Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2025-12-23T18:33:39.121041Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:39.121183Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] 2025-12-23T18:33:39.121236Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:8:2055], path# path 2025-12-23T18:33:39.121290Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] 2025-12-23T18:33:39.534723Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2025-12-23T18:33:39.538935Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:39.539132Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2025-12-23T18:33:39.539171Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7:2054] Upsert description: path# path 2025-12-23T18:33:39.539248Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:39.539491Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:39.539541Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:39.539608Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:39.539841Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2025-12-23T18:33:39.539895Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-12-23T18:33:39.539938Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:39.540089Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:10:2057] 2025-12-23T18:33:39.540154Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:10:2057], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:39.540257Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2025-12-23T18:33:39.540295Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true |92.2%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> KqpRboPg::TPCH [GOOD] >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuildUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:30:23.907684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:30:23.907759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:23.907791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:30:23.907844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:30:23.907885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:30:23.907911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:30:23.907961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:30:23.908035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:30:23.908871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:30:23.909175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:30:24.002244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:30:24.002305Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:24.008294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:30:24.008500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:30:24.008634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:30:24.012490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:30:24.012714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:30:24.013455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:30:24.013658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:30:24.015539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:24.015726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:30:24.016934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:30:24.016995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:30:24.017128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:30:24.017190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:30:24.017240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:30:24.017413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:30:24.175798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.176770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.176878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.176962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.177030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.177144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.177222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.177296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.177375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.177446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.177509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.177590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.177664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.177760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:30:24.177842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... fy, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-12-23T18:33:32.524530Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:127:2152] message: TxId: 281474976710760 2025-12-23T18:33:32.524596Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:33:32.524642Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2025-12-23T18:33:32.524674Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710760:0 2025-12-23T18:33:32.524788Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-12-23T18:33:32.532841Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-12-23T18:33:32.532974Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710760 2025-12-23T18:33:32.533071Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2025-12-23T18:33:32.533237Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [6:1549:3411], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2025-12-23T18:33:32.540676Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking 2025-12-23T18:33:32.540926Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [6:1549:3411], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:33:32.541018Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-12-23T18:33:32.548122Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled 2025-12-23T18:33:32.548336Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [6:1549:3411], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:33:32.548400Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-12-23T18:33:32.548637Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:33:32.548709Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:1646:3497] TestWaitNotification: OK eventTxId 102 2025-12-23T18:33:32.557856Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-12-23T18:33:32.558305Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2025-12-23T18:33:32.572135Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:33:32.572537Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 470us result status StatusSuccess 2025-12-23T18:33:32.573191Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:33:32.589312Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:33:32.589705Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 436us result status StatusPathDoesNotExist 2025-12-23T18:33:32.589988Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 39 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> OperationMapping::IndexBuildCanceled >> OperationMapping::IndexBuildCanceled [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SyncVersion [GOOD] Test command err: 2025-12-23T18:33:40.183712Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2025-12-23T18:33:40.183817Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:40.184031Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:40.184096Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:40.192236Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:40.192445Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-12-23T18:33:40.192530Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:40.192682Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 40 2025-12-23T18:33:40.192727Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-12-23T18:33:40.192769Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [1:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:40.504990Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2025-12-23T18:33:40.505072Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# path 2025-12-23T18:33:40.505143Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:40.849846Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2025-12-23T18:33:40.849932Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:40.850075Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 76 2025-12-23T18:33:40.850134Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:40.850212Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 100500, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 32} 2025-12-23T18:33:40.850341Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-12-23T18:33:40.850423Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:40.850601Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:8:2055], cookie# 1 |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] |92.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |92.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |92.2%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> OperationMapping::IndexBuildSuccess |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2025-12-23T18:32:18.631794Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123028911694026:2082];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:18.633930Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00521f/r3tmp/tmpPuYJTY/pdisk_1.dat 2025-12-23T18:32:18.942734Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:18.969578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:18.970125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:18.999870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:19.099631Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:19.148132Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 61997, node 1 2025-12-23T18:32:19.413011Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:19.413038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:19.413046Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:19.413261Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:19.650870Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:11351 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514739157 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:19.919280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:19.942047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:11351 2025-12-23T18:32:23.634825Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123028911694026:2082];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:23.634917Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:23.651409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123050386531894:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:23.651678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:23.655720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123050386531904:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:23.655877Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.052425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:24.310791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123054681499366:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.310919Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.311385Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123054681499372:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.311408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123054681499371:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.311495Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.315254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:24.350942Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123054681499375:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:32:24.442244Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123054681499463:3107] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:32:33.911272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:32:33.911302Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:12.631707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976712661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766514744211 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766514744211 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) 2025-12-23T18:33:14.722732Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 1.7709 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 1 at datashard: 72075186224037888 table: [/Root/Foo] TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 7205759 ... nder: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:33:34.935873Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:33:34.941580Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2025-12-23T18:33:34.941685Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-12-23T18:33:34.941845Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2025-12-23T18:33:34.956466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715693 TabletId: 72075186224037890 2025-12-23T18:33:34.956782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715693 TabletId: 72075186224037889 2025-12-23T18:33:34.956809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 3 -> 131 2025-12-23T18:33:34.963135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:33:35.012198Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037889 2025-12-23T18:33:35.012331Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:33:35.012384Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2025-12-23T18:33:35.012414Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2025-12-23T18:33:35.012602Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037890 2025-12-23T18:33:35.012680Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:33:35.012725Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2025-12-23T18:33:35.012742Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:33:35.012746Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2025-12-23T18:33:35.012960Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2025-12-23T18:33:35.018695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715693:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715693 TabletId: 72075186224037888 2025-12-23T18:33:35.019154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 131 -> 132 2025-12-23T18:33:35.021867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:33:35.022121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:33:35.022192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:33:35.025652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715693 2025-12-23T18:33:35.025707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715693 2025-12-23T18:33:35.025725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2025-12-23T18:33:35.036905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-12-23T18:33:35.037333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2025-12-23T18:33:35.037374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2025-12-23T18:33:35.037431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2025-12-23T18:33:35.043635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715693:0 2025-12-23T18:33:35.120836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2025-12-23T18:33:35.121007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, next wakeup# 14.999777s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2025-12-23T18:33:35.121918Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7587123028911694396:2217] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037889 2025-12-23T18:33:35.122134Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7587123028911694396:2217] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037890 2025-12-23T18:33:35.166844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 45, next wakeup# 14.953959s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2025-12-23T18:33:35.173136Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037889 CompletedLoansChanged 2025-12-23T18:33:35.197929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 77, next wakeup# 14.922875s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2025-12-23T18:33:35.208961Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037890 CompletedLoansChanged 2025-12-23T18:33:35.220647Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2025-12-23T18:33:35.234724Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:33:35.255181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:33:35.270867Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-12-23T18:33:35.271955Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-12-23T18:33:35.274612Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2025-12-23T18:33:35.283087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, next wakeup# 14.837715s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2025-12-23T18:33:35.283773Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2025-12-23T18:33:35.287929Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7587123028911694396:2217] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037889 2025-12-23T18:33:35.288913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 5, next wakeup# 14.831879s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766514744211 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766514744211 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versi... (TRUNCATED) Table has 2 shards >> IncrementalBackup::DropBackupCollectionSqlPathResolution [GOOD] >> IncrementalBackup::DropBackupCollectionSqlWithDatabaseLikeNames ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2025-12-23T18:33:34.563279Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123354947790242:2090];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:34.568843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00389c/r3tmp/tmpenig80/pdisk_1.dat 2025-12-23T18:33:34.922840Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:35.004062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:35.004204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:35.019801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:35.060394Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7038, node 1 2025-12-23T18:33:35.303716Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:35.405202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:35.405227Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:35.405236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:35.406397Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:33:35.585187Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22418 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514815170 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:36.213986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:33:36.318644Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7587123363537725904:2301], Recipient [1:7587123354947790695:2223]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:58438" } 2025-12-23T18:33:36.318691Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-12-23T18:33:36.318720Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:36.318735Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:36.322986Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:58438" 2025-12-23T18:33:36.323192Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1766514816322806) 2025-12-23T18:33:36.323804Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1766514816322806 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-12-23T18:33:36.324014Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-12-23T18:33:36.334264Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2025-12-23T18:33:36.335107Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514816322806&action=1" } } } 2025-12-23T18:33:36.335264Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:36.335347Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-12-23T18:33:36.335560Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-12-23T18:33:36.336236Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-12-23T18:33:36.336369Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-12-23T18:33:36.345167Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-12-23T18:33:36.345233Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:36.345335Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7587123363537725909:2223], Recipient [1:7587123354947790695:2223]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:36.345351Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:36.345378Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:36.345386Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:36.345432Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-12-23T18:33:36.345452Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-12-23T18:33:36.345520Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-12-23T18:33:36.350015Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-12-23T18:33:36.350067Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:36.350077Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:36.350087Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:36.350152Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-12-23T18:33:36.350196Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1766514816322806 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:36.356453Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-12-23T18:33:36.356637Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:36.356668Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-12-23T18:33:36.356677Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2025-12-23T18:33:36.365736Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:58438" 2025-12-23T18:33:36.377620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:33:36.378517Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7587123363537725922:2302], Recipient [1:7587123354947790695:2223]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514816322806&action=1" } UserToken: "" } 2025-12-23T18:33:36.378541Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-12-23T18:33:36.378810Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/use ... :2223]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-12-23T18:33:37.410799Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-12-23T18:33:37.410817Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:37.410840Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:37.410876Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-12-23T18:33:37.410894Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1766514817363290 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:37.410955Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1766514817363290 issue= 2025-12-23T18:33:37.412330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2025-12-23T18:33:37.412398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2025-12-23T18:33:37.412439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2025-12-23T18:33:37.412469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2025-12-23T18:33:37.412506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2025-12-23T18:33:37.419951Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-12-23T18:33:37.422701Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-12-23T18:33:37.422855Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-12-23T18:33:37.422901Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:37.423373Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7587123354947790576:2204], Recipient [1:7587123354947790695:2223]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-12-23T18:33:37.423393Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-12-23T18:33:37.423417Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:37.423432Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:37.423467Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-12-23T18:33:37.423499Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1766514817363290 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:37.426259Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-12-23T18:33:37.426311Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:37.426341Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-12-23T18:33:37.426463Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-12-23T18:33:37.426969Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-12-23T18:33:37.427140Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-12-23T18:33:37.431141Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-12-23T18:33:37.431229Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7587123367832694016:2223], Recipient [1:7587123354947790695:2223]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-12-23T18:33:37.431260Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-12-23T18:33:37.431274Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:37.431283Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:37.431322Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-12-23T18:33:37.431348Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-12-23T18:33:37.450926Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008004s 2025-12-23T18:33:37.458433Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7587123367832694031:2386], Recipient [1:7587123354947790695:2223]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514817363290&action=2" } UserToken: "" } 2025-12-23T18:33:37.458475Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-12-23T18:33:37.458703Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514817363290&action=2" } } 2025-12-23T18:33:37.463593Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-12-23T18:33:37.463622Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:37.463627Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:37.463632Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:37.463690Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1766514817363290 2025-12-23T18:33:37.463710Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1766514817363290 issue= 2025-12-23T18:33:37.463725Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1766514817363290 issue= 2025-12-23T18:33:37.463741Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2025-12-23T18:33:37.463824Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1766514817363290 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:37.483407Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2025-12-23T18:33:37.483502Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:37.495140Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:37.519458Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7587123367832694039:2388], Recipient [1:7587123354947790695:2223]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514817363290&action=2" } UserToken: "" } 2025-12-23T18:33:37.519492Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-12-23T18:33:37.519709Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514817363290&action=2" ready: true status: SUCCESS } } 2025-12-23T18:33:37.531431Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7587123367832694042:2390], Recipient [1:7587123354947790695:2223]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" PeerName: "ipv6:[::1]:58438" } 2025-12-23T18:33:37.531464Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-12-23T18:33:37.531671Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3368: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-12-23T18:33:37.539601Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285123, Sender [1:7587123367832694045:2391], Recipient [1:7587123354947790695:2223]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:58438" } 2025-12-23T18:33:37.539636Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:967: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-12-23T18:33:37.539937Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3412: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-12-23T18:33:37.558653Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-12-23T18:33:37.558897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:33:41.467606Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587123363688749139:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:41.467687Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=timeout; |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2025-12-23T18:33:32.455726Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123346139019779:2180];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:32.455779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0038a1/r3tmp/tmpNCe4mr/pdisk_1.dat 2025-12-23T18:33:33.273285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:33.369393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:33.369505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:33.523674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:33.543814Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:33.589161Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:33.619093Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 3623, node 1 2025-12-23T18:33:34.059499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:34.059523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:34.059535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:34.059625Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7917 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514813693 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:34.481044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:7917 2025-12-23T18:33:34.838925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:33:34.983504Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7587123354728955423:2302], Recipient [1:7587123350433987461:2225]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" PeerName: "ipv6:[::1]:60686" } 2025-12-23T18:33:34.983557Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-12-23T18:33:34.983579Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:34.983592Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:34.983705Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" PeerName: "ipv6:[::1]:60686" 2025-12-23T18:33:34.983903Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1766514814983352) 2025-12-23T18:33:34.984425Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1766514814983352 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-12-23T18:33:34.984602Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-12-23T18:33:34.995426Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2025-12-23T18:33:34.996327Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514814983352&action=1" } } } 2025-12-23T18:33:34.996483Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:34.996544Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-12-23T18:33:34.996701Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-12-23T18:33:35.003045Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7587123359023922727:2303], Recipient [1:7587123350433987461:2225]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514814983352&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" } 2025-12-23T18:33:35.003084Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-12-23T18:33:35.003337Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514814983352&action=1" } } 2025-12-23T18:33:35.006188Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-12-23T18:33:35.006588Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-12-23T18:33:35.013227Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-12-23T18:33:35.013293Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:35.013404Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7587123354728955428:2225], Recipient [1:7587123350433987461:2225]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:35.013428Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:35.013442Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:35.013453Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:35.013519Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-12-23T18:33:35.013538Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-12-23T18:33:35.013600Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-12-23T18:33:35.020545Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-12-23T18:33:35.020575Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:35.020586Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:35.020601Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:35.020675Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-12-23T18:33:35.020705Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1766514814983352 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:35.027717Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-12-23T18:33:35.027926Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:35.028006Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-12-23T18:33:35.028022Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2025-12-23T18:33:35.033301Z node 1 :CMS_TENANTS TRACE: console_tenants_manager. ... chemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:2 2025-12-23T18:33:36.777124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:7 2025-12-23T18:33:36.777166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:4 2025-12-23T18:33:36.777194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:10 2025-12-23T18:33:36.782465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2025-12-23T18:33:36.782530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2025-12-23T18:33:36.782557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2025-12-23T18:33:36.782590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2025-12-23T18:33:36.782623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2025-12-23T18:33:36.792464Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715663 2025-12-23T18:33:36.792507Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2025-12-23T18:33:36.792561Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-12-23T18:33:36.792656Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7587123363318890676:2225], Recipient [1:7587123350433987461:2225]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-12-23T18:33:36.792679Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-12-23T18:33:36.792694Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:36.792703Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:36.792736Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-12-23T18:33:36.792758Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1766514816698788 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-12-23T18:33:36.792829Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1766514816698788 issue=AccessDenied: Access denied for request 2025-12-23T18:33:36.794213Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-12-23T18:33:36.799047Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-12-23T18:33:36.799114Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-12-23T18:33:36.799143Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:36.799350Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7587123350433987355:2223], Recipient [1:7587123350433987461:2225]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-12-23T18:33:36.799373Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-12-23T18:33:36.799390Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:36.799398Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:36.799421Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-12-23T18:33:36.799445Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1766514816698788 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-12-23T18:33:36.801887Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7587123363318890740:2371], Recipient [1:7587123350433987461:2225]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514816698788&action=2" } UserToken: "" } 2025-12-23T18:33:36.801946Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-12-23T18:33:36.802134Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514816698788&action=2" } } 2025-12-23T18:33:36.807791Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-12-23T18:33:36.807838Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:36.807870Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-12-23T18:33:36.808006Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-12-23T18:33:36.809789Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2025-12-23T18:33:36.809897Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2025-12-23T18:33:36.817654Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2025-12-23T18:33:36.817779Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7587123363318890770:2225], Recipient [1:7587123350433987461:2225]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-12-23T18:33:36.817832Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-12-23T18:33:36.817848Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:36.817858Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:36.817895Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-12-23T18:33:36.817938Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-12-23T18:33:36.843484Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-12-23T18:33:36.843515Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:36.843523Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:36.843529Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:36.843607Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1766514816698788 2025-12-23T18:33:36.843624Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1766514816698788 issue=AccessDenied: Access denied for request 2025-12-23T18:33:36.843639Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1766514816698788 issue=AccessDenied: Access denied for request 2025-12-23T18:33:36.843684Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2025-12-23T18:33:36.843763Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1766514816698788 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-12-23T18:33:36.854037Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2025-12-23T18:33:36.854097Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:36.872119Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7587123363318890788:2374], Recipient [1:7587123350433987461:2225]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514816698788&action=2" } UserToken: "" } 2025-12-23T18:33:36.872154Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-12-23T18:33:36.872338Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514816698788&action=2" ready: true status: SUCCESS } } 2025-12-23T18:33:36.903860Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-12-23T18:33:36.904068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:33:40.131938Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587123359927539193:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:40.132006Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=timeout; >> KqpRboPg::Aggregation-ColumnStore [GOOD] >> KqpRboPg::AliasesRenames |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::TPCH [GOOD] Test command err: Trying to start YDB, gRPC: 27412, MsgBus: 6939 2025-12-23T18:32:53.247191Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123180555181071:2104];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:53.250842Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:32:53.297666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d0d/r3tmp/tmpamJKxT/pdisk_1.dat 2025-12-23T18:32:53.634895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:53.790756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:53.790873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:53.806463Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:53.806624Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123180555180994:2081] 1766514773208345 != 1766514773208348 2025-12-23T18:32:53.871560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:53.873489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 27412, node 1 2025-12-23T18:32:54.136404Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:54.136424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:54.136436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:54.136511Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:54.254920Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6939 TClient is connected to server localhost:6939 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514773919 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:54.958914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:54.969198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:32:57.711577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123197735051071:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:57.711683Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:57.718899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123197735051080:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:57.718993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:58.106990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:32:58.231843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123180555181071:2104];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:58.231975Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:58.939172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7587123202030018899:2335];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:32:58.939449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7587123202030018899:2335];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:32:58.939698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7587123202030018899:2335];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:32:58.939996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7587123202030018899:2335];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:32:58.940127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7587123202030018899:2335];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:32:58.940177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7587123202030019033:2350];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:32:58.940266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7587123202030018899:2335];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:32:58.940271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7587123202030019033:2350];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:32:58.941100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7587123202030018899:2335];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:32:58.941140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7587123202030019033:2350];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:32:58.941232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7587123202030018899:2335];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:32:58.941234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7587123202030019033:2350];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:32:58.941330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7587123202030019033:2350];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:32:58.941331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7587123202030018899:2335];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:32:58.941411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7587123202030019033:2350];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:32:58.941419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7587123202030018899:2335];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:32:58.941486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7587123202030019033:2350];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:32:58.941507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7587123202030018899:2335];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:32:58.941570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7587123202030019033:2350];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025- ... _id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.617820Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.617893Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.617915Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.618910Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.618963Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.618981Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.627976Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.631345Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.631670Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.631694Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.635015Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.635045Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.649469Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.649541Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.649562Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.653447Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.653518Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.653540Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.669896Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.669974Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.669994Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:31.699996Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd67sem5dz4hpqb8h54h2h5s", SessionId: ydb://session/3?node_id=3&id=NGQ5Yzg4Y2ItMzU1ZmZmMDQtODZjZmUyZTItYjc5YmM5MDY=, Slow query, duration: 10.411740s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE `/Root/customer` (\n c_acctbal Double,\n c_address String,\n c_comment String,\n c_custkey Int32 NOT NULL, -- Identifier\n c_mktsegment String ,\n c_name String ,\n c_nationkey Int32 , -- FK to N_NATIONKEY\n c_phone String ,\n PRIMARY KEY (c_custkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/lineitem` (\n l_comment String ,\n l_commitdate Date ,\n l_discount Double , -- it should be Decimal(12, 2)\n l_extendedprice Double , -- it should be Decimal(12, 2)\n l_linenumber Int32 NOT NULL,\n l_linestatus String ,\n l_orderkey Int32 NOT NULL, -- FK to O_ORDERKEY\n l_partkey Int32 , -- FK to P_PARTKEY, first part of the compound FK to (PS_PARTKEY, PS_SUPPKEY) with L_SUPPKEY\n l_quantity Double , -- it should be Decimal(12, 2)\n l_receiptdate Date ,\n l_returnflag String ,\n l_shipdate Date ,\n l_shipinstruct String ,\n l_shipmode String ,\n l_suppkey Int32 , -- FK to S_SUPPKEY, second part of the compound FK to (PS_PARTKEY, PS_SUPPKEY) with L_PARTKEY\n l_tax Double , -- it should be Decimal(12, 2)\n PRIMARY KEY (l_orderkey, l_linenumber)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/nation` (\n n_comment String ,\n n_name String ,\n n_nationkey Int32 NOT NULL, -- Identifier\n n_regionkey Int32 , -- FK to R_REGIONKEY\n PRIMARY KEY(n_nationkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/orders` (\n o_clerk String ,\n o_comment String ,\n o_custkey Int32 , -- FK to C_CUSTKEY\n o_orderdate Date ,\n o_orderkey Int32 NOT NULL, -- Identifier\n o_orderpriority String ,\n o_orderstatus String ,\n o_shippriority Int32 ,\n o_totalprice Double , -- it should be Decimal(12, 2)\n PRIMARY KEY (o_orderkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/part` (\n p_brand String ,\n p_comment String ,\n p_container String ,\n p_mfgr String ,\n p_name String ,\n p_partkey Int32 NOT NULL, -- Identifier\n p_retailprice Double , -- it should be Decimal(12, 2)\n p_size Int32 ,\n p_type String ,\n PRIMARY KEY(p_partkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/partsupp` (\n ps_availqty Int32 ,\n ps_comment String ,\n ps_partkey Int32 NOT NULL, -- FK to P_PARTKEY\n ps_suppkey Int32 NOT NULL, -- FK to S_SUPPKEY\n ps_supplycost Double , -- it should be Decimal(12, 2)\n PRIMARY KEY(ps_partkey, ps_suppkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/region` (\n r_comment String ,\n r_name String ,\n r_regionkey Int32 NOT NULL, -- Identifier\n PRIMARY KEY(r_regionkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n\nCREATE TABLE `/Root/supplier` (\n s_acctbal Double , -- it should be Decimal(12, 2)\n s_address String ,\n s_comment String ,\n s_name String ,\n s_nationkey Int32 , -- FK to N_NATIONKEY\n s_phone String ,\n s_suppkey Int32 NOT NULL, -- Identifier\n PRIMARY KEY(s_suppkey)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 16);;\n", parameters: 0b 2025-12-23T18:33:31.767384Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123342680908896:3028], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:31.767494Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:31.768019Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123342680908901:3031], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:31.768154Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587123342680908902:3032], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:31.768261Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:31.773175Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710666:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:33:31.817336Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587123342680908905:3033], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710666 completed, doublechecking } 2025-12-23T18:33:31.882122Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587123342680908956:5387] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 49], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:33:32.275822Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:33:32.275852Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |92.2%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> DataShardSnapshots::ShardRestartWholeShardLockBasic [GOOD] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] |92.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] |92.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |92.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] >> TPQCachingProxyTest::TestPublishAndForget |92.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |92.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |92.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |92.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |92.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |92.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning >> TGRpcCmsTest::DisabledTxTest [GOOD] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] |92.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |92.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |92.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> CompressExecutor::TestReorderedExecutor >> TPQCachingProxyTest::TestPublishAndForget [GOOD] >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::BrokenCredentialsProvider |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |92.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |92.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |92.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |92.3%| [TA] $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink |92.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageWardenTest::TestDeleteStoragePool >> TDistconfGenerateConfigTest::GenerateConfig1DCBigCases >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2025-12-23T18:33:39.282305Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123376386494081:2147];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:39.282365Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:33:39.362524Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00389b/r3tmp/tmpYAJWLD/pdisk_1.dat 2025-12-23T18:33:39.714895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:39.941108Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:40.079900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:40.080006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:40.140040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:40.348035Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:40.363539Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20692, node 1 2025-12-23T18:33:40.697873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:40.777133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:40.777155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:40.777168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:40.777265Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514820455 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:41.439688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:33:41.657801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) 2025-12-23T18:33:41.720806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |92.3%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> TBlobStorageWardenTest::TestFilterBadSerials >> TBlobStorageWardenTest::TestSendToInvalidGroupId >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError >> TDistconfGenerateConfigTest::GenerateConfig1DCBigCases [GOOD] >> TDistconfGenerateConfigTest::BadRack [GOOD] >> TDistconfGenerateConfigTest::ExtraDCHelp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2025-12-23T18:33:46.113946Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:33:46.235003Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:33:46.235103Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:33:46.235188Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:33:46.235293Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:33:46.254393Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:33:46.254556Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2025-12-23T18:33:46.254716Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2025-12-23T18:33:46.254800Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-12-23T18:33:46.254926Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:218: Direct read cache: forget read: 1 for session session1 |92.3%| [TA] {RESULT} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDistconfGenerateConfigTest::GenerateConfigSimpleCases >> BindQueue::Basic >> TDistconfGenerateConfigTest::GenerateConfigSimpleCases [GOOD] >> TDistconfGenerateConfigTest::IgnoreNodes [GOOD] >> TDistconfGenerateConfigTest::GenerateConfigReplicasSpecificVolume [GOOD] |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> TDistconfGenerateConfigTest::GenerateConfig3DCCases [GOOD] >> TDistconfGenerateConfigTest::GenerateConfig3DCBigCases >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring >> TBlobStorageWardenTest::TestHttpMonPage >> TDistconfGenerateConfigTest::GenerateConfig3DCBigCases [GOOD] >> TDistconfGenerateConfigTest::GenerateConfigReplicasOverrides [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] Test command err: 2025-12-23T18:33:39.632142Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123376235475097:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:39.632204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003898/r3tmp/tmpHOD2c9/pdisk_1.dat 2025-12-23T18:33:40.418967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:40.644279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:40.644373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:40.814949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:41.023822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:41.024235Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:41.164100Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26683, node 1 2025-12-23T18:33:41.497869Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:41.535527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:41.535559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:41.535575Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:41.535681Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514821141 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:42.200554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:33:42.302282Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7587123389120378078:2302], Recipient [1:7587123380530442922:2209]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:48054" } 2025-12-23T18:33:42.302334Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-12-23T18:33:42.302356Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:42.302372Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:42.302497Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:48054" 2025-12-23T18:33:42.302644Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1766514822302615) 2025-12-23T18:33:42.303554Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1766514822302615 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-12-23T18:33:42.303785Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-12-23T18:33:42.323511Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2025-12-23T18:33:42.324313Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514822302615&action=1" } } } 2025-12-23T18:33:42.324453Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:42.324521Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-12-23T18:33:42.324654Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-12-23T18:33:42.325101Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-12-23T18:33:42.325216Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-12-23T18:33:42.356663Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-12-23T18:33:42.356727Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:42.356791Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7587123389120378083:2209], Recipient [1:7587123380530442922:2209]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:42.356805Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:42.356835Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:42.356844Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:42.356887Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-12-23T18:33:42.356906Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-12-23T18:33:42.356968Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-12-23T18:33:42.362638Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7587123389120378086:2303], Recipient [1:7587123380530442922:2209]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514822302615&action=1" } UserToken: "" } 2025-12-23T18:33:42.362685Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-12-23T18:33:42.362947Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514822302615&action=1" } } 2025-12-23T18:33:42.372653Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-12-23T18:33:42.372697Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:42.372706Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:42.372713Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:42.372773Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-12-23T18:33:42.372798Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1766514822302615 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:42.379663Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-12-23T18:33:42.379845Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:42.379882Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-12-23T18:33:42.379891Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2025-12-23T18:33:42.392987Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:48054" 2025-12-23T18:33:42.395009Z node 1 :FLAT_TX_SCHEMESHARD WARN: s ... S_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435076, Sender [1:7587123389120378194:2209], Recipient [1:7587123380530442922:2209]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-12-23T18:33:42.561990Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:979: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-12-23T18:33:42.562004Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:3652: Ignoring ready subdomain for tenant /Root/users/user-1 in REMOVING_SUBDOMAIN state 2025-12-23T18:33:42.569842Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710660 2025-12-23T18:33:42.599737Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-12-23T18:33:42.599864Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2025-12-23T18:33:42.599925Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-12-23T18:33:42.599972Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found 2025-12-23T18:33:42.600156Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found 2025-12-23T18:33:42.600184Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-12-23T18:33:42.600211Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-12-23T18:33:42.600246Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found 2025-12-23T18:33:42.600292Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2025-12-23T18:33:42.606291Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2025-12-23T18:33:42.606321Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2025-12-23T18:33:42.606373Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-12-23T18:33:42.606480Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7587123389120378266:2209], Recipient [1:7587123380530442922:2209]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-12-23T18:33:42.606499Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-12-23T18:33:42.606520Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:42.606534Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:42.606574Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-12-23T18:33:42.606594Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1766514822525695 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:42.606665Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1766514822525695 issue= 2025-12-23T18:33:42.628829Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7587123389120378357:2315], Recipient [1:7587123380530442922:2209]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514822525695&action=2" } UserToken: "" } 2025-12-23T18:33:42.628865Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-12-23T18:33:42.629068Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514822525695&action=2" } } 2025-12-23T18:33:42.636797Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-12-23T18:33:42.636885Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-12-23T18:33:42.636899Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:42.637263Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7587123380530442754:2207], Recipient [1:7587123380530442922:2209]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-12-23T18:33:42.637279Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-12-23T18:33:42.637300Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:42.637316Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:42.637338Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-12-23T18:33:42.637360Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1766514822525695 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:42.645146Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2025-12-23T18:33:42.645202Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:42.645233Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-12-23T18:33:42.645388Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-12-23T18:33:42.646009Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-12-23T18:33:42.646126Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-12-23T18:33:42.657143Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-12-23T18:33:42.657250Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7587123389120378405:2209], Recipient [1:7587123380530442922:2209]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-12-23T18:33:42.657279Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-12-23T18:33:42.657292Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:42.657302Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:42.657357Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-12-23T18:33:42.657407Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-12-23T18:33:42.683380Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-12-23T18:33:42.683412Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:42.683422Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:42.683428Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:42.683499Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1766514822525695 2025-12-23T18:33:42.683513Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1766514822525695 issue= 2025-12-23T18:33:42.690896Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1766514822525695 issue= 2025-12-23T18:33:42.690948Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2025-12-23T18:33:42.691033Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1766514822525695 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:42.705722Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639260 Duration# 0.005808s 2025-12-23T18:33:42.703720Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7587123389120378422:2318], Recipient [1:7587123380530442922:2209]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514822525695&action=2" } UserToken: "" } 2025-12-23T18:33:42.703741Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-12-23T18:33:42.703978Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514822525695&action=2" } } 2025-12-23T18:33:42.706938Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2025-12-23T18:33:42.709116Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:42.760108Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7587123389120378425:2320], Recipient [1:7587123380530442922:2209]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514822525695&action=2" } UserToken: "" } 2025-12-23T18:33:42.760137Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-12-23T18:33:42.760338Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514822525695&action=2" ready: true status: SUCCESS } } |92.3%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::ExtraDCHelp [GOOD] Test command err: Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 15 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 11 } } } Expected: NToSelect: 9 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::GenerateConfigReplicasSpecificVolume [GOOD] Test command err: Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } Ring { Node: 2 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 6 } Ring { Node: 11 } Ring { Node: 16 } Ring { Node: 21 } Ring { Node: 26 } Ring { Node: 31 } Ring { Node: 36 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 11 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 >> TGRpcCmsTest::AuthTokenTest [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::UsedNodes >> TDistconfGenerateConfigTest::UsedNodes [GOOD] >> TDistconfGenerateConfigTest::UseOldNodesInDisconnectedDC [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::GenerateConfigReplicasOverrides [GOOD] Test command err: Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 3 } Ring { Node: 5 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 19 } Ring { Node: 20 } Ring { Node: 21 } Ring { Node: 37 } Ring { Node: 38 } Ring { Node: 39 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 10 } Ring { Node: 19 } } } Expected: NToSelect: 3 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings >> TPQCachingProxyTest::OutdatedSession |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore+WithIncremental [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore-WithIncremental >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::UseOldNodesInDisconnectedDC [GOOD] Test command err: Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 5 Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 10 } Ring { Node: 11 } Ring { Node: 12 } Ring { Node: 13 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 5 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 17 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } Ring { Node: 11 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 12 } Ring { Node: 15 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 17 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden >> TPQCachingProxyTest::TestWrongSessionOrGeneration >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::ShopDemoIncrementalBackupScenario >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] >> TBlobStorageWardenTest::TestEvVGenerationChangeRace >> TPQCachingProxyTest::OutdatedSession [GOOD] >> TPQCachingProxyTest::TestDeregister ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2025-12-23T18:33:39.571617Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123377950488645:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:39.571677Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:33:39.742325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:33:39.805236Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00389a/r3tmp/tmpgvy5pc/pdisk_1.dat 2025-12-23T18:33:40.518289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:40.574962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:33:40.780394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:40.780509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:40.837552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:40.957050Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:40.999588Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:41.010634Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 21086, node 1 2025-12-23T18:33:41.320047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:41.320073Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:41.320081Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:41.320219Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514820882 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:41.955727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:33:42.106925Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7587123390835391637:2302], Recipient [1:7587123382245456455:2203]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:39812" } 2025-12-23T18:33:42.106975Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-12-23T18:33:42.107009Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:42.107025Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:42.107174Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:39812" 2025-12-23T18:33:42.107458Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1766514822106794) 2025-12-23T18:33:42.108167Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1766514822106794 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-12-23T18:33:42.108818Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-12-23T18:33:42.119571Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2025-12-23T18:33:42.120384Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514822106794&action=1" } } } 2025-12-23T18:33:42.120527Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:42.120615Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-12-23T18:33:42.120758Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-12-23T18:33:42.121274Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-12-23T18:33:42.121457Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-12-23T18:33:42.127555Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7587123390835391646:2303], Recipient [1:7587123382245456455:2203]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514822106794&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" } 2025-12-23T18:33:42.127588Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-12-23T18:33:42.127789Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1766514822106794&action=1" } } 2025-12-23T18:33:42.144104Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-12-23T18:33:42.144165Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:42.144237Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7587123390835391642:2203], Recipient [1:7587123382245456455:2203]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:42.144255Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-12-23T18:33:42.144279Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:42.144287Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:42.144327Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-12-23T18:33:42.144346Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-12-23T18:33:42.144422Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-12-23T18:33:42.167318Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-12-23T18:33:42.167350Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:42.167361Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:42.167366Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:42.174978Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-12-23T18:33:42.175019Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1766514822106794 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:42.196305Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7587123390835391662:2306], Recipient [1:7587123382245456455:2203]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=720575940379 ... 455:2203]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-12-23T18:33:43.243573Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-12-23T18:33:43.244786Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-12-23T18:33:43.261643Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7587123395130359528:2370], Recipient [1:7587123382245456455:2203]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:39812" } 2025-12-23T18:33:43.261675Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-12-23T18:33:43.261712Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-12-23T18:33:43.262671Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7587123377950489011:2207], Recipient [1:7587123382245456455:2203]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-12-23T18:33:43.262692Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-12-23T18:33:43.267981Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-12-23T18:33:43.304544Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7587123395130359547:2371], Recipient [1:7587123382245456455:2203]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:39812" } 2025-12-23T18:33:43.304573Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-12-23T18:33:43.304615Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-12-23T18:33:43.304714Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7587123377950489011:2207], Recipient [1:7587123382245456455:2203]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-12-23T18:33:43.304727Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-12-23T18:33:43.305227Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-12-23T18:33:43.341635Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715659 2025-12-23T18:33:43.341662Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2025-12-23T18:33:43.342161Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-12-23T18:33:43.342318Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435076, Sender [1:7587123390835391759:2203], Recipient [1:7587123382245456455:2203]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-12-23T18:33:43.342376Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:979: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-12-23T18:33:43.342401Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2025-12-23T18:33:43.342447Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2025-12-23T18:33:43.342494Z node 1 :CMS_TENANTS DEBUG: console__update_confirmed_subdomain.cpp:22: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2025-12-23T18:33:43.342530Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1766514822106794 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-12-23T18:33:43.342617Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2904: Update database for /Root/users/user-1 confirmedsubdomain=2 2025-12-23T18:33:43.349832Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7587123395130359558:2372], Recipient [1:7587123382245456455:2203]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:39812" } 2025-12-23T18:33:43.349889Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-12-23T18:33:43.349936Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-12-23T18:33:43.350030Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7587123377950489011:2207], Recipient [1:7587123382245456455:2203]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-12-23T18:33:43.350042Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-12-23T18:33:43.350655Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-12-23T18:33:43.363483Z node 1 :CMS_TENANTS DEBUG: console__update_confirmed_subdomain.cpp:42: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2025-12-23T18:33:43.363525Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2025-12-23T18:33:43.367755Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7587123395130359566:2373], Recipient [1:7587123382245456455:2203]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:39812" } 2025-12-23T18:33:43.367783Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-12-23T18:33:43.367827Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-12-23T18:33:43.368011Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7587123377950489011:2207], Recipient [1:7587123382245456455:2203]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-12-23T18:33:43.368031Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-12-23T18:33:43.368569Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-12-23T18:33:43.411568Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:25767 TClient::Ls request: /Root/users/user-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root/users/user-1" PathId: 1 SchemeshardId: 72075186224037897 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 3 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186224037897 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976725657 CreateStep: 1766514823270 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 720575940466... (TRUNCATED) 2025-12-23T18:33:43.961667Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-12-23T18:33:43.962243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:33:47.375050Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587123391067049034:2083];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:47.375123Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:33:47.888979Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7587123412541886507:2338], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:33:47.889874Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:33:47.890147Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes |92.3%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> TPQCachingProxyTest::MultipleSessions >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] >> TReplicaTest::UpdateWithoutHandshake >> TPQCachingProxyTest::TestDeregister [GOOD] >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountPureFunction >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot >> TPQCachingProxyTest::MultipleSessions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2025-12-23T18:33:50.095414Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:33:50.202733Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:33:50.202824Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:33:50.202892Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:33:50.202958Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:33:50.241596Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:33:50.241740Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2025-12-23T18:33:50.241843Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2025-12-23T18:33:50.241913Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-12-23T18:33:50.242020Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:293: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation >> TReplicaTest::UpdateWithoutHandshake [GOOD] >> TReplicaTest::UpdateWithStaleGeneration |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> TReplicaTest::HandshakeWithStaleGeneration >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict >> TReplicaTest::UpdateWithStaleGeneration [GOOD] >> TReplicaTest::Merge >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> TReplicaTest::HandshakeWithStaleGeneration [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2025-12-23T18:33:50.871871Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:33:50.960441Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:33:50.960560Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:33:50.960641Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:33:50.960710Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:33:50.978756Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:33:50.978882Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 2 2025-12-23T18:33:50.979038Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2025-12-23T18:33:50.979094Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 2 2025-12-23T18:33:50.979175Z node 1 :PQ_READ_PROXY INFO: caching_service.cpp:297: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2025-12-23T18:33:50.979237Z node 1 :PQ_READ_PROXY ALERT: caching_service.cpp:159: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2025-12-23T18:33:50.979283Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-12-23T18:33:50.979386Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:218: Direct read cache: forget read: 1 for session session1 >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden [GOOD] >> YdbTableSplit::RenameTablesAndSplit [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2025-12-23T18:33:51.018279Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:33:51.094473Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:33:51.094546Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:33:51.094638Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:33:51.094707Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:33:51.113115Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:33:51.113235Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2025-12-23T18:33:51.113286Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session2:1 with generation 1 2025-12-23T18:33:51.113427Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: session1 >> TReplicaTest::Merge [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers >> TBlobStorageWardenTest::TestInferPDiskSlotCountPureFunction [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig >> IncrementalBackup::DropBackupCollectionSqlWithDatabaseLikeNames [GOOD] >> IncrementalBackup::IncrementalBackupNonExistentTable >> TReplicaTest::IdempotencyUpdatesAliveSubscriber [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers [GOOD] >> TReplicaTest::StrongNotificationAfterCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2025-12-23T18:33:51.347141Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:33:51.419334Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:33:51.419419Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:33:51.419508Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:33:51.419586Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:33:51.438176Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:33:51.438320Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2025-12-23T18:33:51.438426Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2025-12-23T18:33:51.438502Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 2 for session: session1 2025-12-23T18:33:51.438555Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-12-23T18:33:51.438647Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 2 for session session1, Generation: 1 2025-12-23T18:33:51.438716Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session2:1 with generation 2 2025-12-23T18:33:51.438827Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 3 for session: session2 2025-12-23T18:33:51.438876Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 3 for session session2, Generation: 2 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation >> KqpRboPg::AliasesRenames [GOOD] >> KqpRboPg::Bench_10Joins >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] >> TReplicaTest::StrongNotificationAfterCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] Test command err: 2025-12-23T18:33:47.980285Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:47.981864Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:47.982381Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/8ptz/004c1f/r3tmp/tmpqWHWlH/pdisk_1.dat 2025-12-23T18:33:48.117652Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.117766Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.117821Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.907452Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [5a9a1d6240d04444] bootstrap ActorId# [1:485:2464] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1350:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-12-23T18:33:48.907609Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:48.907653Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:48.907684Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:48.907710Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:48.907736Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:48.907759Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:48.907799Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [5a9a1d6240d04444] restore Id# [72057594037932033:2:8:0:0:1350:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:33:48.907863Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1350:1] Marker# BPG33 2025-12-23T18:33:48.907909Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1350:1] Marker# BPG32 2025-12-23T18:33:48.907949Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1350:2] Marker# BPG33 2025-12-23T18:33:48.907977Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1350:2] Marker# BPG32 2025-12-23T18:33:48.908004Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1350:3] Marker# BPG33 2025-12-23T18:33:48.908030Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1350:3] Marker# BPG32 2025-12-23T18:33:48.908199Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:47:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1350:3] FDS# 1350 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:48.908271Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:40:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1350:2] FDS# 1350 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:48.908314Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:61:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1350:1] FDS# 1350 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:48.926422Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1350:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90629 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-12-23T18:33:48.926726Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1350:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 4 } Cost# 90629 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 5 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-12-23T18:33:48.926858Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1350:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90629 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-12-23T18:33:48.926950Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [5a9a1d6240d04444] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1350:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-12-23T18:33:48.927041Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1350:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:33:48.927272Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.02 sample PartId# [72057594037932033:2:8:0:0:1350:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.02 sample PartId# [72057594037932033:2:8:0:0:1350:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.021 sample PartId# [72057594037932033:2:8:0:0:1350:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 19.218 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 19.463 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 19.574 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-12-23T18:33:49.043442Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [b6b2c6548553d7a5] bootstrap ActorId# [1:531:2502] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-12-23T18:33:49.043594Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.043639Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.043667Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.043695Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.043720Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.043745Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.043784Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:33:49.043850Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2025-12-23T18:33:49.043895Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2025-12-23T18:33:49.043935Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2025-12-23T18:33:49.043959Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2025-12-23T18:33:49.043986Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2025-12-23T18:33:49.044009Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2025-12-23T18:33:49.044155Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:40:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:49.044227Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:61:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:49.044272Z node 1 :BS_PROXY DEBUG: group_sessions.h ... # 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-12-23T18:33:51.266314Z node 2 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [2:609:2512] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:51.277650Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [cd65997ea3b51537] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2025-12-23T18:33:51.277979Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [cd65997ea3b51537] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-12-23T18:33:51.278090Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [cd65997ea3b51537] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:33:51.278826Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:33:51.278878Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:58: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-12-23T18:33:51.279055Z node 3 :BS_PROXY DEBUG: dsproxy_impl.h:219: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2025-12-23T18:33:51.279423Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:221} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/8ptz/004c1f/r3tmp/tmprqCxGZ//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-12-23T18:33:51.280489Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 2 IsLimitedKeyless# true Marker# DSP02 2025-12-23T18:33:51.280539Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:33:51.282933Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:620:2105] Create Queue# [3:622:2106] targetNodeId# 2 Marker# DSP01 2025-12-23T18:33:51.283122Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:620:2105] Create Queue# [3:623:2107] targetNodeId# 2 Marker# DSP01 2025-12-23T18:33:51.283248Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:620:2105] Create Queue# [3:624:2108] targetNodeId# 2 Marker# DSP01 2025-12-23T18:33:51.283369Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:620:2105] Create Queue# [3:625:2109] targetNodeId# 2 Marker# DSP01 2025-12-23T18:33:51.283485Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:620:2105] Create Queue# [3:626:2110] targetNodeId# 2 Marker# DSP01 2025-12-23T18:33:51.283601Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:620:2105] Create Queue# [3:627:2111] targetNodeId# 2 Marker# DSP01 2025-12-23T18:33:51.283721Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:620:2105] Create Queue# [3:628:2112] targetNodeId# 2 Marker# DSP01 2025-12-23T18:33:51.283757Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:33:51.285618Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:51.285955Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:51.286066Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:51.286192Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:51.286304Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:51.286361Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:51.286420Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:51.286454Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:228: Group# 2181038082 -> StateWork Marker# DSP11 2025-12-23T18:33:51.286513Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2025-12-23T18:33:51.286678Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:150: [efc53170c63234c6] bootstrap ActorId# [3:629:2113] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-12-23T18:33:51.286759Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:111: [efc53170c63234c6] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:2:0:0:0] node# 2 Marker# DSPB03 2025-12-23T18:33:51.286963Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:622:2106] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 6710916250758050253 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-12-23T18:33:51.288367Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:43: [efc53170c63234c6] Handle TEvVBlockResult status# OK From# [82000002:2:0:0:0] NodeId# 2 Marker# DSPB01 2025-12-23T18:33:51.288442Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:100: [efc53170c63234c6] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-12-23T18:33:51.288812Z node 3 :BS_PROXY INFO: dsproxy_impl.h:329: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-12-23T18:33:51.289077Z node 3 :BS_PROXY DEBUG: dsproxy_impl.h:329: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-12-23T18:33:51.289452Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [c85e1a21dcb31b54] bootstrap ActorId# [2:630:2522] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-12-23T18:33:51.289613Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [c85e1a21dcb31b54] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:51.289672Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [c85e1a21dcb31b54] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:33:51.289736Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [c85e1a21dcb31b54] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-12-23T18:33:51.289783Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [c85e1a21dcb31b54] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-12-23T18:33:51.289928Z node 2 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [2:609:2512] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:51.290214Z node 2 :BS_VDISK_PUT ERROR: blobstorage_skeleton.cpp:578: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-12-23T18:33:51.290548Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:265: [c85e1a21dcb31b54] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2025-12-23T18:33:51.290648Z node 2 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [c85e1a21dcb31b54] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:2:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-12-23T18:33:51.290711Z node 2 :BS_PROXY_PUT NOTICE: dsproxy_put.cpp:499: [c85e1a21dcb31b54] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:2:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:33:51.291258Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:622:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] Test command err: 2025-12-23T18:33:51.318449Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:51.318541Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:806: [1:7:2054] Reject update from unknown populator: sender# [1:8:2055], owner# 1, generation# 1 2025-12-23T18:33:51.318658Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-12-23T18:33:51.318723Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# path 2025-12-23T18:33:51.319298Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:51.319449Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2025-12-23T18:33:51.319515Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# path 2025-12-23T18:33:51.319586Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:8:2055] 2025-12-23T18:33:51.319647Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:51.319708Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-12-23T18:33:51.319793Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:8:2055] 2025-12-23T18:33:51.319848Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:51.651273Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-12-23T18:33:51.651361Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:51.651493Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 0 }: sender# [2:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:51.651539Z node 2 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:814: [2:7:2054] Reject update from stale populator: sender# [2:8:2055], owner# 1, generation# 0, pending generation# 1 2025-12-23T18:33:51.651643Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2025-12-23T18:33:51.651696Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# path 2025-12-23T18:33:51.651780Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:51.651957Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] 2025-12-23T18:33:51.652012Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:8:2055], path# path 2025-12-23T18:33:51.652094Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2025-12-23T18:33:51.652147Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:51.652214Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-12-23T18:33:51.652323Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:8:2055] 2025-12-23T18:33:51.652472Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1] |92.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] >> TBlobStorageWardenTest::TestEvVGenerationChangeRace [GOOD] |92.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |92.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |92.3%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |92.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |92.3%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden [GOOD] Test command err: 2025-12-23T18:33:48.595044Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.596691Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.599803Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.601065Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.601253Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.604368Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/8ptz/004c17/r3tmp/tmpw7QFVJ/pdisk_1.dat 2025-12-23T18:33:49.455524Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [abc2fc901918ac71] bootstrap ActorId# [1:554:2468] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1346:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-12-23T18:33:49.455739Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1346:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.455797Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1346:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.455829Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1346:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.455858Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1346:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.455889Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1346:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.455923Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1346:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.455973Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [abc2fc901918ac71] restore Id# [72057594037932033:2:8:0:0:1346:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:33:49.456060Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1346:1] Marker# BPG33 2025-12-23T18:33:49.456118Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [abc2fc901918ac71] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1346:1] Marker# BPG32 2025-12-23T18:33:49.456166Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1346:2] Marker# BPG33 2025-12-23T18:33:49.456200Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [abc2fc901918ac71] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1346:2] Marker# BPG32 2025-12-23T18:33:49.456268Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1346:3] Marker# BPG33 2025-12-23T18:33:49.456308Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [abc2fc901918ac71] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1346:3] Marker# BPG32 2025-12-23T18:33:49.456526Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:67:2092] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1346:3] FDS# 1346 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:49.456607Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:60:2085] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1346:2] FDS# 1346 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:49.456664Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:81:2106] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1346:1] FDS# 1346 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:49.460296Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1346:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90598 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-12-23T18:33:49.460625Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1346:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90598 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-12-23T18:33:49.460754Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1346:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90598 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-12-23T18:33:49.460846Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [abc2fc901918ac71] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1346:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-12-23T18:33:49.460919Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [abc2fc901918ac71] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1346:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:33:49.461149Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.349 sample PartId# [72057594037932033:2:8:0:0:1346:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.349 sample PartId# [72057594037932033:2:8:0:0:1346:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.35 sample PartId# [72057594037932033:2:8:0:0:1346:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 5.054 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 5.318 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 5.446 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-12-23T18:33:49.555683Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [a55b41de52eb2a08] bootstrap ActorId# [1:600:2506] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-12-23T18:33:49.555917Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.555979Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.556016Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.556053Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.556085Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.556122Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.556175Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [a55b41de52eb2a08] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:33:49.556270Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [a55b41de52eb2a08] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2025-12-23T18:33:49.556333Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [a55b41de52eb2a08] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2025-12-23T18:33:49.556388Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [a55b41de52eb2a08] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2025-12-23T18:33:49.556425Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [a55b41de52eb2a08] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2025-12-23T18:33:49.556463Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [a55b41de52eb2a08] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2025-12-23T18:33:49.556497Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [a55b41de52eb2a08] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2025-12-23T18:33:49.556700Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:60:2085] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:49.556842Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:81:2106] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:49.556907Z node 1 :BS_PROXY DEBUG: group_sessions.h:19 ... eHeader writing ChunkIdx# 0 SectorIdx# 246 PDiskId# 1002 2025-12-23T18:33:51.939772Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 249 PDiskId# 1002 2025-12-23T18:33:51.939808Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 252 PDiskId# 1002 2025-12-23T18:33:51.939856Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 255 PDiskId# 1002 2025-12-23T18:33:51.939896Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 258 PDiskId# 1002 2025-12-23T18:33:51.939933Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 261 PDiskId# 1002 2025-12-23T18:33:51.939973Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 264 PDiskId# 1002 2025-12-23T18:33:51.940018Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 267 PDiskId# 1002 2025-12-23T18:33:51.940060Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 270 PDiskId# 1002 2025-12-23T18:33:51.940104Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 273 PDiskId# 1002 2025-12-23T18:33:51.940143Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 276 PDiskId# 1002 2025-12-23T18:33:51.940186Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 279 PDiskId# 1002 2025-12-23T18:33:51.940224Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 282 PDiskId# 1002 2025-12-23T18:33:51.940263Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 285 PDiskId# 1002 2025-12-23T18:33:51.940304Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 288 PDiskId# 1002 2025-12-23T18:33:51.940344Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 291 PDiskId# 1002 2025-12-23T18:33:51.940385Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 294 PDiskId# 1002 2025-12-23T18:33:51.940450Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 297 PDiskId# 1002 2025-12-23T18:33:51.940494Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 300 PDiskId# 1002 2025-12-23T18:33:51.940534Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 303 PDiskId# 1002 2025-12-23T18:33:51.940582Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 306 PDiskId# 1002 2025-12-23T18:33:51.940622Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 309 PDiskId# 1002 2025-12-23T18:33:51.940664Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 312 PDiskId# 1002 2025-12-23T18:33:51.940719Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 315 PDiskId# 1002 2025-12-23T18:33:51.940795Z node 3 :BS_PDISK DEBUG: {BPD66@blobstorage_pdisk_writer.h:388} TSectorWriter TerminateLog large SectorBytesFree# 2119 ChunkIdx# 0 SectorIdx# 315 SectorOffset# 1290240 PDiskId# 1002 2025-12-23T18:33:51.942932Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_completion_impl.cpp:423: TCompletionEventSender {EvLogInitResult} 2025-12-23T18:33:51.943010Z node 3 :BS_PDISK DEBUG: {BPD69@blobstorage_pdisk_impl_log.cpp:846} WriteSysLogRestorePoint FirstLogChunkToParseCommits# 1 CommonLogger# 0x00007CFAC360F480 "LogChunks.size()"# 1 "LogChunks.front().ChunkIdx"# 1 BeginSectorIdx# 171 EndSectorIdx# 318 PDiskId# 1002 2025-12-23T18:33:51.943129Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:217} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10020000005 PDiskId# 1002 2025-12-23T18:33:51.943197Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 0 ReadOffset# 2576760176640 BytesToRead# 4096 ReqId# 2565120001604 PDiskId# 1002 2025-12-23T18:33:51.943254Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1759} PDisk have successfully started PDiskId# 1002 2025-12-23T18:33:51.943954Z node 3 :BS_PDISK INFO: {BPD01@blobstorage_pdisk_impl_log.cpp:1760} StartupOwnerInfo# { PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..18902} PDiskId# 1002 2025-12-23T18:33:51.944035Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:51.944278Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1002 ReqId# 2565120001604 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 8286.658629 2025-12-23T18:33:51.944395Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2025-12-23T18:33:51.944425Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1002 ReqId# 2565120001604 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2025-12-23T18:33:51.944450Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:51.944487Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 0 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1002 2025-12-23T18:33:51.944538Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:217} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10020000006 PDiskId# 1002 2025-12-23T18:33:51.944580Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 16640 ReadOffset# 2576828334080 BytesToRead# 4096 ReqId# 2565120001860 PDiskId# 1002 2025-12-23T18:33:51.944632Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:51.944707Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1002 ReqId# 2565120001860 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 8286.660030 2025-12-23T18:33:51.944751Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2025-12-23T18:33:51.944782Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1002 ReqId# 2565120001860 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2025-12-23T18:33:51.944800Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:51.944822Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 16640 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1002 2025-12-23T18:33:51.944868Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:51.955293Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:51.965570Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:51.975814Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:51.986072Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:51.998843Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:52.009159Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:52.019157Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:217} CreateReqFromArgs Req# NKikimr::NPDisk::TWhiteboardReport ReqId# 10020000007 PDiskId# 1002 2025-12-23T18:33:52.019275Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1002 ReqId# 2565120002104 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 8286.734704 2025-12-23T18:33:52.019450Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TWhiteboardReport OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2025-12-23T18:33:52.019528Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1002 ReqId# 2565120002104 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2025-12-23T18:33:52.019565Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:52.019673Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 Got TEvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1002 Path: "SectorMap:TestInferPDiskSlotCount:2400" AvailableSize: 2576487546880 TotalSize: 2576980377600 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 27262976000 ExpectedSlotCount: 12 NumActiveSlots: 0 SlotSizeInUnits: 2 PDiskUsage: 0 Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 0 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialCommonLogRead SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 0 } 2025-12-23T18:33:52.029848Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:52.042493Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2025-12-23T18:33:52.052966Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] Test command err: 2025-12-23T18:33:51.802321Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:8:2055] 2025-12-23T18:33:51.802412Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 2 2025-12-23T18:33:51.802515Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2025-12-23T18:33:51.802572Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:772: [1:7:2054] Reject handshake from stale populator: sender# [1:8:2055], owner# 1, generation# 1, pending generation# 2 2025-12-23T18:33:52.087622Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-12-23T18:33:52.087689Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:52.087838Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-12-23T18:33:52.087914Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:52.088031Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-12-23T18:33:52.088173Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:52.088214Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:52.096461Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:52.096721Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 40 2025-12-23T18:33:52.096771Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-12-23T18:33:52.096809Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [2:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:52.096907Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:52.096954Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:52.097021Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [2:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:52.097114Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:52.097152Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-12-23T18:33:52.097213Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:52.097354Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:10:2057] 2025-12-23T18:33:52.097414Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:10:2057], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-12-23T18:33:52.452199Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2025-12-23T18:33:52.452280Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:52.452404Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:52.452441Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:52.452520Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:52.452625Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:52.452660Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-12-23T18:33:52.452692Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:52.452740Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:52.452828Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2025-12-23T18:33:52.452885Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# true 2025-12-23T18:33:52.452927Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 2] 2025-12-23T18:33:52.453010Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:52.453046Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:52.453080Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [3:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:52.453154Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:52.453194Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-12-23T18:33:52.453243Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [3:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 2] |92.3%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest |92.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2025-12-23T18:33:48.147332Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.147492Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.148569Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.150692Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.152442Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.153113Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.153210Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/8ptz/004c22/r3tmp/tmpHxwIcm/pdisk_1.dat 2025-12-23T18:33:49.015643Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [5a9a1d6240d04444] bootstrap ActorId# [1:489:2468] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1350:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-12-23T18:33:49.015842Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.015909Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.015941Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.015971Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.016001Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.016030Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1350:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.016075Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [5a9a1d6240d04444] restore Id# [72057594037932033:2:8:0:0:1350:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:33:49.016155Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1350:1] Marker# BPG33 2025-12-23T18:33:49.016208Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1350:1] Marker# BPG32 2025-12-23T18:33:49.016258Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1350:2] Marker# BPG33 2025-12-23T18:33:49.016287Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1350:2] Marker# BPG32 2025-12-23T18:33:49.016321Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1350:3] Marker# BPG33 2025-12-23T18:33:49.016350Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [5a9a1d6240d04444] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1350:3] Marker# BPG32 2025-12-23T18:33:49.016571Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:47:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1350:3] FDS# 1350 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:49.016660Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:40:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1350:2] FDS# 1350 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:49.016709Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:61:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1350:1] FDS# 1350 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:49.018782Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1350:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90629 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-12-23T18:33:49.019076Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1350:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90629 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-12-23T18:33:49.019179Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1350:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90629 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-12-23T18:33:49.019291Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [5a9a1d6240d04444] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1350:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-12-23T18:33:49.019359Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1350:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:33:49.019575Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.281 sample PartId# [72057594037932033:2:8:0:0:1350:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.282 sample PartId# [72057594037932033:2:8:0:0:1350:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.282 sample PartId# [72057594037932033:2:8:0:0:1350:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 3.415 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.661 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.76 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } ] } 2025-12-23T18:33:49.078700Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 4294967295 IsLimitedKeyless# 0 fullIfPossible# 1 Marker# DSP58 2025-12-23T18:33:49.081636Z node 1 :BS_PROXY CRIT: dsproxy_impl.h:329: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvBlock {TabletId# 1234 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} Response# TEvBlockResult {Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 Sending TEvPut 2025-12-23T18:33:49.082068Z node 1 :BS_PROXY DEBUG: dsproxy_impl.h:329: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvPut {Id# [1234:1:0:0:0:5:0] Size# 5 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID" ApproximateFreeSpaceShare# 0} Marker# DSP31 2025-12-23T18:33:49.082273Z node 1 :BS_PROXY DEBUG: dsproxy_impl.h:329: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Response# TEvCollectGarbageResult {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 2025-12-23T18:33:49.870639Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/8ptz/004c22/r3tmp/tmpQNmVRY/pdisk_1.dat 2025-12-23T18:33:49.949625Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:49.949755Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:49.952475Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:49.952586Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:49.959527Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Ha ... stablishingSessions Marker# DSP03 2025-12-23T18:33:52.168092Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.168268Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.168475Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.168582Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.168662Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.168728Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.168786Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.168818Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:228: Group# 2181038082 -> StateWork Marker# DSP11 2025-12-23T18:33:52.168858Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2025-12-23T18:33:52.168910Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:338: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-12-23T18:33:52.169787Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [d70ef3c23a1a2346] bootstrap ActorId# [3:616:2519] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-12-23T18:33:52.169890Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [d70ef3c23a1a2346] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:52.169923Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [d70ef3c23a1a2346] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:33:52.169963Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [d70ef3c23a1a2346] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-12-23T18:33:52.169990Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [d70ef3c23a1a2346] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-12-23T18:33:52.170116Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:609:2512] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:52.173786Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [d70ef3c23a1a2346] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2025-12-23T18:33:52.173914Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [d70ef3c23a1a2346] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-12-23T18:33:52.173970Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [d70ef3c23a1a2346] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:33:52.174660Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:33:52.174711Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:58: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-12-23T18:33:52.174857Z node 4 :BS_PROXY DEBUG: dsproxy_impl.h:219: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2025-12-23T18:33:52.175310Z node 4 :BS_NODE ERROR: {NW19@node_warden_group.cpp:221} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/8ptz/004c22/r3tmp/tmp0hLeQx//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-12-23T18:33:52.176583Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 2 IsLimitedKeyless# true Marker# DSP02 2025-12-23T18:33:52.176640Z node 4 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:33:52.180230Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:620:2106] targetNodeId# 3 Marker# DSP01 2025-12-23T18:33:52.180413Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:621:2107] targetNodeId# 3 Marker# DSP01 2025-12-23T18:33:52.180553Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:622:2108] targetNodeId# 3 Marker# DSP01 2025-12-23T18:33:52.180682Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:623:2109] targetNodeId# 3 Marker# DSP01 2025-12-23T18:33:52.180850Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:624:2110] targetNodeId# 3 Marker# DSP01 2025-12-23T18:33:52.180987Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:625:2111] targetNodeId# 3 Marker# DSP01 2025-12-23T18:33:52.181121Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:618:2105] Create Queue# [4:626:2112] targetNodeId# 3 Marker# DSP01 2025-12-23T18:33:52.181152Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:33:52.183658Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.183843Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.183988Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.184304Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.184400Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.184461Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.184517Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:257: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2025-12-23T18:33:52.184557Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:228: Group# 2181038082 -> StateWork Marker# DSP11 2025-12-23T18:33:52.184598Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2025-12-23T18:33:52.184804Z node 4 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [4:620:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-12-23T18:33:51.957178Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2025-12-23T18:33:51.957275Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# path 2025-12-23T18:33:51.957412Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:51.957540Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:10:2057] 2025-12-23T18:33:51.957585Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:51.957639Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:10:2057], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-12-23T18:33:51.957732Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2025-12-23T18:33:51.957769Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:51.957891Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:51.957928Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:51.964992Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:51.965334Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 40 2025-12-23T18:33:51.965380Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-12-23T18:33:51.965415Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [1:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:52.263570Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-12-23T18:33:52.263648Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:52.263761Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-12-23T18:33:52.263809Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:52.263887Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-12-23T18:33:52.264054Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:52.264097Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:52.264154Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:52.264290Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 40 2025-12-23T18:33:52.264332Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-12-23T18:33:52.264367Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [2:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:52.264470Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:9:2056] 2025-12-23T18:33:52.264532Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:52.264616Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:52.264663Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-12-23T18:33:52.264705Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [2:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-12-23T18:33:52.264771Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:52.264807Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-12-23T18:33:52.264865Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:52.264947Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:10:2057] 2025-12-23T18:33:52.264995Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:10:2057], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-12-23T18:33:52.581577Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 1 }: sender# [3:9:2056] 2025-12-23T18:33:52.581643Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7:2054] Upsert description: path# path 2025-12-23T18:33:52.581717Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 1, capabilities# 2025-12-23T18:33:52.581836Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2025-12-23T18:33:52.581881Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:52.581957Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2025-12-23T18:33:52.581989Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:7:2054] Commit generation: owner# 1, generation# 1 2025-12-23T18:33:52.582127Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1006: [3:7:2054] Handle NKikimr::NSchemeBoard::TReplica::TEvPrivate::TEvSendStrongNotifications { Owner: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2025-12-23T18:32:18.826511Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123031088841982:2257];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:18.826556Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:32:18.897809Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005222/r3tmp/tmpkp8iHj/pdisk_1.dat 2025-12-23T18:32:19.208353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:19.288979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:19.289067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:19.391530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:19.464503Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:19.493952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 12471, node 1 2025-12-23T18:32:19.826420Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:32:19.843035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:19.843065Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:19.843074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:19.843188Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514739542 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:20.290536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:23.826814Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123031088841982:2257];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:23.826888Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:23.831654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123052563679676:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:23.831782Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:23.832193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123052563679686:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:23.832277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.080077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/Dir, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-12-23T18:32:24.080375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:32:24.080492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/Dir/Foo, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-12-23T18:32:24.081926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:32:24.081970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:24.084861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Dir/Foo 2025-12-23T18:32:24.208704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766514744253, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:32:24.334727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715658:0 2025-12-23T18:32:24.334859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715658:1 2025-12-23T18:32:24.371325Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123056858647230:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.371475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.374545Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123056858647233:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.374634Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:24.393146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /Root/Dir/Foo, pathId: , opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-12-23T18:32:24.393749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:32:24.393806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T18:32:24.398975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Dir/Foo 2025-12-23T18:32:24.415872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766514744463, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:32:24.429286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715659:0 Fast forward 1m partitions 2 Fast forward 1m partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2025-12-23T18:32:34.154370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:32:34.154395Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:34.283058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Dir/Foo, tableId: , opId: 281474976710693:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Dir/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2025-12-23T18:32:34.283732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Dir/Foo, tableId: , opId: 281474976710693:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" TabletID: 72075186224037888 ShardIdx: 1 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037889 ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/Root/Dir/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 7 ... mentPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 5 2025-12-23T18:33:50.980308Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7587123426225838858:2820], serverId# [1:7587123426225838863:5147], sessionId# [0:0:0] 2025-12-23T18:33:50.980355Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037892 state PreOffline 2025-12-23T18:33:50.980393Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037892 Got TEvSchemaChangedResult from SS at 72075186224037892 2025-12-23T18:33:50.980523Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037891 state PreOffline 2025-12-23T18:33:50.980545Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2025-12-23T18:33:50.980977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715664:0 progress is 1/1 2025-12-23T18:33:50.981014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715664 ready parts: 1/1 2025-12-23T18:33:50.981036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715664:0 progress is 1/1 2025-12-23T18:33:50.981045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715664 ready parts: 1/1 2025-12-23T18:33:50.981061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715664, ready parts: 1/1, is published: true 2025-12-23T18:33:50.981127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7587123426225838795:2815] message: TxId: 281474976715664 2025-12-23T18:33:50.981163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715664 ready parts: 1/1 2025-12-23T18:33:50.981180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715664:0 2025-12-23T18:33:50.981190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976715664:0 2025-12-23T18:33:50.981297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2025-12-23T18:33:50.984127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587123099808320604 RawX2: 4503603922340186 } TabletId: 72075186224037890 State: 4 2025-12-23T18:33:50.984201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:33:50.985843Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-12-23T18:33:50.985912Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037892 Initiating switch from PreOffline to Offline state 2025-12-23T18:33:50.987355Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-12-23T18:33:50.987403Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-12-23T18:33:50.995579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:33:50.995580Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:33:50.995628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:33:50.995685Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:33:50.995819Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-12-23T18:33:50.996340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587123421930871154 RawX2: 4503603922340577 } TabletId: 72075186224037891 State: 4 2025-12-23T18:33:50.996386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:33:50.996564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587123421930871149 RawX2: 4503603922340576 } TabletId: 72075186224037892 State: 4 2025-12-23T18:33:50.996613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:33:50.999235Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-12-23T18:33:50.999241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:33:50.999262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:33:50.999361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:33:50.999381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:33:50.999926Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-12-23T18:33:51.000752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-12-23T18:33:51.000957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2025-12-23T18:33:51.002523Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-12-23T18:33:51.002605Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7587123108398255550:3556], serverId# [1:7587123108398255551:3557], sessionId# [0:0:0] 2025-12-23T18:33:51.002893Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-12-23T18:33:51.003227Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2025-12-23T18:33:51.003364Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2025-12-23T18:33:51.004897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-12-23T18:33:51.004927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-12-23T18:33:51.005092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-12-23T18:33:51.005270Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-12-23T18:33:51.005291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2025-12-23T18:33:51.005324Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-12-23T18:33:51.005487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-12-23T18:33:51.005634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2025-12-23T18:33:51.005785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:33:51.005805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2025-12-23T18:33:51.005850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:33:51.006130Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2025-12-23T18:33:51.006222Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2025-12-23T18:33:51.006280Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-12-23T18:33:51.006923Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-12-23T18:33:51.008081Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2025-12-23T18:33:51.008152Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2025-12-23T18:33:51.012067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-12-23T18:33:51.012093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-12-23T18:33:51.012129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2025-12-23T18:33:51.012143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-12-23T18:33:51.012191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestEvVGenerationChangeRace [GOOD] Test command err: 2025-12-23T18:33:48.205308Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.207000Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.207566Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/8ptz/004c1a/r3tmp/tmprtEZUd/pdisk_1.dat 2025-12-23T18:33:48.346426Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.346506Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.346555Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 Formatting PDisk with guid1 18307490054771045764 Creating PDisk with guid2 9143346725603891810 Creating pdisk 2025-12-23T18:33:49.244225Z node 1 :BS_PDISK ERROR: {BSP01@blobstorage_pdisk_actor.cpp:570} PDiskId# 1001 Can't start due to a guid error expected# 9143346725603891810 on-disk# 18307490054771045764 PDiskId# 1001 2025-12-23T18:33:49.267892Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [e2e5f1b9c917f854] bootstrap ActorId# [1:486:2464] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:352:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-12-23T18:33:49.268078Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:352:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.268129Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:352:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.268158Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:352:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.268186Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:352:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.268214Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:352:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.268243Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:352:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:33:49.268284Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [e2e5f1b9c917f854] restore Id# [72057594037932033:2:8:0:0:352:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:33:49.268362Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:352:1] Marker# BPG33 2025-12-23T18:33:49.268413Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:352:1] Marker# BPG32 2025-12-23T18:33:49.268456Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:352:2] Marker# BPG33 2025-12-23T18:33:49.268486Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:352:2] Marker# BPG32 2025-12-23T18:33:49.268519Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:352:3] Marker# BPG33 2025-12-23T18:33:49.268546Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:352:3] Marker# BPG32 2025-12-23T18:33:49.268717Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:47:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:352:3] FDS# 352 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:49.268793Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:40:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:352:2] FDS# 352 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:49.271147Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:61:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:352:1] FDS# 352 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:33:49.279828Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:352:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 82771 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-12-23T18:33:49.280065Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:352:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 4 } Cost# 82771 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 5 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-12-23T18:33:49.280165Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:352:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 82771 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-12-23T18:33:49.280268Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [e2e5f1b9c917f854] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:352:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-12-23T18:33:49.280336Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:352:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:33:49.280542Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 3.556 sample PartId# [72057594037932033:2:8:0:0:352:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 3.557 sample PartId# [72057594037932033:2:8:0:0:352:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 3.558 sample PartId# [72057594037932033:2:8:0:0:352:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 12.265 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 12.441 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 12.542 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } Verify that PDisk returns ERROR YardInitResult: {EvYardInitResult Status# CORRUPTED ErrorReason# "PDisk is in StateError, reason# PDiskId# 1001 Can't start due to a guid error expected# 9143346725603891810 on-disk# 18307490054771045764" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 0 ownerRound# 0 SlotSizeInUnits# 0 ChunkSize# 0 AppendBlockSize# 0 RecommendedReadSize# 0 SeekTimeUs# 0 ReadSpeedBps# 0 WriteSpeedBps# 0 ReadBlockSize# 0 WriteBlockSize# 0 BulkWriteBlockSize# 0 PrefetchSizeBytes# 0 GlueRequestDistanceBytes# 0 IsTinyDisk# 0}} OwnedChunks# {}} 2025-12-23T18:33:50.851832Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/8ptz/004c1a/r3tmp/tmpXUxFqr/pdisk_1.dat 2025-12-23T18:33:51.007523Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:51.007642Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:51.010665Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:51.010802Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:51.023958Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 Starting test 2025-12-23T18:33:51.559264Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:2534: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON START Marker# BSVS37 2025-12-23T18:33:51.559549Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_public.cpp:707: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) LocalRecovery START 2025-12-23T18:33:51.566615Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_public.cpp:190: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-12-23T18:33:51.567524Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_logreplay.cpp:83: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TRecoveryLogReplayer: START 2025-12-23T18:33:51.568076Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_public.cpp:143: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) LocalRecovery FINISHED: {RecoveryDuration# 0.002000s RecoveredLogStartLsn# 0 SuccessfulRecovery# true EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {{RecsN# 0 Lsns# [0 0]}}} ... blocking NKikimr::TEvBlobStorage::TEvLocalRecoveryDone from to VDISK_SKELETON cookie 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } PDiskId: 1000 VDiskSlotId: 1002 Guid: 12036270616136454551 Kind: 0 StoragePoolName: "testEvVGenerationChangeRace" InstanceGuid: 16965840344674344464 GroupSizeInUnits: 0 2025-12-23T18:33:51.654914Z node 2 :BS_SKELETON INFO: blobstorage_skeletonfront.cpp:1727: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) VDisk Generation Change success; new VDiskId# [82000002:2:0:0:0] Marker# BSVSF02 TEvControllerConfigResponse# NKikimrBlobStorage.TEvControllerConfigResponse Response { Status { Success: true } Success: true ConfigTxSeqNo: 5 } Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: Initial DiskSpace: Green Replicated: false UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 16965840344674344464 ReplicationProgress: nan ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 16965840344674344464 GroupSizeInUnits: 0 ... unblocking NKikimr::TEvBlobStorage::TEvLocalRecoveryDone from to VDISK_SKELETON 2025-12-23T18:33:51.655780Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:1963: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON LOCAL RECOVERY SUCCEEDED Marker# BSVS29 2025-12-23T18:33:51.671310Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:2131: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON SYNC GUID RECOVERY SUCCEEDED Marker# BSVS31 2025-12-23T18:33:51.671437Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:1842: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON IS UP AND RUNNING Marker# BSVS28 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } IncarnationGuid: 16349739802483488852 InstanceGuid: 16965840344674344464 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 2 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 9291663407116100381 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 3 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 17896166834918957343 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 1 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 4004046642820941808 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 3373745326798022930 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 9291663407116100381 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 17896166834918957343 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 4004046642820941808 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 3373745326798022930 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 9291663407116100381 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 17896166834918957343 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 4004046642820941808 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 3373745326798022930 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 781801286752660147 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 8242401636566958354 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } ReadThroughput: 0 WriteThroughput: 0 InstanceGuid: 781801286752660147 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } ReadThroughput: 0 WriteThroughput: 0 InstanceGuid: 8242401636566958354 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 16965840344674344464 AvailableSize: 34225520640 GroupSizeInUnits: 2 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig [GOOD] |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] Test command err: 2025-12-23T18:33:51.592207Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-12-23T18:33:51.592305Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 800, generation# 1 2025-12-23T18:33:51.592439Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-12-23T18:33:51.592475Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:7:2054] Commit generation: owner# 800, generation# 1 2025-12-23T18:33:51.592566Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:9:2056] 2025-12-23T18:33:51.592598Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 800, generation# 1 2025-12-23T18:33:51.592668Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:9:2056] 2025-12-23T18:33:51.592707Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:7:2054] Commit generation: owner# 800, generation# 1 2025-12-23T18:33:51.592923Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 103 2025-12-23T18:33:51.592961Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-12-23T18:33:51.604320Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-12-23T18:33:51.604610Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:9:2056], cookie# 0, event size# 103 2025-12-23T18:33:51.604654Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-12-23T18:33:51.604723Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-12-23T18:33:51.604845Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:10:2057] 2025-12-23T18:33:51.604917Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:10:2057], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-12-23T18:33:51.649509Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:11:2058] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:12:2059] 2025-12-23T18:33:51.649569Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:11:2058] Successful handshake: owner# 800, generation# 1 2025-12-23T18:33:51.649739Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:11:2058] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:12:2059] 2025-12-23T18:33:51.649953Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:11:2058] Commit generation: owner# 800, generation# 1 2025-12-23T18:33:51.650036Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:11:2058] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:13:2060] 2025-12-23T18:33:51.650078Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:11:2058] Successful handshake: owner# 900, generation# 1 2025-12-23T18:33:51.650139Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:11:2058] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:13:2060] 2025-12-23T18:33:51.650164Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:11:2058] Commit generation: owner# 900, generation# 1 2025-12-23T18:33:51.650273Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:11:2058] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:12:2059], cookie# 0, event size# 103 2025-12-23T18:33:51.650307Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:11:2058] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-12-23T18:33:51.650357Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:11:2058] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-12-23T18:33:51.650459Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:11:2058] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:13:2060], cookie# 0, event size# 103 2025-12-23T18:33:51.650491Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:11:2058] Update description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], deletion# false 2025-12-23T18:33:51.650556Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:893: [1:11:2058] Replace GSS by TSS description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], domainId# [OwnerId: 800, LocalPathId: 2], curPathId# [OwnerId: 800, LocalPathId: 2], curDomainId# [OwnerId: 800, LocalPathId: 2] 2025-12-23T18:33:51.650633Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:11:2058] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 900, LocalPathId: 1], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-12-23T18:33:51.650757Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:11:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:14:2061] 2025-12-23T18:33:51.650801Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:11:2058] Subscribe: subscriber# [1:14:2061], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-12-23T18:33:51.651154Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:15:2062] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-12-23T18:33:51.651199Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:15:2062] Successful handshake: owner# 800, generation# 1 2025-12-23T18:33:51.651264Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:15:2062] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-12-23T18:33:51.651294Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:15:2062] Commit generation: owner# 800, generation# 1 2025-12-23T18:33:51.651347Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:15:2062] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:17:2064] 2025-12-23T18:33:51.651368Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:15:2062] Successful handshake: owner# 800, generation# 1 2025-12-23T18:33:51.651431Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:15:2062] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:17:2064] 2025-12-23T18:33:51.651455Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:15:2062] Commit generation: owner# 800, generation# 1 2025-12-23T18:33:51.651513Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:15:2062] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:16:2063], cookie# 0, event size# 103 2025-12-23T18:33:51.651539Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:15:2062] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-12-23T18:33:51.651592Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:15:2062] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-12-23T18:33:51.651659Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:15:2062] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:17:2064], cookie# 0, event size# 103 2025-12-23T18:33:51.651688Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:15:2062] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-12-23T18:33:51.651724Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:15:2062] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 2, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-12-23T18:33:51.651836Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:15:2062] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:18:2065] 2025-12-23T18:33:51.651870Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:15:2062] Subscribe: subscriber# [1:18:2065], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-12-23T18:33:51.652614Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:19:2066] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:20:2067] 2025-12-23T18:33:51.652709Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:19:2066] Successful handshake: owner# 800, generation# 1 2025-12-23T18:33:51.652769Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:19:2066] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:20:2067] 2025-12-23T18:33:51.652828Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:19:2066] Commit generation: owner# 800, generation# 1 2025-12-23T18:33:51.652882Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:19:2066] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Gener ... DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-12-23T18:33:52.355786Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:399:2446] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-12-23T18:33:52.355851Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:399:2446] Successful handshake: owner# 910, generation# 1 2025-12-23T18:33:52.355957Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:399:2446] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-12-23T18:33:52.355992Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:399:2446] Commit generation: owner# 910, generation# 1 2025-12-23T18:33:52.356115Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:399:2446] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:401:2448] 2025-12-23T18:33:52.356148Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:399:2446] Successful handshake: owner# 910, generation# 1 2025-12-23T18:33:52.356206Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:399:2446] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:401:2448] 2025-12-23T18:33:52.356243Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:399:2446] Commit generation: owner# 910, generation# 1 2025-12-23T18:33:52.356333Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:399:2446] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:400:2447], cookie# 0, event size# 64 2025-12-23T18:33:52.356380Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:399:2446] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-12-23T18:33:52.356422Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:399:2446] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-12-23T18:33:52.356518Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:399:2446] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:401:2448], cookie# 0, event size# 130 2025-12-23T18:33:52.356549Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:399:2446] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# false 2025-12-23T18:33:52.356585Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [2:399:2446] Path was explicitly deleted, ignoring: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2025-12-23T18:33:52.356658Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:399:2446] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:402:2449] 2025-12-23T18:33:52.356695Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:399:2446] Upsert description: path# /Root/Tenant/table_inside 2025-12-23T18:33:52.356753Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:399:2446] Subscribe: subscriber# [2:402:2449], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-12-23T18:33:52.359649Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:403:2450] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-12-23T18:33:52.359714Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:403:2450] Successful handshake: owner# 910, generation# 1 2025-12-23T18:33:52.359794Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:403:2450] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-12-23T18:33:52.359834Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:403:2450] Commit generation: owner# 910, generation# 1 2025-12-23T18:33:52.359901Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:403:2450] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:405:2452] 2025-12-23T18:33:52.359932Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:403:2450] Successful handshake: owner# 910, generation# 1 2025-12-23T18:33:52.360056Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:403:2450] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:405:2452] 2025-12-23T18:33:52.360096Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:403:2450] Commit generation: owner# 910, generation# 1 2025-12-23T18:33:52.360202Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:403:2450] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:404:2451], cookie# 0, event size# 64 2025-12-23T18:33:52.360235Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:403:2450] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-12-23T18:33:52.360268Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:403:2450] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-12-23T18:33:52.360374Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:403:2450] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:405:2452], cookie# 0, event size# 64 2025-12-23T18:33:52.360428Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:403:2450] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-12-23T18:33:52.360535Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:403:2450] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:406:2453] 2025-12-23T18:33:52.360595Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:403:2450] Upsert description: path# /Root/Tenant/table_inside 2025-12-23T18:33:52.360659Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:403:2450] Subscribe: subscriber# [2:406:2453], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-12-23T18:33:52.657213Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:8:2055] 2025-12-23T18:33:52.657290Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 800, generation# 1 2025-12-23T18:33:52.657394Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:8:2055] 2025-12-23T18:33:52.657462Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:7:2054] Commit generation: owner# 800, generation# 1 2025-12-23T18:33:52.657542Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:9:2056] 2025-12-23T18:33:52.657580Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 900, generation# 1 2025-12-23T18:33:52.657663Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:9:2056] 2025-12-23T18:33:52.657713Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:7:2054] Commit generation: owner# 900, generation# 1 2025-12-23T18:33:52.657915Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 118 2025-12-23T18:33:52.657981Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-12-23T18:33:52.658154Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-12-23T18:33:52.658285Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [3:9:2056], cookie# 0, event size# 117 2025-12-23T18:33:52.658328Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-12-23T18:33:52.658374Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:893: [3:7:2054] Update description by newest path form tenant schemeshard: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], domainId# [OwnerId: 800, LocalPathId: 1], curPathId# [OwnerId: 800, LocalPathId: 1111], curDomainId# [OwnerId: 800, LocalPathId: 1] 2025-12-23T18:33:52.658415Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111] 2025-12-23T18:33:52.658477Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-12-23T18:33:52.658582Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 0 }: sender# [3:10:2057] 2025-12-23T18:33:52.658643Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:10:2057], path# /root/db/dir_inside, domainOwnerId# 0, capabilities# =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 1111 PathOwnerId: 800 =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 11 PathOwnerId: 900 =========== DomainId: [OwnerId: 800, LocalPathId: 1] IsDeletion: 0 PathId: [OwnerId: 900, LocalPathId: 11] Versions: 1 |92.3%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> TReplicaTest::CommitWithoutHandshake |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop >> TReplicaTest::CommitWithoutHandshake [GOOD] >> TReplicaTest::CommitWithStaleGeneration |92.3%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable >> TReplicaTest::CommitWithStaleGeneration [GOOD] >> TReplicaTest::Delete >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi >> TReplicaTest::Delete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig [GOOD] Test command err: 2025-12-23T18:33:48.522863Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.524249Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.524712Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/8ptz/004c16/r3tmp/tmpaP9SEI/pdisk_1.dat 2025-12-23T18:33:48.701954Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.702087Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:48.702141Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:50.341388Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:50.342972Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:50.344013Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:50.344508Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:50.345287Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:33:50.346522Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/8ptz/004c16/r3tmp/tmpzDpoVT/pdisk_1.dat driveSize# 7900 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 8 SlotSizeInUnits# 1 relativeError# -0.0125 driveSize# 8000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 8 SlotSizeInUnits# 1 relativeError# 0 driveSize# 8100 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 8 SlotSizeInUnits# 1 relativeError# 0.0125 driveSize# 16000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 16 SlotSizeInUnits# 1 relativeError# 0 driveSize# 24000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 12 SlotSizeInUnits# 2 relativeError# 0 driveSize# 31000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 16 SlotSizeInUnits# 2 relativeError# -0.03125 driveSize# 50000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 13 SlotSizeInUnits# 4 relativeError# -0.03846153846 driveSize# 50000 unitSizeInBytes# 100 maxSlots# 16 -> ExpectedSlotCount# 16 SlotSizeInUnits# 32 relativeError# -0.0234375 driveSize# 18000 unitSizeInBytes# 200 maxSlots# 16 -> ExpectedSlotCount# 11 SlotSizeInUnits# 8 relativeError# 0.02272727273 driveSize# 1 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 1 relativeError# 0 driveSize# 2 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 2 relativeError# 0 driveSize# 3 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 4 relativeError# -0.25 driveSize# 4 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 4 relativeError# 0 driveSize# 5 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 4 relativeError# 0.25 driveSize# 6 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# -0.25 driveSize# 7 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# -0.125 driveSize# 8 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0 driveSize# 9 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0.125 driveSize# 10 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0.25 driveSize# 11 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0.375 driveSize# 12 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.25 driveSize# 13 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.1875 driveSize# 14 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.125 driveSize# 15 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.0625 driveSize# 16 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0 driveSize# 17 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.0625 driveSize# 18 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.125 driveSize# 19 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.1875 driveSize# 20 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.25 driveSize# 21 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.3125 driveSize# 22 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.375 driveSize# 23 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.4375 driveSize# 24 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.25 driveSize# 25 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.21875 driveSize# 26 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.1875 driveSize# 27 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.15625 driveSize# 28 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.125 driveSize# 29 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.09375 driveSize# 30 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.0625 driveSize# 31 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.03125 driveSize# 32 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0 driveSize# 33 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.03125 driveSize# 34 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.0625 driveSize# 35 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.09375 driveSize# 36 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.125 driveSize# 37 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.15625 driveSize# 38 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.1875 driveSize# 39 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.21875 driveSize# 40 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.25 driveSize# 41 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.28125 driveSize# 42 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.3125 driveSize# 43 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.34375 driveSize# 44 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.375 driveSize# 45 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.40625 driveSize# 46 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.4375 driveSize# 47 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.46875 driveSize# 48 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.25 driveSize# 49 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.234375 driveSize# 50 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.21875 driveSize# 51 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.203125 driveSize# 52 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.1875 driveSize# 53 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.171875 driveSize# 54 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.15625 driveSize# 55 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.140625 driveSize# 56 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.125 driveSize# 57 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.109375 driveSize# 58 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.09375 driveSize# 59 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.078125 driveSize# 60 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.0625 driveSize# 61 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.046875 driveSize# 62 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.03125 driveSize# 63 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.015625 driveSize# 64 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# 0 driveSize# 65 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# 0.015625 driveSize# 66 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# 0.03125 driveSize# 67 unitSizeInBytes# 1 ... 12-23T18:33:53.983280Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 225 PDiskId# 1001 2025-12-23T18:33:53.983314Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 228 PDiskId# 1001 2025-12-23T18:33:53.983354Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 231 PDiskId# 1001 2025-12-23T18:33:53.983394Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 234 PDiskId# 1001 2025-12-23T18:33:53.983453Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 237 PDiskId# 1001 2025-12-23T18:33:53.983498Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 240 PDiskId# 1001 2025-12-23T18:33:53.983533Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 243 PDiskId# 1001 2025-12-23T18:33:53.983572Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 246 PDiskId# 1001 2025-12-23T18:33:53.983615Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 249 PDiskId# 1001 2025-12-23T18:33:53.983651Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 252 PDiskId# 1001 2025-12-23T18:33:53.983686Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 255 PDiskId# 1001 2025-12-23T18:33:53.983723Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 258 PDiskId# 1001 2025-12-23T18:33:53.983767Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 261 PDiskId# 1001 2025-12-23T18:33:53.983806Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 264 PDiskId# 1001 2025-12-23T18:33:53.983843Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 267 PDiskId# 1001 2025-12-23T18:33:53.983906Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 270 PDiskId# 1001 2025-12-23T18:33:53.983948Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 273 PDiskId# 1001 2025-12-23T18:33:53.983983Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 276 PDiskId# 1001 2025-12-23T18:33:53.984020Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 279 PDiskId# 1001 2025-12-23T18:33:53.984059Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 282 PDiskId# 1001 2025-12-23T18:33:53.984097Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 285 PDiskId# 1001 2025-12-23T18:33:53.984136Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 288 PDiskId# 1001 2025-12-23T18:33:53.984174Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 291 PDiskId# 1001 2025-12-23T18:33:53.984208Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 294 PDiskId# 1001 2025-12-23T18:33:53.984263Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 297 PDiskId# 1001 2025-12-23T18:33:53.984309Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 300 PDiskId# 1001 2025-12-23T18:33:53.984343Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 303 PDiskId# 1001 2025-12-23T18:33:53.984407Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 306 PDiskId# 1001 2025-12-23T18:33:53.984458Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 309 PDiskId# 1001 2025-12-23T18:33:53.984499Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 312 PDiskId# 1001 2025-12-23T18:33:53.984546Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 315 PDiskId# 1001 2025-12-23T18:33:53.984617Z node 3 :BS_PDISK DEBUG: {BPD66@blobstorage_pdisk_writer.h:388} TSectorWriter TerminateLog large SectorBytesFree# 2119 ChunkIdx# 0 SectorIdx# 315 SectorOffset# 1290240 PDiskId# 1001 2025-12-23T18:33:53.985804Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_completion_impl.cpp:423: TCompletionEventSender {EvLogInitResult} 2025-12-23T18:33:53.985972Z node 3 :BS_PDISK DEBUG: {BPD69@blobstorage_pdisk_impl_log.cpp:846} WriteSysLogRestorePoint FirstLogChunkToParseCommits# 1 CommonLogger# 0x00007CBBF3AF9880 "LogChunks.size()"# 1 "LogChunks.front().ChunkIdx"# 1 BeginSectorIdx# 171 EndSectorIdx# 318 PDiskId# 1001 2025-12-23T18:33:53.986077Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:217} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10010000004 PDiskId# 1001 2025-12-23T18:33:53.986181Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 0 ReadOffset# 2576760176640 BytesToRead# 4096 ReqId# 2562560001348 PDiskId# 1001 2025-12-23T18:33:53.986245Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1759} PDisk have successfully started PDiskId# 1001 2025-12-23T18:33:53.987129Z node 3 :BS_PDISK INFO: {BPD01@blobstorage_pdisk_impl_log.cpp:1760} StartupOwnerInfo# { PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..18902} PDiskId# 1001 2025-12-23T18:33:53.987220Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2025-12-23T18:33:53.987314Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1001 ReqId# 2562560001348 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 8288.219225 2025-12-23T18:33:53.987419Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2025-12-23T18:33:53.987458Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1001 ReqId# 2562560001348 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2025-12-23T18:33:53.987489Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2025-12-23T18:33:53.987527Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 0 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1001 2025-12-23T18:33:53.987587Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:217} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10010000005 PDiskId# 1001 2025-12-23T18:33:53.987683Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 16640 ReadOffset# 2576828334080 BytesToRead# 4096 ReqId# 2562560001604 PDiskId# 1001 2025-12-23T18:33:53.987790Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2025-12-23T18:33:53.988421Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1001 ReqId# 2562560001604 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 8288.220748 2025-12-23T18:33:53.988536Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2025-12-23T18:33:53.988583Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1001 ReqId# 2562560001604 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2025-12-23T18:33:53.988616Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2025-12-23T18:33:53.988655Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 16640 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1001 2025-12-23T18:33:53.988703Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2025-12-23T18:33:53.999201Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2025-12-23T18:33:54.007038Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:217} CreateReqFromArgs Req# NKikimr::NPDisk::TWhiteboardReport ReqId# 10010000006 PDiskId# 1001 2025-12-23T18:33:54.007145Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1001 ReqId# 2562560001848 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 8288.240222 2025-12-23T18:33:54.007369Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TWhiteboardReport OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2025-12-23T18:33:54.007418Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1001 ReqId# 2562560001848 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2025-12-23T18:33:54.007461Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2025-12-23T18:33:54.007562Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 Got TEvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1001 Path: "SectorMap:TestInferPDiskSlotCountExplicitConfig:2400" AvailableSize: 2576487546880 TotalSize: 2576980377600 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 27262976000 ExpectedSlotCount: 13 NumActiveSlots: 0 SlotSizeInUnits: 0 PDiskUsage: 0 Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 0 } 2025-12-23T18:33:54.018885Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest |92.3%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbIndexTable::OnlineBuild |92.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink >> YdbIndexTable::MultiShardTableOneIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Delete [GOOD] Test command err: 2025-12-23T18:33:54.288643Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2025-12-23T18:33:54.288727Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:978: [1:7:2054] Reject commit from unknown populator: sender# [1:8:2055], owner# 1, generation# 1 2025-12-23T18:33:54.288804Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2025-12-23T18:33:54.288844Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:54.686802Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 0 }: sender# [2:8:2055] 2025-12-23T18:33:54.686869Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 0 2025-12-23T18:33:54.686957Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:9:2056] 2025-12-23T18:33:54.687007Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:54.687089Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [2:9:2056] 2025-12-23T18:33:54.687149Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:7:2054] Commit generation: owner# 1, generation# 1 2025-12-23T18:33:54.687203Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 0 }: sender# [2:8:2055] 2025-12-23T18:33:54.687238Z node 2 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:988: [2:7:2054] Reject commit from stale populator: sender# [2:8:2055], owner# 1, generation# 0, pending generation# 1 2025-12-23T18:33:54.687284Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [2:8:2055] 2025-12-23T18:33:54.687315Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 2 2025-12-23T18:33:55.049917Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2025-12-23T18:33:55.050004Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2025-12-23T18:33:55.050146Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2025-12-23T18:33:55.050189Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# false 2025-12-23T18:33:55.057449Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 42, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-12-23T18:33:55.057651Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2025-12-23T18:33:55.057748Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:55.057895Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:10:2057] 2025-12-23T18:33:55.057954Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:10:2057], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-12-23T18:33:55.058115Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2025-12-23T18:33:55.058156Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# true 2025-12-23T18:33:55.058193Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 42, LocalPathId: 1] 2025-12-23T18:33:55.058318Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:11:2058] 2025-12-23T18:33:55.058389Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:11:2058], path# path, domainOwnerId# 0, capabilities# 2025-12-23T18:33:55.058494Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:12:2059] 2025-12-23T18:33:55.058548Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:12:2059], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-12-23T18:33:55.058666Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:13:2060] 2025-12-23T18:33:55.058705Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:13:2060], path# path, domainOwnerId# 0, capabilities# |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> YdbIndexTable::MultiShardTableOneUniqIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] Test command err: Delete nodeId# 52 Disable nodeId# 3 Delete nodeId# 56 Enable nodeId# 3 Delete nodeId# 88 Add nodeId# 101 Pick Add nodeId# 102 Delete nodeId# 20 Add nodeId# 103 Pick Disable nodeId# 82 Enable nodeId# 82 Add nodeId# 104 Disable nodeId# 102 Delete nodeId# 19 Delete nodeId# 31 Delete nodeId# 83 Disable nodeId# 36 Enable nodeId# 102 Delete nodeId# 14 Enable nodeId# 36 Pick Add nodeId# 105 Disable nodeId# 93 Delete nodeId# 93 Pick Pick Delete nodeId# 90 Delete nodeId# 3 Add nodeId# 106 Disable nodeId# 89 Add nodeId# 107 Pick Enable nodeId# 89 Delete nodeId# 37 Add nodeId# 108 Pick Delete nodeId# 40 Pick Delete nodeId# 61 Pick Disable nodeId# 47 Disable nodeId# 16 Pick Add nodeId# 109 Delete nodeId# 47 Delete nodeId# 99 Add nodeId# 110 Pick Delete nodeId# 105 Add nodeId# 111 Disable nodeId# 4 Add nodeId# 112 Disable nodeId# 72 Delete nodeId# 70 Delete nodeId# 65 Disable nodeId# 59 Enable nodeId# 16 Delete nodeId# 26 Add nodeId# 113 Pick Add nodeId# 114 Delete nodeId# 92 Disable nodeId# 34 Delete nodeId# 79 Pick Add nodeId# 115 Delete nodeId# 108 Pick Pick Pick Add nodeId# 116 Disable nodeId# 35 Enable nodeId# 34 Enable nodeId# 72 Delete nodeId# 95 Enable nodeId# 4 Add nodeId# 117 Add nodeId# 118 Enable nodeId# 35 Enable nodeId# 59 Pick Disable nodeId# 53 Add nodeId# 119 Pick Add nodeId# 120 Delete nodeId# 25 Delete nodeId# 11 Pick Delete nodeId# 16 Pick Delete nodeId# 114 Enable nodeId# 53 Add nodeId# 121 Add nodeId# 122 Delete nodeId# 81 Disable nodeId# 23 Delete nodeId# 23 Pick Disable nodeId# 24 Pick Pick Delete nodeId# 89 Pick Delete nodeId# 44 Pick Disable nodeId# 22 Delete nodeId# 94 Add nodeId# 123 Add nodeId# 124 Add nodeId# 125 Enable nodeId# 22 Pick Pick Disable nodeId# 2 Add nodeId# 126 Pick Pick Disable nodeId# 85 Pick Pick Delete nodeId# 122 Disable nodeId# 97 Delete nodeId# 36 Disable nodeId# 32 Add nodeId# 127 Delete nodeId# 8 Add nodeId# 128 Disable nodeId# 53 Delete nodeId# 30 Add nodeId# 129 Enable nodeId# 97 Disable nodeId# 43 Pick Pick Delete nodeId# 5 Pick Add nodeId# 130 Disable nodeId# 118 Delete nodeId# 107 Add nodeId# 131 Enable nodeId# 32 Pick Delete nodeId# 33 Disable nodeId# 128 Enable nodeId# 118 Delete nodeId# 39 Disable nodeId# 18 Add nodeId# 132 Pick Add nodeId# 133 Disable nodeId# 68 Add nodeId# 134 Pick Enable nodeId# 128 Delete nodeId# 104 Delete nodeId# 125 Pick Add nodeId# 135 Pick Pick Enable nodeId# 43 Add nodeId# 136 Add nodeId# 137 Pick Enable nodeId# 53 Pick Disable nodeId# 87 Delete nodeId# 32 Disable nodeId# 135 Delete nodeId# 48 Enable nodeId# 85 Enable nodeId# 18 Enable nodeId# 87 Disable nodeId# 62 Enable nodeId# 24 Pick Delete nodeId# 98 Pick Enable nodeId# 62 Add nodeId# 138 Add nodeId# 139 Add nodeId# 140 Delete nodeId# 42 Pick Disable nodeId# 15 Disable nodeId# 53 Enable nodeId# 135 Delete nodeId# 49 Add nodeId# 141 Disable nodeId# 136 Delete nodeId# 34 Enable nodeId# 53 Pick Disable nodeId# 138 Disable nodeId# 139 Add nodeId# 142 Enable nodeId# 136 Add nodeId# 143 Add nodeId# 144 Add nodeId# 145 Enable nodeId# 68 Pick Pick Add nodeId# 146 Enable nodeId# 2 Enable nodeId# 139 Delete nodeId# 142 Add nodeId# 147 Enable nodeId# 15 Pick Delete nodeId# 102 Enable nodeId# 138 Pick Add nodeId# 148 Pick Delete nodeId# 78 Delete nodeId# 109 Delete nodeId# 139 Add nodeId# 149 Delete nodeId# 46 Add nodeId# 150 Add nodeId# 151 Add nodeId# 152 Pick Disable nodeId# 138 Delete nodeId# 77 Add nodeId# 153 Disable nodeId# 75 Enable nodeId# 138 Disable nodeId# 80 Delete nodeId# 110 Disable nodeId# 153 Enable nodeId# 75 Pick Enable nodeId# 153 Enable nodeId# 80 Pick Add nodeId# 154 Disable nodeId# 144 Disable nodeId# 15 Enable nodeId# 144 Pick Enable nodeId# 15 Disable nodeId# 75 Pick Add nodeId# 155 Enable nodeId# 75 Pick Pick Add nodeId# 156 Disable nodeId# 152 Pick Disable nodeId# 2 Disable nodeId# 68 Delete nodeId# 152 Enable nodeId# 2 Enable nodeId# 68 Disable nodeId# 150 Pick Enable nodeId# 150 Disable nodeId# 87 Enable nodeId# 87 Delete nodeId# 18 Disable nodeId# 112 Delete nodeId# 129 Delete nodeId# 124 Disable nodeId# 9 Disable nodeId# 135 Delete nodeId# 117 Pick Pick Add nodeId# 157 Add nodeId# 158 Disable nodeId# 157 Add nodeId# 159 Disable nodeId# 76 Disable nodeId# 21 Pick Disable nodeId# 145 Enable nodeId# 21 Enable nodeId# 135 Enable nodeId# 157 Add nodeId# 160 Delete nodeId# 106 Delete nodeId# 9 Add nodeId# 161 Pick Add nodeId# 162 Disable nodeId# 154 Disable nodeId# 29 Disable nodeId# 140 Disable nodeId# 68 Pick Enable nodeId# 112 Add nodeId# 163 Add nodeId# 164 Enable nodeId# 29 Pick Enable nodeId# 154 Delete nodeId# 161 Delete nodeId# 103 Disable nodeId# 158 Pick Pick Enable nodeId# 158 Pick Add nodeId# 165 Disable nodeId# 149 Delete nodeId# 155 Add nodeId# 166 Enable nodeId# 145 Add nodeId# 167 Delete nodeId# 121 Enable nodeId# 149 Delete nodeId# 166 Delete nodeId# 24 Disable nodeId# 118 Disable nodeId# 41 Add nodeId# 168 Enable nodeId# 41 Disable nodeId# 63 Add nodeId# 169 Enable nodeId# 76 Disable nodeId# 96 Delete nodeId# 141 Pick Add nodeId# 170 Delete nodeId# 118 Enable nodeId# 140 Enable nodeId# 96 Disable nodeId# 160 Add nodeId# 171 Delete nodeId# 101 Add nodeId# 172 Delete nodeId# 10 Disable nodeId# 153 Disable nodeId# 45 Delete nodeId# 87 Pick Add nodeId# 173 Enable nodeId# 153 Enable nodeId# 45 Delete nodeId# 156 Add nodeId# 174 Pick Add nodeId# 175 Disable nodeId# 136 Add nodeId# 176 Delete nodeId# 100 Add nodeId# 177 Enable nodeId# 68 Enable nodeId# 63 Delete nodeId# 60 Delete nodeId# 84 Disable nodeId# 64 Delete nodeId# 171 Pick Pick Enable nodeId# 160 Add nodeId# 178 Enable nodeId# 64 Add nodeId# 179 Disable nodeId# 146 Add nodeId# 180 Disable nodeId# 7 Pick Disable nodeId# 133 Disable nodeId# 165 Delete nodeId# 29 Delete nodeId# 35 Disable nodeId# 2 Enable nodeId# 133 Enable nodeId# 7 Delete nodeId# 62 Add nodeId# 181 Pick Delete nodeId# 59 Add nodeId# 182 Pick Enable nodeId# 146 Enable nodeId# 2 Pick Add nodeId# 183 Pick Pick Delete nodeId# 50 Enable nodeId# 165 Delete nodeId# 67 Disable nodeId# 137 Enable nodeId# 137 Enable nodeId# 136 Pick Pick Add nodeId# 184 Delete nodeId# 53 Pick Add nodeId# 185 Add nodeId# 186 Delete nodeId# 179 Pick Pick Delete nodeId# 86 Disable nodeId# 182 Delete nodeId# 113 Delete nodeId# 159 Delete nodeId# 167 Enable nodeId# 182 Delete nodeId# 22 Pick Add nodeId# 187 Pick Delete nodeId# 58 Add nodeId# 188 Pick Pick Disable nodeId# 185 Delete nodeId# 164 Add nodeId# 189 Pick Delete nodeId# 41 Disable nodeId# 6 Disable nodeId# 75 Disable nodeId# 181 Add nodeId# 190 Delete nodeId# 147 Disable nodeId# 175 Disable nodeId# 180 Pick Add nodeId# 191 Delete nodeId# 130 Disable nodeId# 162 Add nodeId# 192 Add nodeId# 193 Pick Delete nodeId# 189 Delete nodeId# 13 Disable nodeId# 116 Delete nodeId# 57 Pick Add nodeId# 194 Delete nodeId# 1 Delete nodeId# 176 Enable nodeId# 175 Pick Add nodeId# 195 Pick Enable nodeId# 162 Disable nodeId# 153 Pick Add nodeId# 196 Pick Pick Enable nodeId# 6 Disable nodeId# 146 Disable nodeId# 154 Disable nodeId# 151 Add nodeId# 197 Delete nodeId# 133 Disable nodeId# 111 Disable nodeId# 27 Disable nodeId# 184 Disable nodeId# 135 Disable nodeId# 172 Enable nodeId# 185 Enable nodeId# 184 Enable nodeId# 180 Disable nodeId# 175 Disable nodeId# 148 Pick Enable nodeId# 27 Disable nodeId# 173 Delete nodeId# 112 Delete nodeId# 163 Disable nodeId# 43 Delete nodeId# 181 Enable nodeId# 175 Delete nodeId# 64 Add nodeId# 198 Disable nodeId# 80 Disable nodeId# 190 Disable nodeId# 150 Disable nodeId# 17 Add nodeId# 199 Enable nodeId# 151 Disable nodeId# 191 Enable nodeId# 150 Enable nodeId# 75 Pick Delete nodeId# 75 Add nodeId# 200 Enable nodeId# 153 Enable nodeId# 43 Disable nodeId# 85 Pick Disable nodeId# 6 Add nodeId# 201 Disable nodeId# 158 Enable nodeId# 154 Delete nodeId# 128 Add nodeId# 202 Pick Disable nodeId# 136 Delete nodeId# 73 Delete nodeId# 97 Add nodeId# 203 Pick Disable nodeId# 184 Disable nodeId# 185 Add nodeId# 204 Add nodeId# 205 Pick Pick Add nodeId# 206 Add nodeId# 207 Add nodeId# 208 Disable nodeId# 2 Disable nodeId# 200 Delete nodeId# 69 Disable nodeId# 149 Disable nodeId# 199 Enable nodeId# 85 Enable nodeId# 199 Disable nodeId# 160 Pick Add nodeId# 209 Add nodeId# 210 Disable nodeId# 134 Disable nodeId# 82 Disable nodeId# 208 Add nodeId# 211 Add nodeId# 212 Enable nodeId# 2 Add nodeId# 213 Delete nodeId# 120 Delete nodeId# 149 Delete nodeId# 15 Disable nodeId# 28 Enable nodeId# 200 Disable nodeId# 186 Pick Enable nodeId# 6 Enable nodeId# 135 Disable nodeId# 192 Pick Delete nodeId# 145 Pick Enable nodeId# 148 Add nodeId# 214 Enable nodeId# 146 Pick Pick Disable nodeId# 138 Pick Delete nodeId# 210 Disable nodeId# 119 Delete nodeId# 148 Disable nodeId# 174 Pick Delete nodeId# 209 Disable nodeId# 4 Pick Pick Disable nodeId# 180 Add nodeId# 215 Pick Add nodeId# 216 Add nodeId# 217 Delete nodeId# 157 Delete nodeId# 207 Pick Pick Enable nodeId# 116 Delete nodeId# 80 Delete nodeId# 151 Pick Add nodeId# 218 Disable nodeId# 216 Disable nodeId# 126 Disable nodeId# 196 Pick Delete nodeId# 205 Disable nodeId# 21 Pick Disable nodeId# 199 Enable nodeId# 21 Enable nodeId# 173 Disable nodeId# 218 Pick Enable nodeId# 185 Enable nodeId# 119 Delete nodeId# 196 Add nodeId# 219 Disable nodeId# 45 Disable nodeId# 96 Enable nodeId# 192 Pick Disable nodeId# 131 Delete nodeId# 28 Enable nodeId# 191 Disable nodeId# 197 Delete nodeId# 146 Disable nodeId# 198 Pick Delete nodeId# 183 Add nodeId# 220 Add nodeId# 221 Disable nodeId# 144 Add nodeId# 222 Pick Enable nodeId# 197 Enable nodeId# 216 Delete nodeId# 123 Delete nodeId# 27 Pick Pick Enable nodeId# 199 Delete nodeId# 193 Disable nodeId# 85 Add nodeId# 223 Delete nodeId# 7 Disable nodeId# 220 Disable nodeId# 54 Enable nodeId# 180 Delete nodeId# 21 Enable nodeId# 134 Disable nodeId# 38 Pick Add nodeId# 224 Enable nodeId# 218 Enable nodeId# 172 Enable nodeId# 190 Add nodeId# 225 Pick Pick Add nodeId# 226 Disable nodeId# 180 Add nodeId# 227 Add nodeId# 228 Disable nodeId# 55 Pick Disable nodeId# 91 Pick Disable nodeId# 134 Add nodeId# 229 Pick Pick Delete nodeId# 68 Delete nodeId# 222 Add nodeId# 230 Disable nodeId# 66 Delete nodeId# 228 Pick Pick Delete nodeId# 178 Pick Add nodeId# 231 Enable nodeId# 144 Delete nodeId# 137 Delete nodeId# 203 Enable nodeId# 180 Enable nodeId# 138 Add nodeId# 232 Add nodeId# 233 Disable nodeId# 132 Delete nodeId# 182 Add nodeId# 234 Pick Pick Enable nodeId# 38 Disable nodeId# 150 Enable nodeId# 131 Delete nodeId# 215 Delete nodeId# 185 Add nodeId# 235 Disable nodeId# 180 Pick Disable nodeId# 235 Pick Pick Add nodeId# 236 Pick Enable nodeId# 180 Disable nodeId# 236 Add nodeId# 237 Enable nodeId# 55 Enable nodeId# 160 Pick Add nodeId# 238 Disable nodeId# 214 Delete nodeId# 194 Disable nodeId# 217 Add nodeId# 239 Pick Disable nodeId# 211 Add nodeId# 240 Disable nodeId# 226 Disable nodeId# 213 Enable nodeId# 158 Delete nodeId# 237 Disable nodeId# 51 Delete nodeId# 55 Enable nodeId# 45 Add nodeId# 241 Disable nodeId# 162 Delete nodeId# 119 Add nodeId# 242 Pick Add nodeId# 243 Pick Add nodeId# 244 Enable nodeId# 184 Delete nodeId# 192 Disable nodeId# 158 Disable nodeId# 6 Enable nodeId# 217 Disable nodeId# 190 Disable nodeId# 12 Delete nodeId# 165 Delete nodeId# 187 Enable nodeId# 174 Pick Enable nodeId# 85 Disable nodeId# 200 Disable nodeId# 230 Pick Delete nodeId# 38 Add nodeId# 245 Enable nodeId# 12 Disable nodeId# 184 Pick Delete nodeId# 244 Pick Enable nodeId# 236 Pick Enable nodeId# 96 Disable nodeId# 154 Delete nodeId# 191 Enable nodeId# 154 Enable nodeId# 230 Add nodeId# 246 Delete nodeId# 173 Add nodeId# 247 Disable nodeId# 85 Pick Delete nodeId# 247 Enable nodeId# 200 Enable nodeId# 162 Enable nodeId# 82 Enable nodeId# 17 Enable nodeId# 184 Enable nodeId# 54 Pick Pick Pick Add nodeId# 248 Pick Delete nodeId# 231 Pick Delete nodeId# 225 Enable nodeId# 4 Delete nodeId# 234 Pick Enable nodeId# 136 Enable nodeId# 198 Pick Disable nodeId# 72 Disable nodeId# 115 Add nodeId# 249 Disable nodeId# 197 Add nodeId# 250 Enable nodeId# 66 Add nodeId# 251 Disable nodeId# 230 Disable nodeId# 144 Disable nodeId# 140 P ... Disable nodeId# 20236 Add nodeId# 20312 Pick Enable nodeId# 20236 Add nodeId# 20313 Add nodeId# 20314 Disable nodeId# 20313 Enable nodeId# 20216 Pick Enable nodeId# 20262 Pick Enable nodeId# 20313 Enable nodeId# 20272 Disable nodeId# 20287 Pick Enable nodeId# 20232 Enable nodeId# 20287 Add nodeId# 20315 Pick Delete nodeId# 20236 Pick Add nodeId# 20316 Disable nodeId# 20196 Enable nodeId# 20196 Pick Pick Disable nodeId# 20090 Enable nodeId# 20090 Pick Disable nodeId# 20311 Delete nodeId# 20216 Enable nodeId# 20311 Pick Pick Add nodeId# 20317 Pick Pick Add nodeId# 20318 Add nodeId# 20319 Add nodeId# 20320 Pick Pick Delete nodeId# 20277 Delete nodeId# 20311 Delete nodeId# 20079 Disable nodeId# 20232 Enable nodeId# 20232 Disable nodeId# 20253 Enable nodeId# 20253 Disable nodeId# 20238 Add nodeId# 20321 Disable nodeId# 20164 Enable nodeId# 20164 Delete nodeId# 20321 Delete nodeId# 20239 Enable nodeId# 20238 Pick Disable nodeId# 20248 Add nodeId# 20322 Delete nodeId# 20291 Pick Pick Add nodeId# 20323 Add nodeId# 20324 Pick Delete nodeId# 20300 Delete nodeId# 20281 Pick Disable nodeId# 20196 Add nodeId# 20325 Enable nodeId# 20248 Pick Delete nodeId# 20272 Disable nodeId# 20322 Delete nodeId# 20325 Add nodeId# 20326 Add nodeId# 20327 Disable nodeId# 20090 Delete nodeId# 20194 Disable nodeId# 20284 Add nodeId# 20328 Pick Enable nodeId# 20322 Pick Disable nodeId# 20026 Delete nodeId# 20314 Pick Disable nodeId# 20156 Delete nodeId# 20293 Add nodeId# 20329 Disable nodeId# 20327 Disable nodeId# 20262 Enable nodeId# 20284 Pick Pick Enable nodeId# 20156 Disable nodeId# 20287 Pick Disable nodeId# 20318 Add nodeId# 20330 Disable nodeId# 20322 Pick Delete nodeId# 20164 Pick Add nodeId# 20331 Pick Enable nodeId# 20026 Pick Add nodeId# 20332 Delete nodeId# 20254 Enable nodeId# 20262 Enable nodeId# 20196 Disable nodeId# 20238 Enable nodeId# 20322 Enable nodeId# 20238 Disable nodeId# 20238 Enable nodeId# 20287 Disable nodeId# 20249 Disable nodeId# 20304 Add nodeId# 20333 Add nodeId# 20334 Delete nodeId# 20306 Enable nodeId# 20327 Delete nodeId# 20287 Disable nodeId# 20326 Pick Pick Add nodeId# 20335 Add nodeId# 20336 Pick Pick Pick Delete nodeId# 20313 Enable nodeId# 20326 Enable nodeId# 20304 Delete nodeId# 20288 Pick Disable nodeId# 20224 Delete nodeId# 20290 Enable nodeId# 20090 Delete nodeId# 20298 Enable nodeId# 20318 Disable nodeId# 20222 Add nodeId# 20337 Pick Pick Add nodeId# 20338 Enable nodeId# 20249 Disable nodeId# 20301 Add nodeId# 20339 Delete nodeId# 20299 Disable nodeId# 20211 Delete nodeId# 20327 Add nodeId# 20340 Add nodeId# 20341 Add nodeId# 20342 Delete nodeId# 20211 Enable nodeId# 20238 Enable nodeId# 20301 Disable nodeId# 20335 Add nodeId# 20343 Disable nodeId# 20336 Pick Disable nodeId# 20171 Add nodeId# 20344 Enable nodeId# 20222 Add nodeId# 20345 Disable nodeId# 20330 Enable nodeId# 20224 Enable nodeId# 20171 Pick Enable nodeId# 20330 Delete nodeId# 20080 Disable nodeId# 20317 Pick Enable nodeId# 20336 Delete nodeId# 20339 Disable nodeId# 20282 Delete nodeId# 20245 Delete nodeId# 20279 Pick Pick Add nodeId# 20346 Pick Disable nodeId# 20120 Delete nodeId# 20078 Add nodeId# 20347 Disable nodeId# 20285 Delete nodeId# 20345 Delete nodeId# 20326 Enable nodeId# 20120 Enable nodeId# 20335 Pick Delete nodeId# 20319 Add nodeId# 20348 Pick Add nodeId# 20349 Enable nodeId# 20282 Add nodeId# 20350 Pick Disable nodeId# 20347 Add nodeId# 20351 Add nodeId# 20352 Delete nodeId# 20262 Add nodeId# 20353 Delete nodeId# 20338 Enable nodeId# 20347 Delete nodeId# 20316 Add nodeId# 20354 Disable nodeId# 20309 Pick Pick Delete nodeId# 20317 Delete nodeId# 20354 Delete nodeId# 20324 Pick Pick Delete nodeId# 20332 Pick Add nodeId# 20355 Add nodeId# 20356 Delete nodeId# 20248 Enable nodeId# 20285 Enable nodeId# 20309 Pick Delete nodeId# 20053 Disable nodeId# 20202 Enable nodeId# 20202 Pick Disable nodeId# 20282 Delete nodeId# 20302 Pick Pick Delete nodeId# 20318 Disable nodeId# 20196 Delete nodeId# 20344 Add nodeId# 20357 Add nodeId# 20358 Add nodeId# 20359 Enable nodeId# 20196 Disable nodeId# 20156 Delete nodeId# 20301 Add nodeId# 20360 Pick Disable nodeId# 20225 Pick Disable nodeId# 20285 Disable nodeId# 20271 Enable nodeId# 20271 Add nodeId# 20361 Pick Delete nodeId# 20259 Pick Delete nodeId# 20333 Enable nodeId# 20156 Pick Pick Add nodeId# 20362 Add nodeId# 20363 Disable nodeId# 20104 Pick Delete nodeId# 20305 Enable nodeId# 20285 Enable nodeId# 20104 Add nodeId# 20364 Pick Delete nodeId# 20359 Delete nodeId# 20349 Pick Pick Pick Add nodeId# 20365 Add nodeId# 20366 Enable nodeId# 20225 Add nodeId# 20367 Delete nodeId# 20348 Add nodeId# 20368 Delete nodeId# 20249 Delete nodeId# 20336 Enable nodeId# 20282 Add nodeId# 20369 Delete nodeId# 20358 Delete nodeId# 20297 Delete nodeId# 20367 Delete nodeId# 20285 Disable nodeId# 20237 Add nodeId# 20370 Disable nodeId# 20273 Disable nodeId# 20120 Delete nodeId# 20294 Delete nodeId# 20228 Add nodeId# 20371 Disable nodeId# 20371 Add nodeId# 20372 Delete nodeId# 20328 Add nodeId# 20373 Add nodeId# 20374 Enable nodeId# 20120 Add nodeId# 20375 Add nodeId# 20376 Disable nodeId# 20274 Delete nodeId# 20330 Add nodeId# 20377 Pick Delete nodeId# 20026 Enable nodeId# 20237 Add nodeId# 20378 Pick Enable nodeId# 20273 Disable nodeId# 20343 Disable nodeId# 20253 Disable nodeId# 20171 Disable nodeId# 20341 Enable nodeId# 20371 Pick Delete nodeId# 20364 Delete nodeId# 20374 Add nodeId# 20379 Enable nodeId# 20274 Disable nodeId# 20283 Disable nodeId# 20352 Delete nodeId# 20322 Pick Delete nodeId# 20323 Disable nodeId# 20196 Delete nodeId# 20265 Delete nodeId# 20238 Pick Enable nodeId# 20343 Pick Delete nodeId# 20138 Disable nodeId# 20351 Disable nodeId# 20307 Pick Disable nodeId# 20346 Enable nodeId# 20352 Enable nodeId# 20196 Delete nodeId# 20351 Enable nodeId# 20253 Delete nodeId# 20309 Add nodeId# 20380 Add nodeId# 20381 Disable nodeId# 20189 Add nodeId# 20382 Add nodeId# 20383 Pick Delete nodeId# 20163 Disable nodeId# 20261 Disable nodeId# 20310 Add nodeId# 20384 Pick Pick Disable nodeId# 20235 Disable nodeId# 20380 Pick Enable nodeId# 20310 Disable nodeId# 20237 Pick Add nodeId# 20385 Add nodeId# 20386 Disable nodeId# 20383 Pick Disable nodeId# 20016 Add nodeId# 20387 Enable nodeId# 20383 Pick Pick Pick Delete nodeId# 20282 Disable nodeId# 20368 Enable nodeId# 20016 Enable nodeId# 20235 Pick Disable nodeId# 20342 Disable nodeId# 20370 Disable nodeId# 20296 Disable nodeId# 20320 Enable nodeId# 20307 Add nodeId# 20388 Delete nodeId# 20320 Disable nodeId# 20158 Enable nodeId# 20342 Pick Pick Add nodeId# 20389 Add nodeId# 20390 Pick Pick Disable nodeId# 20274 Pick Pick Enable nodeId# 20370 Enable nodeId# 20380 Disable nodeId# 20016 Disable nodeId# 20278 Pick Delete nodeId# 20342 Add nodeId# 20391 Delete nodeId# 20379 Disable nodeId# 20343 Add nodeId# 20392 Delete nodeId# 20090 Enable nodeId# 20237 Pick Disable nodeId# 20329 Disable nodeId# 20237 Delete nodeId# 20156 Disable nodeId# 20030 Disable nodeId# 20380 Enable nodeId# 20016 Enable nodeId# 20274 Enable nodeId# 20283 Delete nodeId# 20377 Enable nodeId# 20261 Enable nodeId# 20189 Pick Disable nodeId# 20378 Add nodeId# 20393 Pick Delete nodeId# 20276 Add nodeId# 20394 Delete nodeId# 20368 Add nodeId# 20395 Disable nodeId# 20366 Disable nodeId# 20335 Disable nodeId# 20304 Add nodeId# 20396 Add nodeId# 20397 Disable nodeId# 20375 Disable nodeId# 20353 Enable nodeId# 20171 Enable nodeId# 20329 Add nodeId# 20398 Delete nodeId# 20386 Delete nodeId# 19747 Delete nodeId# 20273 Enable nodeId# 20304 Disable nodeId# 20250 Pick Enable nodeId# 20378 Enable nodeId# 20375 Pick Add nodeId# 20399 Add nodeId# 20400 Pick Delete nodeId# 20222 Enable nodeId# 20335 Delete nodeId# 20271 Add nodeId# 20401 Delete nodeId# 20369 Add nodeId# 20402 Enable nodeId# 20237 Disable nodeId# 20385 Delete nodeId# 20388 Delete nodeId# 20308 Add nodeId# 20403 Add nodeId# 20404 Add nodeId# 20405 Pick Disable nodeId# 20394 Pick Pick Add nodeId# 20406 Add nodeId# 20407 Enable nodeId# 20394 Enable nodeId# 20030 Add nodeId# 20408 Disable nodeId# 20292 Enable nodeId# 20158 Pick Enable nodeId# 20341 Delete nodeId# 20385 Disable nodeId# 20224 Delete nodeId# 20347 Disable nodeId# 20393 Disable nodeId# 20373 Delete nodeId# 20380 Pick Disable nodeId# 20331 Disable nodeId# 20392 Add nodeId# 20409 Add nodeId# 20410 Disable nodeId# 20237 Pick Disable nodeId# 20158 Enable nodeId# 20158 Enable nodeId# 20392 Add nodeId# 20411 Pick Enable nodeId# 20292 Add nodeId# 20412 Disable nodeId# 20400 Add nodeId# 20413 Delete nodeId# 20410 Pick Delete nodeId# 20376 Disable nodeId# 20397 Add nodeId# 20414 Enable nodeId# 20250 Disable nodeId# 20392 Pick Disable nodeId# 20232 Delete nodeId# 20292 Add nodeId# 20415 Disable nodeId# 20356 Disable nodeId# 20413 Pick Add nodeId# 20416 Add nodeId# 20417 Enable nodeId# 20346 Delete nodeId# 20261 Disable nodeId# 20335 Delete nodeId# 20411 Delete nodeId# 20382 Enable nodeId# 20400 Disable nodeId# 20120 Delete nodeId# 20274 Enable nodeId# 20335 Enable nodeId# 20343 Enable nodeId# 20331 Disable nodeId# 20415 Enable nodeId# 20232 Disable nodeId# 20416 Add nodeId# 20418 Delete nodeId# 20312 Add nodeId# 20419 Disable nodeId# 20196 Enable nodeId# 20393 Disable nodeId# 20235 Pick Enable nodeId# 20353 Add nodeId# 20420 Delete nodeId# 20296 Disable nodeId# 20372 Enable nodeId# 20120 Disable nodeId# 20370 Delete nodeId# 20235 Pick Add nodeId# 20421 Add nodeId# 20422 Enable nodeId# 20356 Disable nodeId# 20419 Enable nodeId# 20392 Disable nodeId# 20353 Enable nodeId# 20419 Disable nodeId# 20343 Pick Delete nodeId# 20232 Pick Delete nodeId# 20401 Disable nodeId# 20352 Enable nodeId# 20416 Pick Pick Add nodeId# 20423 Add nodeId# 20424 Add nodeId# 20425 Delete nodeId# 20278 Add nodeId# 20426 Pick Pick Pick Enable nodeId# 20366 Pick Disable nodeId# 20391 Disable nodeId# 20406 Delete nodeId# 20404 Add nodeId# 20427 Delete nodeId# 20257 Add nodeId# 20428 Disable nodeId# 20381 Pick Add nodeId# 20429 Pick Disable nodeId# 20355 Enable nodeId# 20343 Enable nodeId# 20397 Enable nodeId# 20372 Add nodeId# 20430 Disable nodeId# 20016 Delete nodeId# 20429 Pick Pick Add nodeId# 20431 Enable nodeId# 20406 Add nodeId# 20432 Pick Enable nodeId# 20237 Disable nodeId# 20412 Delete nodeId# 20406 Pick Enable nodeId# 20413 Pick Add nodeId# 20433 Pick Add nodeId# 20434 Enable nodeId# 20016 Add nodeId# 20435 Pick Enable nodeId# 20381 Pick Disable nodeId# 20250 Pick Pick Add nodeId# 20436 Add nodeId# 20437 Enable nodeId# 20352 Delete nodeId# 20394 Enable nodeId# 20370 Delete nodeId# 20430 Disable nodeId# 20356 Pick Pick Pick Add nodeId# 20438 Pick Add nodeId# 20439 Enable nodeId# 20224 Delete nodeId# 20425 Add nodeId# 20440 Disable nodeId# 20284 Pick Add nodeId# 20441 Delete nodeId# 20104 Add nodeId# 20442 Enable nodeId# 20356 Pick Delete nodeId# 20402 Disable nodeId# 20189 Enable nodeId# 20250 Enable nodeId# 20196 Pick Disable nodeId# 20158 Enable nodeId# 20189 Enable nodeId# 20355 Disable nodeId# 20384 Delete nodeId# 20438 Enable nodeId# 20415 Pick Pick Disable nodeId# 20392 Pick Enable nodeId# 20353 Enable nodeId# 20158 Enable nodeId# 20412 Delete nodeId# 20387 Add nodeId# 20443 Add nodeId# 20444 Delete nodeId# 20260 Add nodeId# 20445 Delete nodeId# 20189 Disable nodeId# 20350 Add nodeId# 20446 Enable nodeId# 20392 Add nodeId# 20447 Delete nodeId# 20416 Enable nodeId# 20391 Add nodeId# 20448 Pick Pick Pick Enable nodeId# 20284 Disable nodeId# 20310 Delete nodeId# 20399 Add nodeId# 20449 Pick Pick Disable nodeId# 20408 Disable nodeId# 20315 Enable nodeId# 20315 Delete nodeId# 20253 Delete nodeId# 20437 Pick Pick Delete nodeId# 20383 Pick Delete nodeId# 20415 Disable nodeId# 20427 Add nodeId# 20450 Disable nodeId# 20237 Enable nodeId# 20408 Delete nodeId# 20428 Disable nodeId# 20375 Pick Pick Delete nodeId# 20418 Pick Pick Disable nodeId# 20445 Add nodeId# 20451 Pick Delete nodeId# 20341 Disable nodeId# 20396 Enable nodeId# 20427 Enable nodeId# 20396 Disable nodeId# 20450 Enable nodeId# 20384 Delete nodeId# 20350 Disable nodeId# 20408 Disable nodeId# 20337 Delete nodeId# 20393 |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink |92.3%| [TA] $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative |92.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn |92.3%| [TA] $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |92.3%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRboYql::TPCH_YQL [GOOD] |92.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |92.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |92.4%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> DataShardSnapshots::ShardRestartLockBrokenByConflict [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> IncrementalBackup::IncrementalBackupNonExistentTable [GOOD] >> IncrementalBackup::IncrementalBackupWithIndexes >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> TSchemeShardTTLTests::ConditionalErase >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore-WithIncremental [GOOD] >> IncrementalBackup::BasicIndexIncrementalRestore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:33:58.168255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:58.168360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:58.168410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:58.168446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:58.168485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:58.168526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:58.168599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:58.168683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:58.169654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:58.169933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:58.380095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:58.380159Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:58.393295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:58.393514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:58.393644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:58.402607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:58.402840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:58.403589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:58.403810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:58.411206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:58.411414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:58.412562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:58.412629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:58.412760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:58.412809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:58.412853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:58.413016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:58.746679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.749684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.749831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.749901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.749993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.750056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.750176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.750288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.750371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.750460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.750532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.750618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.750769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.750854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.750921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:2 129 -> 240 2025-12-23T18:34:00.410119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 732 RawX2: 4294970010 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:34:00.410162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-12-23T18:34:00.410277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 732 RawX2: 4294970010 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:34:00.410321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:34:00.410428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 732 RawX2: 4294970010 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:34:00.410482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:00.410510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:00.410539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:34:00.410578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2025-12-23T18:34:00.419076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:34:00.419522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:34:00.432883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:34:00.433384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:34:00.433569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-12-23T18:34:00.433963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:00.434215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-12-23T18:34:00.434660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-12-23T18:34:00.434709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:2 ProgressState 2025-12-23T18:34:00.438625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2025-12-23T18:34:00.438684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-12-23T18:34:00.438727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2025-12-23T18:34:00.438773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-12-23T18:34:00.438811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-12-23T18:34:00.439308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:00.439967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:00.440011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:34:00.440076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2025-12-23T18:34:00.440099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-12-23T18:34:00.440129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2025-12-23T18:34:00.440154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-12-23T18:34:00.440180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-12-23T18:34:00.440245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:781:2748] message: TxId: 101 2025-12-23T18:34:00.440291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-12-23T18:34:00.440358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:34:00.440396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:34:00.440552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:34:00.440615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2025-12-23T18:34:00.440639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:1 2025-12-23T18:34:00.440682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:34:00.440708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2025-12-23T18:34:00.440727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:2 2025-12-23T18:34:00.446874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2025-12-23T18:34:00.450015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:34:00.450093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:782:2749] TestWaitNotification: OK eventTxId 101 2025-12-23T18:34:00.450691Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:34:00.450974Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 310us result status StatusSuccess 2025-12-23T18:34:00.451515Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:33:58.296020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:58.296110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:58.296158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:58.296192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:58.296224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:58.296252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:58.296312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:58.296376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:58.297212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:58.297478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:58.399058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:58.399109Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:58.410642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:58.410960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:58.411176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:58.422933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:58.423152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:58.423823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:58.424044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:58.428386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:58.428581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:58.429859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:58.429997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:58.430212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:58.430265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:58.430311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:58.430435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:58.597184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.598144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.598288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.598378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.598435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.598492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.598550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.598613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.598719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.598870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.598952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.599034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.599106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.599180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.599246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... AT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 819 RawX2: 4294970081 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:34:00.549302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-12-23T18:34:00.549463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 819 RawX2: 4294970081 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:34:00.549526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:34:00.549613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 819 RawX2: 4294970081 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:34:00.549680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:00.549751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1090: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2025-12-23T18:34:00.552178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:34:00.552317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:34:00.564919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 718 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:34:00.564976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:34:00.565126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 718 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:34:00.565200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:34:00.565289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 718 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:34:00.565345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:00.565390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:34:00.565431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:34:00.565467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:34:00.565517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:34:00.567374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:34:00.567807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:34:00.567868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-12-23T18:34:00.567929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-12-23T18:34:00.567983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-12-23T18:34:00.568065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-12-23T18:34:00.568104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2025-12-23T18:34:00.569804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:34:00.569858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:34:00.569968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:34:00.569998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:34:00.570054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:34:00.570086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:34:00.570119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:34:00.570200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:745:2722] message: TxId: 102 2025-12-23T18:34:00.570243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:34:00.570279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:34:00.570307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:34:00.570445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:34:00.570480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:34:00.574916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:34:00.574997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:847:2805] TestWaitNotification: OK eventTxId 102 2025-12-23T18:34:00.575654Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:34:00.576004Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 339us result status StatusSuccess 2025-12-23T18:34:00.576555Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |92.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::TPCH_YQL [GOOD] Test command err: Trying to start YDB, gRPC: 19023, MsgBus: 32754 2025-12-23T18:32:52.676734Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123176105008629:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:52.676830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:32:52.736134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d0e/r3tmp/tmpWqHM4J/pdisk_1.dat 2025-12-23T18:32:53.132397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:53.221143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:53.221259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:53.261874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:53.287343Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:53.296067Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123176105008596:2081] 1766514772674077 != 1766514772674080 TServer::EnableGrpc on GrpcPort 19023, node 1 2025-12-23T18:32:53.403100Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:53.511620Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:53.511641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:53.511653Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:53.511750Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:53.699335Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:32754 TClient is connected to server localhost:32754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514773310 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:54.454328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:54.463738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:32:57.262670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123197579845972:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:57.262889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:57.263424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123197579845984:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:57.263492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123197579845985:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:57.263696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:57.269119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:57.294058Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123197579845988:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:32:57.399754Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123197579846040:2541] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:32:57.677966Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123176105008629:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:57.678183Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1851, MsgBus: 27917 2025-12-23T18:32:58.933971Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123202179780030:2135];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:58.934047Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d0e/r3tmp/tmpGoR0aw/pdisk_1.dat 2025-12-23T18:32:59.027291Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:59.149374Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:59.152063Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:59.152350Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:59.178602Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587123202179779933:2081] 1766514778917750 != 1766514778917753 2025-12-23T18:32:59.190633Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1851, node 2 2025-12-23T18:32:59.271742Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:59.318644Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:59.318664Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:59.318671Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:59.318777Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27917 TClient is connected to server localhost:27917 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514779274 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:59.876249Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:59.891992Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:32:59.946969Z node 2 :TX_CONVEYO ... : tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.711498Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.711579Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.711604Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.714016Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.714092Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.714112Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.725245Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.725325Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.725347Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.725707Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.725757Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.725775Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.737229Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.737322Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.737346Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.737354Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.737407Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.737423Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.751257Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.751350Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.751376Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.751645Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.751717Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.751746Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.764476Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.764538Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.764562Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.764587Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.764593Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.764612Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.777289Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.777357Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.777376Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.777402Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.777411Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.777432Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2025-12-23T18:33:53.857625Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587123437227417381:3028], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:53.857748Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:53.858476Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587123437227417386:3031], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:53.858536Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587123437227417387:3032], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:53.858917Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:53.865270Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710666:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:33:53.890798Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587123437227417390:3033], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710666 completed, doublechecking } 2025-12-23T18:33:53.994714Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587123437227417441:5396] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 49], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest |92.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] |92.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |92.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> BasicUsage::BrokenCredentialsProvider [GOOD] >> BasicUsage::CreateTopicWithCustomName >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> CompressExecutor::TestReorderedExecutor [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages |92.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink [GOOD] >> CompressExecutor::TestExecutorMemUsage >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink |92.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |92.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |92.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |92.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |92.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-12-23T18:33:25.815003Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.815035Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.815084Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:25.835150Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:25.838229Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:25.865997Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.867306Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-12-23T18:33:25.868893Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.868922Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.869402Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:25.883664Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:25.895052Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:25.895280Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.895708Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:25.896263Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.896405Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:33:25.896517Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:25.896576Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-12-23T18:33:25.897466Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.897494Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.897526Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:25.903377Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:25.915096Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:25.915315Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.915694Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:25.916204Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.918869Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:33:25.922877Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:25.922960Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-12-23T18:33:25.931906Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.931937Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.931966Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:25.943980Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:25.951022Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:25.951295Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.951706Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:25.952685Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.953004Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:33:25.953168Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:25.953229Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-12-23T18:33:25.959772Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.959806Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.959831Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:25.963026Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:25.963805Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:25.963943Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.966986Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:25.967861Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.972115Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:33:25.972256Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:25.972316Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-12-23T18:33:25.973646Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.973673Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.973700Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:25.983440Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:33:25.991256Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:33:25.991500Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.992002Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:33:25.992970Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.993434Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:33:25.993725Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:33:25.993791Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-12-23T18:33:25.993880Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2025-12-23T18:33:26.031285Z :ReadSession INFO: Random seed for debugging is 1766514806031241 2025-12-23T18:33:26.470598Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123319839848942:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:26.470725Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:33:26.591052Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.006113s 2025-12-23T18:33:26.613397Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005996/r3tmp/tmpyLtYRV/pdisk_1.dat 2025-12-23T18:33:26.721397Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:33:27.262693Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:27.262886Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:33:27.324049Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:27.436633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:27.436795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:27.438790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:27.438867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:27.635280Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:27.650715Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:27.651998Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:27.659582Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:27.660720Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:33:27.661553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:27.713724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:27.721661Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TServer::EnableGrpc on GrpcPort 30587, node 1 2025-12-23T18:33:27.877832Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.018535s 2025-12-23T18:33:28.055009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinali ... BUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:33:59.015897Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:33:59.015910Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:33:59.015929Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:33:59.015940Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:33:59.118876Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:33:59.118915Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:33:59.118927Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:33:59.118945Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:33:59.118957Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:33:59.222895Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:33:59.222931Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:33:59.222943Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:33:59.222961Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:33:59.222974Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:33:59.288699Z node 1 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_1_1_15099810232914795049_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset 3 2025-12-23T18:33:59.322806Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:33:59.322839Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:33:59.322850Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:33:59.322864Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:33:59.322875Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:33:59.354907Z :INFO: [/Root] [/Root] [c8b1d461-4e2f5750-35b3bbc2-6bf091d4] Closing read session. Close timeout: 0.000000s 2025-12-23T18:33:59.354988Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-12-23T18:33:59.355037Z :INFO: [/Root] [/Root] [c8b1d461-4e2f5750-35b3bbc2-6bf091d4] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16663 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:33:59.355153Z :NOTICE: [/Root] [/Root] [c8b1d461-4e2f5750-35b3bbc2-6bf091d4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-12-23T18:33:59.355197Z :DEBUG: [/Root] [/Root] [c8b1d461-4e2f5750-35b3bbc2-6bf091d4] [dc1] Abort session to cluster 2025-12-23T18:33:59.355825Z :NOTICE: [/Root] [/Root] [c8b1d461-4e2f5750-35b3bbc2-6bf091d4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:33:59.363729Z node 1 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_1_1_15099810232914795049_v1 grpc read done: success# 0, data# { } 2025-12-23T18:33:59.363760Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_1_1_15099810232914795049_v1 grpc read failed 2025-12-23T18:33:59.363789Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_1_1_15099810232914795049_v1 grpc closed 2025-12-23T18:33:59.363824Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_1_1_15099810232914795049_v1 is DEAD 2025-12-23T18:33:59.367754Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_15099810232914795049_v1 2025-12-23T18:33:59.367802Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [1:7587123388559328217:2490] destroyed 2025-12-23T18:33:59.367856Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_1_1_15099810232914795049_v1 2025-12-23T18:33:59.370550Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [1:7587123388559328214:2487] disconnected. 2025-12-23T18:33:59.370588Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [1:7587123388559328214:2487] disconnected; active server actors: 1 2025-12-23T18:33:59.370610Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [1:7587123388559328214:2487] client user disconnected session shared/user_1_1_15099810232914795049_v1 2025-12-23T18:33:59.425343Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:33:59.425381Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:33:59.425406Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:33:59.425424Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:33:59.425454Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:33:59.531205Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:33:59.531241Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:33:59.531253Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:33:59.531271Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:33:59.531282Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:33:59.761869Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1177: ActorId: [1:7587123461573773049:2634] TxId: 281474976710691. Ctx: { TraceId: 01kd67tk246f0agtzyj0gmwf3t, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTJjYzI4MDktMTdlYTM3MjUtYzZhYzQ3ZGQtZTI0NzhjZjg=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2025-12-23T18:33:59.771027Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [1:7587123461573773053:2634], TxId: 281474976710691, task: 3. Ctx: { TraceId : 01kd67tk246f0agtzyj0gmwf3t. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTJjYzI4MDktMTdlYTM3MjUtYzZhYzQ3ZGQtZTI0NzhjZjg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7587123461573773049:2634], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-12-23T18:34:01.460787Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:34:01.460834Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:34:01.460864Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:34:01.461519Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:34:01.462052Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:34:01.462351Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:34:01.471011Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:34:01.471873Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:34:01.472404Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:34:01.472705Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-12-23T18:34:01.472800Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:34:01.472863Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:34:01.472902Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-12-23T18:34:01.473059Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-12-23T18:34:01.473104Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-12-23T18:34:03.487676Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:34:03.490829Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:34:03.490899Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:34:03.491334Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:34:03.506998Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:34:03.516474Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:34:03.519586Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:34:03.519818Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:34:03.519917Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:34:03.520009Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes |92.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink |92.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] >> IncrementalBackup::MultipleIndexesIncrementalRestore [GOOD] >> IncrementalBackup::MultipleIncrementalBackupsWithIndexes >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:01.789135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:01.789228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:01.789266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:01.789304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:01.789340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:01.789371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:01.789435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:01.789507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:01.790425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:01.790702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:01.893675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:01.893739Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:01.899773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:01.900008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:01.900156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:01.908614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:01.908835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:01.909607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:01.909840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:01.913707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:01.913910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:01.919983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:01.920077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:01.920256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:01.920308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:01.920353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:01.920496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:02.172800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.173822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.173996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.174071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.174140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.174201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.174273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.174400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.174498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.174576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.174666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.182963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.183209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.183299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:02.183371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 8Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:215: Run conditional erase, tabletId: 72075186233409551, request: TableId: 43 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640224000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2025-12-23T18:34:07.684972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7099: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-12-23T18:34:07.685538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7099: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-12-23T18:34:07.685723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7099: Conditional erase accepted: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-12-23T18:34:07.685996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7099: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:34:07.686296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7099: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-12-23T18:34:07.686553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7099: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:34:07.686641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:349: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-12-23T18:34:07.686694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:399: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-12-23T18:34:07.687846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:349: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-12-23T18:34:07.687885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:399: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-12-23T18:34:07.697802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:349: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-12-23T18:34:07.697855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:399: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-12-23T18:34:07.698607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:453: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-12-23T18:34:07.698762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:453: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-12-23T18:34:07.698872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:34:07.698923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:108: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2020-09-18T23:04:00.224000Z, at schemeshard: 72057594046678944 2025-12-23T18:34:07.699011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:34:07.699038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:108: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2020-09-18T23:04:00.224000Z, at schemeshard: 72057594046678944 2025-12-23T18:34:07.699237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:349: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-12-23T18:34:07.699274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:399: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:34:07.699478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:349: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-12-23T18:34:07.699505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:399: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:34:07.701484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:453: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-12-23T18:34:07.701552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-12-23T18:34:07.701583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-12-23T18:34:07.701663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:453: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-12-23T18:34:07.701738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:34:07.701769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:108: Skip conditional erase: shardIdx: 72057594046678944:5, run at: 2020-09-18T23:04:00.226000Z, at schemeshard: 72057594046678944 2025-12-23T18:34:07.701818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:453: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-12-23T18:34:07.701849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-12-23T18:34:07.701907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:34:07.701952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:108: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 2020-09-18T23:04:00.226000Z, at schemeshard: 72057594046678944 2025-12-23T18:34:07.702002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-12-23T18:34:07.702049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:34:07.702072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:108: Skip conditional erase: shardIdx: 72057594046678944:2, run at: 2020-09-18T23:04:00.226000Z, at schemeshard: 72057594046678944 2025-12-23T18:34:07.702094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-12-23T18:34:07.783114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2025-12-23T18:34:07.783319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2025-12-23T18:34:07.783404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable1, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:34:07.783515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2025-12-23T18:34:07.783581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 0 row count 0 2025-12-23T18:34:07.783610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=TTLEnabledTable2, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:34:07.783653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2025-12-23T18:34:07.783681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 41 shard idx 72057594046678944:4 data size 43 row count 1 2025-12-23T18:34:07.783715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], pathId map=TTLEnabledTable4, is column=0, is olap=0, RowCount 1, DataSize 43 2025-12-23T18:34:07.783762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable 2025-12-23T18:34:07.783795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 603 row count 2 2025-12-23T18:34:07.783825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTable3, is column=0, is olap=0, RowCount 2, DataSize 603 2025-12-23T18:34:07.783889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2025-12-23T18:34:07.783923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 42 shard idx 72057594046678944:5 data size 627 row count 2 2025-12-23T18:34:07.783960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 42], pathId map=TTLEnabledTable5, is column=0, is olap=0, RowCount 2, DataSize 627, with borrowed parts 2025-12-23T18:34:07.784012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409550: SplitByLoadNotEnabledForTable 2025-12-23T18:34:07.796681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:349: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-12-23T18:34:07.796751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:399: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-12-23T18:34:07.799330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:453: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-12-23T18:34:07.799476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:34:07.799525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:108: Skip conditional erase: shardIdx: 72057594046678944:6, run at: 2020-09-18T23:04:00.228000Z, at schemeshard: 72057594046678944 2025-12-23T18:34:07.799600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> TServiceAccountServiceTest::Get [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> TAccessServiceTest::PassRequestId >> TUserAccountServiceTest::Get >> FolderServiceTest::TFolderServiceAdapter >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink [GOOD] >> KqpRboPg::Bench_10Joins [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink [GOOD] >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] >> FolderServiceTest::TFolderServiceTransitional >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink [GOOD] >> FolderServiceTest::TFolderService >> IncrementalBackup::IncrementalBackupWithIndexes [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] >> IncrementalBackup::ShopDemoIncrementalBackupScenario [GOOD] >> IncrementalBackup::IncrementalBackupWithCoveringIndex >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] >> TServiceAccountServiceTest::IssueToken [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> GenericFederatedQuery::YdbFilterPushdown [GOOD] >> ExternalIndex::Simple [GOOD] >> BasicUsage::CreateTopicWithCustomName [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] >> IncrementalBackup::BasicIndexIncrementalRestore [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> CompressExecutor::TestExecutorMemUsage [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> IncrementalBackup::VerifyIncrementalBackupTableAttributes >> DataShardSnapshots::VolatileSnapshotTimeout >> TAccessServiceTest::PassRequestId [GOOD] >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> TUserAccountServiceTest::Get [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink [GOOD] >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod >> IncrementalBackup::CdcVersionSync >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex >> Compression::WriteRAW >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables >> TIncrHugeBasicTest::Defrag [GOOD] >> IncrementalBackup::MultipleIncrementalBackupsWithIndexes [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink >> FolderServiceTest::TFolderService [GOOD] >> IncrementalBackup::IncrementalBackupWithCoveringIndex [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> IncrementalBackup::VerifyIncrementalBackupTableAttributes [GOOD] >> IncrementalBackup::ResetVsUpsertMissingColumnsTest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest |92.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink >> IncrementalBackup::CdcVersionSync [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> Compression::WriteRAW [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink >> IncrementalBackup::IncrementalBackupMultipleIndexes >> PersQueueSdkReadSessionTest::SettingsValidation >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:82:2113] Leader for TabletID 72057594037927937 is [45:82:2113] sender: [45:198:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:78:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:81:2057] recipient: [46:80:2112] Leader for TabletID 72057594037927937 is [46:82:2113] sender: [46:83:2057] recipient: [46:80:2112] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:82:2113] Leader for TabletID 72057594037927937 is [46:82:2113] sender: [46:198:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:79:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:82:2057] recipient: [47:81:2112] Leader for TabletID 72057594037927937 is [47:83:2113] sender: [47:84:2057] recipient: [47:81:2112] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:83:2113] Leader for TabletID 72057594037927937 is [47:83:2113] sender: [47:199:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:82:2057] recipient: [48:39:2086] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:85:2057] recipient: [48:84:2115] Leader for TabletID 72057594037927937 is [48:86:2116] sender: [48:87:2057] recipient: [48:84:2115] !Reboot 72057594037927937 (actor [48:58:2099]) rebooted! !Reboot 72057594037927937 (actor [48:58:2099]) tablet resolver refreshed! new actor is[48:86:2116] Leader for TabletID 72057594037927937 is [48:86:2116] sender: [48:202:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:56:2057] recipient: [49:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:56:2057] recipient: [49:53:2097] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:59:2057] recipient: [49:53:2097] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:76:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:82:2057] recipient: [49:39:2086] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:85:2057] recipient: [49:84:2115] Leader for TabletID 72057594037927937 is [49:86:2116] sender: [49:87:2057] recipient: [49:84:2115] !Reboot 72057594037927937 (actor [49:58:2099]) rebooted! !Reboot 72057594037927937 (actor [49:58:2099]) tablet resolver refreshed! new actor is[49:86:2116] Leader for TabletID 72057594037927937 is [49:86:2116] sender: [49:202:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:56:2057] recipient: [50:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:56:2057] recipient: [50:53:2097] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:59:2057] recipient: [50:53:2097] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:76:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:83:2057] recipient: [50:39:2086] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:86:2057] recipient: [50:85:2115] Leader for TabletID 72057594037927937 is [50:87:2116] sender: [50:88:2057] recipient: [50:85:2115] !Reboot 72057594037927937 (actor [50:58:2099]) rebooted! !Reboot 72057594037927937 (actor [50:58:2099]) tablet resolver refreshed! new actor is[50:87:2116] Leader for TabletID 72057594037927937 is [50:87:2116] sender: [50:203:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:56:2057] recipient: [51:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:56:2057] recipient: [51:53:2097] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:59:2057] recipient: [51:53:2097] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:76:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:86:2057] recipient: [51:39:2086] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:89:2057] recipient: [51:88:2118] Leader for TabletID 72057594037927937 is [51:90:2119] sender: [51:91:2057] recipient: [51:88:2118] !Reboot 72057594037927937 (actor [51:58:2099]) rebooted! !Reboot 72057594037927937 (actor [51:58:2099]) tablet resolver refreshed! new actor is[51:90:2119] Leader for TabletID 72057594037927937 is [51:90:2119] sender: [51:206:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:56:2057] recipient: [52:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:56:2057] recipient: [52:53:2097] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:59:2057] recipient: [52:53:2097] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:76:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:86:2057] recipient: [52:39:2086] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:89:2057] recipient: [52:88:2118] Leader for TabletID 72057594037927937 is [52:90:2119] sender: [52:91:2057] recipient: [52:88:2118] !Reboot 72057594037927937 (actor [52:58:2099]) rebooted! !Reboot 72057594037927937 (actor [52:58:2099]) tablet resolver refreshed! new actor is[52:90:2119] Leader for TabletID 72057594037927937 is [52:90:2119] sender: [52:206:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:56:2057] recipient: [53:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:56:2057] recipient: [53:53:2097] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:59:2057] recipient: [53:53:2097] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:76:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:87:2057] recipient: [53:39:2086] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:90:2057] recipient: [53:89:2118] Leader for TabletID 72057594037927937 is [53:91:2119] sender: [53:92:2057] recipient: [53:89:2118] !Reboot 72057594037927937 (actor [53:58:2099]) rebooted! !Reboot 72057594037927937 (actor [53:58:2099]) tablet resolver refreshed! new actor is[53:91:2119] Leader for TabletID 72057594037927937 is [53:91:2119] sender: [53:207:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:56:2057] recipient: [54:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:56:2057] recipient: [54:53:2097] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:59:2057] recipient: [54:53:2097] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:76:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:90:2057] recipient: [54:39:2086] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:93:2057] recipient: [54:92:2121] Leader for TabletID 72057594037927937 is [54:94:2122] sender: [54:95:2057] recipient: [54:92:2121] !Reboot 72057594037927937 (actor [54:58:2099]) rebooted! !Reboot 72057594037927937 (actor [54:58:2099]) tablet resolver refreshed! new actor is[54:94:2122] Leader for TabletID 72057594037927937 is [54:94:2122] sender: [54:210:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:56:2057] recipient: [55:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:56:2057] recipient: [55:53:2097] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:59:2057] recipient: [55:53:2097] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:76:2057] recipient: [55:14:2061] !Reboot 72057594037927937 (actor [55:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:90:2057] recipient: [55:39:2086] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:93:2057] recipient: [55:92:2121] Leader for TabletID 72057594037927937 is [55:94:2122] sender: [55:95:2057] recipient: [55:92:2121] !Reboot 72057594037927937 (actor [55:58:2099]) rebooted! !Reboot 72057594037927937 (actor [55:58:2099]) tablet resolver refreshed! new actor is[55:94:2122] Leader for TabletID 72057594037927937 is [55:94:2122] sender: [55:210:2057] recipient: [55:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:56:2057] recipient: [56:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:56:2057] recipient: [56:53:2097] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:59:2057] recipient: [56:53:2097] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:76:2057] recipient: [56:14:2061] !Reboot 72057594037927937 (actor [56:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:91:2057] recipient: [56:39:2086] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:94:2057] recipient: [56:93:2121] Leader for TabletID 72057594037927937 is [56:95:2122] sender: [56:96:2057] recipient: [56:93:2121] !Reboot 72057594037927937 (actor [56:58:2099]) rebooted! !Reboot 72057594037927937 (actor [56:58:2099]) tablet resolver refreshed! new actor is[56:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:59:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:76:2057] recipient: [57:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 11672, MsgBus: 30126 2025-12-23T18:31:41.361765Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122870115192492:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:41.361879Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:31:41.397487Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d55/r3tmp/tmpncYL4L/pdisk_1.dat 2025-12-23T18:31:41.708820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:41.708947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:41.731931Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:41.749938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.785500Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:41.790110Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122870115192381:2081] 1766514701355577 != 1766514701355580 TServer::EnableGrpc on GrpcPort 11672, node 1 2025-12-23T18:31:41.873493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:41.873515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:41.873522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:41.873615Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:42.004074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30126 TClient is connected to server localhost:30126 2025-12-23T18:31:42.375894Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514701819 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:42.482993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:31:42.483319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:31:42.485120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:42.485712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:31:42.487809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766514702533, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:31:42.490623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2025-12-23T18:31:42.490677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 waiting... 2025-12-23T18:31:42.493390Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7587122870115192902:2241] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2025-12-23T18:31:42.493598Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122870115192352:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.493714Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122870115192355:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.494237Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122870115193025:2287][/Root] Path was updated to new version: owner# [1:7587122870115193019:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.494463Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122870115193026:2288][/Root] Path was updated to new version: owner# [1:7587122870115193020:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.494720Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7587122870115192902:2241] Ack update: ack to# [1:7587122870115192739:2148], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2025-12-23T18:31:42.494938Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122870115192349:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.495038Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122870115192822:2201][/Root] Path was updated to new version: owner# [1:7587122870115192673:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.495726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-12-23T18:31:42.496825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:44.916454Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2025-12-23T18:31:44.917506Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/8ptz/004d55/r3tmp/spilling-tmp-runner/node_1_2d741e2b-85332be9-feea061c-7e411a8f, actor: [1:7587122883000095117:2305] 2025-12-23T18:31:44.917682Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/8ptz/004d55/r3tmp/spilling-tmp-runner 2025-12-23T18:31:44.921012Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122883000095131:2498][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7587122870115192673:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.921245Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd67pe5s4kgqvgxsz42qg0n5", Request has 18444977559004.630396s seconds to be completed 2025-12-23T18:31:44.925522Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd67pe5s4kgqvgxsz42qg0n5", Created new session, sessionId: ydb://session/3?node_id=1&id=ODNhOWY5YjMtMjUzN2RlZDUtNmNmMTg5ODAtZDBlOWEwNTY=, workerId: [1:7587122883000095146:2325], database: /Root, longSession: 1, local sessions count: 1 2025-12-23T18:31:44.925792Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd67pe5s4kgqvgxsz42qg0n5 2025-12-23T18:31:44.926019Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:31:44.926055Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:31:44.926075Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E1223 18:31:44.927475527 1144465 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:44.927642688 1144465 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:31:44.927910Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122883000095148:2500][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7587122870115192673:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.927916Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122883000095149:2501][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7587122870115192673:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E1223 18:31:44.93025 ... { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-12-23T18:34:16.637448Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977558852.914205s seconds to be completed 2025-12-23T18:34:16.642341Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=9&id=NTM0MDBlYzktY2UxY2JkYzctM2JhMTBjMGMtYWJkNDA2ODQ=, workerId: [9:7587123535212945786:2742], database: /Root, longSession: 1, local sessions count: 2 2025-12-23T18:34:16.642780Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T18:34:16.643537Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=NTM0MDBlYzktY2UxY2JkYzctM2JhMTBjMGMtYWJkNDA2ODQ=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 56, targetId: [9:7587123535212945786:2742] 2025-12-23T18:34:16.643602Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 56 timeout: 300.000000s actor id: [9:7587123535212945788:3267] 2025-12-23T18:34:16.678341Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123535212945802:2733] TxId: 281474976710711. Ctx: { TraceId: 01kd67v48ybg3w6wskxx2c2jtx, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NzVhMWM3ZDYtZTI5ZWY4M2YtNzhiZDMzLTM0MmM3NjNj, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-12-23T18:34:16.685717Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710711. Ctx: { TraceId: 01kd67v48ybg3w6wskxx2c2jtx, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NzVhMWM3ZDYtZTI5ZWY4M2YtNzhiZDMzLTM0MmM3NjNj, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123535212945815:2752] 2025-12-23T18:34:16.686166Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710711. Ctx: { TraceId: 01kd67v48ybg3w6wskxx2c2jtx, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NzVhMWM3ZDYtZTI5ZWY4M2YtNzhiZDMzLTM0MmM3NjNj, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123535212945816:2753] 2025-12-23T18:34:16.687215Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd67v48ybg3w6wskxx2c2jtx", Forwarded response to sender actor, requestId: 54, sender: [9:7587123535212945749:2732], selfId: [9:7587123445018630329:2265], source: [9:7587123535212945750:2733] 2025-12-23T18:34:16.690688Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=NzVhMWM3ZDYtZTI5ZWY4M2YtNzhiZDMzLTM0MmM3NjNj, workerId: [9:7587123535212945750:2733], local sessions count: 1 2025-12-23T18:34:16.891708Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123535212945831:2742] TxId: 281474976710712. Ctx: { TraceId: 01kd67v4p31yr6y6yv0fdf8q5w, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NTM0MDBlYzktY2UxY2JkYzctM2JhMTBjMGMtYWJkNDA2ODQ=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:34:16.895798Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710712. Ctx: { TraceId: 01kd67v4p31yr6y6yv0fdf8q5w, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NTM0MDBlYzktY2UxY2JkYzctM2JhMTBjMGMtYWJkNDA2ODQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123535212945835:2757] 2025-12-23T18:34:16.896303Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710712. Ctx: { TraceId: 01kd67v4p31yr6y6yv0fdf8q5w, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NTM0MDBlYzktY2UxY2JkYzctM2JhMTBjMGMtYWJkNDA2ODQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123535212945836:2758] 2025-12-23T18:34:16.897676Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 56, sender: [9:7587123535212945787:2743], selfId: [9:7587123445018630329:2265], source: [9:7587123535212945786:2742] 2025-12-23T18:34:16.899652Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123535212945843:2742] TxId: 281474976710713. Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=NTM0MDBlYzktY2UxY2JkYzctM2JhMTBjMGMtYWJkNDA2ODQ=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:34:16.900651Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=NTM0MDBlYzktY2UxY2JkYzctM2JhMTBjMGMtYWJkNDA2ODQ=, workerId: [9:7587123535212945786:2742], local sessions count: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:30:00.054454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:30:00.180230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:30:00.209328Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:30:00.209868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:30:00.209969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; 2025-12-23T18:30:00.210268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:30:00.543464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:30:00.543611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:30:00.633987Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514596798219 != 1766514596798223 2025-12-23T18:30:00.645136Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:30:00.693879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:30:00.801133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005a53/r3tmp/tmpaiAomP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16795, node 1 TClient is connected to server localhost:12753 2025-12-23T18:30:01.400037Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:68:2115] Handle TEvGetProxyServicesRequest 2025-12-23T18:30:01.400702Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:68:2115] Handle TEvGetProxyServicesRequest 2025-12-23T18:30:01.405072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:30:01.407156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:30:01.407239Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:30:01.407276Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:30:01.408111Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:30:01.422023Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:30:01.576067Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2025-12-23T18:30:01.576161Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:30:01.576370Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:906:2768] 2025-12-23T18:30:01.710679Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:906:2768] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "new_column1" Type: "Uint64" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:30:01.710787Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:906:2768] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:30:01.711339Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:906:2768] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:30:01.711438Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:906:2768] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:30:01.711801Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:906:2768] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:30:01.711928Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:906:2768] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:30:01.712043Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:906:2768] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:30:01.715540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2025-12-23T18:30:01.716124Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:906:2768] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:30:01.716989Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:906:2768] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:30:01.717065Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:906:2768] txid# 281474976715657 SEND to# [1:905:2767] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:30:01.801138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:30:01.836600Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:30:01.836982Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037888 2025-12-23T18:30:01.846583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:30:01.847303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:30:01.847699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:30:01.847889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:30:01.848020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:30:01.848165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:30:01.848303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:30:01.848422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:30:01.848569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:30:01.848700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:30:01.848877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:30:01.849009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:30:01.849136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:30:01.885340Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 72075186224037888 2025-12-23T18:30:01.885960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:30:01.886036Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tab ... n { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 2025-12-23T18:33:44.987254Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:33:44.987352Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716216 ProcessProposeKqpTransaction 2025-12-23T18:33:45.002000Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:33:45.002073Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716217 ProcessProposeKqpTransaction 2025-12-23T18:33:45.161057Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:33:45.161134Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716218 ProcessProposeKqpTransaction 2025-12-23T18:33:45.174584Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:33:45.174661Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716219 ProcessProposeKqpTransaction 2025-12-23T18:33:45.392096Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-12-23T18:33:45.392303Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:968:2816];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-12-23T18:33:45.392384Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:971:2818];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-12-23T18:33:45.392458Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:976:2823];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple`;EXPECTATION=0 2025-12-23T18:33:55.517618Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:11795:10570], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:33:55.522180Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MzMzZGU5M2EtNGI1ZjMzNWMtMzdjZjA1MGQtNTFmN2FlMTU=, ActorId: [1:11791:10567], ActorState: ExecuteState, TraceId: 01kd67tg0w7p0e735admdsb12w, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 2025-12-23T18:33:56.230578Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:33:56.230673Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716220 ProcessProposeKqpTransaction 2025-12-23T18:33:56.280525Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:33:56.280598Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716221 ProcessProposeKqpTransaction 2025-12-23T18:33:56.465524Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:33:56.465602Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716222 ProcessProposeKqpTransaction 2025-12-23T18:33:56.486445Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:33:56.486515Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716223 ProcessProposeKqpTransaction 2025-12-23T18:33:56.720141Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-12-23T18:33:56.720303Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:968:2816];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-12-23T18:33:56.720370Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:971:2818];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-12-23T18:33:56.720434Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:976:2823];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1`;EXPECTATION=0 2025-12-23T18:34:07.797111Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:34:07.797209Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716224 ProcessProposeKqpTransaction REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/external`;RESULT=;EXPECTATION=1 2025-12-23T18:34:08.463734Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:34:08.463821Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716225 ProcessProposeKqpTransaction 2025-12-23T18:34:08.477068Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:34:08.477137Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716226 ProcessProposeKqpTransaction 2025-12-23T18:34:08.809347Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:34:08.809434Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716227 ProcessProposeKqpTransaction 2025-12-23T18:34:08.823131Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:34:08.823209Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716228 ProcessProposeKqpTransaction 2025-12-23T18:34:09.045397Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-12-23T18:34:09.045540Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:968:2816];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-12-23T18:34:09.045604Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:971:2818];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-12-23T18:34:09.045656Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:976:2823];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/external`;EXPECTATION=1 2025-12-23T18:34:19.456358Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:34:19.456443Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716229 ProcessProposeKqpTransaction REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 2025-12-23T18:34:19.987679Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:34:19.987742Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716230 ProcessProposeKqpTransaction 2025-12-23T18:34:19.998665Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:34:19.998734Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716231 ProcessProposeKqpTransaction 2025-12-23T18:34:20.142150Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:34:20.142216Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716232 ProcessProposeKqpTransaction 2025-12-23T18:34:20.153978Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:68:2115] Handle TEvExecuteKqpTransaction 2025-12-23T18:34:20.154044Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:68:2115] TxId# 281474976716233 ProcessProposeKqpTransaction 2025-12-23T18:34:20.335709Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:960:2814];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-12-23T18:34:20.335868Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:968:2816];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-12-23T18:34:20.335958Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:971:2818];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-12-23T18:34:20.336023Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:976:2823];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 26944, MsgBus: 16244 2025-12-23T18:31:41.226609Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122869100982911:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:41.226652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d56/r3tmp/tmpyXaGwv/pdisk_1.dat 2025-12-23T18:31:41.594541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:41.594663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:41.639159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:41.674452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.675823Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:41.683476Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122869100982885:2081] 1766514701224084 != 1766514701224087 TServer::EnableGrpc on GrpcPort 26944, node 1 2025-12-23T18:31:41.904243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:41.922690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:41.922723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:41.922733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:41.922875Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16244 2025-12-23T18:31:42.246912Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:16244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514701763 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:42.597694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:31:42.598130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:31:42.611659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:42.612253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:31:42.613709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766514702659, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:31:42.614460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2025-12-23T18:31:42.614484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 0 2025-12-23T18:31:42.614633Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7587122869100983402:2241] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 6 2025-12-23T18:31:42.615636Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122869100982853:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.615746Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122869100982856:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.615897Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122869100982859:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.615930Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122869100983482:2288][/Root] Path was updated to new version: owner# [1:7587122869100983474:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.616154Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122869100983334:2209][/Root] Path was updated to new version: owner# [1:7587122869100983168:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.616229Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122869100983481:2287][/Root] Path was updated to new version: owner# [1:7587122869100983473:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.616485Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7587122869100983402:2241] Ack update: ack to# [1:7587122869100983243:2149], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2025-12-23T18:31:42.616671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 0 waiting... 2025-12-23T18:31:42.618317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:31:44.934051Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2025-12-23T18:31:44.939635Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/8ptz/004d56/r3tmp/spilling-tmp-runner/node_1_2c383b33-17b4df42-1d342a17-10982aac, actor: [1:7587122881985885616:2305] 2025-12-23T18:31:44.939873Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/8ptz/004d56/r3tmp/spilling-tmp-runner 2025-12-23T18:31:44.941092Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122881985885619:2497][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7587122869100983168:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.948333Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd67pe981qe10zt0w02hxd5c", Request has 18444977559004.603322s seconds to be completed E1223 18:31:44.963874368 1144244 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:44.964083013 1144244 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:31:44.957473Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd67pe981qe10zt0w02hxd5c", Created new session, sessionId: ydb://session/3?node_id=1&id=MWIzMzUxNC0zNmYzNjA1Ni1jMWVjNGQzZi1lMTI0ODIyMw==, workerId: [1:7587122881985885646:2325], database: /Root, longSession: 1, local sessions count: 1 2025-12-23T18:31:44.957730Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd67pe981qe10zt0w02hxd5c 2025-12-23T18:31:44.957787Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:31:44.957824Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:31:44.957845Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2025-12-23T18:31:44.963185Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122881985885649:2501][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7587122869100983168:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.968945Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122881985885648:2500][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7587122869100983168:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E1223 18:31:44.976285691 1144243 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:44.976460909 1144243 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// ... t32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-12-23T18:34:17.769076Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123542341759528:2723] TxId: 281474976710709. Ctx: { TraceId: 01kd67v5efdkstsc0m0bjkr303, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Zjc2NTEyOGItYTc2ZjVhZjItM2ExOWYwMS1mYTI2NjhhZQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-12-23T18:34:17.774856Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710709. Ctx: { TraceId: 01kd67v5efdkstsc0m0bjkr303, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Zjc2NTEyOGItYTc2ZjVhZjItM2ExOWYwMS1mYTI2NjhhZQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123542341759532:2727] 2025-12-23T18:34:17.775263Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710709. Ctx: { TraceId: 01kd67v5efdkstsc0m0bjkr303, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Zjc2NTEyOGItYTc2ZjVhZjItM2ExOWYwMS1mYTI2NjhhZQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123542341759533:2728] 2025-12-23T18:34:17.775879Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd67v5efdkstsc0m0bjkr303", Forwarded response to sender actor, requestId: 52, sender: [9:7587123542341759508:2722], selfId: [9:7587123465032345945:2224], source: [9:7587123542341759509:2723] 2025-12-23T18:34:17.776486Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=Zjc2NTEyOGItYTc2ZjVhZjItM2ExOWYwMS1mYTI2NjhhZQ==, workerId: [9:7587123542341759509:2723], local sessions count: 0 2025-12-23T18:34:17.830636Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977558851.721042s seconds to be completed 2025-12-23T18:34:17.835815Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=9&id=ZjYzYjFlZTgtNjU0YjI1YzUtMTFlMjQ5NWUtY2QwY2NlODg=, workerId: [9:7587123542341759541:2731], database: /Root, longSession: 1, local sessions count: 1 2025-12-23T18:34:17.836274Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T18:34:17.836896Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZjYzYjFlZTgtNjU0YjI1YzUtMTFlMjQ5NWUtY2QwY2NlODg=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 54, targetId: [9:7587123542341759541:2731] 2025-12-23T18:34:17.836952Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 54 timeout: 300.000000s actor id: [9:7587123542341759543:3258] |92.4%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 8006, MsgBus: 21420 2025-12-23T18:31:41.509733Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122870592204371:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:41.509835Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d51/r3tmp/tmp3gMkf4/pdisk_1.dat 2025-12-23T18:31:41.787003Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:41.798838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:41.798909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:41.841861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.931880Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:41.938272Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122870592204347:2081] 1766514701508599 != 1766514701508602 TServer::EnableGrpc on GrpcPort 8006, node 1 2025-12-23T18:31:41.974202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:42.079002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:42.079156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:42.079168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:42.079280Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21420 2025-12-23T18:31:42.503455Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514701952 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:42.591563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:31:42.591970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:31:42.594767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:42.595414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-12-23T18:31:42.599617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766514702645, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:31:42.601876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2025-12-23T18:31:42.601900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-12-23T18:31:42.602035Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7587122870592204875:2248] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2025-12-23T18:31:42.602396Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122870592204315:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.602632Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122870592205069:2287][/Root] Path was updated to new version: owner# [1:7587122870592205063:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.602711Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122870592204318:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.603024Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7587122870592204875:2248] Ack update: ack to# [1:7587122870592204704:2148], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2025-12-23T18:31:42.603075Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122870592204321:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.603297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2025-12-23T18:31:42.603907Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122870592204789:2200][/Root] Path was updated to new version: owner# [1:7587122870592204616:2108], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.604598Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122870592205070:2288][/Root] Path was updated to new version: owner# [1:7587122870592205064:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.604997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:44.790788Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2025-12-23T18:31:44.791863Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/8ptz/004d51/r3tmp/spilling-tmp-runner/node_1_132e6cb3-241aa959-5a93f95c-415dc107, actor: [1:7587122883477107079:2305] 2025-12-23T18:31:44.792159Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/8ptz/004d51/r3tmp/spilling-tmp-runner 2025-12-23T18:31:44.795385Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd67pe9c59rg0d2dj8eyvpb9", Request has 18444977559004.756255s seconds to be completed 2025-12-23T18:31:44.799664Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd67pe9c59rg0d2dj8eyvpb9", Created new session, sessionId: ydb://session/3?node_id=1&id=ZWY0OGMyMGMtNGRkYTk5OTItODFjMDk3MjYtZjEwOTg5YmU=, workerId: [1:7587122883477107100:2325], database: /Root, longSession: 1, local sessions count: 1 2025-12-23T18:31:44.799964Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd67pe9c59rg0d2dj8eyvpb9 2025-12-23T18:31:44.800116Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:31:44.800170Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:31:44.800199Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2025-12-23T18:31:44.808555Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122883477107102:2495][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7587122870592204616:2108], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.808813Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122883477107103:2496][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7587122870592204616:2108], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.808910Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122883477107104:2497][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7587122870592204616:2108], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E1223 18:31:44.812970117 1144595 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:44.813171416 1144595 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1223 18:31:44.821099611 1144595 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:44.821271783 1144595 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 20 ... item { type_id: STRING } } } } } error { status: SUCCESS } E1223 18:34:18.028881270 1178810 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:34:18.029067902 1178810 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-12-23T18:34:18.299581Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123543511241737:2710] TxId: 281474976710707. Ctx: { TraceId: 01kd67v5s535sr3ja12nbxkb9e, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Yzc4NDRlYTItNWIzMjI1ZjctZjAzODg2ZjEtMWIzNzJmNzg=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-12-23T18:34:18.305525Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710707. Ctx: { TraceId: 01kd67v5s535sr3ja12nbxkb9e, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Yzc4NDRlYTItNWIzMjI1ZjctZjAzODg2ZjEtMWIzNzJmNzg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123543511241741:2722] 2025-12-23T18:34:18.306316Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710707. Ctx: { TraceId: 01kd67v5s535sr3ja12nbxkb9e, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Yzc4NDRlYTItNWIzMjI1ZjctZjAzODg2ZjEtMWIzNzJmNzg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123543511241742:2723] 2025-12-23T18:34:18.307066Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd67v5s535sr3ja12nbxkb9e", Forwarded response to sender actor, requestId: 50, sender: [9:7587123539216274375:2705], selfId: [9:7587123457611893624:2265], source: [9:7587123539216274398:2710] 2025-12-23T18:34:18.307567Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=Yzc4NDRlYTItNWIzMjI1ZjctZjAzODg2ZjEtMWIzNzJmNzg=, workerId: [9:7587123539216274398:2710], local sessions count: 0 >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink [GOOD] >> IncrementalBackup::ComplexBackupSequenceWithDataVerification >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> Compression::WriteGZIP >> BasicUsage::CreateTopicWithCustomMetricsLevel >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink [GOOD] >> IncrementalBackup::ResetVsUpsertMissingColumnsTest [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2025-12-23T18:34:13.060365Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123522965266642:2196];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:13.060617Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003bc1/r3tmp/tmpHlnh1B/pdisk_1.dat 2025-12-23T18:34:13.358862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:13.375000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:13.375144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:13.427987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:13.532185Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123522965266469:2081] 1766514853043426 != 1766514853043429 2025-12-23T18:34:13.549440Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:13.584013Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514853537 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:13.952970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:13.959444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:34:14.066060Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:34:16.997107Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123538551115235:2239];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:16.998814Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003bc1/r3tmp/tmpRmRH3d/pdisk_1.dat 2025-12-23T18:34:17.027574Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:17.138932Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:17.140254Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:17.140327Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:17.140666Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587123538551115019:2081] 1766514856982889 != 1766514856982892 2025-12-23T18:34:17.169778Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:17.218632Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514857226 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:17.357475Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:17.365151Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2025-12-23T18:34:14.327743Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123527598945296:2192];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:14.328354Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003bb9/r3tmp/tmpVzz1b9/pdisk_1.dat 2025-12-23T18:34:14.623108Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:14.630015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:14.630129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:14.635069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:14.762205Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:14.900469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514854797 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:15.059703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:15.100532Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c9b77d78fd0] Connect to grpc://localhost:11360 2025-12-23T18:34:15.101724Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c9b77d78fd0] Request ListFoldersRequest { id: "i_am_exists" } 2025-12-23T18:34:15.115392Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c9b77d78fd0] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2025-12-23T18:34:15.116764Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c9b77d3f750] Connect to grpc://localhost:8569 2025-12-23T18:34:15.117707Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c9b77d3f750] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-12-23T18:34:15.136272Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c9b77d3f750] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2025-12-23T18:34:15.137260Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c9b77d3f750] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-12-23T18:34:15.140312Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c9b77d3f750] Status 5 Not Found 2025-12-23T18:34:15.140985Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c9b77d78fd0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-12-23T18:34:15.145771Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c9b77d78fd0] Status 5 Not Found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 65329, MsgBus: 28125 2025-12-23T18:31:43.486584Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122878041241171:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:43.486726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d4e/r3tmp/tmpey6viJ/pdisk_1.dat 2025-12-23T18:31:43.795253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:43.795404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:43.797738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:43.855538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:43.860682Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122878041241144:2081] 1766514703484036 != 1766514703484039 2025-12-23T18:31:43.863519Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65329, node 1 2025-12-23T18:31:43.934460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:43.934483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:43.934490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:43.934607Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:44.008245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28125 TClient is connected to server localhost:28125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:31:44.498344Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122882336209252:2476][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7587122878041241435:2122], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.498649Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514703870 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:44.522906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:31:44.523291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:31:44.525378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:44.526049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-12-23T18:31:44.529082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766514704577, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:31:44.530337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2025-12-23T18:31:44.530421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-12-23T18:31:44.530833Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7587122878041241667:2245] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2025-12-23T18:31:44.531057Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122878041241115:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:44.531116Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122878041241118:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:44.531284Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122878041241112:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:44.531366Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7587122878041241667:2245] Ack update: ack to# [1:7587122878041241500:2149], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2025-12-23T18:31:44.531410Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122878041241854:2287][/Root] Path was updated to new version: owner# [1:7587122878041241841:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.531438Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122878041241612:2214][/Root] Path was updated to new version: owner# [1:7587122878041241435:2122], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.531555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-12-23T18:31:44.531753Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122878041241869:2288][/Root] Path was updated to new version: owner# [1:7587122878041241842:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:46.667135Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2025-12-23T18:31:46.670562Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/8ptz/004d4e/r3tmp/spilling-tmp-runner/node_1_fddb3962-b6b16879-a664980f-8d6b224c, actor: [1:7587122890926143876:2305] 2025-12-23T18:31:46.670798Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/8ptz/004d4e/r3tmp/spilling-tmp-runner 2025-12-23T18:31:46.678609Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122890926143892:2496][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7587122878041241435:2122], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:46.678777Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122890926143893:2497][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7587122878041241435:2122], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E1223 18:31:46.682822369 1145430 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:46.683293107 1145430 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1223 18:31:46.684847986 1145430 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:46.685012543 1145430 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:31:46.683576Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd67pg5a9gqf858k7wn404m7", Request has 18444977559002.868073s seconds to be completed 2025-12-23T18:31:46.689254Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122890926143914:2499][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7587122878041241435:2122], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:46.689654Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2025-12-23T18:31:46.691054Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd67pg5a9gqf858k7wn404m7", Created new session, sessionId: ydb://session/3?node_id=1&id=NzZkOTk2M2UtNGE4MTczZWEtYzE3ZTI0YmEtYTYwOGRkZjM=, workerId: [1:7587122890926143922:2327], database: /Root, longSession: 1, local sessions count: 1 2025-12-23T18:31:46.691295Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd67pg5a9gqf858k7 ... e" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-12-23T18:34:20.657161Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123552772514650:2708] TxId: 281474976710707. Ctx: { TraceId: 01kd67v885fag6ps2wavja855d, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NjY1YjU1Y2QtYTMyZWVmNzItMjU0ZjkzZmYtNWY2NGMwMg==, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-12-23T18:34:20.663698Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710707. Ctx: { TraceId: 01kd67v885fag6ps2wavja855d, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NjY1YjU1Y2QtYTMyZWVmNzItMjU0ZjkzZmYtNWY2NGMwMg==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123552772514655:2716] 2025-12-23T18:34:20.664218Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710707. Ctx: { TraceId: 01kd67v885fag6ps2wavja855d, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NjY1YjU1Y2QtYTMyZWVmNzItMjU0ZjkzZmYtNWY2NGMwMg==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123552772514656:2717] 2025-12-23T18:34:20.664863Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd67v885fag6ps2wavja855d", Forwarded response to sender actor, requestId: 50, sender: [9:7587123552772514611:2707], selfId: [9:7587123475463101161:2265], source: [9:7587123552772514612:2708] 2025-12-23T18:34:20.665538Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=NjY1YjU1Y2QtYTMyZWVmNzItMjU0ZjkzZmYtNWY2NGMwMg==, workerId: [9:7587123552772514612:2708], local sessions count: 0 E1223 18:34:20.713118149 1177529 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:34:20.713320777 1177529 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:34:20.724149Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977558848.827505s seconds to be completed 2025-12-23T18:34:20.728439Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2025-12-23T18:34:20.729204Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=9&id=NzlmNDY0OGMtMWFiZTdlYTctNWU5ZjZmZDQtNjkwZThjYzU=, workerId: [9:7587123552772514662:2719], database: /Root, longSession: 1, local sessions count: 1 2025-12-23T18:34:20.729655Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T18:34:20.730289Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=NzlmNDY0OGMtMWFiZTdlYTctNWU5ZjZmZDQtNjkwZThjYzU=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 52, targetId: [9:7587123552772514662:2719] 2025-12-23T18:34:20.730351Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 52 timeout: 300.000000s actor id: [9:7587123552772514664:3242] E1223 18:34:20.883432848 1179215 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:34:20.883634425 1179215 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2025-12-23T18:34:19.342542Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123551530261210:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:19.342620Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ba3/r3tmp/tmpr1s98m/pdisk_1.dat 2025-12-23T18:34:19.493037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:19.572190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:19.572333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:19.617239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:19.620899Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:19.622017Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123551530261184:2081] 1766514859341245 != 1766514859341248 2025-12-23T18:34:19.686270Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1326 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514859662 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:19.901456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:19.911052Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c81ada9f450] Connect to grpc://localhost:4711 2025-12-23T18:34:19.924229Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c81ada9f450] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-12-23T18:34:19.934585Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c81ada9f450] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4711: Failed to connect to remote host: Connection refused 2025-12-23T18:34:19.936040Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c81ada9f450] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-12-23T18:34:19.936561Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c81ada9f450] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4711: Failed to connect to remote host: Connection refused 2025-12-23T18:34:20.349534Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:34:20.937114Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c81ada9f450] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-12-23T18:34:20.940079Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c81ada9f450] Status 5 Not Found 2025-12-23T18:34:20.940399Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c81ada9f450] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-12-23T18:34:20.943070Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c81ada9f450] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::Defrag [GOOD] Test command err: 2025-12-23T18:33:34.965157Z :BS_INCRHUGE DEBUG: incrhuge_keeper.cpp:72: BlockSize# 8128 BlocksInChunk# 2304 BlocksInMinBlob# 65 MaxBlobsPerChunk# 35 BlocksInDataSection# 2303 BlocksInIndexSection# 1 2025-12-23T18:33:34.965330Z :BS_INCRHUGE INFO: incrhuge_keeper_recovery.cpp:152: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] starting ReadLog 2025-12-23T18:33:34.965853Z :BS_INCRHUGE INFO: incrhuge_keeper_recovery.cpp:161: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] finished ReadLog 2025-12-23T18:33:34.965914Z :BS_INCRHUGE DEBUG: incrhuge_keeper_recovery.cpp:200: [PDisk# 000000001 Recovery] ApplyReadLog Chunks# [] Deletes# [] Owners# {} CurrentSerNum# 0 NextLsn# 1 2025-12-23T18:33:34.965973Z :BS_INCRHUGE INFO: incrhuge_keeper_recovery.cpp:515: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] ready 2025-12-23T18:33:34.966011Z :TEST DEBUG: test_actor_concurrent.h:153: finished Init Reference# [] Enumerated# [] InFlightDeletes# [] 2025-12-23T18:33:34.966023Z :TEST DEBUG: test_actor_concurrent.h:209: ActionsTaken# 1 2025-12-23T18:33:34.966034Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 0 InFlightWritesSize# 0 2025-12-23T18:33:34.969116Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:811717:0:0] Lsn# 0 NumReq# 0 2025-12-23T18:33:34.969488Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 0 HandleWrite Lsn# 0 DataSize# 811717 WriteQueueSize# 1 WriteInProgressItemsSize# 0 2025-12-23T18:33:34.969535Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 0 2025-12-23T18:33:34.969563Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2025-12-23T18:33:34.969584Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:230: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem no free chunks 2025-12-23T18:33:34.975013Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 1 InFlightWritesSize# 1 2025-12-23T18:33:34.978073Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1745495:1:0] Lsn# 1 NumReq# 1 2025-12-23T18:33:34.978132Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 1 HandleWrite Lsn# 1 DataSize# 1745495 WriteQueueSize# 2 WriteInProgressItemsSize# 0 2025-12-23T18:33:34.978160Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 2 WriteInProgressItemsSize# 0 2025-12-23T18:33:34.978191Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2025-12-23T18:33:34.978212Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:230: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem no free chunks 2025-12-23T18:33:34.982019Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 2 InFlightWritesSize# 2 2025-12-23T18:33:34.982957Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:602037:2:0] Lsn# 2 NumReq# 2 2025-12-23T18:33:34.984166Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 3 InFlightWritesSize# 3 2025-12-23T18:33:34.984519Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 2 HandleWrite Lsn# 2 DataSize# 602037 WriteQueueSize# 3 WriteInProgressItemsSize# 0 2025-12-23T18:33:34.984569Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 3 WriteInProgressItemsSize# 0 2025-12-23T18:33:34.984588Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2025-12-23T18:33:34.984616Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:230: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem no free chunks 2025-12-23T18:33:34.987861Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:460: [PDisk# 000000001 Logger] ApplyLogChunkItem Lsn# 1 Status# OK 2025-12-23T18:33:34.987931Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 2 ChunkSerNum# 1000 2025-12-23T18:33:34.987968Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 3 ChunkSerNum# 1001 2025-12-23T18:33:34.987989Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 4 ChunkSerNum# 1002 2025-12-23T18:33:34.988006Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 5 ChunkSerNum# 1003 2025-12-23T18:33:34.988020Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 6 ChunkSerNum# 1004 2025-12-23T18:33:34.988045Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 7 ChunkSerNum# 1005 2025-12-23T18:33:34.988061Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 8 ChunkSerNum# 1006 2025-12-23T18:33:34.988718Z :BS_INCRHUGE DEBUG: incrhuge_keeper_alloc.cpp:64: [PDisk# 000000001 Allocator] ChunkIdx# 9 ChunkSerNum# 1007 2025-12-23T18:33:34.988761Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 3 WriteInProgressItemsSize# 0 2025-12-23T18:33:34.988782Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2025-12-23T18:33:34.989778Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem OffsetInBlocks# 0 IndexInsideChunk# 0 SizeInBlocks# 100 SizeInBytes# 812800 Offset# 0 Size# 812800 End# 812800 Id# 0000000000000000 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-12-23T18:33:34.989812Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 1 ProcessWriteItem entry 2025-12-23T18:33:34.990299Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 1 ProcessWriteItem OffsetInBlocks# 100 IndexInsideChunk# 1 SizeInBlocks# 215 SizeInBytes# 1747520 Offset# 812800 Size# 1747520 End# 2560320 Id# 0000000000000001 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-12-23T18:33:34.990325Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 2 ProcessWriteItem entry 2025-12-23T18:33:34.990504Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 2 ProcessWriteItem OffsetInBlocks# 315 IndexInsideChunk# 2 SizeInBlocks# 75 SizeInBytes# 609600 Offset# 2560320 Size# 609600 End# 3169920 Id# 0000000000000002 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-12-23T18:33:34.993856Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1287465:3:0] Lsn# 3 NumReq# 3 2025-12-23T18:33:34.996914Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 4 InFlightWritesSize# 4 2025-12-23T18:33:34.998045Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 3 HandleWrite Lsn# 3 DataSize# 1287465 WriteQueueSize# 1 WriteInProgressItemsSize# 3 2025-12-23T18:33:34.998084Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 3 2025-12-23T18:33:34.998106Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 3 ProcessWriteItem entry 2025-12-23T18:33:34.998415Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 3 ProcessWriteItem OffsetInBlocks# 390 IndexInsideChunk# 3 SizeInBlocks# 159 SizeInBytes# 1292352 Offset# 3169920 Size# 1292352 End# 4462272 Id# 0000000000000003 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-12-23T18:33:35.000066Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1501676:4:0] Lsn# 4 NumReq# 4 2025-12-23T18:33:35.000114Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 4 HandleWrite Lsn# 4 DataSize# 1501676 WriteQueueSize# 1 WriteInProgressItemsSize# 4 2025-12-23T18:33:35.000135Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 4 2025-12-23T18:33:35.000151Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 4 ProcessWriteItem entry 2025-12-23T18:33:35.000535Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 4 ProcessWriteItem OffsetInBlocks# 549 IndexInsideChunk# 4 SizeInBlocks# 185 SizeInBytes# 1503680 Offset# 4462272 Size# 1503680 End# 5965952 Id# 0000000000000004 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-12-23T18:33:35.003778Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 5 InFlightWritesSize# 5 2025-12-23T18:33:35.005008Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:687721:5:0] Lsn# 5 NumReq# 5 2025-12-23T18:33:35.006480Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 6 InFlightWritesSize# 6 2025-12-23T18:33:35.007000Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 5 HandleWrite Lsn# 5 DataSize# 687721 WriteQueueSize# 1 WriteInProgressItemsSize# 5 2025-12-23T18:33:35.007026Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 5 2025-12-23T18:33:35.020041Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1957662:6:0] Lsn# 6 NumReq# 6 2025-12-23T18:33:35.022905Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 6 HandleWrite Lsn# 6 DataSize# 1957662 WriteQueueSize# 2 WriteInProgressItemsSize# 5 2025-12-23T18:33:35.022974Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 2 WriteInProgressItemsSize# 5 2025-12-23T18:33:35.024297Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 7 InFlightWritesSize# 7 2025-12-23T18:33:35.026949Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 0 ApplyBlobWrite Status# OK 2025-12-23T18:33:35.027329Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 2 WriteInProgressItemsSize# 4 2025-12-23T18:33:35.027355Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 5 ProcessWriteItem entry 2025-12-23T18:33:35.027627Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 5 ProcessWriteItem OffsetInBlocks# 734 IndexInsideChunk# 5 SizeInBlocks# 85 SizeInBytes# 690880 Offset# 5965952 Size# 690880 End# 6656832 Id# 0000000000000005 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-12-23T18:33:35.027781Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:1:1:0:1824284:7:0] Lsn# 7 NumReq# 7 2025-12-23T18:33:35.027885Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 7 HandleWrite Lsn# 7 DataSize# 1824284 WriteQueueSize# 2 WriteInProgressItemsSize# 5 2025-12-23T18:33:35.027921Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 2 WriteInProgressItemsSize# 5 2025-12-23T18:33:35.034188Z :TEST DEBUG: test_actor_concurrent.h:308: finished Write Id# 0000000000000000 LogoBlobId# [1:1:1:0:811717:0:0] Lsn# 0 2025-12-23T18:33:35.034251Z :TEST INFO: test_actor_concurrent.h:320: BytesWritten# 0 MB ElapsedTime# 0.118252s Speed# 0.00 MB/s 2025-12-23T18:33:35.034274Z :TEST DEBUG: test_actor_concurrent.h:209: ActionsTaken# 2 2025-12-23T18:33:35.034288Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 7 InFlightWritesSize# 7 2025-12-23T18:33:35.034313Z :TEST DEBUG: test_actor_concurrent.h:381: sent Delete Id# 0000000000000000 NumReq# 7 2025-12-23T18:33:35.034379Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:50: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 8 HandleDelete Ids# [0000000000000000] 2025-12-23T18:33:35.034452Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:544: [PDisk# 000000001 Logger] Log ... 481 HandleWrite Lsn# 1191 DataSize# 709646 WriteQueueSize# 19 WriteInProgressItemsSize# 5 2025-12-23T18:34:38.114826Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 19 WriteInProgressItemsSize# 5 2025-12-23T18:34:38.116212Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 44 InFlightWritesSize# 29 2025-12-23T18:34:38.119028Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:2093185:1192:0] Lsn# 1192 NumReq# 44 2025-12-23T18:34:38.119048Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 482 HandleWrite Lsn# 1192 DataSize# 2093185 WriteQueueSize# 20 WriteInProgressItemsSize# 5 2025-12-23T18:34:38.119060Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 20 WriteInProgressItemsSize# 5 2025-12-23T18:34:38.120326Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:648: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 795 Status# OK 2025-12-23T18:34:38.120347Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:686: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 795 Virtual# false 2025-12-23T18:34:38.120366Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:196: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 1165 finished Status# OK 2025-12-23T18:34:38.120380Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:249: [PDisk# 000000001 Deleter] deleting 0000000000000028 from lookup table 2025-12-23T18:34:38.120401Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:648: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 796 Status# OK 2025-12-23T18:34:38.120410Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:686: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 796 Virtual# false 2025-12-23T18:34:38.120418Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:196: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 1166 finished Status# OK 2025-12-23T18:34:38.120425Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:249: [PDisk# 000000001 Deleter] deleting 0000000000000005 from lookup table 2025-12-23T18:34:38.120438Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:648: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 797 Status# OK 2025-12-23T18:34:38.120445Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:686: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 797 Virtual# false 2025-12-23T18:34:38.120554Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:196: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 1167 finished Status# OK 2025-12-23T18:34:38.120563Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:249: [PDisk# 000000001 Deleter] deleting 000000000000003a from lookup table 2025-12-23T18:34:38.120584Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:648: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 798 Status# OK 2025-12-23T18:34:38.120593Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:686: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 798 Virtual# false 2025-12-23T18:34:38.120601Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:196: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 1168 finished Status# OK 2025-12-23T18:34:38.120608Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:249: [PDisk# 000000001 Deleter] deleting 0000000000000022 from lookup table 2025-12-23T18:34:38.120619Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:326: [PDisk# 000000001 Deleter] sending chunk delete ChunkIdx# 24 2025-12-23T18:34:38.120654Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:648: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 799 Status# OK 2025-12-23T18:34:38.120661Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:686: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 799 Virtual# false 2025-12-23T18:34:38.120668Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:196: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 1169 finished Status# OK 2025-12-23T18:34:38.120674Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:249: [PDisk# 000000001 Deleter] deleting 0000000000000030 from lookup table 2025-12-23T18:34:38.120698Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:648: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 800 Status# OK 2025-12-23T18:34:38.120709Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:686: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 800 Virtual# false 2025-12-23T18:34:38.120718Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:196: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 1171 finished Status# OK 2025-12-23T18:34:38.120725Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:249: [PDisk# 000000001 Deleter] deleting 0000000000000010 from lookup table 2025-12-23T18:34:38.120736Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:648: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 801 Status# OK 2025-12-23T18:34:38.120747Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:686: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 801 Virtual# false 2025-12-23T18:34:38.120758Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:196: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 1172 finished Status# OK 2025-12-23T18:34:38.120769Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:249: [PDisk# 000000001 Deleter] deleting 0000000000000012 from lookup table 2025-12-23T18:34:38.120780Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:648: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 802 Status# OK 2025-12-23T18:34:38.120787Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:686: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 802 Virtual# false 2025-12-23T18:34:38.120794Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:196: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 1173 finished Status# OK 2025-12-23T18:34:38.120800Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:249: [PDisk# 000000001 Deleter] deleting 0000000000000006 from lookup table 2025-12-23T18:34:38.120826Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:648: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 803 Status# OK 2025-12-23T18:34:38.120835Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:686: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 803 Virtual# false 2025-12-23T18:34:38.120843Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:196: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 1175 finished Status# OK 2025-12-23T18:34:38.120849Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:249: [PDisk# 000000001 Deleter] deleting 0000000000000038 from lookup table 2025-12-23T18:34:38.120862Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:648: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 804 Status# OK 2025-12-23T18:34:38.120869Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:686: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 804 Virtual# false 2025-12-23T18:34:38.120876Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:196: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 1182 finished Status# OK 2025-12-23T18:34:38.120888Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:249: [PDisk# 000000001 Deleter] deleting 0000000000000037 from lookup table 2025-12-23T18:34:38.120897Z :BS_INCRHUGE DEBUG: incrhuge_keeper_delete.cpp:326: [PDisk# 000000001 Deleter] sending chunk delete ChunkIdx# 25 2025-12-23T18:34:38.120922Z :BS_INCRHUGE DEBUG: incrhuge_keeper_log.cpp:686: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 805 Virtual# true 2025-12-23T18:34:38.123181Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 45 InFlightWritesSize# 30 2025-12-23T18:34:38.125955Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:2044453:1193:0] Lsn# 1193 NumReq# 45 2025-12-23T18:34:38.125969Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 483 HandleWrite Lsn# 1193 DataSize# 2044453 WriteQueueSize# 21 WriteInProgressItemsSize# 5 2025-12-23T18:34:38.125981Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 21 WriteInProgressItemsSize# 5 2025-12-23T18:34:38.127354Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 458 ApplyBlobWrite Status# OK 2025-12-23T18:34:38.127780Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 21 WriteInProgressItemsSize# 4 2025-12-23T18:34:38.127799Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 463 ProcessWriteItem entry 2025-12-23T18:34:38.128024Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 463 ProcessWriteItem OffsetInBlocks# 1852 IndexInsideChunk# 13 SizeInBlocks# 164 SizeInBytes# 1332992 Offset# 15053056 Size# 1332992 End# 16386048 Id# 0000000000000037 ChunkIdx# 26 ChunkSerNum# 1120 Defrag# false 2025-12-23T18:34:38.130142Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 46 InFlightWritesSize# 31 2025-12-23T18:34:38.132632Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:1815657:1194:0] Lsn# 1194 NumReq# 46 2025-12-23T18:34:38.132654Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 484 HandleWrite Lsn# 1194 DataSize# 1815657 WriteQueueSize# 21 WriteInProgressItemsSize# 5 2025-12-23T18:34:38.132667Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 21 WriteInProgressItemsSize# 5 2025-12-23T18:34:38.136204Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 47 InFlightWritesSize# 32 2025-12-23T18:34:38.138470Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:1564457:1195:0] Lsn# 1195 NumReq# 47 2025-12-23T18:34:38.138495Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 485 HandleWrite Lsn# 1195 DataSize# 1564457 WriteQueueSize# 22 WriteInProgressItemsSize# 5 2025-12-23T18:34:38.138509Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 22 WriteInProgressItemsSize# 5 2025-12-23T18:34:38.139946Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:344: [PDisk# 000000001 Writer] QueryId# 459 ApplyBlobWrite Status# OK 2025-12-23T18:34:38.140188Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 22 WriteInProgressItemsSize# 4 2025-12-23T18:34:38.140205Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:192: [PDisk# 000000001 Writer] QueryId# 464 ProcessWriteItem entry 2025-12-23T18:34:38.140391Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:319: [PDisk# 000000001 Writer] QueryId# 464 ProcessWriteItem OffsetInBlocks# 2016 IndexInsideChunk# 14 SizeInBlocks# 135 SizeInBytes# 1097280 Offset# 16386048 Size# 1097280 End# 17483328 Id# 0000000000000038 ChunkIdx# 26 ChunkSerNum# 1120 Defrag# false 2025-12-23T18:34:38.141593Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 48 InFlightWritesSize# 33 2025-12-23T18:34:38.143910Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:1579253:1196:0] Lsn# 1196 NumReq# 48 2025-12-23T18:34:38.143917Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 486 HandleWrite Lsn# 1196 DataSize# 1579253 WriteQueueSize# 22 WriteInProgressItemsSize# 5 2025-12-23T18:34:38.143929Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 22 WriteInProgressItemsSize# 5 2025-12-23T18:34:38.147058Z :TEST DEBUG: test_actor_concurrent.h:213: GetNumRequestsInFlight# 49 InFlightWritesSize# 34 2025-12-23T18:34:38.147916Z :TEST DEBUG: test_actor_concurrent.h:295: sent Write LogoBlobId# [1:2:1:0:584806:1197:0] Lsn# 1197 NumReq# 49 2025-12-23T18:34:38.147958Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:130: [PDisk# 000000001 Writer] QueryId# 487 HandleWrite Lsn# 1197 DataSize# 584806 WriteQueueSize# 23 WriteInProgressItemsSize# 5 2025-12-23T18:34:38.147999Z :BS_INCRHUGE DEBUG: incrhuge_keeper_write.cpp:174: [PDisk# 000000001 Writer] WriteQueueSize# 23 WriteInProgressItemsSize# 5 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2025-12-23T18:34:16.446524Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123535072196338:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:16.446638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003baf/r3tmp/tmpGYHAWc/pdisk_1.dat 2025-12-23T18:34:16.681652Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:16.703707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:16.703871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:16.775373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:16.799814Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:16.801145Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123535072196311:2081] 1766514856444475 != 1766514856444478 2025-12-23T18:34:16.899766Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514856827 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:17.141834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:17.148561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:34:17.164489Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d1bab79f450] Connect to grpc://localhost:24939 2025-12-23T18:34:17.180397Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d1bab79f450] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-12-23T18:34:17.192516Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d1bab79f450] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24939: Failed to connect to remote host: Connection refused 2025-12-23T18:34:17.194523Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d1bab79f450] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-12-23T18:34:17.195119Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d1bab79f450] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24939: Failed to connect to remote host: Connection refused 2025-12-23T18:34:17.464314Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:34:18.198405Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d1bab79f450] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-12-23T18:34:18.201729Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d1bab79f450] Status 5 Not Found 2025-12-23T18:34:18.202088Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d1bab79f450] Request ListFoldersRequest { id: "i_am_exists" } 2025-12-23T18:34:18.206954Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d1bab79f450] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2025-12-23T18:34:16.466345Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123537499175109:2083];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:16.467194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ba7/r3tmp/tmpNGZuCl/pdisk_1.dat 2025-12-23T18:34:16.713844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:16.721363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:16.721481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:16.728945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:16.830404Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:16.941210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61110 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514856890 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:17.122096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:17.137588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:34:19.884635Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123550634031854:2146];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:19.884763Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ba7/r3tmp/tmpM18h3Y/pdisk_1.dat 2025-12-23T18:34:19.899174Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:19.994001Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:20.008682Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:20.008783Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:20.034231Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:20.143927Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514860082 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:20.213421Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:33:58.480262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:58.480367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:58.480404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:58.480442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:58.480478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:58.480513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:58.480589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:58.480661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:58.481606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:58.481922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:58.576451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:58.576524Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:58.586826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:58.587118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:58.587285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:58.591188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:58.591416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:58.592293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:58.592547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:58.594431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:58.594616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:58.595731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:58.595798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:58.595955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:58.596001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:58.596046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:58.596192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:58.829903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.837694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.837930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.838033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.838113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.838189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.838267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.838453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.838582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.838682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.838770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.838865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.838996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.839089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.839163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... sole configs 2025-12-23T18:34:05.359104Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:06.973342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0428 2025-12-23T18:34:06.973447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0836 2025-12-23T18:34:07.010959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-12-23T18:34:07.011166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2025-12-23T18:34:07.011242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:34:07.011368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2025-12-23T18:34:07.011441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2025-12-23T18:34:07.011485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:34:07.011544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2025-12-23T18:34:07.023036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:34:09.933736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0117 2025-12-23T18:34:09.933833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0214 2025-12-23T18:34:09.980543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-12-23T18:34:09.980730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2025-12-23T18:34:09.980785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:34:09.980892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2025-12-23T18:34:09.980945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2025-12-23T18:34:09.980976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:34:09.981015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2025-12-23T18:34:09.995005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:34:12.764629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0117 2025-12-23T18:34:12.764740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0214 2025-12-23T18:34:12.809269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-12-23T18:34:12.809446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2025-12-23T18:34:12.809501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:34:12.810802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2025-12-23T18:34:12.810874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2025-12-23T18:34:12.810916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:34:12.810971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2025-12-23T18:34:12.821442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:34:16.503216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0042 2025-12-23T18:34:16.503317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0061 2025-12-23T18:34:16.545779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-12-23T18:34:16.546018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2025-12-23T18:34:16.546091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:34:16.546220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2025-12-23T18:34:16.546267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2025-12-23T18:34:16.546296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:34:16.546370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2025-12-23T18:34:16.559002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:34:20.047406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7071: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-12-23T18:34:20.047556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:34:20.047797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-12-23T18:34:20.048066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:215: Run conditional erase, tabletId: 72075186233409547, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 60025000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 40 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2025-12-23T18:34:20.048594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7099: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:34:20.049265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:349: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-12-23T18:34:20.049318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:399: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:34:20.052902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:453: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-12-23T18:34:20.053074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:34:20.053126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:108: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T01:01:00.025000Z, at schemeshard: 72057594046678944 2025-12-23T18:34:20.053183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2025-12-23T18:34:12.849322Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123518989670569:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:12.849386Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003bbb/r3tmp/tmp1oh7Kx/pdisk_1.dat 2025-12-23T18:34:13.115128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:13.148675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:13.148798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:13.224693Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:13.227196Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123518989670544:2081] 1766514852846922 != 1766514852846925 2025-12-23T18:34:13.245388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:13.289340Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514853299 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:13.634610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:13.642153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:34:13.861892Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-12-23T18:34:13.825319Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123522225257689:2061];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:13.825381Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003bba/r3tmp/tmpOs25MI/pdisk_1.dat 2025-12-23T18:34:14.127761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:14.176865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:14.177100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:14.240738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:14.259867Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:14.298916Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123522225257669:2081] 1766514853823725 != 1766514853823728 2025-12-23T18:34:14.403489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514854286 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:14.595300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:14.608669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:34:14.642821Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d14ba49f450]{trololo} Connect to grpc://localhost:18998 2025-12-23T18:34:14.644104Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d14ba49f450]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-12-23T18:34:14.665278Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d14ba49f450]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 16834, MsgBus: 2021 2025-12-23T18:31:43.530063Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122877909325218:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:43.530147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d4c/r3tmp/tmpNFrZ5K/pdisk_1.dat 2025-12-23T18:31:43.805063Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:43.808589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:43.808704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:43.816562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:43.887568Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122877909325183:2081] 1766514703528239 != 1766514703528242 2025-12-23T18:31:43.914331Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16834, node 1 2025-12-23T18:31:44.003060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:44.003087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:44.003101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:44.003217Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:44.097361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2021 TClient is connected to server localhost:2021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:31:44.534200Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122882204293292:2474][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7587122877909325471:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.544014Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514703968 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:44.610716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:31:44.611159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:31:44.613364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:44.614619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:31:44.617287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766514704661, transactions count in step: 1, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:31:44.620216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2025-12-23T18:31:44.620273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-12-23T18:31:44.620599Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7587122877909325719:2252] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2025-12-23T18:31:44.621179Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122877909325151:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:44.621302Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122877909325154:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:44.621379Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122877909325157:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:44.621413Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122877909325774:2287][/Root] Path was updated to new version: owner# [1:7587122877909325767:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.621665Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122877909325628:2204][/Root] Path was updated to new version: owner# [1:7587122877909325471:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.621778Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122877909325775:2288][/Root] Path was updated to new version: owner# [1:7587122877909325768:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.622157Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7587122877909325719:2252] Ack update: ack to# [1:7587122877909325550:2153], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2025-12-23T18:31:44.622542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-12-23T18:31:46.911497Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E1223 18:31:46.916197128 1145442 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:46.916697724 1145442 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1223 18:31:46.920630438 1145442 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:46.920791067 1145442 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:31:46.920629Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd67pg7x0ephd8j5nytqnwsv", Request has 18444977559002.631029s seconds to be completed 2025-12-23T18:31:46.922022Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/8ptz/004d4c/r3tmp/spilling-tmp-runner/node_1_c02c57c8-1b38d5cd-3e3b700d-6e49ae28, actor: [1:7587122890794227916:2305] 2025-12-23T18:31:46.922263Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/8ptz/004d4c/r3tmp/spilling-tmp-runner 2025-12-23T18:31:46.924765Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd67pg7x0ephd8j5nytqnwsv", Created new session, sessionId: ydb://session/3?node_id=1&id=NzFmNjhhMmMtNTE3NjI0MmEtNDYzYWY3MjYtM2NhNjgyNjc=, workerId: [1:7587122890794227948:2327], database: /Root, longSession: 1, local sessions count: 1 2025-12-23T18:31:46.924818Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122890794227939:2494][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7587122877909325471:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:46.925006Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd67pg7x0ephd8j5nytqnwsv 2025-12-23T18:31:46.925072Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:31:46.925092Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:31:46.925108Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2025-12-23T18:31:46.925823Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2025-12-23T18:31:46.927007Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122890794227940:2495][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7587122877909325471:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12- ... Root, SessionId: ydb://session/3?node_id=9&id=OGQ2NWE5MjQtNzhiZGE2OTMtNjYwYmQ5ZTUtMjAzNTJkNTk=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123547358039591:2712] 2025-12-23T18:34:19.124849Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710705. Ctx: { TraceId: 01kd67v6wx16me6861wxz5ky4j, Database: /Root, SessionId: ydb://session/3?node_id=9&id=OGQ2NWE5MjQtNzhiZGE2OTMtNjYwYmQ5ZTUtMjAzNTJkNTk=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123547358039592:2713] 2025-12-23T18:34:19.125509Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 49, sender: [9:7587123543063072244:2698], selfId: [9:7587123470048626091:2252], source: [9:7587123543063072241:2697] 2025-12-23T18:34:19.126962Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123547358039598:2697] TxId: 281474976710706. Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=OGQ2NWE5MjQtNzhiZGE2OTMtNjYwYmQ5ZTUtMjAzNTJkNTk=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:34:19.127827Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=OGQ2NWE5MjQtNzhiZGE2OTMtNjYwYmQ5ZTUtMjAzNTJkNTk=, workerId: [9:7587123543063072241:2697], local sessions count: 1 Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-12-23T18:34:19.241198Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123547358039606:2708] TxId: 281474976710707. Ctx: { TraceId: 01kd67v6y9eqcg11a5nrrmxtwx, Database: /Root, SessionId: ydb://session/3?node_id=9&id=N2JkZGYxZjItYWZmNjJlZDAtZWIyOTc3NGUtYTczYmY4MTI=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-12-23T18:34:19.245765Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710707. Ctx: { TraceId: 01kd67v6y9eqcg11a5nrrmxtwx, Database: /Root, SessionId: ydb://session/3?node_id=9&id=N2JkZGYxZjItYWZmNjJlZDAtZWIyOTc3NGUtYTczYmY4MTI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123547358039610:2716] 2025-12-23T18:34:19.246224Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710707. Ctx: { TraceId: 01kd67v6y9eqcg11a5nrrmxtwx, Database: /Root, SessionId: ydb://session/3?node_id=9&id=N2JkZGYxZjItYWZmNjJlZDAtZWIyOTc3NGUtYTczYmY4MTI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123547358039611:2717] 2025-12-23T18:34:19.246731Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd67v6y9eqcg11a5nrrmxtwx", Forwarded response to sender actor, requestId: 50, sender: [9:7587123543063072271:2707], selfId: [9:7587123470048626091:2252], source: [9:7587123543063072272:2708] 2025-12-23T18:34:19.247377Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=N2JkZGYxZjItYWZmNjJlZDAtZWIyOTc3NGUtYTczYmY4MTI=, workerId: [9:7587123543063072272:2708], local sessions count: 0 E1223 18:34:19.454335570 1178921 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:34:19.454559015 1178921 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// |92.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DataShardSnapshots::RepeatableReadAfterSplitRace >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ResetVsUpsertMissingColumnsTest [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:32:42.296283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:42.425021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:42.439612Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:32:42.440135Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:32:42.440250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:32:42.788614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:42.788755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:42.887066Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514758915725 != 1766514758915729 2025-12-23T18:32:42.905633Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:42.954707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:43.059444Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c1d/r3tmp/tmplUEmCt/pdisk_1.dat 2025-12-23T18:32:43.384775Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:830:2725], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:43.384851Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:43.384887Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046644480 2025-12-23T18:32:43.385014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122432, Sender [1:828:2723], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-12-23T18:32:43.385047Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5258: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-12-23T18:32:43.546643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-12-23T18:32:43.546935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:43.547177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:32:43.547232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:32:43.547461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:32:43.547561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:43.547678Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:43.548461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:32:43.548616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:32:43.548675Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:43.548713Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-12-23T18:32:43.548898Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:32:43.548947Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:32:43.549022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:43.549086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:32:43.549128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:32:43.549165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:32:43.549284Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:43.549744Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:43.549785Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-12-23T18:32:43.549909Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:32:43.549946Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:32:43.549997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:43.550046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:32:43.550087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:32:43.550166Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:43.550524Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:43.550555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-12-23T18:32:43.550656Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:32:43.550685Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:32:43.550730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:43.550792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:43.550845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-12-23T18:32:43.550881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:43.550925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:32:43.554761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:32:43.555315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:43.555378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:32:43.555507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:32:43.555792Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2025-12-23T18:32:43.556220Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:836:2731], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:43.556273Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:43.556310Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046644480 2025-12-23T18:32:43.556402Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124996, Sender [1:828:2723], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-12-23T18:32:43.556437Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-12-23T18:32:43.556499Z node 1 :FLAT_TX_SCHEMESHARD DE ... 5186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:30.784419Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:34:30.806945Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:30.807011Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:30.807042Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:30.807076Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:30.807112Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:34:30.838129Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:30.838201Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:30.838233Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:30.838265Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:30.838295Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:34:30.858974Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:30.859030Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:30.859054Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:30.859079Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:30.859100Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:34:30.879740Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:30.879798Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:30.879821Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:30.879846Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:30.879867Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:34:30.900555Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [9:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:30.900628Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:30.900715Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [9:398:2397], Recipient [9:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:30.900746Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:30.932181Z node 9 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037893][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:34:30.932283Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:30.932310Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:30.932332Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:30.932357Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:30.932378Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:34:30.953896Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [9:1773:3365], Recipient [9:398:2397]: NKikimrSchemeOp.TDescribePath Path: "/Root/.backups/collections/TestCollection/19700101000008Z_incremental/Table" Options { ShowPrivateTable: true } 2025-12-23T18:34:30.954052Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme Found incremental backup table at: /Root/.backups/collections/TestCollection/19700101000008Z_incremental/Table 2025-12-23T18:34:30.956701Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [9:1775:3367], Recipient [9:398:2397]: NKikimrSchemeOp.TDescribePath Path: "/Root/.backups/collections/TestCollection/19700101000008Z_incremental/Table" Options { ShowPrivateTable: true } 2025-12-23T18:34:30.956819Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme Found attribute: __incremental_backup = {} ... waiting for SysViewsRoster update finished 2025-12-23T18:34:35.625085Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:34:35.635564Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:34:35.640864Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:314:2357], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:34:35.641043Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:34:35.641123Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:34:35.974919Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:35.975148Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:36.002086Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:34:2081] 1766514871763106 != 1766514871763110 2025-12-23T18:34:36.006859Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:36.055722Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:36.126385Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c1d/r3tmp/tmpaZKYgp/pdisk_1.dat 2025-12-23T18:34:36.419916Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:36.436912Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:34:36.544698Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:36.828449Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:950:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:36.828565Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:961:2813], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:36.829027Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:36.829883Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:966:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:36.829965Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:36.835512Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:34:36.962976Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:964:2816], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:34:36.999417Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:1022:2855] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:33:58.236816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:58.236913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:58.236955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:58.236989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:58.237023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:58.237051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:58.237112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:58.237198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:58.238011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:58.238279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:58.459969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:58.460037Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:58.492765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:58.492964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:58.493135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:58.519503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:58.519725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:58.520418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:58.520650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:58.530268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:58.530550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:58.531821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:58.531891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:58.532068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:58.532115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:58.532156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:58.532269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:58.865727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.878211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.878419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.878499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.878561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.878622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.878685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.878768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.878919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.879011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.879099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.879197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.879300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.879412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:58.879489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :34:08.711236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:08.711356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:08.711430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:08.711512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:08.711583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:08.711688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:08.711756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:08.711817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:08.711873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:08.711986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:34:08.712032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:34:08.712137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:34:08.712172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:34:08.712207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:34:08.712256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:34:08.712300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-12-23T18:34:08.712377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3124:4344] message: TxId: 101 2025-12-23T18:34:08.712443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:34:08.712494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:34:08.712526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:34:08.713959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2025-12-23T18:34:08.717470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:34:08.717543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:3125:4345] TestWaitNotification: OK eventTxId 101 2025-12-23T18:34:08.718109Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:34:08.718347Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 268us result status StatusSuccess 2025-12-23T18:34:08.719058Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1766514848.719789 1176160 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TAlterColumnTable: 6:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-12-23T18:34:08.722976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:34:08.723174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: alter_table.cpp:283: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:34:08.723623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2025-12-23T18:34:08.726054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:08.726283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 |92.4%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 21760, MsgBus: 24038 2025-12-23T18:31:42.823200Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122875925178259:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:42.823308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:31:42.884203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d4f/r3tmp/tmpbyxlOl/pdisk_1.dat 2025-12-23T18:31:43.188066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:43.188144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:43.240065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:43.244909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:43.279559Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21760, node 1 2025-12-23T18:31:43.355833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:43.355857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:43.355864Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:43.355971Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:43.470506Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24038 TClient is connected to server localhost:24038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:31:43.819749Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122880220146326:2474][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7587122875925178513:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:43.857839Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514703331 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:43.874523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:31:43.874868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:31:43.876386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:43.876960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-12-23T18:31:43.880270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766514703926, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:31:43.882583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2025-12-23T18:31:43.882586Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7587122880220146046:2249] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2025-12-23T18:31:43.882630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-12-23T18:31:43.882967Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122875925178189:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:43.882972Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122875925178186:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:43.883088Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122875925178192:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:43.883221Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7587122880220146046:2249] Ack update: ack to# [1:7587122875925178592:2156], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2025-12-23T18:31:43.883353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-12-23T18:31:43.883543Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122880220145998:2221][/Root] Path was updated to new version: owner# [1:7587122875925178513:2121], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:43.886830Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122880220146106:2287][/Root] Path was updated to new version: owner# [1:7587122880220146097:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:43.887113Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122880220146109:2288][/Root] Path was updated to new version: owner# [1:7587122880220146098:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:45.964526Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2025-12-23T18:31:45.966222Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122888810080952:2494][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7587122875925178513:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:45.970735Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/8ptz/004d4f/r3tmp/spilling-tmp-runner/node_1_d3b699a3-84d22ee7-1fe70d9c-a92ece75, actor: [1:7587122888810080960:2307] 2025-12-23T18:31:45.975913Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd67pfgx55yqap5qqyef3225", Request has 18444977559003.575738s seconds to be completed 2025-12-23T18:31:45.979397Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd67pfgx55yqap5qqyef3225", Created new session, sessionId: ydb://session/3?node_id=1&id=ZDA3ODRhOWMtZmRmYWQ1MS00MDQ1YTY0Yi0yZTFjOTljNQ==, workerId: [1:7587122888810080979:2325], database: /Root, longSession: 1, local sessions count: 1 2025-12-23T18:31:45.979623Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd67pfgx55yqap5qqyef3225 2025-12-23T18:31:45.979689Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:31:45.979729Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:31:45.979755Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E1223 18:31:45.981293739 1145260 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:45.981473823 1145260 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:31:45.981404Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122888810080981:2496][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7587122875925178513:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:45.981487Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122888810080982:2497][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7587122875925178513:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E1223 18:31:45.983926805 1145260 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:45.984058052 1145260 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-2 ... "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-12-23T18:34:32.158636Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123604716637902:2798] TxId: 281474976710718. Ctx: { TraceId: 01kd67vkn6at6taa3tb6mpf11q, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NmM4ODllNC05OTg2OTRiOC05ZTgyZTllOS1hN2JiNDk2ZA==, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:34:32.160908Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710718. Ctx: { TraceId: 01kd67vkn6at6taa3tb6mpf11q, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NmM4ODllNC05OTg2OTRiOC05ZTgyZTllOS1hN2JiNDk2ZA==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123604716637907:2813] 2025-12-23T18:34:32.161194Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710718. Ctx: { TraceId: 01kd67vkn6at6taa3tb6mpf11q, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NmM4ODllNC05OTg2OTRiOC05ZTgyZTllOS1hN2JiNDk2ZA==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123604716637908:2814] 2025-12-23T18:34:32.161910Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 62, sender: [9:7587123600421670558:2799], selfId: [9:7587123544587093335:2265], source: [9:7587123600421670556:2798] 2025-12-23T18:34:32.163255Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123604716637914:2798] TxId: 281474976710719. Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=NmM4ODllNC05OTg2OTRiOC05ZTgyZTllOS1hN2JiNDk2ZA==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:34:32.164044Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=NmM4ODllNC05OTg2OTRiOC05ZTgyZTllOS1hN2JiNDk2ZA==, workerId: [9:7587123600421670556:2798], local sessions count: 1 2025-12-23T18:34:32.254934Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123604716637922:2809] TxId: 281474976710720. Ctx: { TraceId: 01kd67vknxee7g1x93ahgyg1pz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZWVjNjFkMzUtMzAzNjZmMWYtOWQ0ZmJiM2UtZmYyNWY1MTM=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-12-23T18:34:32.259440Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710720. Ctx: { TraceId: 01kd67vknxee7g1x93ahgyg1pz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZWVjNjFkMzUtMzAzNjZmMWYtOWQ0ZmJiM2UtZmYyNWY1MTM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123604716637926:2817] 2025-12-23T18:34:32.259719Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710720. Ctx: { TraceId: 01kd67vknxee7g1x93ahgyg1pz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZWVjNjFkMzUtMzAzNjZmMWYtOWQ0ZmJiM2UtZmYyNWY1MTM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123604716637927:2818] 2025-12-23T18:34:32.260132Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd67vknxee7g1x93ahgyg1pz", Forwarded response to sender actor, requestId: 63, sender: [9:7587123600421670587:2808], selfId: [9:7587123544587093335:2265], source: [9:7587123604716637884:2809] 2025-12-23T18:34:32.260705Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=ZWVjNjFkMzUtMzAzNjZmMWYtOWQ0ZmJiM2UtZmYyNWY1MTM=, workerId: [9:7587123604716637884:2809], local sessions count: 0 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 13363, MsgBus: 24348 2025-12-23T18:31:41.198322Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122869753117879:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:41.198590Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d57/r3tmp/tmpaMQ0qv/pdisk_1.dat 2025-12-23T18:31:41.538724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:41.555205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:41.555307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:41.604911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:41.622935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122869753117839:2081] 1766514701194290 != 1766514701194293 2025-12-23T18:31:41.630202Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13363, node 1 2025-12-23T18:31:41.727707Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:41.727734Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:41.727740Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:41.727830Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:41.811955Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24348 TClient is connected to server localhost:24348 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-12-23T18:31:42.255197Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514701686 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:42.370128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:31:42.370504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:31:42.372394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:42.373038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-12-23T18:31:42.376014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766514702421, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:31:42.377069Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7587122869753118362:2248] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2025-12-23T18:31:42.377093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2025-12-23T18:31:42.377128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-12-23T18:31:42.377262Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122869753117807:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.377303Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122869753117810:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.377374Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122869753117813:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.377532Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122869753118476:2287][/Root] Path was updated to new version: owner# [1:7587122869753118469:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.377574Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122869753118283:2207][/Root] Path was updated to new version: owner# [1:7587122869753118118:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.377645Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7587122869753118362:2248] Ack update: ack to# [1:7587122869753118184:2146], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2025-12-23T18:31:42.377806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-12-23T18:31:42.377824Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122869753118477:2288][/Root] Path was updated to new version: owner# [1:7587122869753118470:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.574416Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E1223 18:31:44.579625175 1144216 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:44.579805734 1144216 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:31:44.579185Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/8ptz/004d57/r3tmp/spilling-tmp-runner/node_1_2cf68a8-2d068c5e-75ac31a2-c19ced9a, actor: [1:7587122882638020567:2305] 2025-12-23T18:31:44.579412Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/8ptz/004d57/r3tmp/spilling-tmp-runner E1223 18:31:44.587667737 1144216 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:44.587783165 1144216 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:31:44.594252Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd67pe20bp06rpg3x1vqtn7f", Request has 18444977559004.957404s seconds to be completed 2025-12-23T18:31:44.595410Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122882638020590:2496][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7587122869753118118:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.596555Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122882638020589:2495][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7587122869753118118:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.596564Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122882638020591:2497][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7587122869753118118:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:44.597836Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2025-12-23T18:31:44.600040Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd67pe20bp06rpg3x1vqtn7f", Created new session, sessionId: ydb://session/3?node_id=1&id=MjY0MmU4OGItNTY5Yzg0ODktMzZiODA3ZTAtY2RiZWJkN2Q=, workerId: [1:7587122882638020613:2327], database: /Root, longSession: 1, local sessions count: 1 2025-12-23T18:31:44.600291Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd67pe20bp06rpg3x1vqtn7f 2025-12-23T18:31:44.600404Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:31:44.600434Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:31:44.600456Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priori ... { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-12-23T18:34:14.630311Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123527155957793:2734] TxId: 281474976710711. Ctx: { TraceId: 01kd67v2atdt6wg4d87t45vnyw, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZjlkZjY1MjUtODgxMzAyMjMtZGVkY2UxNTUtODVhMjY4YWE=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-12-23T18:34:14.638703Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710711. Ctx: { TraceId: 01kd67v2atdt6wg4d87t45vnyw, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZjlkZjY1MjUtODgxMzAyMjMtZGVkY2UxNTUtODVhMjY4YWE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123527155957798:2742] 2025-12-23T18:34:14.639405Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710711. Ctx: { TraceId: 01kd67v2atdt6wg4d87t45vnyw, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZjlkZjY1MjUtODgxMzAyMjMtZGVkY2UxNTUtODVhMjY4YWE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123527155957799:2743] 2025-12-23T18:34:14.640101Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd67v2atdt6wg4d87t45vnyw", Forwarded response to sender actor, requestId: 54, sender: [9:7587123527155957754:2733], selfId: [9:7587123436961642255:2197], source: [9:7587123527155957755:2734] 2025-12-23T18:34:14.641260Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=ZjlkZjY1MjUtODgxMzAyMjMtZGVkY2UxNTUtODVhMjY4YWE=, workerId: [9:7587123527155957755:2734], local sessions count: 0 2025-12-23T18:34:14.779936Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977558854.771718s seconds to be completed 2025-12-23T18:34:14.795531Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=9&id=NWRhMjhiM2UtYWMzYWUwY2EtYzU5YjgzYTgtNzk3YzE4Y2M=, workerId: [9:7587123527155957810:2747], database: /Root, longSession: 1, local sessions count: 1 2025-12-23T18:34:14.796041Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T18:34:14.796763Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=NWRhMjhiM2UtYWMzYWUwY2EtYzU5YjgzYTgtNzk3YzE4Y2M=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 56, targetId: [9:7587123527155957810:2747] 2025-12-23T18:34:14.796813Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 56 timeout: 300.000000s actor id: [9:7587123527155957812:3274] |92.4%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::Bench_10Joins [GOOD] Test command err: Trying to start YDB, gRPC: 15939, MsgBus: 14636 2025-12-23T18:32:51.351651Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123170476317331:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:51.351727Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002d21/r3tmp/tmp0B0cYP/pdisk_1.dat 2025-12-23T18:32:51.658797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:32:51.715452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:51.715606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:51.788520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:51.820198Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15939, node 1 2025-12-23T18:32:51.862905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:32:52.123282Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:32:52.123317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:32:52.123328Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:32:52.123453Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:32:52.370996Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:14636 TClient is connected to server localhost:14636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514771840 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:32:52.923807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:32:55.368801Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123187656187347:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:55.368891Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:55.369153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123187656187357:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:55.369180Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:55.759515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:32:56.354832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123170476317331:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:32:56.354921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:32:56.425141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7587123191951155132:2329];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:32:56.425142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7587123191951155139:2334];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:32:56.425423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7587123191951155139:2334];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:32:56.425680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7587123191951155139:2334];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:32:56.425812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7587123191951155139:2334];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:32:56.425915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7587123191951155139:2334];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:32:56.426007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7587123191951155139:2334];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:32:56.426097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7587123191951155139:2334];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:32:56.426192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7587123191951155139:2334];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:32:56.426284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7587123191951155132:2329];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:32:56.426295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7587123191951155139:2334];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:32:56.426736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7587123191951155139:2334];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:32:56.427869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7587123191951155139:2334];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:32:56.427879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7587123191951155132:2329];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:32:56.427990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7587123191951155132:2329];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:32:56.428019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7587123191951155139:2334];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:32:56.428087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7587123191951155132:2329];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:32:56.428129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[1:7587123191951155139:2334];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:32:56.428179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7587123191951155132:2329];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:32:56.428265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7587123191951155132:2329];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:32:56.428356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7587123191951155132:2329];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:32:56.428450Z node 1 :TX_COLUMNSHARD WA ... pp:440: Table profiles were not loaded 2025-12-23T18:33:59.663069Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7587123465605976999:2081] 1766514839198425 != 1766514839198428 2025-12-23T18:33:59.689138Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:59.689284Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:59.692918Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:59.725877Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 3260, node 5 2025-12-23T18:33:59.979498Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:59.979524Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:59.979535Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:59.979656Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:34:00.154874Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:34:00.370900Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6473 TClient is connected to server localhost:6473 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514839859 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:01.192882Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:01.212399Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:34:04.206973Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587123465605977021:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:04.207064Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:34:05.310911Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587123491375781676:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:05.311028Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:05.313172Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587123491375781686:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:05.313254Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:05.637146Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:05.727287Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:05.814113Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:05.876739Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:05.928193Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:06.015997Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:06.079150Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:06.169552Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:06.245459Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:06.325057Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:06.423260Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587123495670749717:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:06.423389Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:06.423780Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587123495670749722:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:06.423844Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587123495670749723:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:06.424185Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:06.429401Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:34:06.451282Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587123495670749726:2408], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-12-23T18:34:06.540875Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587123495670749777:3026] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 51], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:34:08.034134Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:34:08.034176Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/incrhuge/ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 32522, MsgBus: 26024 2025-12-23T18:31:41.412934Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122871051579232:2189];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:41.414123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:31:41.438122Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d52/r3tmp/tmpRPOaeB/pdisk_1.dat 2025-12-23T18:31:41.749243Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122871051579081:2081] 1766514701400827 != 1766514701400830 2025-12-23T18:31:41.786169Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:41.792041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:41.792548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:41.792681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:41.823606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32522, node 1 2025-12-23T18:31:42.003587Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:42.003610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:42.003618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:42.003728Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:42.057628Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26024 2025-12-23T18:31:42.429399Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:26024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514701910 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:42.694703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:31:42.695169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:31:42.708065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:42.708845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:31:42.711156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766514702757, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:31:42.712244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2025-12-23T18:31:42.712309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-12-23T18:31:42.712520Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7587122871051579616:2254] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2025-12-23T18:31:42.712913Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122871051579049:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.713155Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122871051579729:2288][/Root] Path was updated to new version: owner# [1:7587122871051579715:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.713940Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122871051579052:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.714064Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122871051579055:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:42.714283Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122871051579584:2234][/Root] Path was updated to new version: owner# [1:7587122871051579370:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.714699Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122871051579721:2287][/Root] Path was updated to new version: owner# [1:7587122871051579714:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:42.715294Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7587122871051579616:2254] Ack update: ack to# [1:7587122871051579488:2184], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2025-12-23T18:31:42.715572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 waiting... 2025-12-23T18:31:42.717959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:31:45.086472Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2025-12-23T18:31:45.088923Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122888231449110:2495][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7587122871051579370:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:45.089258Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/8ptz/004d52/r3tmp/spilling-tmp-runner/node_1_7ca75f7e-4392a5e5-504ec510-35a038f6, actor: [1:7587122888231449117:2307] 2025-12-23T18:31:45.089432Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/8ptz/004d52/r3tmp/spilling-tmp-runner 2025-12-23T18:31:45.094834Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd67pect5797xz10rbyq44q4", Request has 18444977559004.456898s seconds to be completed 2025-12-23T18:31:45.100821Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122888231449137:2497][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7587122871051579370:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:45.100907Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122888231449138:2498][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7587122871051579370:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E1223 18:31:45.106865039 1144517 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:45.107052476 1144517 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1223 18:31:45.109367119 1144517 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:45.109615109 1144517 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:31:45.111566Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd67pect5797xz10rbyq44q4", Created new session, sessionId: ydb://session/3?node_id=1&id=NGVmYjdjNmItMmZlNTRhYjItYjc2ZTE5ZTktM2NlN2M4ZA==, workerId: [1:7587122888231449154:2327], database: /Root, longSession: 1, local sessions count: 1 2025-12-23T18:31:45.111858Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd67pect5797xz10rbyq44q4 2025-12-23T18:31:45.111946Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025 ... arison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-12-23T18:34:14.297404Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123527479468776:2727] TxId: 281474976710709. Ctx: { TraceId: 01kd67v20jb1nnpeeqzh2pdaxc, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YzdlNTZkNTktNzZlMmMyNmUtNmFlMTBkZTctYjZiZjc0Mzg=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 2025-12-23T18:34:14.307410Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710709. Ctx: { TraceId: 01kd67v20jb1nnpeeqzh2pdaxc, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YzdlNTZkNTktNzZlMmMyNmUtNmFlMTBkZTctYjZiZjc0Mzg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123527479468780:2733] 2025-12-23T18:34:14.308159Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710709. Ctx: { TraceId: 01kd67v20jb1nnpeeqzh2pdaxc, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YzdlNTZkNTktNzZlMmMyNmUtNmFlMTBkZTctYjZiZjc0Mzg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123527479468781:2734] 2025-12-23T18:34:14.309027Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd67v20jb1nnpeeqzh2pdaxc", Forwarded response to sender actor, requestId: 52, sender: [9:7587123523184501455:2726], selfId: [9:7587123437285153322:2260], source: [9:7587123523184501456:2727] 2025-12-23T18:34:14.311289Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=YzdlNTZkNTktNzZlMmMyNmUtNmFlMTBkZTctYjZiZjc0Mzg=, workerId: [9:7587123523184501456:2727], local sessions count: 0 2025-12-23T18:34:14.353670Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977558855.197969s seconds to be completed 2025-12-23T18:34:14.356881Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=9&id=Y2JhNzVkMGUtM2MyNGVmNGYtYzlmY2RmMmItZDc0Njc5NmU=, workerId: [9:7587123527479468789:2737], database: /Root, longSession: 1, local sessions count: 1 2025-12-23T18:34:14.357313Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T18:34:14.357697Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977558855.193939s seconds to be completed 2025-12-23T18:34:14.360795Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=9&id=N2M1ZTRjNjMtZGM1YzgyZDAtNDE1YjFmMDctZWI0ZTNmOTE=, workerId: [9:7587123527479468792:2740], database: /Root, longSession: 1, local sessions count: 2 2025-12-23T18:34:14.361108Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T18:34:14.361283Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2JhNzVkMGUtM2MyNGVmNGYtYzlmY2RmMmItZDc0Njc5NmU=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 55, targetId: [9:7587123527479468789:2737] 2025-12-23T18:34:14.361331Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 55 timeout: 300.000000s actor id: [9:7587123527479468793:3266] 2025-12-23T18:34:14.361631Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=N2M1ZTRjNjMtZGM1YzgyZDAtNDE1YjFmMDctZWI0ZTNmOTE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 56, targetId: [9:7587123527479468792:2740] 2025-12-23T18:34:14.361677Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 56 timeout: 300.000000s actor id: [9:7587123527479468795:3267] 2025-12-23T18:34:14.588622Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123527479468824:2737] TxId: 281474976710710. Ctx: { TraceId: 01kd67v2es2khed55d01w90hg8, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2JhNzVkMGUtM2MyNGVmNGYtYzlmY2RmMmItZDc0Njc5NmU=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:34:14.591932Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710710. Ctx: { TraceId: 01kd67v2es2khed55d01w90hg8, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2JhNzVkMGUtM2MyNGVmNGYtYzlmY2RmMmItZDc0Njc5NmU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123527479468829:2754] 2025-12-23T18:34:14.592476Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710710. Ctx: { TraceId: 01kd67v2es2khed55d01w90hg8, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2JhNzVkMGUtM2MyNGVmNGYtYzlmY2RmMmItZDc0Njc5NmU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123527479468828:2753] 2025-12-23T18:34:14.593423Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 55, sender: [9:7587123527479468791:2739], selfId: [9:7587123437285153322:2260], source: [9:7587123527479468789:2737] 2025-12-23T18:34:14.595020Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123527479468835:2737] TxId: 281474976710711. Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2JhNzVkMGUtM2MyNGVmNGYtYzlmY2RmMmItZDc0Njc5NmU=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:34:14.596054Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=Y2JhNzVkMGUtM2MyNGVmNGYtYzlmY2RmMmItZDc0Njc5NmU=, workerId: [9:7587123527479468789:2737], local sessions count: 1 2025-12-23T18:34:14.601995Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123527479468843:2740] TxId: 281474976710712. Ctx: { TraceId: 01kd67v2es5btngqebtvwnshyr, Database: /Root, SessionId: ydb://session/3?node_id=9&id=N2M1ZTRjNjMtZGM1YzgyZDAtNDE1YjFmMDctZWI0ZTNmOTE=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:34:14.606229Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710712. Ctx: { TraceId: 01kd67v2es5btngqebtvwnshyr, Database: /Root, SessionId: ydb://session/3?node_id=9&id=N2M1ZTRjNjMtZGM1YzgyZDAtNDE1YjFmMDctZWI0ZTNmOTE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123527479468847:2757] 2025-12-23T18:34:14.606676Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710712. Ctx: { TraceId: 01kd67v2es5btngqebtvwnshyr, Database: /Root, SessionId: ydb://session/3?node_id=9&id=N2M1ZTRjNjMtZGM1YzgyZDAtNDE1YjFmMDctZWI0ZTNmOTE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123527479468848:2758] 2025-12-23T18:34:14.607808Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 56, sender: [9:7587123527479468794:2741], selfId: [9:7587123437285153322:2260], source: [9:7587123527479468792:2740] 2025-12-23T18:34:14.609420Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123527479468854:2740] TxId: 281474976710713. Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=N2M1ZTRjNjMtZGM1YzgyZDAtNDE1YjFmMDctZWI0ZTNmOTE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:34:14.609990Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=N2M1ZTRjNjMtZGM1YzgyZDAtNDE1YjFmMDctZWI0ZTNmOTE=, workerId: [9:7587123527479468792:2740], local sessions count: 0 >> DataShardSnapshots::DelayedWriteReadableAfterSplit |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 8817, MsgBus: 19580 2025-12-23T18:31:41.997215Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587122868878206981:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:31:41.997551Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d50/r3tmp/tmpZ0rH3l/pdisk_1.dat 2025-12-23T18:31:42.286875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:31:42.331869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:42.331965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:42.469511Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587122868878206866:2081] 1766514701965885 != 1766514701965888 2025-12-23T18:31:42.477851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:42.481487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8817, node 1 2025-12-23T18:31:42.644440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:31:42.703547Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:42.703570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:42.703585Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:42.705734Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19580 2025-12-23T18:31:43.001587Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:19580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514702547 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:31:43.263124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:31:43.263550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:31:43.265355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:43.266079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root waiting... 2025-12-23T18:31:43.268228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766514703317, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:31:43.269471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2025-12-23T18:31:43.269517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-12-23T18:31:43.269761Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7587122873173174685:2246] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2025-12-23T18:31:43.269933Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122868878206834:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:43.269975Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122868878206837:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:43.270032Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7587122868878206840:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2025-12-23T18:31:43.270206Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122873173174590:2195][/Root] Path was updated to new version: owner# [1:7587122873173174448:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:43.270232Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7587122873173174685:2246] Ack update: ack to# [1:7587122873173174504:2142], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2025-12-23T18:31:43.270233Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122873173174816:2288][/Root] Path was updated to new version: owner# [1:7587122873173174808:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:43.270403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-12-23T18:31:43.270545Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7587122873173174814:2287][/Root] Path was updated to new version: owner# [1:7587122873173174807:2282], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:45.448325Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2025-12-23T18:31:45.449433Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/8ptz/004d50/r3tmp/spilling-tmp-runner/node_1_4bef97be-adb6623-33ff5658-a3337d19, actor: [1:7587122886058076894:2305] 2025-12-23T18:31:45.449637Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/8ptz/004d50/r3tmp/spilling-tmp-runner E1223 18:31:45.455957732 1144923 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:45.456139886 1144923 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E1223 18:31:45.458509182 1144923 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:31:45.458621627 1144923 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:31:45.454855Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122886058076916:2497][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7587122873173174448:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:45.461007Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd67pext4my0c0vrm1sjnpff", Request has 18444977559004.090637s seconds to be completed 2025-12-23T18:31:45.462511Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122886058076925:2499][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7587122873173174448:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:45.462592Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587122886058076926:2500][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7587122873173174448:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:31:45.464495Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd67pext4my0c0vrm1sjnpff", Created new session, sessionId: ydb://session/3?node_id=1&id=NGVkMWI1NjYtY2Q5NDAyNTgtNTViMmUzZTEtZWQxYWM4NmU=, workerId: [1:7587122886058076940:2327], database: /Root, longSession: 1, local sessions count: 1 2025-12-23T18:31:45.464702Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd67pext4my0c0vrm1sjnpff 2025-12-23T18:31:45.465015Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:31:45.465052Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:31:45.465090Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2025-12-23T18:31:45.465276Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create sec ... : false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2025-12-23T18:34:18.033317Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123544170180628:2728] TxId: 281474976710709. Ctx: { TraceId: 01kd67v5qv45j73q8mk2q38zmr, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZmY2NmVhYjktZmMyN2FlYTgtNWZjY2U1NTQtZjMzNjI2ZjY=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 2, useFollowers: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-12-23T18:34:18.067799Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977558851.483849s seconds to be completed 2025-12-23T18:34:18.073310Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=9&id=YzNmZjNlMzctNmE0N2MyNzktMjA5ODBmOTUtZDllZTQ2ODA=, workerId: [9:7587123544170180640:2736], database: /Root, longSession: 1, local sessions count: 2 2025-12-23T18:34:18.073700Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T18:34:18.074324Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=YzNmZjNlMzctNmE0N2MyNzktMjA5ODBmOTUtZDllZTQ2ODA=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 54, targetId: [9:7587123544170180640:2736] 2025-12-23T18:34:18.074409Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 54 timeout: 300.000000s actor id: [9:7587123544170180642:3266] 2025-12-23T18:34:18.088397Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710709. Ctx: { TraceId: 01kd67v5qv45j73q8mk2q38zmr, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZmY2NmVhYjktZmMyN2FlYTgtNWZjY2U1NTQtZjMzNjI2ZjY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123544170180632:2732] 2025-12-23T18:34:18.088510Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710709. Ctx: { TraceId: 01kd67v5qv45j73q8mk2q38zmr, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZmY2NmVhYjktZmMyN2FlYTgtNWZjY2U1NTQtZjMzNjI2ZjY=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123544170180633:2733] 2025-12-23T18:34:18.088926Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd67v5qv45j73q8mk2q38zmr", Forwarded response to sender actor, requestId: 52, sender: [9:7587123539875213312:2727], selfId: [9:7587123453975865155:2221], source: [9:7587123539875213313:2728] 2025-12-23T18:34:18.090206Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=ZmY2NmVhYjktZmMyN2FlYTgtNWZjY2U1NTQtZjMzNjI2ZjY=, workerId: [9:7587123539875213313:2728], local sessions count: 1 2025-12-23T18:34:18.265271Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123544170180666:2736] TxId: 281474976710710. Ctx: { TraceId: 01kd67v62t79j8a1yq2r5pdbev, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YzNmZjNlMzctNmE0N2MyNzktMjA5ODBmOTUtZDllZTQ2ODA=, PoolId: default, IsStreamingQuery: 0}. Total tasks: 2, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:34:18.267218Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710710. Ctx: { TraceId: 01kd67v62t79j8a1yq2r5pdbev, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YzNmZjNlMzctNmE0N2MyNzktMjA5ODBmOTUtZDllZTQ2ODA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123544170180670:2746] 2025-12-23T18:34:18.267658Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710710. Ctx: { TraceId: 01kd67v62t79j8a1yq2r5pdbev, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YzNmZjNlMzctNmE0N2MyNzktMjA5ODBmOTUtZDllZTQ2ODA=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7587123544170180672:2747] 2025-12-23T18:34:18.268646Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 54, sender: [9:7587123544170180641:2737], selfId: [9:7587123453975865155:2221], source: [9:7587123544170180640:2736] 2025-12-23T18:34:18.269839Z node 9 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [9:7587123544170180677:2736] TxId: 281474976710711. Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=9&id=YzNmZjNlMzctNmE0N2MyNzktMjA5ODBmOTUtZDllZTQ2ODA=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:34:18.270284Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=YzNmZjNlMzctNmE0N2MyNzktMjA5ODBmOTUtZDllZTQ2ODA=, workerId: [9:7587123544170180640:2736], local sessions count: 0 E1223 18:34:18.327210972 1176307 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:34:18.327416172 1176307 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:34:18.366555Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest |92.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |92.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |92.4%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink |92.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |92.4%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |92.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |92.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |92.4%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |92.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |92.4%| [TA] $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBoardSubscriber2DCTest::ReconnectReplica >> TBoardSubscriber2DCTest::DropByDisconnect >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink >> TBoardSubscriber2DCTest::NotAvailableByShutdown |92.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |92.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |92.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |92.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |92.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |92.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> TBoardSubscriberTest::ReconnectReplica |92.5%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |92.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |92.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator >> TOlap::CreateStore >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 >> TOlap::CreateTableWithNullableKeysNotAllowed >> TOlap::CreateStoreWithDirs |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 >> TOlapNaming::AlterColumnTableFailed >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |92.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |92.5%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |92.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitions >> TSchemeShardTopicSplitMergeTest::MargePartitions >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 |92.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithDuplicatePartition >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidThreeChildren |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |92.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] |92.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TOlap::CreateTableWithNullableKeysNotAllowed [GOOD] >> TOlap::CreateTableWithNullableKeys >> TOlap::CreateStoreWithDirs [GOOD] >> TOlap::CreateTable >> TOlap::CreateStore [GOOD] >> TOlap::CreateDropTable >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithDuplicatePartition [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlap >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidThreeChildren [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithWrongPartition >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsWithOverlap >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionTo >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToGapIndices >> TOlap::CreateTableWithNullableKeys [GOOD] >> TOlap::CustomDefaultPresets >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> TOlap::CreateTable [GOOD] >> TOlap::CreateTableTtl >> TOlap::CreateDropTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlap [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlap >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsWithOverlap [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsMixed >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:233:2060] recipient: [1:227:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:233:2060] recipient: [1:227:2145] Leader for TabletID 72057594046678944 is [1:244:2156] sender: [1:245:2060] recipient: [1:227:2145] 2025-12-23T18:33:53.632095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:53.632208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:53.632271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:53.632332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:53.632386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:53.632418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:53.632509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:53.632626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:53.633609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:53.633962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:53.734812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:53.734890Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:53.746496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:53.747103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:53.747321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:53.760555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:53.761062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:53.761861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:53.762224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:53.765974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:53.766198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:53.767412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:53.767484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:53.767688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:53.767766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:53.767822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:53.767964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:53.927864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.929117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.929285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.929372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.929514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.929599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.929672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.929763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.929873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.929959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.930090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.930212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.930312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.930393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:53.930471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rts: 2/3 2025-12-23T18:34:46.630248Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:2 progress is 2/3 2025-12-23T18:34:46.630286Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-12-23T18:34:46.630329Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-12-23T18:34:46.630557Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:34:46.630603Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:34:46.630636Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 104:0 2025-12-23T18:34:46.630720Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:1233:3016] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2025-12-23T18:34:46.630867Z node 7 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [7:243:2156], Recipient [7:1233:3016]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-12-23T18:34:46.630904Z node 7 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-12-23T18:34:46.630936Z node 7 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 104 datashard 72075186233409549 state Ready 2025-12-23T18:34:46.630997Z node 7 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186233409549 Got TEvSchemaChangedResult from SS at 72075186233409549 2025-12-23T18:34:46.631297Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [7:243:2156], Recipient [7:243:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:34:46.631335Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:34:46.631393Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:34:46.631434Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:34:46.631502Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:34:46.631529Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2025-12-23T18:34:46.631557Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-12-23T18:34:46.631589Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2025-12-23T18:34:46.631617Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-12-23T18:34:46.631646Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-12-23T18:34:46.631709Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:986:2806] message: TxId: 104 2025-12-23T18:34:46.631775Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-12-23T18:34:46.631846Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:34:46.631888Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:34:46.632034Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 4 2025-12-23T18:34:46.632074Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2025-12-23T18:34:46.632099Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:1 2025-12-23T18:34:46.632131Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2025-12-23T18:34:46.632155Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2025-12-23T18:34:46.632175Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:2 2025-12-23T18:34:46.632223Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 3 2025-12-23T18:34:46.636166Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:34:46.636365Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:34:46.636463Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:986:2806] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2025-12-23T18:34:46.636622Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:34:46.636666Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1284:3053] 2025-12-23T18:34:46.636884Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [7:1286:3055], Recipient [7:243:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:34:46.636942Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:34:46.636976Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-12-23T18:34:46.640477Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122432, Sender [8:960:2104], Recipient [7:243:2156] 2025-12-23T18:34:46.640560Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5258: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-12-23T18:34:46.643788Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:34:46.644355Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:34:46.644444Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:34:46.644722Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:34:46.647257Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:46.647608Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2025-12-23T18:34:46.647677Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-12-23T18:34:46.648174Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:34:46.648228Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-12-23T18:34:46.648620Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [7:1359:3128], Recipient [7:243:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:34:46.648685Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:34:46.648727Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:34:46.648901Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124996, Sender [7:986:2806], Recipient [7:243:2156]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2025-12-23T18:34:46.648949Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-12-23T18:34:46.649034Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:34:46.649144Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:34:46.649190Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1357:3126] 2025-12-23T18:34:46.649415Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [7:1359:3128], Recipient [7:243:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:34:46.649461Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:34:46.649497Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TOlap::CustomDefaultPresets [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToGapIndices [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToDifferentOrder >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlap [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlapAndCreateRootLevelSibling >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionTo [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithManyPartition >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 >> TOlap::CreateTableTtl [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsMixed [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitInactivePartition >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CustomDefaultPresets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:43.482588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:43.482687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:43.482727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:43.482784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:43.482828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:43.482861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:43.482938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:43.483065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:43.483945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:43.484259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:43.563498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:43.563561Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:43.568928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:43.569164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:43.569301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:43.572034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:43.572218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:43.572877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:43.573089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:43.575212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:43.575366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:43.576569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:43.576638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:43.576757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:43.576803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:43.576840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:43.576952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:43.718880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.723163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.723345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.723459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.723532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.723596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.723690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.723766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.723872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.723948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.724047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.724136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.724212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.724297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.724388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 39 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2025-12-23T18:34:48.372483Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.372533Z node 3 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:461: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:34:48.372598Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:487: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-12-23T18:34:48.373369Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:34:48.373474Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:34:48.373513Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:34:48.373568Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2025-12-23T18:34:48.373615Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:34:48.374043Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:34:48.374108Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:34:48.374134Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:34:48.374160Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2025-12-23T18:34:48.374185Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:34:48.374248Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:34:48.381476Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-12-23T18:34:48.381588Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:39 msg type: 268697639 2025-12-23T18:34:48.381662Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2025-12-23T18:34:48.381973Z node 3 :HIVE INFO: tablet_helpers.cpp:1623: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2025-12-23T18:34:48.382209Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6411: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.382334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2025-12-23T18:34:48.383437Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:34:48.383543Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:34:48.384639Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.396764Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6585: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2025-12-23T18:34:48.396838Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:34:48.396963Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:34:48.399228Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.399429Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.399476Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:34:48.399611Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:34:48.399652Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:34:48.399703Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:34:48.399741Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:34:48.399783Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:34:48.399866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:753:2729] message: TxId: 102 2025-12-23T18:34:48.399916Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:34:48.399996Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:34:48.400033Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:34:48.400182Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:34:48.402700Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:34:48.402774Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:817:2785] TestWaitNotification: OK eventTxId 102 2025-12-23T18:34:48.403340Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:34:48.403606Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 300us result status StatusSuccess 2025-12-23T18:34:48.404135Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 38 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:44.656272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:44.656363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:44.656403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:44.656438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:44.656495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:44.656527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:44.656591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:44.656654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:44.657529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:44.657842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:44.831056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:44.831110Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:44.836231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:44.836421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:44.836542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:44.839157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:44.839341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:44.840108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:44.840358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:44.842132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:44.842315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:44.843471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:44.843526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:44.843658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:44.843700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:44.843750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:44.843898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:45.116926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.118099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.118200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.118273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.118333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.118414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.118486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.118549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.118620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.118709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.118814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.118903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.118969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.119052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:45.119131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eshard: 72057594046678944, cookie: 281474976710672 2025-12-23T18:34:48.920959Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710672 2025-12-23T18:34:48.921042Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [3:214:2214] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/streaming_queries' 2025-12-23T18:34:48.921088Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [3:214:2214] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished 2025-12-23T18:34:48.927347Z node 3 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2025-12-23T18:34:48.937078Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:34:48.937286Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.937473Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:34:48.937519Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:34:48.937709Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:34:48.937769Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:48.939779Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:48.939948Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:34:48.940186Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.940257Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:34:48.940298Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:34:48.940336Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:34:48.942619Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.942683Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:34:48.942724Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:34:48.944183Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.944236Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.944284Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:34:48.944337Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:34:48.944470Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:34:48.945834Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:34:48.945968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:34:48.946262Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:48.946395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 12884904047 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:48.946465Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:34:48.946724Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:34:48.946811Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:34:48.946986Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:34:48.947054Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:34:48.948621Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:48.948683Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:48.948871Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:48.948921Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:34:48.949215Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.949278Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:34:48.949383Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:34:48.949422Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:34:48.949461Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:34:48.949495Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:34:48.949533Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:34:48.949592Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:34:48.949637Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:34:48.949669Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:34:48.949735Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:34:48.949774Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:34:48.949807Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:34:48.950464Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:34:48.950567Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:34:48.950602Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:34:48.950642Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:34:48.950686Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:34:48.950785Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:34:48.953314Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:34:48.953786Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:43.859221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:43.859306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:43.859345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:43.859370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:43.859397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:43.859418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:43.859455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:43.859534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:43.860175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:43.860408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:43.939739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:43.939803Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:43.952066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:43.952422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:43.952563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:43.956089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:43.956302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:43.957083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:43.957334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:43.959554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:43.959738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:43.961020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:43.961083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:43.961234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:43.961287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:43.961332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:43.961461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:44.132976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.133870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.133954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.134008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.134044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.134079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.134131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.134180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.134232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.134271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.134326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.134387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.134431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.134487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.134555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... , tablet: 72075186233409548, partId: 0 2025-12-23T18:34:48.711410Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2025-12-23T18:34:48.711500Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2025-12-23T18:34:48.711562Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.711613Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:34:48.711834Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2025-12-23T18:34:48.712021Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:34:48.712093Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:48.715414Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.716371Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:48.716414Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:34:48.716560Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:34:48.716773Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:48.716813Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 104, path id: 38 2025-12-23T18:34:48.716854Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 104, path id: 39 2025-12-23T18:34:48.717825Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.717878Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:34:48.718000Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:34:48.718041Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:34:48.718089Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:34:48.718133Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:34:48.718176Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-12-23T18:34:48.718244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:34:48.718292Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:34:48.718334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:34:48.718513Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:34:48.718569Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-12-23T18:34:48.718609Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2025-12-23T18:34:48.718648Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2025-12-23T18:34:48.720062Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:34:48.720154Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:34:48.720189Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:34:48.720240Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2025-12-23T18:34:48.720295Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:34:48.720821Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:34:48.720899Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:34:48.720930Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:34:48.720961Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2025-12-23T18:34:48.720992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:48.721063Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-12-23T18:34:48.725201Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:34:48.725316Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:34:48.725587Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:34:48.725652Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:34:48.726045Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:34:48.726146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:34:48.726192Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:998:2917] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2025-12-23T18:34:48.730112Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:34:48.730373Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.730580Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2025-12-23T18:34:48.733142Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:48.733452Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-12-23T18:34:48.733819Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:34:48.733870Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-12-23T18:34:48.734278Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:34:48.734383Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:34:48.734424Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1005:2924] TestWaitNotification: OK eventTxId 105 >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_WriteEncoded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:43.504878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:43.504982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:43.505019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:43.505053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:43.505089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:43.505120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:43.505170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:43.505264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:43.506224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:43.506502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:43.613729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:43.613809Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:43.636736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:43.637075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:43.637234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:43.655622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:43.655857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:43.656638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:43.656862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:43.659432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:43.659624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:43.660933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:43.661018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:43.661180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:43.661238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:43.661282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:43.661419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:43.808660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.809682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.809812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.809908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.809976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.810038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.810134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.810208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.810292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.810364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.810449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.810568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.810640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.810717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.810860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 105 CreateStep: 5000042 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "Table3" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 360 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } Version: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 38 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-12-23T18:34:49.213082Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "Table4" TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 3600000000 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } } ColumnShardCount: 1 } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:34:49.213464Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/OlapStore/Table4, opId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:34:49.216336Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 38], parent name: OlapStore, child name: Table4, child id: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2025-12-23T18:34:49.216484Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 0 2025-12-23T18:34:49.216541Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 106:0 type: TxCreateColumnTable target path: [OwnerId: 72057594046678944, LocalPathId: 43] source path: 2025-12-23T18:34:49.216861Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2025-12-23T18:34:49.217242Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:34:49.217331Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:747) 2025-12-23T18:34:49.217482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2025-12-23T18:34:49.217568Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2025-12-23T18:34:49.225201Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusAccepted TxId: 106 SchemeshardId: 72057594046678944 PathId: 43, at schemeshard: 72057594046678944 2025-12-23T18:34:49.225532Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/ 2025-12-23T18:34:49.225900Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:49.225956Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:34:49.226283Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2025-12-23T18:34:49.226422Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:49.226478Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:214:2214], at schemeshard: 72057594046678944, txId: 106, path id: 38 2025-12-23T18:34:49.226547Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:214:2214], at schemeshard: 72057594046678944, txId: 106, path id: 43 2025-12-23T18:34:49.227092Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:34:49.227170Z node 3 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:237: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState at tabletId# 72057594046678944 2025-12-23T18:34:49.227397Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:323: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2025-12-23T18:34:49.228290Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-12-23T18:34:49.228430Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-12-23T18:34:49.228503Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-12-23T18:34:49.228557Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 10 2025-12-23T18:34:49.228624Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2025-12-23T18:34:49.229457Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2025-12-23T18:34:49.229577Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2025-12-23T18:34:49.229618Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-12-23T18:34:49.229653Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 1 2025-12-23T18:34:49.229684Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2025-12-23T18:34:49.229777Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-12-23T18:34:49.248434Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2025-12-23T18:34:49.248656Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-12-23T18:34:49.250457Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;self_id=[3:717:2702];ev=NActors::IEventHandle;tablet_id=72075186233409546;tx_id=106;this=136696601801088;method=TTxController::StartProposeOnExecute;tx_info=106:TX_KIND_SCHEMA;min=5000043;max=18446744073709551615;plan=0;src=[3:129:2153];cookie=12:5;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:34:49.251150Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-12-23T18:34:49.251762Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:43.984358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:43.984431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:43.984462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:43.984486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:43.984535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:43.984587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:43.984638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:43.984699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:43.985301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:43.985529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:44.078262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:44.078333Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:44.107634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:44.107972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:44.108114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:44.121282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:44.121524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:44.122327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:44.122614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:44.125207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:44.125420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:44.126907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:44.126974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:44.127103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:44.127154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:44.127196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:44.127376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:44.348132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.349228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.349370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.349496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.349572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.349656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.349767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.349853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.349962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.350042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.350112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.350222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.350303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.350392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.350581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Step: 250 2025-12-23T18:34:49.584511Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2025-12-23T18:34:49.584586Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:34:49.584640Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:34:49.584941Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2025-12-23T18:34:49.585219Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:49.589168Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:49.589811Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:49.589873Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:34:49.590215Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:49.590262Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 105, path id: 39 2025-12-23T18:34:49.590365Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:49.590414Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2025-12-23T18:34:49.590532Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:34:49.590573Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:49.590620Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:34:49.590668Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:49.590716Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-12-23T18:34:49.590781Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:49.590830Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2025-12-23T18:34:49.590869Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 105:0 2025-12-23T18:34:49.591041Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:34:49.591089Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-12-23T18:34:49.591128Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2025-12-23T18:34:49.595150Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:34:49.595282Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:34:49.595324Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:34:49.595379Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2025-12-23T18:34:49.595427Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:49.595535Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-12-23T18:34:49.595583Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:820:2786] 2025-12-23T18:34:49.599446Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:34:49.599594Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:34:49.599648Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1039:2952] TestWaitNotification: OK eventTxId 105 2025-12-23T18:34:49.600408Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:34:49.600705Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 321us result status StatusSuccess 2025-12-23T18:34:49.601757Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:44.211782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:44.211886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:44.211932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:44.211975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:44.212014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:44.212046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:44.212129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:44.212252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:44.213104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:44.213444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:44.314383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:44.314447Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:44.321073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:44.321291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:44.321418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:44.324658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:44.324833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:44.325617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:44.325851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:44.328020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:44.328217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:44.329456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:44.329518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:44.329681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:44.329735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:44.329782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:44.329913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:44.494508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.495621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.495742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.495823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.495881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.495948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.496033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.496128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.496200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.496276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.496352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.496445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.496533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.496616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.496700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HARD DEBUG: schemeshard__operation_common_pq.cpp:659: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-12-23T18:34:49.520915Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-12-23T18:34:49.539657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-12-23T18:34:49.539974Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:34:49.540023Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-12-23T18:34:49.540423Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:34:49.540467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-12-23T18:34:49.540508Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:34:49.601017Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 250, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:49.601171Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:49.601233Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 250, at tablet: 72057594046678944 2025-12-23T18:34:49.601288Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-12-23T18:34:49.623108Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-12-23T18:34:49.623321Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2025-12-23T18:34:49.623405Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2025-12-23T18:34:49.623464Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:34:49.623517Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:34:49.623699Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2025-12-23T18:34:49.623906Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:49.627727Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:49.628672Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:49.628742Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:34:49.629085Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:49.629134Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 105, path id: 39 2025-12-23T18:34:49.630116Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:49.630187Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2025-12-23T18:34:49.630308Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:34:49.630360Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:49.630404Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:34:49.630441Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:49.630485Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-12-23T18:34:49.630537Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:49.630583Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2025-12-23T18:34:49.630649Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 105:0 2025-12-23T18:34:49.630840Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:34:49.630892Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-12-23T18:34:49.630933Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2025-12-23T18:34:49.632168Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:34:49.632290Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:34:49.632340Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:34:49.632389Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2025-12-23T18:34:49.632441Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:49.632550Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-12-23T18:34:49.632607Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:820:2786] 2025-12-23T18:34:49.635147Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:34:49.635265Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:34:49.635313Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1033:2951] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2025-12-23T18:34:49.639165Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:34:49.639437Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:34:49.639661Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2025-12-23T18:34:49.642369Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:49.642684Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-12-23T18:34:49.643175Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-12-23T18:34:49.643228Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-12-23T18:34:49.643733Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-12-23T18:34:49.643855Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:34:49.643899Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:1084:2986] TestWaitNotification: OK eventTxId 106 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToDifferentOrder [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidSingleChild >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlapAndCreateRootLevelSibling [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitInactivePartition [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlapAndCreateRootLevelSibling ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:44.608395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:44.608496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:44.608536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:44.608571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:44.608607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:44.608638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:44.608687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:44.608767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:44.609370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:44.609596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:44.690796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:44.690853Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:44.697798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:44.698044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:44.698196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:44.702703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:44.702927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:44.703702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:44.703930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:44.706138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:44.706321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:44.707637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:44.707704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:44.707856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:44.707910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:44.707954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:44.708119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:44.878946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.880044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.880175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.880259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.880324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.880406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.880494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.880570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.880653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.880724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.880810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.880914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.880985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.881093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.881172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... chemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:50.037025Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:50.037071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:34:50.037378Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:50.037423Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 105, path id: 39 2025-12-23T18:34:50.043483Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:50.043573Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2025-12-23T18:34:50.043749Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:34:50.043800Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:50.043878Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:34:50.043924Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:50.043974Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-12-23T18:34:50.044039Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:50.044100Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2025-12-23T18:34:50.044148Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 105:0 2025-12-23T18:34:50.044361Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:34:50.044420Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-12-23T18:34:50.044472Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2025-12-23T18:34:50.051602Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:34:50.051739Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:34:50.051788Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:34:50.051867Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2025-12-23T18:34:50.051934Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:50.052077Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-12-23T18:34:50.063748Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-12-23T18:34:50.064114Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:34:50.064175Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-12-23T18:34:50.064652Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:34:50.064781Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:34:50.064835Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1077:2986] TestWaitNotification: OK eventTxId 105 2025-12-23T18:34:50.682813Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:34:50.683173Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 394us result status StatusSuccess 2025-12-23T18:34:50.683988Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2025-12-23T18:34:50.688716Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:34:50.688990Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:34:50.689214Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2025-12-23T18:34:50.697662Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:50.698036Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-12-23T18:34:50.698546Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-12-23T18:34:50.698610Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-12-23T18:34:50.699222Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-12-23T18:34:50.699369Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:34:50.699438Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:1091:3000] TestWaitNotification: OK eventTxId 106 >> TAccessServiceTest::Authenticate >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] Test command err: 2025-12-23T18:33:23.694008Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1766514803693972 2025-12-23T18:33:24.408870Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123311386985213:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:24.447560Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:33:24.609009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:33:24.615165Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:33:24.539609Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123313103699023:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:24.539657Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:33:24.725491Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:33:24.725905Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005a17/r3tmp/tmpPYc8qF/pdisk_1.dat 2025-12-23T18:33:25.180406Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:25.342848Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:25.406488Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:25.459078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:33:25.483187Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:25.546333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:25.546442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:25.565369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:25.565459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:25.653508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:25.685258Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:33:25.700772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:25.706950Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:25.736510Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123311386985177:2082] 1766514804336356 != 1766514804336359 2025-12-23T18:33:25.759849Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:25.761193Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4408, node 1 2025-12-23T18:33:25.884782Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:26.179535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005a17/r3tmp/yandexmdEWU8.tmp 2025-12-23T18:33:26.179562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005a17/r3tmp/yandexmdEWU8.tmp 2025-12-23T18:33:26.179720Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005a17/r3tmp/yandexmdEWU8.tmp 2025-12-23T18:33:26.179798Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:33:26.322960Z INFO: TTestServer started on Port 17410 GrpcPort 4408 TClient is connected to server localhost:17410 PQClient connected to localhost:4408 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514805832 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:27.303210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:33:27.403954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-12-23T18:33:29.342929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123311386985213:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:29.355823Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:33:29.542877Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123313103699023:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:29.542970Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:33:32.163005Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123347463437714:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:32.163171Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:32.164563Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123347463437735:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:32.164616Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123347463437736:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:32.164809Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:32.192906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:33:32.234411Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123347463437739:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-12-23T18:33:32.335935Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123347463437767:2144] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:33:32.991358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:33.006163Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587123347463437774:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:3 ... ateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:47.911654Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:47.911679Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:47.911695Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:34:47.923128Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2025-12-23T18:34:47.940451Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:207: new Alter topic request 2025-12-23T18:34:47.942285Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:34:47.959204Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:34:47.960274Z node 9 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037895][rt3.dc1--test-topic-1766514887] updating configuration. Deleted partitions []. Added partitions [] 2025-12-23T18:34:47.964135Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:34:47.976849Z node 10 :PERSQUEUE DEBUG: partition.cpp:1289: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1766514888019, TxId 281474976715676 2025-12-23T18:34:47.976892Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:47.976919Z node 10 :PERSQUEUE DEBUG: partition.cpp:2416: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2025-12-23T18:34:47.976955Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:34:47.976971Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:47.976989Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:34:47.977004Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:34:47.977265Z node 10 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1766514888019, TxId 281474976715676 2025-12-23T18:34:47.977289Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:47.977309Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:34:47.977325Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:47.977360Z node 10 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2025-12-23T18:34:47.977535Z node 10 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037894][Partition][0][StateIdle] Schedule reply tx done 281474976715676 2025-12-23T18:34:47.977577Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:34:47.977596Z node 10 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037894][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:34:47.977619Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:34:47.977834Z node 10 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:34:47.983434Z node 10 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:34:47.983755Z node 10 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037894][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:34:47.983858Z node 10 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037894][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:34:47.983883Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:47.983901Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:47.983917Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:47.983937Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:47.983952Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:34:47.983986Z node 10 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:34:47.984366Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72075186224037894] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--test-topic-1766514887" Version: 3 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "test-topic-1766514887" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--test-topic-1766514887" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 72075186224037894 } MetricsLevel: 4 MonitoringProjectId: "" 2025-12-23T18:34:47.984412Z node 10 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:34:47.984551Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:34:47.994777Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:47.994814Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:47.994831Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:47.994856Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:47.994872Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:34:47.995949Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:34:48.011165Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2025-12-23T18:34:48.015129Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:48.015168Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:48.015185Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:48.015210Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:48.015227Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:34:48.095178Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:48.095217Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:48.095235Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:48.095262Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:48.095279Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:34:48.115104Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:48.115142Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:48.115161Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:48.115190Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:48.115207Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:34:48.195546Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:48.195587Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:48.195602Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:48.195628Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:48.195641Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:34:48.215590Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:34:48.215625Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:48.215642Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:34:48.215666Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:34:48.215681Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |92.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-false >> TOlapNaming::AlterColumnTableFailed [GOOD] >> TOlapNaming::AlterColumnStoreOk >> Secret::Validation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlapAndCreateRootLevelSibling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:44.233679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:44.233779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:44.233839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:44.233879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:44.233933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:44.233966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:44.234022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:44.234090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:44.235017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:44.235337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:44.332803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:44.332867Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:44.341378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:44.341624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:44.341753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:44.345007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:44.345194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:44.346028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:44.346249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:44.348276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:44.348468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:44.349710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:44.349771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:44.349922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:44.349979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:44.350024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:44.350159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:44.504675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.505645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.505767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.505870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.505966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.506032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.506094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.506161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.506259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.506341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.506427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.506499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.506608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.506719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.506842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2-23T18:34:51.200357Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:51.200424Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 250, at tablet: 72057594046678944 2025-12-23T18:34:51.200486Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-12-23T18:34:51.217539Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-12-23T18:34:51.217751Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2025-12-23T18:34:51.217858Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2025-12-23T18:34:51.217930Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:34:51.218194Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:34:51.218388Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2025-12-23T18:34:51.218603Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:51.235884Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:51.236232Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:51.236295Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:34:51.236654Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:51.236711Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:210:2211], at schemeshard: 72057594046678944, txId: 105, path id: 39 2025-12-23T18:34:51.237307Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:51.237393Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2025-12-23T18:34:51.237559Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:34:51.237607Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:51.237662Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:34:51.237701Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:51.237747Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-12-23T18:34:51.237799Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:51.237848Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2025-12-23T18:34:51.237885Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 105:0 2025-12-23T18:34:51.238057Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:34:51.238107Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-12-23T18:34:51.238159Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2025-12-23T18:34:51.239203Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:34:51.239338Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:34:51.239389Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:34:51.239464Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2025-12-23T18:34:51.239540Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:51.239656Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-12-23T18:34:51.239707Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:815:2782] 2025-12-23T18:34:51.251815Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:34:51.251947Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:34:51.252022Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1032:2951] TestWaitNotification: OK eventTxId 105 2025-12-23T18:34:51.252669Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:34:51.252951Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 344us result status StatusSuccess 2025-12-23T18:34:51.253757Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 4 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 4 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 4 NextPartitionId: 4 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:34:44.257283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:44.266896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:44.267002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:44.267056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:44.267096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:44.267125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:44.267213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:44.267290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:44.268186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:44.268478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:44.413695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:44.413748Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:44.425019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:44.425216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:44.425396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:44.438063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:44.438315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:44.439007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:44.439212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:44.443215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:44.443394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:44.444712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:44.444773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:44.444924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:44.444970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:44.445013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:44.445113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:44.625980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.626955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.627073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.627134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.627203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.627260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.627341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.627436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.627498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.627570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.627633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.627691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.627767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.627849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:44.627938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HARD DEBUG: schemeshard__operation_common_pq.cpp:659: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-12-23T18:34:51.268812Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-12-23T18:34:51.270495Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-12-23T18:34:51.270753Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:34:51.270794Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-12-23T18:34:51.271127Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:34:51.271167Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-12-23T18:34:51.271199Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:34:51.320052Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 300, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:51.320211Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 300 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:51.320277Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 300, at tablet: 72057594046678944 2025-12-23T18:34:51.320329Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-12-23T18:34:51.351832Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-12-23T18:34:51.352000Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 300 2025-12-23T18:34:51.352095Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 300 2025-12-23T18:34:51.352154Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:34:51.352204Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:34:51.352377Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2025-12-23T18:34:51.352577Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:51.358619Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:51.359142Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:51.359201Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:34:51.359524Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:51.359577Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:210:2211], at schemeshard: 72057594046678944, txId: 105, path id: 39 2025-12-23T18:34:51.360506Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:51.360575Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2025-12-23T18:34:51.360695Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:34:51.360740Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:51.360802Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:34:51.360839Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:51.360887Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-12-23T18:34:51.360934Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:51.361051Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2025-12-23T18:34:51.361094Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 105:0 2025-12-23T18:34:51.361288Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:34:51.361341Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-12-23T18:34:51.361379Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2025-12-23T18:34:51.362717Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:34:51.362857Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:34:51.362912Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:34:51.362959Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2025-12-23T18:34:51.363009Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:51.363104Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-12-23T18:34:51.363154Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:831:2798] 2025-12-23T18:34:51.371962Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:34:51.372142Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:34:51.372194Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1048:2965] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2025-12-23T18:34:51.375928Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:34:51.376178Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:34:51.376373Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2025-12-23T18:34:51.378571Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:51.378881Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-12-23T18:34:51.379240Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-12-23T18:34:51.379285Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-12-23T18:34:51.379754Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-12-23T18:34:51.379862Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:34:51.379926Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [4:1100:3002] TestWaitNotification: OK eventTxId 106 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndWait >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidSingleChild [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlapAndCreateRootLevelSibling [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 >> KqpScripting::StreamExecuteYqlScriptMixed >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:31:49.545186Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:31:49.681414Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:31:49.694948Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:31:49.695452Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:31:49.695530Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:31:49.981399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:31:49.981546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:31:50.069176Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514706427591 != 1766514706427595 2025-12-23T18:31:50.084019Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:31:50.131373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:31:50.249163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003d02/r3tmp/tmpS759tu/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61231, node 1 TClient is connected to server localhost:19333 2025-12-23T18:31:50.822041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:31:50.823938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:31:50.823997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:31:50.824029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:31:50.825127Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:31:50.847428Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-12-23T18:32:02.983382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:954:2820], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:02.983608Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:02.984061Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:964:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:02.984257Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-12-23T18:32:13.515322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:980:2835], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:13.515684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:13.516983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:985:2838], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:13.517197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:13.523593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:13.835622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1092:2914], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:13.835749Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:13.836194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1096:2918], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:13.836278Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:13.836366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1098:2920], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:32:13.842105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:32:13.985353Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1101:2923], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:32:14.452238Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1195:2988] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:32:15.234205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:32:15.941390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:16.641312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:17.387141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:32:17.972814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:32:19.046116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:32:19.368674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-12-23T18:32:19.440737Z node 1 :FLAT_TX_SCHEMESHARD ... p:440: Table profiles were not loaded REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Execution, code: 1060
:1:48: Error: Executing ALTER OBJECT SECRET
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-12-23T18:32:59.088653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:00.355978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715716:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:02.337643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715725:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:33:02.837448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715728:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Execution, code: 1060
:1:42: Error: Executing CREATE OBJECT SECRET_ACCESS
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-12-23T18:33:29.571049Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=16; 2025-12-23T18:33:29.571450Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 16 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:33:29.571705Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 16 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:33:29.572287Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:966: SelfId: [1:3796:4844], Table: `//Root/.metadata/secrets/access` ([72057594046644480:49:1]), SessionActorId: [1:3701:4844]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3796:4844].{
: Error: Conflict with existing key., code: 2012 } 2025-12-23T18:33:29.572895Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:3789:4844], SessionActorId: [1:3701:4844], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:3701:4844]. 2025-12-23T18:33:29.573869Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=NzQzY2MwMTgtOWJhN2IwNDktMzg3Njc4ODctNGYxYzA2ZQ==, ActorId: [1:3701:4844], ActorState: ExecuteState, TraceId: 01kd67sph5dpher3mkfdsnd4hv, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3790:4844] from: [1:3789:4844] 2025-12-23T18:33:29.574112Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:3790:4844] TxId: 281474976715755. Ctx: { TraceId: 01kd67sph5dpher3mkfdsnd4hv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NzQzY2MwMTgtOWJhN2IwNDktMzg3Njc4ODctNGYxYzA2ZQ==, PoolId: default, IsStreamingQuery: 0}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-12-23T18:33:29.574658Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=NzQzY2MwMTgtOWJhN2IwNDktMzg3Njc4ODctNGYxYzA2ZQ==, ActorId: [1:3701:4844], ActorState: ExecuteState, TraceId: 01kd67sph5dpher3mkfdsnd4hv, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } 2025-12-23T18:33:29.603738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01kd67sp2n3d2yqf9k3se6wpft" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NzQzY2MwMTgtOWJhN2IwNDktMzg3Njc4ODctNGYxYzA2ZQ==" tx_control { tx_id: "01kd67sp2n3d2yqf9k3se6wpft" } query { yql_text: "--!syntax_v1\nDECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Execution, code: 1060
:1:29: Error: Executing DROP OBJECT SECRET
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-12-23T18:33:55.559590Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4297:5281], for# root@builtin, access# DescribeSchema 2025-12-23T18:33:55.559745Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4297:5281], for# root@builtin, access# DescribeSchema 2025-12-23T18:33:55.562107Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:4294:5278], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:33:55.565595Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=ZDZjOGQzYjEtNDZhZDg5ZTEtMWEzMjM0ZTUtZTYwMTQ3OGE=, ActorId: [1:4290:5275], ActorState: ExecuteState, TraceId: 01kd67tg2g0w67gg1gqagx9fy8, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/secrets/values]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-12-23T18:34:08.801778Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (51449FAE): Could not find correct token validator REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing UPSERT OBJECT SECRET
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidSingleChild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:47.310175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:47.310275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:47.310325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:47.310359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:47.310392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:47.310440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:47.310508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:47.310610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:47.311473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:47.311764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:47.413132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:47.413191Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:47.426167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:47.426381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:47.426513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:47.438776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:47.438974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:47.439688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:47.439890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:47.442413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:47.442599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:47.443817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:47.443880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:47.444019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:47.444069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:47.444139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:47.444283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:47.584631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.585525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.585626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.585693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.585862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.585901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.585951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.586009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.586048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.586108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.586162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.586219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.586265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.586333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.586446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 12-23T18:34:53.267599Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2025-12-23T18:34:53.267674Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2025-12-23T18:34:53.267747Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:34:53.267796Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:34:53.267981Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2025-12-23T18:34:53.268162Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:34:53.268233Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:53.281814Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:34:53.282410Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:53.282450Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:34:53.282585Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:34:53.282831Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:53.282871Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 104, path id: 38 2025-12-23T18:34:53.282933Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 104, path id: 39 2025-12-23T18:34:53.283626Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:34:53.283677Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:34:53.283784Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:34:53.283825Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:34:53.283880Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:34:53.283925Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:34:53.283972Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-12-23T18:34:53.284027Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:34:53.284080Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:34:53.284115Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:34:53.284255Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:34:53.284300Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-12-23T18:34:53.284337Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2025-12-23T18:34:53.284369Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2025-12-23T18:34:53.295682Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:34:53.295789Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:34:53.295834Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:34:53.295876Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2025-12-23T18:34:53.295920Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:34:53.296465Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:34:53.296555Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:34:53.296586Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:34:53.296612Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2025-12-23T18:34:53.296642Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:53.296717Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-12-23T18:34:53.311691Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:34:53.311864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:34:53.312119Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:34:53.312167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:34:53.312596Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:34:53.312690Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:34:53.312728Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:984:2913] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "\177" ChildPartitionIds: 1 } TestModificationResults wait txId: 105 2025-12-23T18:34:53.317826Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "\177" ChildPartitionIds: 1 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:34:53.318142Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:53.318382Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid number of child partitions: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:53.325699Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid number of child partitions: 1" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:53.325974Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid number of child partitions: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-12-23T18:34:53.326310Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:34:53.326356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-12-23T18:34:53.326808Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:34:53.326923Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:34:53.326966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:991:2920] TestWaitNotification: OK eventTxId 105 |92.5%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-true >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink >> TOlapNaming::AlterColumnStoreOk [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlapAndCreateRootLevelSibling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:34:47.229072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:47.229183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:47.229234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:47.229275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:47.229330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:47.229363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:47.229433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:47.229546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:47.230519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:47.230890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:47.330375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:47.330460Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:47.345607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:47.345833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:47.346083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:47.359725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:47.359974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:47.360806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:47.361041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:47.366993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:47.367218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:47.368767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:47.368843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:47.369060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:47.369115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:47.369166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:47.369290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:47.512487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.516322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.516496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.516545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.516593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.516642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.516724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.516789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.516858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.516905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.516956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.517032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.517098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.517157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:47.517231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... _SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 200 2025-12-23T18:34:53.582688Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:34:53.582779Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:34:53.582987Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2025-12-23T18:34:53.583180Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:34:53.583246Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:53.597138Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:34:53.598231Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:53.598279Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:34:53.598450Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:34:53.598658Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:53.598694Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2210], at schemeshard: 72057594046678944, txId: 104, path id: 38 2025-12-23T18:34:53.598758Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2210], at schemeshard: 72057594046678944, txId: 104, path id: 39 2025-12-23T18:34:53.599252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:34:53.599307Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:34:53.599423Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:34:53.599467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:34:53.599511Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:34:53.599546Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:34:53.599589Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-12-23T18:34:53.599638Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:34:53.599684Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:34:53.599719Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:34:53.599884Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:34:53.599929Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-12-23T18:34:53.599967Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2025-12-23T18:34:53.600000Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2025-12-23T18:34:53.610716Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:34:53.610856Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:34:53.610903Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:34:53.610954Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2025-12-23T18:34:53.611001Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:34:53.612307Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:34:53.612416Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:34:53.612455Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:34:53.612483Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2025-12-23T18:34:53.612515Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:53.612600Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-12-23T18:34:53.625087Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:34:53.625236Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:34:53.625520Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:34:53.625570Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:34:53.626012Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:34:53.628328Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:34:53.628402Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:1005:2924] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "W" ChildPartitionIds: 1 ChildPartitionIds: 2 CreateRootLevelSibling: true } TestModificationResults wait txId: 105 2025-12-23T18:34:53.632465Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "W" ChildPartitionIds: 1 ChildPartitionIds: 2 CreateRootLevelSibling: true } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:34:53.632738Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:53.632991Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Split with prescribed partition ids: Attempt to reserve partition id (1) that is less than the first availiable id (3), at schemeshard: 72057594046678944 2025-12-23T18:34:53.636281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Split with prescribed partition ids: Attempt to reserve partition id (1) that is less than the first availiable id (3)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:53.636591Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split with prescribed partition ids: Attempt to reserve partition id (1) that is less than the first availiable id (3), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-12-23T18:34:53.636930Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:34:53.636978Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-12-23T18:34:53.637400Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:34:53.637541Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:34:53.637583Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1012:2931] TestWaitNotification: OK eventTxId 105 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> KqpYql::InsertCV+useSink >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false >> KqpYql::Discard+DiscardSelectIsOn >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp >> KqpYql::InsertCV-useSink >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:43.585704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:43.585858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:43.585900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:43.585937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:43.585980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:43.586011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:43.586089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:43.586199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:43.587153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:43.587444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:43.683301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:43.683393Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:43.699551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:43.699890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:43.700048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:43.711660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:43.711904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:43.712703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:43.712923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:43.716308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:43.716512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:43.717847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:43.717923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:43.718080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:43.718136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:43.718183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:43.718320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:43.874857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.875769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.875899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.876006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.876072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.876133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.876225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.876297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.876363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.876446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.876558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.876647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.876721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.876799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.876898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 129 2025-12-23T18:34:54.769701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:34:54.769771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:34:54.771136Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 UpsertColumns { Id: 3 Name: "comment" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:34:54.771262Z node 2 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=5000040;tx_id=103;;switch_optimizer=0;switch_accessors=1; FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000040 2025-12-23T18:34:54.773230Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:54.773279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:54.773484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:34:54.773675Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:54.773717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-12-23T18:34:54.773771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2025-12-23T18:34:54.774172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.774243Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:305: TAlterOlapStore TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:34:54.774310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: alter_store.cpp:332: TAlterOlapStore TProposedWaitParts operationId# 103:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-12-23T18:34:54.774398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:34:54.775343Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:34:54.775456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:34:54.775502Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:34:54.775548Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-12-23T18:34:54.775597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:34:54.776704Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:34:54.776782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:34:54.776813Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:34:54.776849Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2025-12-23T18:34:54.776881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:34:54.784475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-12-23T18:34:54.784752Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:54.784790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:34:54.788201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:34:54.788584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:34:54.788634Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:54.788684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2025-12-23T18:34:54.789251Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:34:54.789339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:34:54.789376Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:34:54.789410Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 5 2025-12-23T18:34:54.789447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:34:54.789586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-12-23T18:34:54.800051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:34:54.814316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6585: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 103 2025-12-23T18:34:54.814427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 0 2025-12-23T18:34:54.814578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 103 2025-12-23T18:34:54.814642Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 FAKE_COORDINATOR: Erasing txId 103 2025-12-23T18:34:54.820376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.820613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.820681Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:34:54.820802Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:34:54.820844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:34:54.820888Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:34:54.820923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:34:54.820960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-12-23T18:34:54.821048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:745:2721] message: TxId: 103 2025-12-23T18:34:54.821101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:34:54.821141Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:34:54.821174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:34:54.821335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:34:54.826008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:34:54.826091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:848:2816] TestWaitNotification: OK eventTxId 103 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> TAccessServiceTest::Authenticate [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |92.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink >> IncrementalBackup::IncrementalBackupMultipleIndexes [GOOD] >> IncrementalBackup::IndexDataVerificationIncrementalRestore >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst >> KqpPragma::ResetPerQuery >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2025-12-23T18:34:52.344300Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123692951582413:2137];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:52.344362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:34:52.419918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003b9d/r3tmp/tmpVPVhGs/pdisk_1.dat 2025-12-23T18:34:52.835837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:52.835975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:52.921640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:52.925183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:52.926697Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:52.929871Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123692951582314:2081] 1766514892318180 != 1766514892318183 2025-12-23T18:34:53.118842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11751 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:34:53.375240Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514893010 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:53.422093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:53.428969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:34:53.564651Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cd989f8f4d0] Connect to grpc://localhost:17761 2025-12-23T18:34:53.575501Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cd989f8f4d0] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2025-12-23T18:34:53.629236Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cd989f8f4d0] Status 7 Permission Denied 2025-12-23T18:34:53.631379Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cd989f8f4d0] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2025-12-23T18:34:53.652029Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cd989f8f4d0] Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice >> KqpYql::InsertCVList-useSink >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst |92.5%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |92.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false |92.5%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::Drop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:50.598025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:50.598123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:50.598168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:50.598217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:50.598270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:50.598307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:50.598369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:50.598469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:50.599411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:50.599703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:50.702767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:50.702847Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:50.723645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:50.723881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:50.723989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:50.733926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:50.734113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:50.734784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:50.735036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:50.738422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:50.738625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:50.739988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:50.740068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:50.740220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:50.740276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:50.740356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:50.740531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:50.911792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.913011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.913148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.913260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.913328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.913418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.913510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.913624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.913708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.913782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.913872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.913988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.914065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.914156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:50.914245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... an, step: 1250, at tablet: 72057594046678944 2025-12-23T18:34:58.467744Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-12-23T18:34:58.468296Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:58.478031Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-12-23T18:34:58.478202Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 1250 2025-12-23T18:34:58.478288Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 1250 2025-12-23T18:34:58.478359Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:34:58.478404Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:34:58.478596Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2025-12-23T18:34:58.478906Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:58.482318Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:58.483952Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:58.484005Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:34:58.484302Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:58.484344Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2210], at schemeshard: 72057594046678944, txId: 105, path id: 39 2025-12-23T18:34:58.484880Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:34:58.484932Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2025-12-23T18:34:58.485066Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:34:58.485110Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:58.485157Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:34:58.485208Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:58.485258Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-12-23T18:34:58.485308Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:34:58.485355Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2025-12-23T18:34:58.485391Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 105:0 2025-12-23T18:34:58.485578Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:34:58.485629Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-12-23T18:34:58.485667Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2025-12-23T18:34:58.486634Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:34:58.486728Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:34:58.486997Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:34:58.487054Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2025-12-23T18:34:58.487101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:34:58.487204Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-12-23T18:34:58.491611Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-12-23T18:34:58.491916Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:34:58.491964Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-12-23T18:34:58.492440Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:34:58.492555Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:34:58.492605Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1090:2997] TestWaitNotification: OK eventTxId 105 2025-12-23T18:34:58.985588Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:34:58.985902Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 334us result status StatusSuccess 2025-12-23T18:34:58.986778Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> KqpScripting::ScanQuery >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 |92.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true |92.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:43.469497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:43.469623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:43.469667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:43.469705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:43.469751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:43.469831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:43.469892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:43.470031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:43.471033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:43.471351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:43.561274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:43.561344Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:43.571306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:43.571556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:43.571720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:43.580909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:43.581119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:43.581961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:43.582161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:43.584489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:43.584674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:43.585918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:43.585994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:43.586166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:43.586220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:43.586268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:43.586440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:43.782484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.783529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.783657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.783733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.783805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.783867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.783961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.784056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.784164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.784262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.784346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.784438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.784518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.784615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:43.784702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Close pipe to deleted shardIdx 72057594046678944:59 tabletId 72075186233409604 2025-12-23T18:35:00.496485Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-12-23T18:35:00.496532Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-12-23T18:35:00.503779Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:35:00.503867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:35:00.504087Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:35:00.504116Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-12-23T18:35:00.506041Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 2025-12-23T18:35:00.506106Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-12-23T18:35:00.507798Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:21 2025-12-23T18:35:00.507852Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-12-23T18:35:00.508464Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:19 2025-12-23T18:35:00.508503Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-12-23T18:35:00.508604Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:17 2025-12-23T18:35:00.508630Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-12-23T18:35:00.516277Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:15 2025-12-23T18:35:00.516366Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-12-23T18:35:00.516529Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:13 2025-12-23T18:35:00.516558Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-12-23T18:35:00.516648Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:11 2025-12-23T18:35:00.516694Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-12-23T18:35:00.516765Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:9 2025-12-23T18:35:00.516794Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-12-23T18:35:00.523859Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:36 2025-12-23T18:35:00.523944Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-12-23T18:35:00.524044Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:38 2025-12-23T18:35:00.524072Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-12-23T18:35:00.524388Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:40 2025-12-23T18:35:00.524424Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2025-12-23T18:35:00.524707Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:32 2025-12-23T18:35:00.524738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-12-23T18:35:00.524821Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:34 2025-12-23T18:35:00.524848Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-12-23T18:35:00.524964Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:28 2025-12-23T18:35:00.524991Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-12-23T18:35:00.525061Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:30 2025-12-23T18:35:00.525088Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-12-23T18:35:00.525155Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:26 2025-12-23T18:35:00.525181Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-12-23T18:35:00.525231Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:24 2025-12-23T18:35:00.525257Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-12-23T18:35:00.541264Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:57 2025-12-23T18:35:00.541356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2025-12-23T18:35:00.541512Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:53 2025-12-23T18:35:00.541542Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2025-12-23T18:35:00.541605Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:55 2025-12-23T18:35:00.541630Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2025-12-23T18:35:00.541698Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:49 2025-12-23T18:35:00.541726Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2025-12-23T18:35:00.541780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:47 2025-12-23T18:35:00.541809Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2025-12-23T18:35:00.541870Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:51 2025-12-23T18:35:00.541895Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2025-12-23T18:35:00.541966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:45 2025-12-23T18:35:00.541994Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2025-12-23T18:35:00.542047Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:43 2025-12-23T18:35:00.542073Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2025-12-23T18:35:00.552489Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:41 2025-12-23T18:35:00.552653Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2025-12-23T18:35:00.552987Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 109 2025-12-23T18:35:00.554437Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:35:00.554889Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 376us result status StatusPathDoesNotExist 2025-12-23T18:35:00.555136Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:35:00.556121Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 42 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-12-23T18:35:00.556253Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 42 took 148us result status StatusPathDoesNotExist 2025-12-23T18:35:00.556353Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpYql::RefSelect |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |92.5%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup >> KqpYql::ColumnNameConflict |92.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |92.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |92.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |92.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_EnableAlterDatabase >> KqpYql::InsertCV+useSink [GOOD] >> KqpYql::FromBytes >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:53.231650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:53.231751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:53.231796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:53.231837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:53.231928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:53.231982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:53.232049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:53.232148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:53.233042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:53.233344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:53.431244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:53.431323Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:53.437754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:53.437988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:53.438178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:53.446449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:53.446694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:53.447493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:53.447819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:53.450245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:53.450440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:53.451816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:53.451890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:53.452030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:53.452083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:53.452133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:53.452319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:53.811122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.812385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.812535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.812657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.812825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.812931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.813041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.813123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.813227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.813319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.813397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.813476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.813573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.813693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:53.813811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:35:04.773063Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 134 2025-12-23T18:35:04.774319Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:35:04.774976Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:35:04.783045Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:35:04.783155Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:35:04.783312Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 134 -> 135 2025-12-23T18:35:04.783631Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:35:04.783708Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:35:04.799131Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:04.799185Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:04.799352Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:35:04.799523Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:04.799564Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-12-23T18:35:04.799605Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-12-23T18:35:04.799896Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:35:04.799949Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 102:0 ProgressState 2025-12-23T18:35:04.800001Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 135 -> 240 2025-12-23T18:35:04.801023Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:35:04.801117Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:35:04.801147Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:35:04.801181Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-12-23T18:35:04.801223Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:35:04.806865Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:35:04.806979Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:35:04.807015Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:35:04.807052Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-12-23T18:35:04.807087Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-12-23T18:35:04.807174Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:35:04.815648Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:35:04.815734Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:35:04.815937Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:35:04.815993Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:35:04.816047Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:35:04.816124Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:35:04.816178Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:35:04.816235Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:35:04.816281Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:35:04.816324Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:35:04.816425Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:35:04.817156Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:35:04.817226Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:35:04.817316Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:35:04.817633Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:35:04.817691Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:35:04.817777Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:35:04.822907Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:35:04.824764Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:35:04.834037Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:35:04.834195Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:35:04.834506Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:35:04.834572Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:35:04.835178Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:35:04.835313Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:35:04.835369Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:348:2337] TestWaitNotification: OK eventTxId 102 2025-12-23T18:35:04.836084Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:35:04.836366Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 318us result status StatusPathDoesNotExist 2025-12-23T18:35:04.846632Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_EnableAlterDatabase [GOOD] >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true >> KqpYql::BinaryJsonOffsetBound >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:32:24.273886Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:24.408122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:24.419545Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:32:24.420051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:32:24.420140Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:32:24.759987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:24.760176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:24.864202Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514739229129 != 1766514739229133 2025-12-23T18:32:24.890792Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:24.944194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:25.054052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c25/r3tmp/tmp8l7VtS/pdisk_1.dat 2025-12-23T18:32:25.393154Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:830:2725], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:25.393226Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:25.393261Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046644480 2025-12-23T18:32:25.393399Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122432, Sender [1:828:2723], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-12-23T18:32:25.393434Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5258: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-12-23T18:32:25.522380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-12-23T18:32:25.522653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:25.522892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:32:25.522956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:32:25.523198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:32:25.523416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:25.523540Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:25.524332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:32:25.524500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:32:25.524553Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:25.524590Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-12-23T18:32:25.524781Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:32:25.524820Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:32:25.524894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:25.524953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:32:25.524997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:32:25.525034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:32:25.525160Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:25.525690Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:25.525739Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-12-23T18:32:25.525873Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:32:25.525910Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:32:25.525969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:25.526040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:32:25.526081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:32:25.526222Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:25.526644Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:25.526684Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-12-23T18:32:25.526843Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:32:25.526881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:32:25.526934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:25.526984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:25.527039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-12-23T18:32:25.527071Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:25.527114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:32:25.530855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:32:25.531551Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:25.531619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:32:25.531761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:32:25.532097Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2025-12-23T18:32:25.532508Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:836:2731], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:25.532549Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:25.532583Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046644480 2025-12-23T18:32:25.532656Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124996, Sender [1:828:2723], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-12-23T18:32:25.532678Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-12-23T18:32:25.532728Z node 1 :FLAT_TX_SCHEMESHARD DE ... ck for operation: 281474976710672 2025-12-23T18:34:56.084205Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 281474976710672 2025-12-23T18:34:56.199045Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:56.199131Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:56.199238Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [10:399:2398], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:56.199273Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:56.407180Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:56.407267Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:56.407374Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [10:399:2398], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:56.407413Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:56.527716Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037911, for tableId 43, but no stats yet 2025-12-23T18:34:56.528015Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037913, for tableId 49, but no stats yet 2025-12-23T18:34:56.528183Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037912, for tableId 40, but no stats yet 2025-12-23T18:34:56.528338Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037910, for tableId 46, but no stats yet 2025-12-23T18:34:56.626890Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:56.626975Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:56.627096Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [10:399:2398], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:56.627137Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:56.719580Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037916, for tableId 54, but no stats yet 2025-12-23T18:34:56.719842Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037917, for tableId 51, but no stats yet 2025-12-23T18:34:56.720019Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037918, for tableId 53, but no stats yet 2025-12-23T18:34:56.720193Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037919, for tableId 56, but no stats yet 2025-12-23T18:34:56.752140Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435093, Sender [0:0:0], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvConsoleConfigsTimeout 2025-12-23T18:34:56.752218Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:34:56.752300Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:56.882832Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:56.882915Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:56.883016Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [10:399:2398], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:56.883052Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:56.907254Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037888 state 5 2025-12-23T18:34:56.907467Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037889 state 5 2025-12-23T18:34:57.005190Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037890 state 5 2025-12-23T18:34:57.005345Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037891 state 5 2025-12-23T18:34:57.107772Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:57.107858Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:57.107962Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [10:399:2398], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:57.107998Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:57.302726Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:57.302828Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:57.302936Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [10:399:2398], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:57.302971Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:57.331191Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:34:57.342448Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:34:57.342622Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037903 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:34:57.342708Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037902 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:34:57.503159Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:57.503248Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:57.503359Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [10:399:2398], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:57.503400Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:57.643268Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037911 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:34:57.643453Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037913 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:34:57.643548Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037912 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:34:57.643629Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037910 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:34:57.727138Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:57.727222Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:57.727318Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [10:399:2398], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:57.727355Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:57.916988Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:57.917079Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:34:57.917188Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [10:399:2398], Recipient [10:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:34:57.917228Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime { items { uint32_value: 1 } items { uint32_value: 100 } }, { items { uint32_value: 2 } items { uint32_value: 200 } }, { items { uint32_value: 3 } items { uint32_value: 300 } } { items { uint32_value: 1 } items { uint32_value: 1000 } }, { items { uint32_value: 2 } items { uint32_value: 2000 } }, { items { uint32_value: 3 } items { uint32_value: 3000 } } { items { uint32_value: 3 } } { items { uint32_value: 3 } } { items { uint64_value: 3 } } { items { uint64_value: 3 } } { items { uint32_value: 300 } items { uint32_value: 3 } } { items { uint32_value: 3000 } items { uint32_value: 3 } } |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest >> KqpYql::Discard+DiscardSelectIsOn [GOOD] >> KqpYql::Discard-DiscardSelectIsOn >> KqpScripting::StreamExecuteYqlScriptMixed [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |92.5%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:54.086720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:54.086839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:54.086899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:54.086947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:54.086988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:54.087029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:54.087094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:54.087220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:54.088052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:54.088335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:54.179524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:54.179587Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:54.185454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:54.185703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:54.185841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:54.188817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:54.189024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:54.189776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:54.189986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:54.191859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:54.192019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:54.193184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:54.193251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:54.193361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:54.193421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:54.193460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:54.193624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.200439Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:34:54.348725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:34:54.348965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.349157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:34:54.349201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:34:54.349420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:34:54.349483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:54.351639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:54.351854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:34:54.352043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.352110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:34:54.352142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:34:54.352172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:34:54.354133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.354191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:34:54.354251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:34:54.355920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.355972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.356041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:34:54.356092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:34:54.359844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:34:54.361642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:34:54.361813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:34:54.362882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:54.363032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:54.363079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:34:54.363340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:34:54.363385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:34:54.363551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:34:54.363629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:34:54.365529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:54.365570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 6545 cookie: 0:108 msg type: 269090816 2025-12-23T18:35:08.950111Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000041 2025-12-23T18:35:08.950811Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:08.950951Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 30064773231 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:08.951012Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-12-23T18:35:08.951230Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 128 -> 240 2025-12-23T18:35:08.951299Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-12-23T18:35:08.951501Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-12-23T18:35:08.951573Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:35:08.951641Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:425: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2025-12-23T18:35:08.952353Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-12-23T18:35:08.953586Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-12-23T18:35:08.959685Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:08.959736Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:08.959964Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2025-12-23T18:35:08.960078Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:08.960122Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-12-23T18:35:08.960161Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 108, path id: 41 FAKE_COORDINATOR: Erasing txId 108 2025-12-23T18:35:08.960576Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-12-23T18:35:08.960624Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2025-12-23T18:35:08.960757Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2025-12-23T18:35:08.960801Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-12-23T18:35:08.960849Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2025-12-23T18:35:08.960894Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-12-23T18:35:08.960967Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2025-12-23T18:35:08.961017Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-12-23T18:35:08.961061Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2025-12-23T18:35:08.961102Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 108:0 2025-12-23T18:35:08.961185Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2025-12-23T18:35:08.961229Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 108, publications: 2, subscribers: 0 2025-12-23T18:35:08.961273Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-12-23T18:35:08.961306Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 41], 3 2025-12-23T18:35:08.962079Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:35:08.962170Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:35:08.962213Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 108 2025-12-23T18:35:08.962263Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-12-23T18:35:08.962313Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2025-12-23T18:35:08.963177Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:35:08.963265Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:35:08.963294Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2025-12-23T18:35:08.963325Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 3 2025-12-23T18:35:08.963357Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:35:08.963436Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2025-12-23T18:35:08.973255Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-12-23T18:35:08.973499Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-12-23T18:35:08.973779Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-12-23T18:35:08.973829Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-12-23T18:35:08.974297Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-12-23T18:35:08.974398Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-12-23T18:35:08.974447Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:888:2858] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2025-12-23T18:35:08.977852Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "extSubdomain" } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:35:08.978098Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_extsubdomain.cpp:58: TCreateExtSubDomain Propose, path/MyRoot/extSubdomain, opId: 109:0, at schemeshard: 72057594046678944 2025-12-23T18:35:08.987177Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/extSubdomain', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:35:08.989610Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/extSubdomain\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges)" TxId: 109 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 108, at schemeshard: 72057594046678944 2025-12-23T18:35:08.989925Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/extSubdomain', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges), operation: CREATE DATABASE, path: /MyRoot/extSubdomain TestModificationResult got TxId: 109, wait until txId: 109 |92.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |92.5%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> KqpYql::TestUuidDefaultColumn >> KqpYql::InsertCV-useSink [GOOD] >> KqpYql::InsertCVList+useSink >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] >> KqpYql::InsertCVList-useSink [GOOD] >> KqpYql::InsertIgnore >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:54.151016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:54.151143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:54.151197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:54.151270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:54.151328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:54.151366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:54.151436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:54.151511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:54.152405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:54.152735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:54.255659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:54.255722Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:54.262647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:54.262939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:54.263134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:54.266895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:54.267120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:54.267939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:54.268219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:54.270588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:54.270830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:54.272205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:54.272277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:54.272410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:54.272488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:54.272548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:54.272713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.288384Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:34:54.477925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:34:54.478197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.478418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:34:54.478462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:34:54.478722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:34:54.478810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:54.483799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:54.484047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:34:54.484293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.484362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:34:54.484402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:34:54.484439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:34:54.486623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.486692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:34:54.486776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:34:54.488779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.488871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.488943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:34:54.488999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:34:54.493226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:34:54.499449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:34:54.499707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:34:54.501039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:54.501188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:34:54.501246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:34:54.501536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:34:54.501601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:34:54.501813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:34:54.501911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:34:54.506193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:54.506246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 86348Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-12-23T18:35:09.389701Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 4, to hive 72057594037968897, at schemeshard 72057594046678944 2025-12-23T18:35:09.389846Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:35:09.389914Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:35:09.389946Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:35:09.389975Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:35:09.390214Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:35:09.390267Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:35:09.390425Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:35:09.390472Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:35:09.390528Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:35:09.390577Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:35:09.390627Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-12-23T18:35:09.390691Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:35:09.390760Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:35:09.390805Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:35:09.391018Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-12-23T18:35:09.392541Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:35:09.393038Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:35:09.393245Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2025-12-23T18:35:09.393484Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-12-23T18:35:09.393819Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409548 2025-12-23T18:35:09.394423Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-12-23T18:35:09.395363Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:09.395639Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-12-23T18:35:09.396354Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2025-12-23T18:35:09.396949Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:35:09.397883Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 2025-12-23T18:35:09.400615Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409549 Forgetting tablet 72075186233409547 2025-12-23T18:35:09.402612Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:09.415593Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:35:09.415958Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:35:09.416982Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:35:09.417060Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:35:09.417238Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:35:09.420063Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:35:09.420136Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:35:09.420247Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:35:09.423172Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:35:09.423248Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-12-23T18:35:09.423385Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:35:09.423415Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:35:09.423468Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:35:09.423495Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-12-23T18:35:09.425543Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:35:09.425618Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:35:09.425859Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:35:09.425949Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:35:09.426225Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:35:09.426280Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:35:09.426819Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:35:09.426949Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:35:09.427000Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:575:2517] TestWaitNotification: OK eventTxId 103 2025-12-23T18:35:09.427614Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:35:09.427870Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 299us result status StatusPathDoesNotExist 2025-12-23T18:35:09.428092Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:237:2060] recipient: [1:232:2147] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:237:2060] recipient: [1:232:2147] Leader for TabletID 72057594046678944 is [1:254:2161] sender: [1:255:2060] recipient: [1:232:2147] 2025-12-23T18:33:55.060772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:55.060869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:55.060954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:55.061002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:55.061041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:55.061072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:55.061144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:55.061230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:55.062232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:55.062615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:55.159580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:55.159651Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:55.171468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:55.171634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:55.171789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:55.191788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:55.192016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:55.192847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:55.193067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:55.195339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:55.195553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:55.196845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:55.196924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:55.197096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:55.197157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:55.197211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:55.197363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:55.351764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.352834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.352957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.353025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.353098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.353196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.353269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.353352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.353423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.353518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.353852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.354014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.354097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.354191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:55.354257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:334:2220] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-12-23T18:35:09.547183Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-12-23T18:35:09.547237Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-12-23T18:35:09.547623Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [7:1077:2897], Recipient [7:242:2155]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:35:09.547685Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:35:09.547724Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:35:09.547878Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124996, Sender [7:976:2796], Recipient [7:242:2155]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-12-23T18:35:09.547912Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-12-23T18:35:09.547996Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-12-23T18:35:09.548110Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:35:09.548157Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:1075:2895] 2025-12-23T18:35:09.548347Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [7:1077:2897], Recipient [7:242:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:35:09.548386Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:35:09.548427Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2025-12-23T18:35:09.548905Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122432, Sender [8:952:2104], Recipient [7:242:2155] 2025-12-23T18:35:09.548960Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5258: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-12-23T18:35:09.552174Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 952 RawX2: 34359740472 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:35:09.552516Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2025-12-23T18:35:09.552661Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:35:09.552906Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:35:09.555531Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:09.555919Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-12-23T18:35:09.556000Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-12-23T18:35:09.556456Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-12-23T18:35:09.556516Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-12-23T18:35:09.556995Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [7:1083:2903], Recipient [7:242:2155]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:35:09.557060Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:35:09.557104Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:35:09.557222Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124996, Sender [7:976:2796], Recipient [7:242:2155]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2025-12-23T18:35:09.557261Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-12-23T18:35:09.557354Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-12-23T18:35:09.557500Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-12-23T18:35:09.557556Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:1081:2901] 2025-12-23T18:35:09.557766Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [7:1083:2903], Recipient [7:242:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:35:09.557811Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:35:09.557858Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-12-23T18:35:09.558328Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122432, Sender [8:952:2104], Recipient [7:242:2155] 2025-12-23T18:35:09.558387Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5258: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-12-23T18:35:09.561800Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 952 RawX2: 34359740472 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:35:09.562181Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2025-12-23T18:35:09.562267Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2025-12-23T18:35:09.562538Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:35:09.565085Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:09.565430Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-12-23T18:35:09.565502Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-12-23T18:35:09.565933Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-12-23T18:35:09.565987Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-12-23T18:35:09.566373Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [7:1089:2909], Recipient [7:242:2155]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:35:09.566428Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:35:09.566479Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:35:09.566616Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124996, Sender [7:976:2796], Recipient [7:242:2155]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2025-12-23T18:35:09.566650Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-12-23T18:35:09.566765Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-12-23T18:35:09.566895Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-12-23T18:35:09.566943Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:1087:2907] 2025-12-23T18:35:09.567143Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [7:1089:2909], Recipient [7:242:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:35:09.567185Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:35:09.567229Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:51.638302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:51.638463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:51.638513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:51.638557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:51.638595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:51.638626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:51.638692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:51.638846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:51.639705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:51.640010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:51.728525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:51.728588Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:51.735272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:51.735502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:51.735673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:51.738845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:51.739074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:51.739776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:51.740044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:51.742185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:51.742353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:51.743648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:51.743717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:51.743873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:51.743918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:51.743963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:51.744105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:51.904657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.905684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.905849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.905940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.906012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.906077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.906146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.906278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.906398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.906476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.906539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.906631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.906699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.908190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:51.908308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... nId 103:0, ProgressState, NeedSyncHive: 0 2025-12-23T18:35:09.149128Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 240 -> 240 2025-12-23T18:35:09.150275Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:35:09.150412Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:35:09.150469Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:35:09.150521Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-12-23T18:35:09.150584Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-12-23T18:35:09.150705Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-12-23T18:35:09.163722Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6221: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409546 Generation: 3 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-12-23T18:35:09.163863Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:35:09.163986Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 3, ActorId:[8:504:2452], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:35:09.164120Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-12-23T18:35:09.164163Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-12-23T18:35:09.164339Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-12-23T18:35:09.164379Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:553:2490], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-12-23T18:35:09.165654Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409546, cookie: 0 2025-12-23T18:35:09.168581Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:35:09.168652Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:35:09.168820Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:35:09.168895Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:35:09.168953Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:35:09.169001Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:35:09.169058Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-12-23T18:35:09.169116Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:35:09.169165Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:35:09.169211Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:35:09.169329Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-12-23T18:35:09.170345Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:35:09.170438Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:35:09.172407Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:35:09.172468Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:35:09.173054Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:35:09.173191Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:35:09.173242Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:597:2530] TestWaitNotification: OK eventTxId 103 2025-12-23T18:35:09.174009Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:35:09.175158Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 1.19ms result status StatusSuccess 2025-12-23T18:35:09.175695Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 5 ShardsInside: 3 ShardsLimit: 7 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 5 MaxChildrenInDir: 3 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 7 MaxShardsInPath: 3 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:09.176555Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-12-23T18:35:09.176774Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 228us result status StatusSuccess 2025-12-23T18:35:09.177227Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10 ShardsInside: 3 ShardsLimit: 10 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } SchemeLimits { MaxDepth: 32 MaxPaths: 10 MaxChildrenInDir: 10 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 10 MaxShardsInPath: 10 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true >> KqpScripting::ScriptValidate |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |92.6%| [LD] {RESULT} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 >> KqpYql::RefSelect [GOOD] >> KqpYql::ScriptUdf >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:33:33.552520Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:33:33.855517Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:33:33.867178Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:33:33.867549Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:33:33.867627Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:33:34.235019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:34.235183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:34.357256Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514808552344 != 1766514808552348 2025-12-23T18:33:34.374484Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:34.424023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:34.557971Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005cc9/r3tmp/tmp96AkVx/pdisk_1.dat 2025-12-23T18:33:34.794668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:33:34.868946Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=ZTNiYmFmNTItNDM2MWQzY2MtMjNlOTk2NGUtNTUzNTcyNDA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZTNiYmFmNTItNDM2MWQzY2MtMjNlOTk2NGUtNTUzNTcyNDA= (tmp dir name: a748712f-4341-0605-2508-43baae348aa2) 2025-12-23T18:33:34.869733Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=ZTNiYmFmNTItNDM2MWQzY2MtMjNlOTk2NGUtNTUzNTcyNDA=, ActorId: [1:843:2738], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:33:34.870553Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=1&id=ZTNiYmFmNTItNDM2MWQzY2MtMjNlOTk2NGUtNTUzNTcyNDA=, ActorId: [1:843:2738], ActorState: ReadyState, TraceId: 01kd67svwpfxt6nt5q523pyyvv, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE TABLE `/Root/table1` (key int, value int, PRIMARY KEY (key)); rpcActor: [0:0:0] database: databaseId: /Root pool id: default 2025-12-23T18:33:35.273449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:846:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:35.273683Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:35.274163Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:864:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:35.274277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:35.294627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:35.356820Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:885:2766], Recipient [1:893:2772]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:33:35.357886Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:885:2766], Recipient [1:893:2772]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:33:35.358212Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:893:2772] 2025-12-23T18:33:35.358486Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:33:35.427192Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:885:2766], Recipient [1:893:2772]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:33:35.428085Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:33:35.428224Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:33:35.437773Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:33:35.437916Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:33:35.437985Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:33:35.438508Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:33:35.438703Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:33:35.438829Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:909:2772] in generation 1 2025-12-23T18:33:35.439406Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:33:35.531158Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:33:35.531402Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:33:35.531521Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:911:2782] 2025-12-23T18:33:35.531559Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:33:35.531594Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:33:35.531631Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:33:35.531861Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:893:2772], Recipient [1:893:2772]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:33:35.531910Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:33:35.532323Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:33:35.532427Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:33:35.532496Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:33:35.532534Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:33:35.532574Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:33:35.532614Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:33:35.532666Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:33:35.532708Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:33:35.532751Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:33:35.532892Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:896:2773], Recipient [1:893:2772]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:33:35.532934Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:33:35.532993Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:892:2771], serverId# [1:896:2773], sessionId# [0:0:0] 2025-12-23T18:33:35.533384Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:896:2773] 2025-12-23T18:33:35.533434Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:33:35.533538Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:33:35.533824Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:33:35.533893Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:33:35.533997Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:33:35.534059Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:33:35.534103Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:33:35.534141Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 720751862240378 ... 5:10.116423Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-12-23T18:35:10.116485Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:35:10.116646Z node 13 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2025-12-23T18:35:10.116727Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2025-12-23T18:35:10.116768Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-12-23T18:35:10.116799Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2025-12-23T18:35:10.116825Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:35:10.116851Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:35:10.116887Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v600/18446744073709551615 ImmediateWriteEdgeReplied# v600/18446744073709551615 2025-12-23T18:35:10.116940Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715666] at 72075186224037888 2025-12-23T18:35:10.116973Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-12-23T18:35:10.116998Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:35:10.117020Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit BlockFailPoint 2025-12-23T18:35:10.117044Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit BlockFailPoint 2025-12-23T18:35:10.117070Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-12-23T18:35:10.117095Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BlockFailPoint 2025-12-23T18:35:10.117116Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-12-23T18:35:10.117140Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2025-12-23T18:35:10.117209Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:269: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2025-12-23T18:35:10.117343Z node 13 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: false 2025-12-23T18:35:10.117458Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:521: add locks to result: 0 2025-12-23T18:35:10.117540Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-12-23T18:35:10.117570Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-12-23T18:35:10.117596Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:35:10.117621Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-12-23T18:35:10.117667Z node 13 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-12-23T18:35:10.117766Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2025-12-23T18:35:10.117795Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:35:10.117822Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:35:10.117851Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:35:10.117896Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-12-23T18:35:10.117924Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:35:10.117950Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2025-12-23T18:35:10.118006Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:35:10.118041Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-12-23T18:35:10.118079Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:35:10.119539Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [13:69:2116], Recipient [13:1107:2924]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 13 Status: STATUS_NOT_FOUND 2025-12-23T18:35:10.520681Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1173:2968], Recipient [13:1107:2924]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-12-23T18:35:10.520925Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:35:10.521018Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v600/18446744073709551615 ImmediateWriteEdgeReplied# v600/18446744073709551615 2025-12-23T18:35:10.521092Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v600/18446744073709551615 2025-12-23T18:35:10.521191Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-12-23T18:35:10.521333Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:35:10.521419Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:35:10.521491Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:35:10.521547Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:35:10.521610Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2025-12-23T18:35:10.521671Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:35:10.521700Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:35:10.521726Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:35:10.521753Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:35:10.521892Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-12-23T18:35:10.522219Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[13:1173:2968], 0} after executionsCount# 1 2025-12-23T18:35:10.522300Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[13:1173:2968], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:35:10.522413Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[13:1173:2968], 0} finished in read 2025-12-23T18:35:10.522511Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:35:10.522541Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:35:10.522567Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:35:10.522594Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:35:10.522648Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2025-12-23T18:35:10.522671Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:35:10.522704Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2025-12-23T18:35:10.523168Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:35:10.523341Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:35:10.524970Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1173:2968], Recipient [13:1107:2924]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:35:10.525052Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } >> KqpYql::JsonNumberPrecision |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 >> KqpYql::TableConcat >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> KqpYql::UpdatePk >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 >> KqpYql::ColumnNameConflict [GOOD] >> KqpYql::ColumnTypeMismatch >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true >> KqpYql::FromBytes [GOOD] >> KqpScripting::ScanQuery [GOOD] >> KqpScripting::ScriptingCreateAndAlterTableTest >> KqpScripting::ScanQueryDisable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:54.491017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:54.491114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:54.491182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:54.491235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:54.491325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:54.491352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:54.491412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:54.491500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:54.492321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:54.492592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:54.587124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:54.587198Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:54.593385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:54.593658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:54.593805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:54.597487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:54.597729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:54.598475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:54.598679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:54.601484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:54.601676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:54.602969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:54.603048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:54.603208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:54.603256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:54.603298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:54.603440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.774417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.775432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.775589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.775671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.775757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.775829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.775892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.775969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.776075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.776139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.776208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.776284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.776350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.776452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.776542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... State leave, operationId 103:0, at tablet# 72057594046678944 2025-12-23T18:35:15.185922Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-12-23T18:35:15.186139Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:35:15.188320Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-12-23T18:35:15.188531Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2025-12-23T18:35:15.189019Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:15.189216Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 34359740527 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:15.189304Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-12-23T18:35:15.189741Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2025-12-23T18:35:15.189849Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-12-23T18:35:15.190059Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2025-12-23T18:35:15.190304Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:588: Send TEvUpdateTenantSchemeShard, to actor: [8:806:2774], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72057594046678944 2025-12-23T18:35:15.193292Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6261: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 2025-12-23T18:35:15.193510Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:79: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72075186234409546 2025-12-23T18:35:15.193779Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:601: Cannot publish paths for unknown operation id#0 FAKE_COORDINATOR: Erasing txId 103 2025-12-23T18:35:15.194472Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:15.194551Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:35:15.194867Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:15.194957Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2025-12-23T18:35:15.195725Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:35:15.195823Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-12-23T18:35:15.195882Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 240 -> 240 2025-12-23T18:35:15.197193Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:35:15.197364Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:35:15.197432Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:35:15.197498Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2025-12-23T18:35:15.197577Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2025-12-23T18:35:15.197721Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-12-23T18:35:15.202108Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6221: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 38 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-12-23T18:35:15.202259Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:35:15.202398Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:806:2774], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:35:15.202668Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-12-23T18:35:15.202712Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-12-23T18:35:15.202919Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-12-23T18:35:15.202962Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:897:2840], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-12-23T18:35:15.204851Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72075186234409546, cookie: 0 2025-12-23T18:35:15.205315Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:35:15.205400Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:35:15.205608Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:35:15.205668Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:35:15.205830Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:35:15.205894Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:35:15.205965Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-12-23T18:35:15.206036Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:35:15.206103Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:35:15.206167Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:35:15.206284Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2025-12-23T18:35:15.207207Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:35:15.207308Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-12-23T18:35:15.209933Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:35:15.210013Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:35:15.210585Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:35:15.210726Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:35:15.210803Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:1021:2962] TestWaitNotification: OK eventTxId 103 >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |92.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots >> KqpYql::FlexibleTypes >> KqpYql::BinaryJsonOffsetBound [GOOD] >> KqpYql::AnsiIn >> KqpYql::TestUuidDefaultColumn [GOOD] >> BasicUsage::TWriteSession_WriteEncoded [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:54.506359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:54.506459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:54.506497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:54.506545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:54.506603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:54.506631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:54.506684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:54.506843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:54.507705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:54.508022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:54.600507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:54.600570Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:54.605761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:54.605908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:54.606007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:54.608868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:54.609061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:54.609857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:54.610125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:54.612119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:54.612299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:54.613520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:54.613595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:54.613740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:54.613786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:54.613833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:54.613959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:54.764847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.765879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.766012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.766095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.766186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.766253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.766319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.766384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.766477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.766580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.766665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.766755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.766819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.766892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:54.766971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__operation_common_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-12-23T18:35:16.568442Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 3 -> 128 2025-12-23T18:35:16.576532Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:35:16.576891Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:35:16.576973Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:35:16.577049Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2025-12-23T18:35:16.577127Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-12-23T18:35:16.577345Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:35:16.585517Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-12-23T18:35:16.585743Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2025-12-23T18:35:16.591350Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:16.591568Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 34359740528 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:16.591653Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-12-23T18:35:16.592093Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2025-12-23T18:35:16.592180Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-12-23T18:35:16.592370Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2025-12-23T18:35:16.592514Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:767:2740], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:35:16.601244Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:16.601325Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:35:16.601632Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:16.601702Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:213:2213], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:35:16.602276Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:35:16.602363Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2025-12-23T18:35:16.602421Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2025-12-23T18:35:16.603683Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:35:16.603841Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:35:16.603895Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:35:16.603953Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2025-12-23T18:35:16.604014Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2025-12-23T18:35:16.604135Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:35:16.624566Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:35:16.624649Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:35:16.627553Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:35:16.627630Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:35:16.627703Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:35:16.627757Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:35:16.627819Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:35:16.627935Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:709:2698] message: TxId: 102 2025-12-23T18:35:16.628014Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:35:16.628080Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:35:16.628137Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:35:16.628452Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2025-12-23T18:35:16.637197Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:35:16.648706Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:35:16.648802Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:959:2900] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-12-23T18:35:16.658090Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:35:16.658359Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2025-12-23T18:35:16.658426Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2025-12-23T18:35:16.674439Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-12-23T18:35:16.674603Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-12-23T18:35:16.686679Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:16.687082Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::FromBytes [GOOD] Test command err: Trying to start YDB, gRPC: 21351, MsgBus: 7248 2025-12-23T18:34:56.024272Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123704292603002:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:56.024310Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005eee/r3tmp/tmpd8YbUe/pdisk_1.dat 2025-12-23T18:34:56.443051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:56.503892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:56.504014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:56.580773Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:56.600791Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:56.610428Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 21351, node 1 2025-12-23T18:34:56.843508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:56.843544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:56.843552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:56.843638Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:34:57.058958Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7248 TClient is connected to server localhost:7248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514896650 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:34:57.979908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:58.026781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:58.265090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:58.530432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:58.635897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:01.027003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123704292603002:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:01.027072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:01.659245Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123730062408611:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:01.659362Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:01.659917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123730062408621:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:01.659957Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:02.147452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:02.230938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:02.292357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:02.342301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:02.425455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:02.491405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:02.579687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:02.636440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:02.802531Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123734357376800:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:02.802610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:02.802918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123734357376805:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:02.802957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123734357376806:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:02.803184Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:02.807364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, ... 4037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:06.304717Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:06.320415Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27532, node 2 2025-12-23T18:35:06.463002Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:06.615529Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:06.615555Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:06.615562Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:06.615641Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11250 2025-12-23T18:35:07.109449Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:11250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514906373 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:07.752140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:07.775271Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:35:07.788978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:07.988276Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:08.367589Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:08.511660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:11.110865Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123749349618779:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:11.110939Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:11.775972Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123770824457093:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:11.776059Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:11.776331Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123770824457103:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:11.776363Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:11.913708Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.167432Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.277859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.390504Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.471967Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.584865Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.677384Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.755776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.869069Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123775119425278:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:12.869133Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:12.869386Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123775119425283:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:12.869429Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123775119425284:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:12.869488Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:12.872640Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:12.889882Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123775119425287:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:35:12.968026Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123775119425340:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |92.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> KqpYql::Discard-DiscardSelectIsOn [GOOD] >> KqpYql::InsertCVList+useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] Test command err: Trying to start YDB, gRPC: 27215, MsgBus: 28247 2025-12-23T18:34:54.576462Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123701444804672:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:54.576943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:34:54.632367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005efc/r3tmp/tmpamhRrO/pdisk_1.dat 2025-12-23T18:34:54.972282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:54.972409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:54.974957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:54.976609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:55.168948Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27215, node 1 2025-12-23T18:34:55.214890Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:34:55.431561Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:55.431597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:55.431606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:55.431706Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:34:55.585977Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28247 TClient is connected to server localhost:28247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514895208 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:56.359802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:56.378006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:56.566175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:56.813512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:56.918832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:59.508495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123722919642966:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:59.508597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:59.510308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123722919642976:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:59.510383Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:59.578913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123701444804672:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:59.579013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:00.105587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:00.161030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:00.249836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:00.351855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:00.408431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:00.498843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:00.607517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:00.777768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:01.019235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123731509578452:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:01.019342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:01.019907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123731509578457:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:01.019951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123731509578458:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:01.019984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you d ... .cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:09.030962Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3154 TClient is connected to server localhost:3154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514908837 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:09.522438Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:09.531782Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:09.561802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:09.582625Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:09.647862Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:09.827729Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:09.945211Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:12.574586Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123776215194072:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:12.574872Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:12.582209Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123776215194082:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:12.582305Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:12.582812Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123776215194084:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:12.582868Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:12.635534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.717401Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.789766Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.877529Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.972389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:13.078763Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:13.192025Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:13.356696Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:13.555369Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123780510162257:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:13.555527Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:13.555922Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123780510162262:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:13.555959Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123780510162263:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:13.555991Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:13.561275Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:13.566851Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123759035323145:2176];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:13.566934Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:13.628739Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123780510162266:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:13.817704Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123780510162320:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:35:17.135378Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514917160, txId: 281474976710673] shutting down 2025-12-23T18:35:17.514032Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514917538, txId: 281474976710675] shutting down |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> KqpScripting::ScanQueryInvalid >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce |92.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [GOOD] Test command err: Trying to start YDB, gRPC: 22857, MsgBus: 28936 2025-12-23T18:35:11.038921Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123773652556655:2148];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:11.039417Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ee0/r3tmp/tmpKAwTOP/pdisk_1.dat 2025-12-23T18:35:11.562863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:11.579776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:11.579875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:11.725743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:11.753081Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:11.830865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 22857, node 1 2025-12-23T18:35:12.135021Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:12.285419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:12.285440Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:12.285457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:12.285533Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28936 TClient is connected to server localhost:28936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514911826 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:13.378453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:13.388036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:16.038906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123773652556655:2148];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:16.038993Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:17.191664Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123799422361209:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:17.191776Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:17.192204Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123799422361218:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:17.192271Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:18.280715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:18.540898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123803717328616:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:18.541001Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:18.541515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123803717328621:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:18.541552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123803717328622:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:18.541585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:18.546500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:18.556268Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123803717328625:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:35:18.627236Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123803717328676:2610] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] Test command err: 2025-12-23T18:33:25.530848Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1766514805530805 2025-12-23T18:33:26.195360Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123320905639062:2089];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:26.195495Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:33:26.379510Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005994/r3tmp/tmpuueynC/pdisk_1.dat 2025-12-23T18:33:26.403576Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123320380169774:2173];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:26.403633Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:33:26.483535Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:33:26.998873Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:27.096459Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:27.172056Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:27.247345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:27.247446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:27.247598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:27.247634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:27.409659Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:33:27.433052Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:27.432123Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:27.438607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:27.441149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:27.460215Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:27.610227Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:27.611803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 31385, node 1 2025-12-23T18:33:28.106469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005994/r3tmp/yandexkxRTJP.tmp 2025-12-23T18:33:28.110828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005994/r3tmp/yandexkxRTJP.tmp 2025-12-23T18:33:28.147256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005994/r3tmp/yandexkxRTJP.tmp 2025-12-23T18:33:28.147413Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:33:28.186189Z INFO: TTestServer started on Port 26501 GrpcPort 31385 TClient is connected to server localhost:26501 PQClient connected to localhost:31385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514807701 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:29.244591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:33:31.199014Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123320905639062:2089];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:31.199094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:33:31.405289Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123320380169774:2173];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:31.405362Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:33:34.514096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123355265378743:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:34.516263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:34.524462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123355265378767:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:34.524696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:34.531244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123355265378774:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:34.535353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:33:34.582257Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123355265378776:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-12-23T18:33:34.664088Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123355265378868:2989] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:33:34.906429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:34.921173Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587123355265378878:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:33:34.923203Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=Y2U4ZWM0OWUtODljY2VlYy1iYmNmYTQ0MS1lOThhNWViZg==, ActorId: [1:7587123355265378741:2334], ActorState: ExecuteState, TraceId: 01kd67svhe98x9tk1f4qkdjn7d, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:33:34.925470Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } messa ... a-38552de4-2bd55823] [null] Abort session to cluster 2025-12-23T18:35:16.382379Z :NOTICE: [/Root] [/Root] [b5bd5d08-d527e41a-38552de4-2bd55823] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:35:16.382628Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fc0ebf3e-b36a6fee-3a9aa1a8-9459acb4_0] Write session: close. Timeout = 0 ms 2025-12-23T18:35:16.382675Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fc0ebf3e-b36a6fee-3a9aa1a8-9459acb4_0] Write session will now close 2025-12-23T18:35:16.382757Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fc0ebf3e-b36a6fee-3a9aa1a8-9459acb4_0] Write session: aborting 2025-12-23T18:35:16.383233Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fc0ebf3e-b36a6fee-3a9aa1a8-9459acb4_0] Write session: gracefully shut down, all writes complete 2025-12-23T18:35:16.383284Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|fc0ebf3e-b36a6fee-3a9aa1a8-9459acb4_0] Write session: destroy 2025-12-23T18:35:16.378292Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_11_1_17285607540668244863_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 98 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 1 WriteTimestampMS: 1766514908857 CreateTimestampMS: 1766514908773 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 91 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 2 WriteTimestampMS: 1766514911154 CreateTimestampMS: 1766514908773 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 98 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 3 WriteTimestampMS: 1766514911297 CreateTimestampMS: 1766514908773 UncompressedSize: 7 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 71 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 4 WriteTimestampMS: 1766514911297 CreateTimestampMS: 1766514908773 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 2 SizeLag: 18446744073709551388 RealReadOffset: 3 WaitQuotaTimeMs: 0 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2025-12-23T18:35:16.378620Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_11_1_17285607540668244863_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2025-12-23T18:35:16.378681Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_11_1_17285607540668244863_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid ae519498-6cdb9680-1549a521-9914be5c has messages 1 2025-12-23T18:35:16.378881Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_11_1_17285607540668244863_v1 read done: guid# ae519498-6cdb9680-1549a521-9914be5c, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 549 2025-12-23T18:35:16.378921Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_11_1_17285607540668244863_v1 response to read: guid# ae519498-6cdb9680-1549a521-9914be5c 2025-12-23T18:35:16.379230Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_11_1_17285607540668244863_v1 Process answer. Aval parts: 0 2025-12-23T18:35:16.391315Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_17285607540668244863_v1 grpc read done: success# 1, data# { read { } } 2025-12-23T18:35:16.391412Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_11_1_17285607540668244863_v1 grpc closed 2025-12-23T18:35:16.391483Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_11_1_17285607540668244863_v1 is DEAD 2025-12-23T18:35:16.392711Z node 11 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-message-group-id|fc0ebf3e-b36a6fee-3a9aa1a8-9459acb4_0 grpc read done: success: 0 data: 2025-12-23T18:35:16.392744Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: test-message-group-id|fc0ebf3e-b36a6fee-3a9aa1a8-9459acb4_0 grpc read failed 2025-12-23T18:35:16.392783Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: test-message-group-id|fc0ebf3e-b36a6fee-3a9aa1a8-9459acb4_0 grpc closed 2025-12-23T18:35:16.392804Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group-id|fc0ebf3e-b36a6fee-3a9aa1a8-9459acb4_0 is DEAD 2025-12-23T18:35:16.393930Z node 11 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:35:16.394321Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [11:7587123792687629232:2536] disconnected. 2025-12-23T18:35:16.394353Z node 11 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [11:7587123792687629232:2536] disconnected; active server actors: 1 2025-12-23T18:35:16.394375Z node 11 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [11:7587123792687629232:2536] client user disconnected session shared/user_11_1_17285607540668244863_v1 2025-12-23T18:35:16.395034Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [11:7587123754032923284:2490] destroyed 2025-12-23T18:35:16.395090Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72075186224037892] Destroy direct read session shared/user_11_1_17285607540668244863_v1 2025-12-23T18:35:16.395126Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [11:7587123792687629235:2539] destroyed 2025-12-23T18:35:16.395171Z node 12 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T18:35:16.395224Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:35:16.395248Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:16.395270Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:35:16.395302Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:16.395326Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:35:16.395404Z node 12 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_11_1_17285607540668244863_v1 2025-12-23T18:35:16.404041Z node 12 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:35:16.434924Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:35:16.434987Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:16.435022Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:35:16.435062Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:16.435086Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:35:16.534897Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:35:16.534943Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:16.534974Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:35:16.534999Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:16.535018Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:35:16.642910Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:35:16.642966Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:16.642992Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:35:16.643018Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:16.643040Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:35:16.743300Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:35:16.743358Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:16.743382Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:35:16.743429Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:16.743450Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:35:17.577385Z node 11 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1177: ActorId: [11:7587123796982596567:2534] TxId: 281474976715681. Ctx: { TraceId: 01kd67wyxc0fhdn8zr2gbrzxd8, Database: /Root, SessionId: ydb://session/3?node_id=11&id=YjgzMzYwYjctYWFlNjQ4ZGItMzIzYmI5MC1kYjFlOTEwNQ==, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 12 2025-12-23T18:35:17.577594Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [11:7587123796982596571:2534], TxId: 281474976715681, task: 3. Ctx: { TraceId : 01kd67wyxc0fhdn8zr2gbrzxd8. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=11&id=YjgzMzYwYjctYWFlNjQ4ZGItMzIzYmI5MC1kYjFlOTEwNQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [11:7587123796982596567:2534], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |92.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: Trying to start YDB, gRPC: 15693, MsgBus: 61224 2025-12-23T18:34:59.160356Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123723304979649:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:59.160435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ee6/r3tmp/tmp1loWDM/pdisk_1.dat 2025-12-23T18:34:59.575216Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:59.585303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:59.585437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:59.678556Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:59.685847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15693, node 1 2025-12-23T18:34:59.747453Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:34:59.815418Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:59.815442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:59.815450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:59.815546Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61224 2025-12-23T18:35:00.191079Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:61224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514899737 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:00.690430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:00.706307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:35:00.727101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:00.917837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:01.178653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:01.271133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:04.160794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123723304979649:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:04.160892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:04.598322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123744779817952:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.598458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.608307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123744779817962:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.608410Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:05.057013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.128353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.186413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.244605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.316786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.414174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.566912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.666703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.904178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123749074786140:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:05.904273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:05.904764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123749074786145:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:05.904812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123749074786146:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:05.905138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18 ... te: Disconnected -> Connecting 2025-12-23T18:35:10.363617Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31826, node 2 2025-12-23T18:35:10.450683Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:10.479448Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:10.479473Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:10.479480Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:10.479556Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5690 TClient is connected to server localhost:5690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514910412 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:11.033540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:11.040479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:35:11.053726Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:11.218953Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:11.225356Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:11.622329Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:11.775495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.338628Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123784626185633:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:14.338747Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:14.339227Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123784626185643:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:14.339265Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:14.450184Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.509180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.583516Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.637305Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.693520Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.791281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.962045Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:15.025615Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:15.152614Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123788921153807:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.152711Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.153081Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123788921153812:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.153122Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123788921153813:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.153217Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.157838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:15.179977Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-12-23T18:35:15.180222Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123788921153816:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:15.207093Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123767446314683:2143];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:15.207161Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:15.254241Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123788921153868:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |92.6%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut >> TDatabaseResolverTests::PostgreSQL >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCVList+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 64349, MsgBus: 20499 2025-12-23T18:34:57.189920Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123713032033207:2193];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:57.189986Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ee9/r3tmp/tmpnboa76/pdisk_1.dat 2025-12-23T18:34:57.885443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:57.886943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:57.931821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:57.978883Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123713032033038:2081] 1766514897136727 != 1766514897136730 2025-12-23T18:34:57.991055Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:58.009465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64349, node 1 2025-12-23T18:34:58.146000Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:34:58.247377Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:34:58.311860Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:58.311890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:58.311896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:58.311978Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20499 TClient is connected to server localhost:20499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514898064 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:59.212008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:59.219647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:34:59.229538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:59.485757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:59.778309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:59.866316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:02.190992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123713032033207:2193];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:02.191075Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:04.140811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123743096806007:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.140937Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.142243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123743096806017:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.142310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.846196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.911238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.981583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.073917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.153363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.217753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.284720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.414495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:05.661254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123747391774214:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:05.661346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:05.661707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123747391774219:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:05.661746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123747391774220:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:05.661779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [W ... osed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:11.971256Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:11.994443Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:12.297864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:12.416084Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:15.016461Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123790074022703:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.016570Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.017028Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123790074022713:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.017085Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.155943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:15.219550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:15.280656Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:15.336139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:15.398834Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:15.496418Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:15.612024Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:15.689664Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:15.918445Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123768599184394:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:15.918846Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:15.922513Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123790074023584:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.922602Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.923405Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123790074023589:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.923457Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123790074023590:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.923495Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.928493Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:15.961581Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123790074023593:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:35:16.027887Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123794368990944:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:35:18.675226Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2025-12-23T18:35:18.675491Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:35:18.675682Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:35:18.675911Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:966: SelfId: [2:7587123802958925896:2540], Table: `/Root/Test` ([72057594046644480:45:1]), SessionActorId: [2:7587123802958925880:2540]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[2:7587123802958925896:2540].{
: Error: Conflict with existing key., code: 2012 } 2025-12-23T18:35:18.676020Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [2:7587123802958925889:2540], SessionActorId: [2:7587123802958925880:2540], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[2:7587123802958925880:2540]. 2025-12-23T18:35:18.676339Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=2&id=NjI5YWRmMTMtMjI3NmU3NzMtZjQ2NTkyMmQtYzg0ZWJmNjE=, ActorId: [2:7587123802958925880:2540], ActorState: ExecuteState, TraceId: 01kd67x1425fe7mr4tvjygwahj, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7587123802958925890:2540] from: [2:7587123802958925889:2540] 2025-12-23T18:35:18.676443Z node 2 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [2:7587123802958925890:2540] TxId: 281474976715673. Ctx: { TraceId: 01kd67x1425fe7mr4tvjygwahj, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NjI5YWRmMTMtMjI3NmU3NzMtZjQ2NTkyMmQtYzg0ZWJmNjE=, PoolId: default, IsStreamingQuery: 0}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-12-23T18:35:18.676729Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=NjI5YWRmMTMtMjI3NmU3NzMtZjQ2NTkyMmQtYzg0ZWJmNjE=, ActorId: [2:7587123802958925880:2540], ActorState: ExecuteState, TraceId: 01kd67x1425fe7mr4tvjygwahj, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Constraint violated. Table: `/Root/Test`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } }
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard-DiscardSelectIsOn [GOOD] Test command err: Trying to start YDB, gRPC: 12708, MsgBus: 8434 2025-12-23T18:34:56.330667Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123709894917496:2195];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:56.330960Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005eec/r3tmp/tmpmBOpH6/pdisk_1.dat 2025-12-23T18:34:56.714942Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:56.760578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:56.760672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:56.942226Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:56.957709Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:56.962137Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 12708, node 1 2025-12-23T18:34:57.228592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:57.228615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:57.228627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:57.228703Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:34:57.335341Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:8434 TClient is connected to server localhost:8434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514897014 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:58.430708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:58.447452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:34:58.465684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:58.641860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:58.831690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:58.927549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:01.330903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123709894917496:2195];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:01.330992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:03.165811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123739959690268:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:03.165932Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:03.167113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123739959690278:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:03.167174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.066764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.105943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.148544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.194466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.234581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.271551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.326266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.413787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.602112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123744254658477:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.602182Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.602486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123744254658482:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.602516Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123744254658483:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.602539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35 ... 35:08.688633Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17209, node 2 2025-12-23T18:35:08.854393Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:08.875333Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:08.875356Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:08.875362Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:08.875478Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26058 TClient is connected to server localhost:26058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514908739 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:09.352597Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:09.387456Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:09.456229Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:09.530064Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:09.691489Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:09.795014Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:13.527994Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123780463652355:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:13.528092Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:13.528401Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123780463652365:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:13.528462Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:13.745488Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:13.812443Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:13.920718Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:13.995106Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.067764Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.219657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.357122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.505704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.701714Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123784758620551:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:14.701832Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:14.702186Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123784758620557:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:14.702223Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123784758620556:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:14.702253Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:14.715653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:14.754656Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123784758620560:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:35:14.850507Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123784758620614:3787] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:35:18.815444Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587123801938490134:2542], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:13: Error: DISCARD not supported in YDB queries, code: 2008 2025-12-23T18:35:18.819211Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=ZDFjODRjMy01N2I2Y2JlZi1iMWYyZDY2Zi00YWQ4MjI4ZA==, ActorId: [2:7587123801938490127:2538], ActorState: ExecuteState, TraceId: 01kd67x19r064f688sg8wvzfx6, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 13 } message: "DISCARD not supported in YDB queries" end_position { row: 2 column: 13 } issue_code: 2008 severity: 1 } }, remove tx with tx_id: |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink >> TDatabaseResolverTests::DataStreams_Dedicated >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2025-12-23T18:35:23.061653Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest >> TContinuousBackupTests::Basic >> KqpYql::InsertIgnore [GOOD] >> TContinuousBackupTests::TakeIncrementalBackup >> KqpScripting::ScriptValidate [GOOD] >> KqpScripting::ScriptStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2025-12-23T18:35:23.798108Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest >> KqpYql::ScriptUdf [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 >> TKesusTest::TestSessionDetach |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeout >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:34:47.905191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:34:47.905301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:47.905347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:34:47.905389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:34:47.905430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:34:47.905458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:34:47.905526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:34:47.905640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:34:47.906481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:34:47.906880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:34:48.007224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:34:48.007297Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:48.017956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:34:48.018193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:34:48.018346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:34:48.021967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:34:48.022168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:34:48.022997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:34:48.023226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:34:48.025201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:48.025395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:34:48.026617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:34:48.026679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:34:48.026828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:34:48.026907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:34:48.026975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:34:48.027125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:34:48.220706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.221804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.221948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.222019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.222089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.222185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.222254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.222347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.222425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.222532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.222621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.222702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.222835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.222896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:34:48.222950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6 2025-12-23T18:35:23.010214Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:1170:3075], at schemeshard: 72075186233409546, txId: 116, path id: 1 2025-12-23T18:35:23.010284Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:1170:3075], at schemeshard: 72075186233409546, txId: 116, path id: 39 2025-12-23T18:35:23.011285Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-12-23T18:35:23.011398Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2025-12-23T18:35:23.011840Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2025-12-23T18:35:23.012742Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 19 PathOwnerId: 72075186233409546, cookie: 116 2025-12-23T18:35:23.012879Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 19 PathOwnerId: 72075186233409546, cookie: 116 2025-12-23T18:35:23.012965Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-12-23T18:35:23.013033Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 19 2025-12-23T18:35:23.013113Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 14 2025-12-23T18:35:23.014026Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 39 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-12-23T18:35:23.014106Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 39 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-12-23T18:35:23.014138Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-12-23T18:35:23.014172Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 39], version: 1 2025-12-23T18:35:23.014208Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 39] was 4 2025-12-23T18:35:23.014290Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2025-12-23T18:35:23.020093Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2025-12-23T18:35:23.020315Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2025-12-23T18:35:23.020395Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-12-23T18:35:23.021008Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2025-12-23T18:35:23.021346Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2025-12-23T18:35:23.021549Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6306: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-12-23T18:35:23.021618Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-12-23T18:35:23.021795Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-12-23T18:35:23.021876Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2025-12-23T18:35:23.022002Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-12-23T18:35:23.022161Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 116:0 2 -> 3 2025-12-23T18:35:23.033629Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-12-23T18:35:23.033921Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-12-23T18:35:23.039856Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2025-12-23T18:35:23.040211Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-12-23T18:35:23.040300Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:219: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2025-12-23T18:35:23.040440Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:239: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2025-12-23T18:35:23.040987Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:255: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 1116 RawX2: 30064774104 } TxBody: "\n\342\004\n\007Table11\020\'\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\366\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001b\031\010\000J\025\n\005\n\003hdd\022\005\n\003hdd\032\005\n\003hdd\212\001&\010\000\022\004\010\001\020\000\022\004\010\002\020\001\022\004\010\003\020\001\032\004\010\001\020\000\032\004\010\002\020\001\032\004\010\003\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\':\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2025-12-23T18:35:23.046307Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2025-12-23T18:35:23.046516Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2025-12-23T18:35:23.131464Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-12-23T18:35:23.137488Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-12-23T18:35:23.137870Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TKesusTest::TestAcquireWaiterDowngrade ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertIgnore [GOOD] Test command err: Trying to start YDB, gRPC: 16911, MsgBus: 32161 2025-12-23T18:34:59.499854Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123719892442290:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:59.499883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ee7/r3tmp/tmpTDGFDm/pdisk_1.dat 2025-12-23T18:35:00.055159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:00.180093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:00.180188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:00.340915Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:00.350868Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123719892442267:2081] 1766514899474306 != 1766514899474309 2025-12-23T18:35:00.359242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:00.366876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 16911, node 1 2025-12-23T18:35:00.567816Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:00.715302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:00.715326Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:00.715334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:00.715424Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32161 TClient is connected to server localhost:32161 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514900416 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:02.049799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:02.083806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:35:02.097731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:02.490115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:02.996357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:03.171907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:04.506863Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123719892442290:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:04.506979Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:06.430786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123749957215231:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:06.430889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:06.434871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123749957215240:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:06.434973Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:07.020614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:07.137028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:07.192773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:07.236409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:07.272662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:07.329727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:07.401121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:07.458292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:07.582952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123754252183414:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:07.583024Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:07.583230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123754252183419:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:07.583273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123754252183420:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:07.583524Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [W ... 0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16828, node 2 2025-12-23T18:35:12.260589Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:12.537160Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:12.537184Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:12.537192Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:12.537268Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:35:12.681382Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:27296 TClient is connected to server localhost:27296 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514912281 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:13.873456Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:13.883428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:13.901951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:14.002144Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:14.260729Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:14.428675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:16.674255Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123772656692730:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:16.674314Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:19.333474Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123807016432930:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:19.333573Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:19.333977Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123807016432940:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:19.334130Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:19.423586Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:19.472858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:19.524428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:19.598421Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:19.696001Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:19.787099Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:19.897557Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:19.996708Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:20.156144Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123811311401122:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:20.156270Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:20.156846Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123811311401127:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:20.156886Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123811311401128:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:20.156922Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:20.161656Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:20.184480Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-12-23T18:35:20.185222Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123811311401131:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:20.248801Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123811311401182:3786] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:35: Error: INSERT OR IGNORE is not yet supported for Kikimr. |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 >> KqpYql::TableConcat [GOOD] >> KqpYql::SelectNoAsciiValue |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |92.6%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy >> KqpYql::JsonNumberPrecision [GOOD] >> KqpYql::JsonCast >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ScriptUdf [GOOD] Test command err: Trying to start YDB, gRPC: 27146, MsgBus: 29853 2025-12-23T18:35:04.238098Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123744257079018:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:04.238235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:04.274080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ee3/r3tmp/tmpsOJc41/pdisk_1.dat 2025-12-23T18:35:04.624971Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:04.745656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:04.745928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:04.752660Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:04.754980Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123744257078903:2081] 1766514904219647 != 1766514904219650 2025-12-23T18:35:04.769590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27146, node 1 2025-12-23T18:35:04.861435Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:05.015528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:05.015547Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:05.015558Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:05.015639Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:35:05.254651Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:29853 TClient is connected to server localhost:29853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514904819 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:06.518077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:06.555247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:06.875964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:07.257835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:07.444098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:09.239030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123744257079018:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:09.239140Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:09.830130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123765731917262:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:09.830256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:09.831959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123765731917272:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:09.832032Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:10.287385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:10.364334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:10.445173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:10.489975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:10.563243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:10.599616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:10.639095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:10.688482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:10.778489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123770026885448:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:10.778560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:10.779105Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123770026885454:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:10.779140Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:10.779179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadServ ... 8:35:14.499899Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65410, node 2 2025-12-23T18:35:14.675508Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:14.675534Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:14.675542Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:14.675615Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:35:14.809954Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21437 2025-12-23T18:35:15.100939Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21437 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514914549 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:15.548112Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:15.577825Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:15.703437Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:16.045536Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:16.206639Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:19.123455Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123785086312714:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:19.123523Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:19.576348Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123806561151020:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:19.576450Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:19.576777Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123806561151030:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:19.576815Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:19.780551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:19.852505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:19.928034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:19.997163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:20.128706Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:20.228035Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:20.326235Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:20.423907Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:20.668523Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123810856119205:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:20.668626Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:20.670334Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123810856119210:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:20.670413Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123810856119211:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:20.670460Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:20.677131Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:20.710888Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123810856119214:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:20.788513Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123810856119265:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:10:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda
:10:20: Error: At function: Apply
:8:28: Error: At function: ScriptUdf
:8:28: Error: Module not loaded for script type: Python3 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSessionStealing >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 >> TContinuousBackupTests::Basic [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> GroupWriteTest::WriteHardRateDispatcher [GOOD] >> KqpYql::ColumnTypeMismatch [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |92.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpYql::UpdatePk [GOOD] >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2025-12-23T18:35:25.944554Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:35:25.944702Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:35:25.967789Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:35:25.967930Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:35:26.019409Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:35:26.020029Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=9157838348346826285, session=0, seqNo=0) 2025-12-23T18:35:26.020240Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:35:26.039407Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=9157838348346826285, session=1) 2025-12-23T18:35:26.041126Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:136:2161], cookie=15697995966122457563, session=2) 2025-12-23T18:35:26.041221Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:136:2161], cookie=15697995966122457563) 2025-12-23T18:35:26.041866Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:136:2161], cookie=4484072128923018358 2025-12-23T18:35:26.042696Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=9718214997013449930, session=1, seqNo=0) 2025-12-23T18:35:26.063385Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=9718214997013449930, session=1) 2025-12-23T18:35:26.063784Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:35:26.063935Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:35:26.064060Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:35:26.064275Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:136:2161], cookie=5752407742304606910, session=1) 2025-12-23T18:35:26.074978Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-12-23T18:35:26.075056Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-12-23T18:35:26.075106Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-12-23T18:35:26.091740Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2025-12-23T18:35:26.091840Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:136:2161], cookie=5752407742304606910) 2025-12-23T18:35:26.091888Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-12-23T18:35:26.798330Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:35:26.798466Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:35:26.818568Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:35:26.818680Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:35:26.844741Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:35:26.845116Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[2:135:2159], cookie=8422000340874527316, path="") 2025-12-23T18:35:26.860257Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[2:135:2159], cookie=8422000340874527316, status=SUCCESS) 2025-12-23T18:35:26.860954Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:144:2166], cookie=111, session=0, seqNo=0) 2025-12-23T18:35:26.861097Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:35:26.861310Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[2:144:2166], cookie=15621876874145851030, session=1) 2025-12-23T18:35:26.871915Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-12-23T18:35:26.871988Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-12-23T18:35:26.883972Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:144:2166], cookie=111, session=1) 2025-12-23T18:35:26.884058Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[2:144:2166], cookie=15621876874145851030) 2025-12-23T18:35:26.884092Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-12-23T18:35:27.427262Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:35:27.427378Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:35:27.458922Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:35:27.459428Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:35:27.499987Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:35:27.500582Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=11521008415037294207, session=0, seqNo=0) 2025-12-23T18:35:27.500741Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:35:27.513540Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=11521008415037294207, session=1) 2025-12-23T18:35:27.514271Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:136:2161], cookie=4380495609861577199, session=1) 2025-12-23T18:35:27.514374Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-12-23T18:35:27.526782Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:136:2161], cookie=4380495609861577199) 2025-12-23T18:35:27.527410Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:154:2176], cookie=13818081905317844070) 2025-12-23T18:35:27.527487Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:154:2176], cookie=13818081905317844070) 2025-12-23T18:35:27.527918Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:157:2179], cookie=2847490347506144182, session=0, seqNo=0) 2025-12-23T18:35:27.528008Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:35:27.542190Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:157:2179], cookie=2847490347506144182, session=2) 2025-12-23T18:35:27.543371Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:136:2161], cookie=14650415330100474457, session=2) 2025-12-23T18:35:27.543476Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 2 2025-12-23T18:35:27.555512Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:136:2161], cookie=14650415330100474457) 2025-12-23T18:35:28.097844Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:35:28.097928Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:35:28.111582Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:35:28.111731Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:35:28.137451Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:35:28.138329Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:134:2159], cookie=12345, session=0, seqNo=0) 2025-12-23T18:35:28.138473Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:35:28.167544Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:134:2159], cookie=12345, session=1) 2025-12-23T18:35:28.168323Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:141:2164], cookie=23456, session=1, seqNo=0) 2025-12-23T18:35:28.180432Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:141:2164], cookie=23456, session=1) 2025-12-23T18:35:28.571330Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:35:28.571447Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:35:28.599479Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:35:28.599625Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:35:28.636523Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:35:28.637408Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=12345, session=0, seqNo=0) 2025-12-23T18:35:28.637563Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:35:28.649971Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=12345, session=1) 2025-12-23T18:35:28.650830Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:144:2166], cookie=23456, session=1, seqNo=0) 2025-12-23T18:35:28.664722Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:144:2166], cookie=23456, session=1) |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:35:24.755324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:24.755437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:24.755480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:24.755515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:24.755552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:24.755583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:24.755635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:24.755721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:24.756585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:24.756824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:24.868124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:24.868194Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:24.877394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:24.877637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:24.877824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:24.890383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:24.890600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:24.891320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:24.891537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:24.900913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:24.901255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:24.902556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:24.902623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:24.902790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:24.902839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:24.902936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:24.903092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:25.098139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.099719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.099891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.099997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.100063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.100145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.100212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.100292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.100381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.100470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.100529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.100616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.100701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.100787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.100896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000041 OrderId: 104 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1042 } } CommitVersion { Step: 5000041 TxId: 104 } 2025-12-23T18:35:27.457643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-12-23T18:35:27.457839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000041 OrderId: 104 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1042 } } CommitVersion { Step: 5000041 TxId: 104 } 2025-12-23T18:35:27.457984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000041 OrderId: 104 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1042 } } CommitVersion { Step: 5000041 TxId: 104 } debug: NTableState::TProposedWaitParts operationId# 104:0 FAKE_COORDINATOR: Erasing txId 104 2025-12-23T18:35:27.459544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-12-23T18:35:27.459603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-12-23T18:35:27.459779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-12-23T18:35:27.459870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:35:27.459969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-12-23T18:35:27.460074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:27.460114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:35:27.460165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:35:27.460213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2025-12-23T18:35:27.462129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:35:27.467301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:35:27.467744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:35:27.467800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:35:27.467942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2025-12-23T18:35:27.467980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-12-23T18:35:27.468040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2025-12-23T18:35:27.468076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-12-23T18:35:27.468110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-12-23T18:35:27.468215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2715] message: TxId: 104 2025-12-23T18:35:27.468278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-12-23T18:35:27.468339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:35:27.468374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:35:27.468516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:35:27.468560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2025-12-23T18:35:27.468589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:1 2025-12-23T18:35:27.468627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:35:27.468651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2025-12-23T18:35:27.468673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:2 2025-12-23T18:35:27.468744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:35:27.469159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:35:27.469225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2025-12-23T18:35:27.469345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:35:27.469404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:35:27.469445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:35:27.477427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:35:27.477500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1086:2999] 2025-12-23T18:35:27.477813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-12-23T18:35:27.478347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:35:27.478602Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 268us result status StatusPathDoesNotExist 2025-12-23T18:35:27.478822Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:35:27.479367Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:35:27.479610Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 206us result status StatusPathDoesNotExist 2025-12-23T18:35:27.479781Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> IncrementalBackup::IndexDataVerificationIncrementalRestore [GOOD] >> TKesusTest::TestAllocatesResources [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot >> KqpYql::FlexibleTypes [GOOD] >> KqpYql::EvaluateIf >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ColumnTypeMismatch [GOOD] Test command err: Trying to start YDB, gRPC: 9856, MsgBus: 3653 2025-12-23T18:35:05.964974Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123748506542360:2217];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:05.965086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:06.029510Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ee2/r3tmp/tmpyQyTWE/pdisk_1.dat 2025-12-23T18:35:06.605750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:06.605836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:06.748426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:06.752872Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:06.824103Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:06.827558Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123748506542158:2081] 1766514905889507 != 1766514905889510 TServer::EnableGrpc on GrpcPort 9856, node 1 2025-12-23T18:35:06.962878Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:06.991002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:07.069371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:07.069395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:07.069411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:07.069484Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3653 TClient is connected to server localhost:3653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514906828 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:08.135356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:08.167335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:08.409649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:08.723075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:08.839349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:10.963058Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123748506542360:2217];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:10.963127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:11.779669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123774276347835:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:11.779777Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:11.780184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123774276347845:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:11.780280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:12.625297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.685819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.731693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.782197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.823511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.910021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:12.957607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:13.012452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:13.111654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123782866283319:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:13.111753Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:13.112094Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123782866283324:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:13.112133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123782866283325:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:13.112163Z node 1 :KQP_WORKLOAD_SERVICE ... iveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514917874 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:18.914678Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:18.933657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:19.066063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:19.584780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:19.737590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:22.171506Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123798312628655:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:22.171566Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:23.239264Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123824082434278:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.239356Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.243428Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123824082434288:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.243508Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.418604Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.476767Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.531310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.602178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.700099Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.772644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.839716Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.911490Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.046932Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123828377402460:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.047065Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.047338Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123828377402465:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.047378Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123828377402466:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.047662Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.051781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:24.089311Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123828377402469:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:24.177890Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123828377402520:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:35:26.622040Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587123836967337457:2544], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 2025-12-23T18:35:26.623501Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=MmI2NjdjMTctY2YwMjJmNmEtMmMwNjA0ZTctYjNmZmQ0MTk=, ActorId: [2:7587123836967337446:2538], ActorState: ExecuteState, TraceId: 01kd67x8xj7n99r0jy2rbh9nqk, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 26 } message: "At function: KiWriteTable!" end_position { row: 5 column: 26 } severity: 1 issues { position { row: 6 column: 27 } message: "Failed to convert type: Struct<\'Key\':Uint64,\'Value\':Uint64> to Struct<\'Key\':Uint64?,\'Value\':String?>" end_position { row: 6 column: 27 } severity: 1 issues { position { row: 6 column: 27 } message: "Failed to convert \'Value\': Uint64 to Optional" end_position { row: 6 column: 27 } severity: 1 } } issues { position { row: 6 column: 27 } message: "Failed to convert input columns types to scheme types" end_position { row: 6 column: 27 } issue_code: 2031 severity: 1 } } }, remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> ColumnBuildTest::Locking_Failed |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |92.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query >> KqpCompileFallback::FallbackWithScanQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2025-12-23T18:35:26.499120Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:35:26.499253Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:35:26.529168Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:35:26.529335Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:35:26.579930Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:35:26.580607Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=4205442085292383341, session=0, seqNo=0) 2025-12-23T18:35:26.580817Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:35:26.599571Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=4205442085292383341, session=1) 2025-12-23T18:35:26.599996Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=4239626951409566566, session=0, seqNo=0) 2025-12-23T18:35:26.600172Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:35:26.615758Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=4239626951409566566, session=2) 2025-12-23T18:35:26.616137Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=1) 2025-12-23T18:35:26.616360Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:35:26.616481Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:35:26.635090Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2025-12-23T18:35:26.635505Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:35:26.635891Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=333, session=2, semaphore="Lock1" count=1) 2025-12-23T18:35:26.636004Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2025-12-23T18:35:26.648822Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=222) 2025-12-23T18:35:26.648924Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=333) 2025-12-23T18:35:26.649655Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:153:2175], cookie=4391234938072964947, name="Lock1") 2025-12-23T18:35:26.649790Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:153:2175], cookie=4391234938072964947) 2025-12-23T18:35:27.186137Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:35:27.186262Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:35:27.205890Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:35:27.206044Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:35:27.240081Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:35:27.240778Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:135:2159], cookie=2951295907895761010, session=0, seqNo=0) 2025-12-23T18:35:27.240997Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:35:27.253889Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:135:2159], cookie=2951295907895761010, session=1) 2025-12-23T18:35:27.254296Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:135:2159], cookie=13312936900494504169, session=0, seqNo=0) 2025-12-23T18:35:27.254438Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:35:27.267845Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:135:2159], cookie=13312936900494504169, session=2) 2025-12-23T18:35:27.268177Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:135:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:35:27.268296Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:35:27.268368Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:35:27.280854Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:135:2159], cookie=111) 2025-12-23T18:35:27.281240Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:135:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-12-23T18:35:27.281615Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:135:2159], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:35:27.299560Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:135:2159], cookie=222) 2025-12-23T18:35:27.299648Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:135:2159], cookie=333) 2025-12-23T18:35:27.300233Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:151:2173], cookie=1148351866239109512, name="Lock1") 2025-12-23T18:35:27.300323Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:151:2173], cookie=1148351866239109512) 2025-12-23T18:35:27.300805Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:154:2176], cookie=16168122934710976793, name="Lock1") 2025-12-23T18:35:27.300880Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:154:2176], cookie=16168122934710976793) 2025-12-23T18:35:27.857025Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:35:27.857145Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:35:27.877298Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:35:27.877697Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:35:27.919263Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:35:27.919854Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=8366596798952424728, session=0, seqNo=0) 2025-12-23T18:35:27.920023Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:35:27.932664Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=8366596798952424728, session=1) 2025-12-23T18:35:27.933050Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=499136956955500604, session=0, seqNo=0) 2025-12-23T18:35:27.933189Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:35:27.949618Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=499136956955500604, session=2) 2025-12-23T18:35:27.950411Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:35:27.950568Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:35:27.950660Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:35:27.963749Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=111) 2025-12-23T18:35:27.964111Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=222, session=2, semaphore="Lock1" count=1) 2025-12-23T18:35:27.964421Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=333, session=2, semaphore="Lock1" count=1) 2025-12-23T18:35:27.964494Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-12-23T18:35:27.979738Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=222) 2025-12-23T18:35:27.979828Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=333) 2025-12-23T18:35:27.980398Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:156:2178], cookie=13133596559516550418, name="Lock1") 2025-12-23T18:35:27.980495Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:156:2178], cookie=13133596559516550418) 2025-12-23T18:35:27.981063Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:159:2181], cookie=7051201266863349123, name="Lock1") 2025-12-23T18:35:27.981134Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:159:2181], cookie=7051201266863349123) 2025-12-23T18:35:27.996373Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:35:27.996498Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:35:27.997006Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:35:27.997723Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:35:28.043542Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:35:28.043782Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:35:28.044286Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:198:2211], cookie=14819557553722912567, name="Lock1") 2025-12-23T18:35:28.044392Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:198:2211], cookie=14819557553722912567) 2025-12-23T18:35:28.044966Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:206:2218], cookie=15716406082741025968, name="Lock1") 2025-12-23T18:35:28.045046Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:206:2218], cookie=15716406082741025968) 2025-12-23T18:35:28.540769Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:35:28.540887Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:35:28.556135Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:35:28.556266Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:35:28.582229Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:35:28.582683Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:134:2159], cookie=7355129599010813240, session=0, seqNo=0) 2025-12-23T18:35:28.582838Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:35:28.608895Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:134:2159], cookie=7355129599010813240, session=1) 2025-12-23T18:35:28.609336Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:134:2159], cookie=12227593110317816968, session=0, seqNo=0) 2025-12-23T18:35:28.609486Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:35:28.624075Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:134:2159], cookie=12227593110317816968, session=2) 2025-12-23T18:35:28.624448Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:134:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:35:28.624616Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:35:28.624723Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:35:28.642309Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:134:2159], cookie=111) 2025-12-23T18:35:28.642685Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:134:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-12-23T18:35:28.643149Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:134:2159], cookie=333, name="Lock1") 2025-12-23T18:35:28.643266Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-12-23T18:35:28.655525Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:134:2159], cookie=222) 2025-12-23T18:35:28.655613Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:134:2159], cookie=333) 2025-12-23T18:35:29.246974Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:35:29.247102Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:35:29.270847Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:35:29.270996Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:35:29.316068Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:35:29.324887Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=7043706041067468328, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-12-23T18:35:29.325213Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-12-23T18:35:29.343068Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=7043706041067468328) 2025-12-23T18:35:29.343775Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=2632141385514705292, path="/Root/Res", config={ }) 2025-12-23T18:35:29.344054Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-12-23T18:35:29.359608Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=2632141385514705292) 2025-12-23T18:35:29.361573Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 3946570507159421771. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-12-23T18:35:29.361661Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=3946570507159421771) 2025-12-23T18:35:29.362252Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:151:2173]. Cookie: 15587523692968693489. Data: { } 2025-12-23T18:35:29.362310Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:151:2173], cookie=15587523692968693489) 2025-12-23T18:35:29.404507Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-12-23T18:35:29.459320Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-12-23T18:35:29.495131Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-12-23T18:35:29.543725Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-12-23T18:35:29.588225Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdatePk [GOOD] Test command err: Trying to start YDB, gRPC: 16650, MsgBus: 6920 2025-12-23T18:35:16.448900Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123793415993186:2148];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:16.448946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:16.533172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005edd/r3tmp/tmpG7wuBT/pdisk_1.dat 2025-12-23T18:35:17.374871Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:17.378091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:17.378182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:17.608401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:17.679848Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:17.746901Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123793415993063:2081] 1766514916374334 != 1766514916374337 2025-12-23T18:35:17.766981Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:17.785540Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 16650, node 1 2025-12-23T18:35:18.071197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:18.071218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:18.071226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:18.071302Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6920 TClient is connected to server localhost:6920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514917706 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:19.090264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:19.103292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:19.121898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:19.425401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:19.855141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:19.995316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:21.454521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123793415993186:2148];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:21.454595Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:23.364404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123823480766040:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.364505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.365357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123823480766050:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.365412Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.796402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.874878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.921721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.969466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.060379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.152835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.229482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.339213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.467349Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123827775734227:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.467442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.467823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123827775734232:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.467881Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123827775734233:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.467989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.473291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:24.490244Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123827775734236:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:24.549604Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123827775734287:3790] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:3:20: Warning: At lambda, At function: AsStruct, At tuple
:4:31: Warning: At function: +
:4:31: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
:5:27: Error: At function: KiUpdateTable!
:5:27: Error: Cannot update primary key column: Group >> KqpScripting::ScanQueryDisable [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptingCreateAndAlterTableTest [GOOD] >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] >> KqpScripting::SecondaryIndexes >> KqpYql::AnsiIn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 16197911723369480074 2025-12-23T18:30:12.073918Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-12-23T18:30:12.100046Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-12-23T18:30:12.100117Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-12-23T18:30:12.102888Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-12-23T18:30:12.118297Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-12-23T18:30:12.121309Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-12-23T18:35:23.626346Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-12-23T18:35:23.626469Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-12-23T18:35:23.895139Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds >> KqpScripting::ScanQueryInvalid [GOOD] >> KqpScripting::ScanQueryTruncate >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:35:25.224401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:25.224514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:25.224557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:25.224596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:25.224635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:25.224665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:25.224717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:25.224804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:25.225664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:25.225985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:25.331282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:25.331348Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:25.337428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:25.337676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:25.337840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:25.346599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:25.346835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:25.347599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:25.347808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:25.355877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:25.356111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:25.357394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:25.357480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:25.357637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:25.357685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:25.357801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:25.357945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:25.638228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.639310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.639476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.639556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.639638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.639723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.639782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.639866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.639943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.640015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.640092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.640190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.640272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.640357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:25.640466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... l shard schema changes has been received, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2025-12-23T18:35:28.694200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710757:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:35:28.694280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710757:0 129 -> 240 2025-12-23T18:35:28.703793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2025-12-23T18:35:28.704429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2025-12-23T18:35:28.704786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2025-12-23T18:35:28.704845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710757:0 ProgressState 2025-12-23T18:35:28.704969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2025-12-23T18:35:28.705018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2025-12-23T18:35:28.705065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2025-12-23T18:35:28.705118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2025-12-23T18:35:28.705158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710757, ready parts: 3/3, is published: true 2025-12-23T18:35:28.705254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1370:3190] message: TxId: 281474976710757 2025-12-23T18:35:28.705308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2025-12-23T18:35:28.705351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:0 2025-12-23T18:35:28.705381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710757:0 2025-12-23T18:35:28.705499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:35:28.705538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:1 2025-12-23T18:35:28.705568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710757:1 2025-12-23T18:35:28.705612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:35:28.705658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:2 2025-12-23T18:35:28.705683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710757:2 2025-12-23T18:35:28.705742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:35:28.706147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:35:28.706671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2025-12-23T18:35:28.706858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:35:28.706920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:35:28.706958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:35:28.713158Z node 1 :CONTINUOUS_BACKUP ERROR: schemeshard_backup_incremental__progress.cpp:189: TIncrementalBackup::TTxProgress: Incremental backup with id# 0 not found 2025-12-23T18:35:28.713675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:35:30.903541Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:35:30.903909Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 318us result status StatusPathDoesNotExist 2025-12-23T18:35:30.904128Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:35:30.904717Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:35:30.904923Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 218us result status StatusPathDoesNotExist 2025-12-23T18:35:30.905074Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:35:30.905664Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:35:30.905890Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 222us result status StatusSuccess 2025-12-23T18:35:30.906365Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> SlowTopicAutopartitioning::CDC_Write |92.6%| [TA] $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 >> ColumnBuildTest::Initiating_Failed >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> DataShardSnapshots::ShardRestartLockBasic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::IndexDataVerificationIncrementalRestore [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:32:42.253489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:42.393405Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:42.409470Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:32:42.409802Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:32:42.409882Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:32:42.734791Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:42.734933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:42.859387Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514759190559 != 1766514759190563 2025-12-23T18:32:42.881403Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:42.932082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:43.048406Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c1c/r3tmp/tmpbCFQem/pdisk_1.dat 2025-12-23T18:32:43.379009Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:830:2725], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:43.379102Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:43.379142Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046644480 2025-12-23T18:32:43.379282Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122432, Sender [1:828:2723], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-12-23T18:32:43.379316Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5258: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-12-23T18:32:43.551273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-12-23T18:32:43.551507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:43.551772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:32:43.551829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:32:43.552041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:32:43.552124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:43.552239Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:43.552986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:32:43.553140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:32:43.553193Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:43.553232Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-12-23T18:32:43.553418Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:32:43.553460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:32:43.553540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:43.553599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:32:43.553639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:32:43.553671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:32:43.553783Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:43.557070Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:43.557128Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-12-23T18:32:43.557293Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:32:43.557340Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:32:43.557420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:43.557470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:32:43.557530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:32:43.557620Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:43.558030Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:43.558076Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-12-23T18:32:43.558198Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:32:43.558228Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:32:43.558282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:43.558317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:43.558365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-12-23T18:32:43.558401Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:43.558442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:32:43.567682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:32:43.568354Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:43.568416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:32:43.568580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:32:43.568883Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2025-12-23T18:32:43.569322Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:836:2731], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:43.569372Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:43.569410Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046644480 2025-12-23T18:32:43.569517Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124996, Sender [1:828:2723], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-12-23T18:32:43.569550Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-12-23T18:32:43.569617Z node 1 :FLAT_TX_SCHEMESHARD DE ... 0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:23.963424Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:23.963519Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:23.963554Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:23.986733Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435096, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressIncrementalRestore 2025-12-23T18:35:23.986840Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5415: StateWork, processing event TEvPrivate::TEvProgressIncrementalRestore 2025-12-23T18:35:23.986881Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:444: [IncrementalRestore] Handle(TEvProgressIncrementalRestore) operationId: 281474976715670 tablet: 72057594046644480 2025-12-23T18:35:23.986969Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 281474976715670 tablet: 72057594046644480 2025-12-23T18:35:23.987017Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:55: [IncrementalRestore] Incremental restore already in state 3, skipping progress check for operation: 281474976715670 2025-12-23T18:35:23.987086Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 281474976715670 2025-12-23T18:35:24.114006Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037896, for tableId 18, but no stats yet 2025-12-23T18:35:24.114389Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037897, for tableId 20, but no stats yet 2025-12-23T18:35:24.114592Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037895, for tableId 18, but no stats yet 2025-12-23T18:35:24.239148Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:24.239246Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:24.239349Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:24.239390Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:24.416719Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:24.416842Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:24.416982Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:24.417027Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:24.452590Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037898, for tableId 31, but no stats yet 2025-12-23T18:35:24.452835Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037903, for tableId 28, but no stats yet 2025-12-23T18:35:24.629258Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:24.629354Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:24.629468Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:24.629502Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:24.641446Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037904, for tableId 35, but no stats yet 2025-12-23T18:35:24.641734Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037905, for tableId 33, but no stats yet 2025-12-23T18:35:24.641893Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037906, for tableId 33, but no stats yet 2025-12-23T18:35:24.745616Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435093, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvPrivate::TEvConsoleConfigsTimeout 2025-12-23T18:35:24.745695Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:35:24.745780Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:24.900613Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:24.900719Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:24.900841Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:24.900882Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:24.925183Z node 11 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037888 state 5 2025-12-23T18:35:24.925401Z node 11 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037889 state 5 2025-12-23T18:35:25.111513Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:25.111616Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:25.113075Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:25.113128Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:25.255025Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:35:25.256323Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:35:25.256425Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:35:25.338939Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:25.339033Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:25.339141Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:25.339179Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:25.504842Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:25.504942Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:25.505059Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:25.505102Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:25.595240Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037898 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:35:25.606843Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037903 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:35:25.789290Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:25.789384Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:25.789500Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:25.789542Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime { items { uint32_value: 2 } items { text_value: "Bob" } items { uint32_value: 31 } }, { items { uint32_value: 12 } items { text_value: "David" } items { uint32_value: 41 } }, { items { uint32_value: 13 } items { text_value: "Frank" } items { uint32_value: 45 } } { items { uint32_value: 28 } items { uint32_value: 3 } }, { items { uint32_value: 31 } items { uint32_value: 2 } }, { items { uint32_value: 41 } items { uint32_value: 12 } }, { items { uint32_value: 45 } items { uint32_value: 13 } } { items { uint64_value: 4 } } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest |92.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |92.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |92.6%| [TA] {RESULT} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable [GOOD] Test command err: Trying to start YDB, gRPC: 26318, MsgBus: 20684 2025-12-23T18:35:02.959316Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123733220497188:2161];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:02.959519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:03.020231Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ee4/r3tmp/tmpT95413/pdisk_1.dat 2025-12-23T18:35:03.110459Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:35:03.803319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:03.803406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:03.920135Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:03.966294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:03.997949Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:04.010855Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123733220497064:2081] 1766514902922319 != 1766514902922322 2025-12-23T18:35:04.012920Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26318, node 1 2025-12-23T18:35:04.232495Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:04.256961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:04.256992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:04.257004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:04.257129Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20684 TClient is connected to server localhost:20684 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514904028 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:05.670321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:05.701880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:05.886592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:06.133650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:06.235007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:07.959659Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123733220497188:2161];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:07.959718Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:08.986242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123758990302726:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:08.986345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:08.988977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123758990302736:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:08.989055Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:09.679200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:09.720002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:09.785590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:09.842203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:09.906277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:09.990063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:10.085505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:10.194704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:10.338205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123767580238211:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:10.338298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:10.338820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123767580238216:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:10.338861Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:758712376758023 ... : configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587123796473811166:2081] 1766514917472005 != 1766514917472008 2025-12-23T18:35:17.821008Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:17.821104Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:17.872888Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22545, node 2 2025-12-23T18:35:18.153813Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:18.227418Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:18.227438Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:18.227596Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:18.227682Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5362 2025-12-23T18:35:18.562983Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5362 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514917958 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:18.885792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:18.893643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:35:18.910210Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:19.042146Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:19.382074Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:19.507453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.230478Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123822243616822:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.230572Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.231027Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123822243616832:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.231069Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.395781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.448955Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.496039Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.543895Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.610148Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.716921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.782880Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.852597Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.961048Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123822243617703:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.961188Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.961407Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123822243617708:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.964411Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123822243617733:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.964516Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.966475Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:23.990863Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123822243617710:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:35:24.089963Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123826538585061:3780] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:35:28.506442Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514928514, txId: 281474976715673] shutting down >> TPQTestSlow::TestWriteVeryBigMessage >> ColumnBuildTest::AlterMainTable_Failed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::AnsiIn [GOOD] Test command err: Trying to start YDB, gRPC: 6960, MsgBus: 9516 2025-12-23T18:35:07.986832Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123755240188351:2171];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:07.986883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ee1/r3tmp/tmpi1oCkF/pdisk_1.dat 2025-12-23T18:35:08.718217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:08.725521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:08.725640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:08.814698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:08.914860Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123755240188208:2081] 1766514907966152 != 1766514907966155 2025-12-23T18:35:08.919879Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6960, node 1 2025-12-23T18:35:09.076064Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:09.149514Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:09.183334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:09.183359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:09.183366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:09.183457Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9516 TClient is connected to server localhost:9516 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514908928 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:10.608339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:10.623413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:10.634969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:10.921876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:11.237037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:11.384444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:12.986949Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123755240188351:2171];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:12.990681Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:14.021090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123785304961163:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:14.021216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:14.021728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123785304961173:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:14.021793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:14.496441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.541846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.601009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.702999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.754634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.838876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:14.914470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:15.099827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:15.234293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123789599929349:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.234409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.235356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123789599929354:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.235360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123789599929355:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:15.235421Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [Worklo ... 594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:20.115938Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:20.129952Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61291, node 2 2025-12-23T18:35:20.436090Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:20.479326Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:20.479346Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:20.479353Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:20.479429Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:35:20.642884Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:9347 TClient is connected to server localhost:9347 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514920177 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:21.454403Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:21.470409Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:21.482621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:21.603476Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:21.966951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:22.126819Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:24.633797Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123807500848558:2195];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:24.633860Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:25.695833Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123833270654034:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:25.695927Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:25.696223Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123833270654044:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:25.696259Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:25.905372Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:25.983369Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:26.066838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:26.143535Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:26.210275Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:26.324906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:26.445770Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:26.630135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:26.968201Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123837565622226:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:26.968321Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:26.968804Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123837565622231:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:26.968859Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123837565622232:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:26.968995Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:26.973714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:27.014395Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123837565622235:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:27.110062Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123841860589585:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 >> ColumnBuildTest::DoNotRestoreDeletedRows >> ColumnBuildTest::Cancelling >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] >> ColumnBuildTest::ValidDefaultValue |92.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |92.6%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 >> KqpYql::SelectNoAsciiValue [GOOD] >> ColumnBuildTest::Locking_Failed [GOOD] >> ColumnBuildTest::Locking_DisableFlag >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 >> TestKinesisHttpProxy::DifferentContentTypes >> TestYmqHttpProxy::TestCreateQueue >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:33:33.558035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:33:33.890266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:33:33.905302Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:33:33.905725Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:33:33.905802Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:33:34.327746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:34.327898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:34.436557Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514809499273 != 1766514809499277 2025-12-23T18:33:34.455023Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:34.511240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:34.611144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005cc3/r3tmp/tmpRQVlYS/pdisk_1.dat 2025-12-23T18:33:35.142946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:33:35.168346Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:35.288702Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:33:35.288782Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:33:35.288933Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:33:35.496281Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:33:35.496477Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:33:35.497234Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:33:35.497347Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:33:35.497768Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:33:35.497960Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:33:35.498141Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:33:35.498465Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:33:35.500493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:35.501849Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:33:35.501947Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:33:35.535820Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:33:35.537019Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:33:35.537389Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:33:35.537748Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:33:35.600094Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:33:35.612148Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:33:35.612312Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:33:35.614321Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:33:35.614459Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:33:35.614523Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:33:35.614949Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:33:35.615091Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:33:35.615189Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:33:35.615661Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:33:35.717421Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:33:35.717636Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:33:35.717768Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:33:35.717820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:33:35.717866Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:33:35.717898Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:33:35.718128Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:33:35.718200Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:33:35.718434Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:33:35.718512Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:33:35.718947Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:33:35.718999Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:33:35.719077Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:33:35.719116Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:33:35.719154Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:33:35.719188Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:33:35.719248Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:33:35.719360Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:33:35.719402Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:33:35.719446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:33:35.719612Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:33:35.719655Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:33:35.719785Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:33:35.720096Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:33:35.720160Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2 ... 075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-12-23T18:35:33.500585Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-12-23T18:35:33.500629Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2025-12-23T18:35:33.500712Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [13:969:2824], Recipient [13:885:2764]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-12-23T18:35:33.500744Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-12-23T18:35:33.500772Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-12-23T18:35:33.500823Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-12-23T18:35:33.501022Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:714: Complete [4001 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1174:2961], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:35:33.501381Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [13:885:2764], Recipient [13:969:2824]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-12-23T18:35:33.501415Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-12-23T18:35:33.501450Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-12-23T18:35:33.501502Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-12-23T18:35:33.501610Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:714: Complete [4001 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1174:2961], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:35:33.502058Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:35:33.502363Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 3436 } LocksBrokenAsBreaker: 0 } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 } } } CommitVersion { Step: 4001 TxId: 281474976715663 } 2025-12-23T18:35:33.503394Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 1232 } LocksBrokenAsBreaker: 0 } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-2" WriteRows: 1 WriteBytes: 8 } } } CommitVersion { Step: 4001 TxId: 281474976715663 } 2025-12-23T18:35:33.506056Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-12-23T18:35:33.506247Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [13:885:2764], Recipient [13:969:2824]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-12-23T18:35:33.506304Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:35:33.506389Z node 13 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2025-12-23T18:35:33.514588Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2025-12-23T18:35:33.517662Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [13:969:2824], Recipient [13:885:2764]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-12-23T18:35:33.517788Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:35:33.517873Z node 13 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-12-23T18:35:33.845656Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:67:2114] Handle TEvExecuteKqpTransaction 2025-12-23T18:35:33.845786Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:67:2114] TxId# 281474976715667 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2025-12-23T18:35:33.851650Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1286:3069], Recipient [13:885:2764]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-12-23T18:35:33.851907Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:35:33.852021Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4001/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v5000/18446744073709551615 ImmediateWriteEdge# v5001/0 ImmediateWriteEdgeReplied# v5001/0 2025-12-23T18:35:33.852112Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v5001/18446744073709551615 2025-12-23T18:35:33.852233Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-12-23T18:35:33.852418Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:35:33.852516Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:35:33.852593Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:35:33.852662Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:35:33.852739Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2025-12-23T18:35:33.852816Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:35:33.852851Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:35:33.852879Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:35:33.852910Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:35:33.853090Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-12-23T18:35:33.853661Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[13:1286:3069], 0} after executionsCount# 1 2025-12-23T18:35:33.853781Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[13:1286:3069], 0} sends rowCount# 2, bytes# 96, quota rows left# 999, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:35:33.853925Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[13:1286:3069], 0} finished in read 2025-12-23T18:35:33.854047Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:35:33.854084Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:35:33.854115Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:35:33.854148Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:35:33.854211Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:35:33.854239Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:35:33.854284Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2025-12-23T18:35:33.854372Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:35:33.854599Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:35:33.855941Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1286:3069], Recipient [13:885:2764]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:35:33.856039Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] Test command err: Trying to start YDB, gRPC: 8148, MsgBus: 20217 2025-12-23T18:35:15.905212Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123790717041252:2252];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:15.931602Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:15.969524Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005edc/r3tmp/tmpnetmKe/pdisk_1.dat 2025-12-23T18:35:16.492828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:16.506941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:16.507068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:16.678930Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123790717041037:2081] 1766514915865093 != 1766514915865096 2025-12-23T18:35:16.688904Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:16.694347Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8148, node 1 2025-12-23T18:35:16.777213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:16.846995Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:17.028459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:17.028482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:17.028489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:17.028566Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20217 TClient is connected to server localhost:20217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514916761 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:18.191463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:18.197706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:18.203769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:18.398301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:18.611583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:18.729018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:20.906936Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123790717041252:2252];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:20.907039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:21.857001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123816486846695:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:21.857103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:21.866897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123816486846704:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:21.867018Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:22.369154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:22.416761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:22.471087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:22.514711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:22.551619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:22.593971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:22.681320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:22.761475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:22.838904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123820781814877:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:22.838991Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:22.839496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123820781814882:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:22.839561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123820781814883:2490], DatabaseId: /Root, PoolId: default ... is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:26.996064Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:26.996071Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:26.996157Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22282 TClient is connected to server localhost:22282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514926967 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:35:27.508038Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:35:27.528313Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:27.536945Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:27.641562Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:27.774890Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:27.844152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:27.937628Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:30.408929Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123853918460133:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:30.408999Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:30.409362Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123853918460143:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:30.409414Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:30.495683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:30.541555Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:30.587071Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:30.639059Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:30.689148Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:30.751859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:30.810982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:30.889050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:31.078870Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123858213428311:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:31.078942Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:31.079350Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123858213428316:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:31.079391Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123858213428317:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:31.079419Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:31.083829Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:31.104061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-12-23T18:35:31.104300Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123858213428320:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:31.189587Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123858213428371:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:35:31.780368Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123836738589263:2212];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:31.780913Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:34.093991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:34.817885Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514934842, txId: 281474976710675] shutting down >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> TestYmqHttpProxy::TestSendMessage >> ColumnBuildTest::AlterMainTable_Failed [GOOD] >> ColumnBuildTest::AlterMainTable_DisableFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest [GOOD] Test command err: 2025-12-23T18:35:31.669359Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2025-12-23T18:35:31.824218Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:35:31.824327Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:35:31.824408Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:31.824506Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:182:2057] recipient: [1:14:2061] 2025-12-23T18:35:31.882727Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:181:2193], now have 1 active actors on pipe 2025-12-23T18:35:31.888667Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:35:31.960179Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2025-12-23T18:35:31.960411Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:31.962191Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2025-12-23T18:35:31.962390Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:35:31.962497Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:35:31.963304Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:35:31.963764Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2025-12-23T18:35:31.966667Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:35:31.966758Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-12-23T18:35:31.966812Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:188:2142] 2025-12-23T18:35:31.966885Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:35:31.968332Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:35:31.969761Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:35:31.969824Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:35:31.969878Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:35:31.969930Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:35:31.969961Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:35:31.970010Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:35:31.970083Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-12-23T18:35:31.970132Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:35:31.970181Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:35:31.970225Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:35:31.970272Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:35:31.970487Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:35:31.970524Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-12-23T18:35:31.970605Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:35:31.970872Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:35:31.971131Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:189:2142] 2025-12-23T18:35:31.973066Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:35:31.973117Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2025-12-23T18:35:31.973152Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:189:2142] 2025-12-23T18:35:31.973195Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:35:31.974363Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:35:31.975385Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2025-12-23T18:35:31.975431Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:35:31.975462Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:35:31.975528Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:35:31.975557Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:35:31.975608Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:35:31.975661Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2025-12-23T18:35:31.975690Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2025-12-23T18:35:31.975717Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:35:31.975742Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2025-12-23T18:35:31.975774Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2025-12-23T18:35:31.975933Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:35:31.975995Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-12-23T18:35:31.976062Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:35:31.976282Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:35:31.976509Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:35:31.976739Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:35:31.976883Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937 ... ot data from cache. Partition 0 offset 6 partno 0 count 1 parts_count 10 source 1 size 5243650 accessed 2 times before, last time 1970-01-01T00:00:00.000000Z 2025-12-23T18:35:37.237778Z node 3 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 17. All 2 blobs are from cache. 2025-12-23T18:35:37.237847Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' 2025-12-23T18:35:37.237881Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 6 partno 0 count 1 parts 10 suffix '0' 2025-12-23T18:35:37.237970Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2025-12-23T18:35:37.238084Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238129Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238157Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238183Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238209Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 1 size 635356 from pos 0 cbcount 2 2025-12-23T18:35:37.238239Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238266Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238297Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238324Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238349Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238375Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238401Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238426Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238453Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238480Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 1 size 123358 from pos 0 cbcount 1 2025-12-23T18:35:37.238537Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238564Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238590Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238616Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238641Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238668Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238694Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238717Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238766Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238794Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:37.238817Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 1 size 123358 from pos 0 cbcount 1 2025-12-23T18:35:37.238949Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2025-12-23T18:35:37.238983Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2025-12-23T18:35:37.239018Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 4:6 isTruncatedBlob 0 2025-12-23T18:35:37.245256Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 res.GetOffset() 4 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 1 2025-12-23T18:35:37.266235Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 res.GetOffset() 5 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2025-12-23T18:35:37.267637Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 isTruncatedBlob 0 hasNonZeroParts 1 isMiddlePartOfMessage 0 2025-12-23T18:35:37.268788Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000000_00000_0000000001_00016 2025-12-23T18:35:37.268889Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000001_00006_0000000002_00014 2025-12-23T18:35:37.268961Z node 3 :PERSQUEUE DEBUG: partition.cpp:4522: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2025-12-23T18:35:37.269233Z node 3 :PERSQUEUE DEBUG: partition.cpp:4530: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2025-12-23T18:35:37.269289Z node 3 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:35:37.269405Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 1 size 283 2025-12-23T18:35:37.269491Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 4 partNo 6 count 2 size 271 2025-12-23T18:35:37.269527Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000000_00000_0000000001_00016(+) to d0000000000_00000000000000000000_00000_0000000001_00016(+) 2025-12-23T18:35:37.269558Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000001_00006_0000000002_00014(+) to d0000000000_00000000000000000001_00006_0000000002_00014(+) 2025-12-23T18:35:37.280049Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 0 count 1 actorID [3:138:2142] 2025-12-23T18:35:37.280149Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 1 count 2 actorID [3:138:2142] 2025-12-23T18:35:37.280187Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 3 count 1 size 3072032 actorID [3:138:2142] is actual 1 2025-12-23T18:35:37.280259Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 3 count 1 size 283 actorID [3:138:2142] 2025-12-23T18:35:37.280289Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 4 count 2 size 7415109 actorID [3:138:2142] is actual 1 2025-12-23T18:35:37.280340Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 4 count 2 size 271 actorID [3:138:2142] 2025-12-23T18:35:37.280439Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 16 suffix '0' size 283 2025-12-23T18:35:37.280489Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 1 partno 6 count 2 parts 14 suffix '0' size 271 2025-12-23T18:35:37.280527Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 1 parts 16 suffix '0' size 3072032 2025-12-23T18:35:37.291144Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' size 7415109 2025-12-23T18:35:37.306030Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 1 parts 16 suffix '0' size 283 2025-12-23T18:35:37.306134Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' size 271 2025-12-23T18:35:37.306494Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:35:37.306541Z node 3 :PERSQUEUE DEBUG: partition.cpp:2161: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2025-12-23T18:35:37.306582Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response Write 3 done Got compacter offset = -1 2025-12-23T18:35:37.317146Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [3:315:2302], now have 1 active actors on pipe 2025-12-23T18:35:37.317287Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:35:37.317329Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-12-23T18:35:37.317447Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 6 for user __ydb_compaction_consumer 2025-12-23T18:35:37.317727Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [3:317:2304], now have 1 active actors on pipe Got start offset = 3 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest >> ColumnBuildTest::Initiating_Failed [GOOD] >> ColumnBuildTest::Initiating_DisableFlag >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase >> KqpYql::JsonCast [GOOD] >> KqpYql::EvaluateIf [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 >> ColumnBuildTest::Locking_DisableFlag [GOOD] >> KqpScripting::ScriptStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::BrokenCompressedData [GOOD] Test command err: 2025-12-23T18:33:23.846894Z :ReadSession INFO: Random seed for debugging is 1766514803846861 2025-12-23T18:33:24.213366Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123314548820296:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:24.227251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:33:24.323875Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123312052253217:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:24.323954Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:33:24.339061Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0059a1/r3tmp/tmp8ZDlPu/pdisk_1.dat 2025-12-23T18:33:24.383460Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:33:24.652465Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:24.691643Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:24.782429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:24.782534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:24.785490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:24.785560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:24.821032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:24.832125Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:33:24.847170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:24.882656Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:24.904339Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:24.906361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 28334, node 1 2025-12-23T18:33:25.124323Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0059a1/r3tmp/yandex7M3wcr.tmp 2025-12-23T18:33:25.124352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0059a1/r3tmp/yandex7M3wcr.tmp 2025-12-23T18:33:25.126067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0059a1/r3tmp/yandex7M3wcr.tmp 2025-12-23T18:33:25.126185Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:33:25.254882Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:25.331155Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:25.358948Z INFO: TTestServer started on Port 14302 GrpcPort 28334 TClient is connected to server localhost:14302 PQClient connected to localhost:28334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514804964 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:26.005804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:33:29.215071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123314548820296:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:29.215153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:33:29.326925Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123312052253217:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:29.327011Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:33:30.066251Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123340318625440:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:30.066387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:30.066714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123340318625454:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:30.066785Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:30.066883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123340318625452:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:30.073233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:33:30.165776Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123340318625456:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-12-23T18:33:30.272442Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123340318625548:3018] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:33:30.731831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:30.761762Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587123340318625558:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:33:30.767350Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MTYzODlhY2EtNDczNzRlMzctZWEwZGZiMjgtNGQ2NGY3MWE=, ActorId: [1:7587123340318625437:2330], ActorState: ExecuteState, TraceId: 01kd67sq5z11570a056w2c9186, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:33:30.770053Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path ... kie 1 consumer shared/user session shared/user_13_1_3255574617664139034_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset 3 2025-12-23T18:35:35.516764Z node 13 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_13_1_3255574617664139034_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid cc8fe455-2ae39984-195539c5-27beacd2 has messages 1 2025-12-23T18:35:35.516895Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_13_1_3255574617664139034_v1 read done: guid# cc8fe455-2ae39984-195539c5-27beacd2, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2025-12-23T18:35:35.516926Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_13_1_3255574617664139034_v1 response to read: guid# cc8fe455-2ae39984-195539c5-27beacd2 2025-12-23T18:35:35.517165Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_13_1_3255574617664139034_v1 Process answer. Aval parts: 0 2025-12-23T18:35:35.518170Z :DEBUG: [/Root] [/Root] [58694847-413bcf67-27c311cf-e082f822] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:35:35.518623Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_13_1_3255574617664139034_v1 grpc read done: success# 1, data# { read { } } 2025-12-23T18:35:35.518784Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_13_1_3255574617664139034_v1 got read request: guid# 6bc019ea-3ca35192-c787e7b1-96fc0ec0 2025-12-23T18:35:35.521142Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-12-23T18:35:35.526895Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2025-12-23T18:35:35.526992Z :DEBUG: [/Root] [/Root] [58694847-413bcf67-27c311cf-e082f822] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-12-23T18:35:34.350000Z WriteTime: 2025-12-23T18:35:34.354000Z Ip: "ipv6:[::1]:53174" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:53174" } } } } 2025-12-23T18:35:35.527198Z :INFO: [/Root] [/Root] [58694847-413bcf67-27c311cf-e082f822] Closing read session. Close timeout: 3.000000s 2025-12-23T18:35:35.527246Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-12-23T18:35:35.527302Z :INFO: [/Root] [/Root] [58694847-413bcf67-27c311cf-e082f822] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1671 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:35:35.527920Z :INFO: [/Root] [/Root] [58694847-413bcf67-27c311cf-e082f822] Closing read session. Close timeout: 0.000000s 2025-12-23T18:35:35.527971Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-12-23T18:35:35.528020Z :INFO: [/Root] [/Root] [58694847-413bcf67-27c311cf-e082f822] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1672 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:35:35.528151Z :NOTICE: [/Root] [/Root] [58694847-413bcf67-27c311cf-e082f822] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:35:35.538401Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_13_1_3255574617664139034_v1 grpc read done: success# 0, data# { } 2025-12-23T18:35:35.538428Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_13_1_3255574617664139034_v1 grpc read failed 2025-12-23T18:35:35.538455Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_13_1_3255574617664139034_v1 grpc closed 2025-12-23T18:35:35.538491Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_13_1_3255574617664139034_v1 is DEAD 2025-12-23T18:35:35.540879Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72075186224037892] Destroy direct read session shared/user_13_1_3255574617664139034_v1 2025-12-23T18:35:35.540927Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [13:7587123866326015241:2519] destroyed 2025-12-23T18:35:35.540970Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_13_1_3255574617664139034_v1 2025-12-23T18:35:35.548057Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [13:7587123866326015238:2516] disconnected. 2025-12-23T18:35:35.548107Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [13:7587123866326015238:2516] disconnected; active server actors: 1 2025-12-23T18:35:35.548135Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [13:7587123866326015238:2516] client user disconnected session shared/user_13_1_3255574617664139034_v1 2025-12-23T18:35:35.601032Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:35:35.601082Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:35.601108Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:35:35.601136Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:35.601156Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:35:35.701607Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:35:35.701657Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:35.701679Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:35:35.701704Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:35.701722Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:35:35.803992Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:35:35.804039Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:35.804060Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:35:35.804086Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:35.804108Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:35:35.907536Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:35:35.907587Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:35.907614Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:35:35.907646Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:35:35.907665Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:35:38.317446Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:35:38.317506Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:35:38.317560Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:35:38.331066Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:35:38.331732Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:35:38.332049Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:35:38.332617Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:35:38.334197Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:35:38.339273Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-12-23T18:35:38.339552Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-12-23T18:35:38.339901Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:35:38.339993Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:35:38.340045Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-12-23T18:35:38.340128Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 57 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } |92.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> TestKinesisHttpProxy::MissingAction >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateIf [GOOD] Test command err: Trying to start YDB, gRPC: 23944, MsgBus: 25076 2025-12-23T18:35:19.532576Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123807622163459:2257];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:19.532670Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005eda/r3tmp/tmpsjyZ1L/pdisk_1.dat 2025-12-23T18:35:20.127347Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:20.172701Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:20.172800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:20.315448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:20.344045Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:20.362869Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123807622163230:2081] 1766514919473717 != 1766514919473720 2025-12-23T18:35:20.422829Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 23944, node 1 2025-12-23T18:35:20.544434Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:20.687510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:20.687541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:20.687548Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:20.687640Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25076 TClient is connected to server localhost:25076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514920464 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:21.718146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:21.758721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:21.915705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:22.182870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:22.276198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:24.024560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123829097001591:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.024676Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.024981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123829097001601:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.025058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.473966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.534943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123807622163459:2257];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:24.535010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:24.538143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.626314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.681867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.744828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.829164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.891702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.978203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:25.102555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123833391969772:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:25.102656Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:25.102860Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123833391969777:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:25.102941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123833391969779:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:25.102984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ... 7594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:30.536811Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:30.605289Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9021, node 2 2025-12-23T18:35:30.730998Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:30.987402Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:30.987423Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:30.987429Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:30.987496Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:35:31.218900Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3707 TClient is connected to server localhost:3707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514930677 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:31.770286Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:31.788894Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:31.813276Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:31.924400Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:32.135840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:32.240003Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:35.214998Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123853144053675:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:35.215071Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:35.790903Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123874618891816:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:35.791020Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:35.791394Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123874618891826:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:35.791446Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:35.907346Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:35.979619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:36.074637Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:36.129639Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:36.189030Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:36.285609Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:36.360503Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:36.440093Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:36.617494Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123878913859992:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:36.617625Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:36.622998Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123878913859998:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:36.623188Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123878913859997:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:36.623667Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:36.627926Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:36.646958Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123878913860001:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:36.711169Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123878913860057:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TestYmqHttpProxy::TestGetQueueUrl >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Locking_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:35:31.459833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:31.459949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:31.459988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:31.460023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:31.460060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:31.460093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:31.460150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:31.460217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:31.461114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:31.461450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:31.564755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:31.564832Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:31.572780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:31.573047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:31.573216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:31.576789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:31.577022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:31.578103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:31.578365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:31.584549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:31.584783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:31.586116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:31.586189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:31.586353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:31.586407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:31.586538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:31.586696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:31.957919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.959088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.959229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.959317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.959386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.959452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.959515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.959617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.959690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.959749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.959803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.959900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.959993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.960077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:31.960161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2025-12-23T18:35:40.132162Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2025-12-23T18:35:40.132201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2025-12-23T18:35:40.132249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2025-12-23T18:35:40.132308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1556:3424] message: TxId: 281474976730758 2025-12-23T18:35:40.132360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2025-12-23T18:35:40.132393Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2025-12-23T18:35:40.132419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976730758:0 2025-12-23T18:35:40.132477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2025-12-23T18:35:40.141640Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2025-12-23T18:35:40.141719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976730758 2025-12-23T18:35:40.141788Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730758 2025-12-23T18:35:40.141931Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2025-12-23T18:35:40.152576Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2025-12-23T18:35:40.152734Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:35:40.152799Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2025-12-23T18:35:40.159600Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2025-12-23T18:35:40.159777Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:35:40.159856Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2025-12-23T18:35:40.160024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:35:40.160104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1747:3602] TestWaitNotification: OK eventTxId 105 2025-12-23T18:35:40.160882Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2025-12-23T18:35:40.161285Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2025-12-23T18:35:40.162337Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-12-23T18:35:40.162557Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 244us result status StatusSuccess 2025-12-23T18:35:40.163401Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonCast [GOOD] Test command err: Trying to start YDB, gRPC: 1505, MsgBus: 3063 2025-12-23T18:35:15.334339Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123791543770420:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:15.334401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ede/r3tmp/tmp1bG6hl/pdisk_1.dat 2025-12-23T18:35:16.256105Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:16.260370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:16.260507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:16.404501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:16.529893Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:16.665858Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:16.666273Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123791543770395:2081] 1766514915331806 != 1766514915331809 2025-12-23T18:35:16.676364Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1505, node 1 2025-12-23T18:35:17.034047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:17.034088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:17.034097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:17.034284Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3063 TClient is connected to server localhost:3063 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514916677 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:18.244836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:18.302092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:18.548934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:18.855942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:18.994544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:20.335093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123791543770420:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:20.335202Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:22.421554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123821608543354:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:22.421658Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:22.422491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123821608543364:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:22.422546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:22.821176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:22.863898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:22.919046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:22.960195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.050365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.108434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.170289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.262832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.410094Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123825903511540:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.410178Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.410660Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123825903511545:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.410697Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123825903511546:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.411012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025 ... 889053Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:27.918576Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:27.918660Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:27.928752Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:27.946497Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26971, node 2 2025-12-23T18:35:28.336255Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:28.336278Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:28.336285Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:28.336381Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:35:28.465113Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21631 2025-12-23T18:35:28.673164Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21631 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514927996 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:29.187586Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:29.195744Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:29.213708Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:29.332938Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:29.550681Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:29.780864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:34.124021Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123870214117920:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:34.124096Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:34.124353Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123870214117930:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:34.124387Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:34.269624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:34.348197Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:34.418639Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:34.505683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:34.572454Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:34.677177Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:34.855352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:35.034189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:35.252822Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123874509086123:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:35.252978Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:35.254181Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123874509086128:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:35.254310Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123874509086129:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:35.254393Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:35.259027Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:35.274061Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123874509086132:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:35.361780Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123874509086183:3782] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [[#]] |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |92.7%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] Test command err: Trying to start YDB, gRPC: 26481, MsgBus: 14105 2025-12-23T18:35:12.585071Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123777436589174:2194];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:12.585305Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005edf/r3tmp/tmpWLDtvD/pdisk_1.dat 2025-12-23T18:35:13.294121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:13.363251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:13.363390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:13.550714Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123777436588990:2081] 1766514912498179 != 1766514912498182 2025-12-23T18:35:13.598818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:13.603016Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:13.612466Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:13.630857Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 26481, node 1 2025-12-23T18:35:14.131468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:14.131500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:14.131507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:14.131595Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14105 TClient is connected to server localhost:14105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514913688 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:15.726079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:15.756574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:16.163096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:16.559847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:16.770250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:17.588025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123777436589174:2194];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:17.588118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:19.896708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123807501361977:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:19.896821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:19.897772Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123807501361987:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:19.897827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:20.441122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:20.489762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:20.532539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:20.576991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:20.646682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:20.737370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:20.837870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:20.955967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:21.146003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123816091297462:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:21.146133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:21.148604Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123816091297467:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:21.148666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123816091297468:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:21.148699Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ... 25-12-23T18:35:25.959529Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:35:26.089694Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12512 TClient is connected to server localhost:12512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514925609 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:35:27.131651Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:35:27.139659Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:27.155495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:27.330989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:27.747162Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:27.990842Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:29.927050Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123829238494254:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:29.927135Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:31.702320Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123859303267186:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:31.702437Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:31.702882Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123859303267196:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:31.702947Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:31.836960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:31.877664Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:31.921985Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:31.984559Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:32.039254Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:32.119348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:32.184303Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:32.283457Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:32.420823Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123863598235366:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:32.420937Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:32.421327Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123863598235372:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:32.421381Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:32.421443Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123863598235371:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:32.426139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:32.451590Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123863598235375:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:32.525705Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123863598235426:3783] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:35:35.995905Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:37.165054Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514937110, txId: 281474976710676] shutting down 2025-12-23T18:35:38.376737Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514938377, txId: 281474976710680] shutting down 2025-12-23T18:35:38.855018Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514938843, txId: 281474976710684] shutting down >> DataShardSnapshots::ShardRestartLockBasic [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTable |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpCompileFallback::FallbackWithScanQuery [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 >> ColumnBuildTest::AlterMainTable_DisableFlag [GOOD] >> ColumnBuildTest::Initiating_DisableFlag [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 >> KqpScripting::ScanQueryTruncate [GOOD] >> TStreamingQueryTest::DropStreamingQueryTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 13389, MsgBus: 10030 2025-12-23T18:34:59.411101Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123720470859789:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:59.411136Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:34:59.501314Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ee8/r3tmp/tmpXKa37X/pdisk_1.dat 2025-12-23T18:34:59.941636Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:59.961592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:59.961712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:00.052208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:00.080348Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:00.083165Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123720470859766:2081] 1766514899407337 != 1766514899407340 TServer::EnableGrpc on GrpcPort 13389, node 1 2025-12-23T18:35:00.411532Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:00.442061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:00.493328Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:00.493351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:00.493358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:00.493437Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10030 TClient is connected to server localhost:10030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514900164 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:01.755142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:01.765128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:35:01.790987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:02.133134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:02.420592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:02.622429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:04.412347Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123720470859789:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:04.412436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:05.771758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123746240665447:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:05.771872Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:05.772277Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123746240665457:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:05.772358Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:06.245007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:06.293642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:06.344103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:06.391136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:06.442129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:06.535162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:06.604479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:06.699433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:06.843127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123750535633635:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:06.843237Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:06.843625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123750535633640:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:06.843672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123750535633641:2492], DatabaseId: /Root, PoolId: defau ... 3?node_id=2&id=NjNmZTY3MjItNDljNjE3NTYtMzMxZjNhYmMtZDZjOWY2MzY=, ActorId: [2:7587123877176332030:2849], ActorState: ExecuteState, TraceId: 01kd67xh9k68n3qn0v6znp0gdp, Create QueryResponse for error on request, msg: , status: TIMEOUT, issues: { message: "Request timeout 142ms exceeded" severity: 1 }{ message: "Cancelling after 141ms in ExecuteState" severity: 1 } 2025-12-23T18:35:35.399502Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=NjdiZGY4MDQtM2ZjMjJkYjItODA3MDc4YzAtYjI3ZTg3ZDE=, ActorId: [2:7587123877176332044:2855], ActorState: ExecuteState, TraceId: 01kd67xheh5q33enszvv2rzp7d, Create QueryResponse for error on request, msg: , status: TIMEOUT, issues: { message: "Request timeout 145ms exceeded" severity: 1 }{ message: "Cancelling after 146ms in ExecuteState" severity: 1 } 2025-12-23T18:35:35.549080Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514935556, txId: 281474976715674] shutting down 2025-12-23T18:35:35.780267Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=ZTQxMjA0ZDEtYmU5ZjY3ZTYtYTNkZTVkYTUtNTYyZGFhNTY=, ActorId: [2:7587123877176332167:2879], ActorState: ExecuteState, TraceId: 01kd67xhrv42nd1tmnb3eba7a7, Create QueryResponse for error on request, msg: , status: TIMEOUT, issues: { message: "Request timeout 151ms exceeded" severity: 1 }{ message: "Cancelling after 196ms in ExecuteState" severity: 1 } 2025-12-23T18:35:35.964415Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 154ms, session id ydb://session/3?node_id=2&id=Mzc1ZGUzZTUtM2ExNjRhOGItOGQ1NzFkOWItZDI5YzRiNjc= } 2025-12-23T18:35:36.090505Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514936109, txId: 281474976715676] shutting down 2025-12-23T18:35:36.397506Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 160ms, session id ydb://session/3?node_id=2&id=Nzg1YzcxODctOGY1OWM1ODYtMTNjOTM1YjgtN2MyMGI5NTI= } 2025-12-23T18:35:36.462687Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=MTNkMWU3NDgtYzZhOTdiZDAtMjAyMjUzOTItODUzMmM4ZQ==, ActorId: [2:7587123881471299632:2923], ActorState: ExecuteState, TraceId: 01kd67xjf60xnrdnwscgh9tbx3, Create QueryResponse for error on request, msg: , status: TIMEOUT, issues: { message: "Request timeout 163ms exceeded" severity: 1 }{ message: "Cancelling after 160ms in ExecuteState" severity: 1 } 2025-12-23T18:35:36.484805Z node 2 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [2:7587123881471299671:2928] TxId: 281474976715679. Ctx: { TraceId: 01kd67xjf60xnrdnwscgh9tbx3, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ODJjZjFjYmQtMjRkOTgwMTctNGIzYzhiNi1kZGEwN2MzNg==, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Client lost } 2025-12-23T18:35:36.485207Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=ODJjZjFjYmQtMjRkOTgwMTctNGIzYzhiNi1kZGEwN2MzNg==, ActorId: [2:7587123881471299642:2928], ActorState: ExecuteState, TraceId: 01kd67xjf60xnrdnwscgh9tbx3, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Client lost" severity: 1 } 2025-12-23T18:35:36.485935Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514936480, txId: 281474976715678] shutting down 2025-12-23T18:35:36.486119Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [2:7587123881471299676:2931], TxId: 281474976715679, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67xjf60xnrdnwscgh9tbx3. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=ODJjZjFjYmQtMjRkOTgwMTctNGIzYzhiNi1kZGEwN2MzNg==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [2:7587123881471299671:2928], status: ABORTED, reason: {
: Error: Terminate execution } 2025-12-23T18:35:36.486480Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [2:7587123881471299679:2934], TxId: 281474976715679, task: 5. Ctx: { CheckpointId : . TraceId : 01kd67xjf60xnrdnwscgh9tbx3. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=ODJjZjFjYmQtMjRkOTgwMTctNGIzYzhiNi1kZGEwN2MzNg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [2:7587123881471299671:2928], status: ABORTED, reason: {
: Error: Terminate execution } 2025-12-23T18:35:36.634899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:35:36.634926Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:36.681661Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=M2ZjNzg3NTktOGEyYTBkNmUtZWM3NjM5OGItZTkxYWIzODA=, ActorId: [2:7587123881471299737:2942], ActorState: ExecuteState, TraceId: 01kd67xjngfcr2jqwd3f3r4vjp, Create QueryResponse for error on request, msg: , status: TIMEOUT, issues: { message: "Request timeout 166ms exceeded" severity: 1 }{ message: "Cancelling after 170ms in ExecuteState" severity: 1 } 2025-12-23T18:35:37.002869Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 169ms, session id ydb://session/3?node_id=2&id=ODE4Mzk2MmEtNzA0MWZhYTAtZDc5ZTQ1MWYtZWMyY2JmMzA= } 2025-12-23T18:35:37.076017Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=MzdkZTViODYtNDYzODEwNmUtYWYzYzZlOGEtY2VlMTdkZDE=, ActorId: [2:7587123881471299810:2971], ActorState: ExecuteState, TraceId: 01kd67xk21f1h8bkz2pv6pwjzn, Create QueryResponse for error on request, msg: , status: TIMEOUT, issues: { message: "Request timeout 172ms exceeded" severity: 1 }{ message: "Cancelling after 171ms in ExecuteState" severity: 1 } 2025-12-23T18:35:37.347092Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 175ms, session id ydb://session/3?node_id=2&id=ZDdmOWQ1MzAtNDFiYmQ0YmItZTQyN2EwYi1iOWY1YTE1MA== } 2025-12-23T18:35:37.542524Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 178ms, session id ydb://session/3?node_id=2&id=N2I5OTFjMWEtMWJlZDdjNjMtODA3MDZjOTItYTJmMDZkMmQ= } 2025-12-23T18:35:37.677366Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=MWQ3NjZlYzQtMjIzZjlkMTgtMWU2NmRkMDItM2EyZmI5NjI=, ActorId: [2:7587123885766267158:2992], ActorState: ExecuteState, TraceId: 01kd67xkmh6nq8tsyfr6te755w, Create QueryResponse for error on request, msg: , status: TIMEOUT, issues: { message: "Request timeout 181ms exceeded" severity: 1 }{ message: "Cancelling after 178ms in ExecuteState" severity: 1 } 2025-12-23T18:35:37.879264Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 184ms, session id ydb://session/3?node_id=2&id=YjQ5NWE0NDctZjMxMDlhNGItMjk3MTQ4ZjctOThhY2M3M2Q= } 2025-12-23T18:35:38.079457Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=NTY1NjZkNWItYmUyMzUzOGMtYWEyZWJlM2QtZDE1Yjk3ODA=, ActorId: [2:7587123885766267249:3010], ActorState: ExecuteState, TraceId: 01kd67xm101t88sztwbfckm0gp, Create QueryResponse for error on request, msg: , status: TIMEOUT, issues: { message: "Request timeout 187ms exceeded" severity: 1 }{ message: "Cancelling after 188ms in ExecuteState" severity: 1 } 2025-12-23T18:35:38.283529Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=ZmZiMDlkZS0xOTIyMWIwNi05YTZiMjNjYS1iM2I4YjA3Mw==, ActorId: [2:7587123890061234578:3020], ActorState: ExecuteState, TraceId: 01kd67xm771ca93agdv1683zts, Create QueryResponse for error on request, msg: , status: TIMEOUT, issues: { message: "Request timeout 190ms exceeded" severity: 1 }{ message: "Cancelling after 191ms in ExecuteState" severity: 1 } 2025-12-23T18:35:38.495062Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=NmYzMjI4ODktZjQwOTZjOTQtYWI1ZTBiMzktNDBmZmU4M2U=, ActorId: [2:7587123890061234599:3029], ActorState: ExecuteState, TraceId: 01kd67xmdr2wv15jk8wbkskzdz, Create QueryResponse for error on request, msg: , status: TIMEOUT, issues: { message: "Request timeout 193ms exceeded" severity: 1 }{ message: "Cancelling after 192ms in ExecuteState" severity: 1 } 2025-12-23T18:35:38.612333Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514938643, txId: 281474976715682] shutting down 2025-12-23T18:35:38.845148Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 199ms, session id ydb://session/3?node_id=2&id=ZGJkZTRhNjgtOGZlYTc5YmQtY2Y1YjA5YjUtZDNhZWU2N2Q= } 2025-12-23T18:35:39.114994Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 202ms, session id ydb://session/3?node_id=2&id=NzkzM2JmMWYtNjJiYTYxM2YtMjZmYTYyMDYtNTZmZDZkMTY= } 2025-12-23T18:35:39.367561Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 205ms, session id ydb://session/3?node_id=2&id=YWYzNDI5YzAtNWQ4ODFkMDAtZmYwM2IzOGItNWFjNDMzMjk= } 2025-12-23T18:35:39.496891Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=NDU2MWUyNjctYTM0NTAzNjktNTQ3NGQwNDgtZmE1ZWY4M2Y=, ActorId: [2:7587123894356202088:3083], ActorState: ExecuteState, TraceId: 01kd67xnct5c80q90k5tkvefzy, Create QueryResponse for error on request, msg: , status: TIMEOUT, issues: { message: "Request timeout 208ms exceeded" severity: 1 }{ message: "Cancelling after 202ms in ExecuteState" severity: 1 } 2025-12-23T18:35:39.729097Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=NjgwOGQ4MC1lYmY0ZTRiZC1kZmQ5YzczYi1jYzU5YTkwOA==, ActorId: [2:7587123894356202140:3089], ActorState: ExecuteState, TraceId: 01kd67xnksenbw6m4wpsqrp4ev, Create QueryResponse for error on request, msg: , status: TIMEOUT, issues: { message: "Request timeout 211ms exceeded" severity: 1 }{ message: "Cancelling after 212ms in ExecuteState" severity: 1 } 2025-12-23T18:35:39.967514Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=NjQ3N2RjZGItY2Y4YWY4LTM2NjIwYTI1LTc5MzRmNmQy, ActorId: [2:7587123894356202165:3100], ActorState: ExecuteState, TraceId: 01kd67xnv3bcz96x8db25ge9z5, Create QueryResponse for error on request, msg: , status: TIMEOUT, issues: { message: "Request timeout 214ms exceeded" severity: 1 }{ message: "Cancelling after 214ms in ExecuteState" severity: 1 } 2025-12-23T18:35:40.186140Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514940204, txId: 281474976715685] shutting down >> ColumnBuildTest::ValidDefaultValue [GOOD] >> ColumnBuildTest::Unlocking_Failed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 63012, MsgBus: 3148 2025-12-23T18:34:56.154682Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123709851242803:2258];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:56.166481Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:34:56.198305Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ef1/r3tmp/tmp3CXGzD/pdisk_1.dat 2025-12-23T18:34:56.750838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:34:56.853206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:56.858678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:56.959646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:57.098871Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:34:57.100043Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123709851242560:2081] 1766514896053955 != 1766514896053958 2025-12-23T18:34:57.133626Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:57.153503Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TServer::EnableGrpc on GrpcPort 63012, node 1 2025-12-23T18:34:57.467501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:57.467528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:57.467534Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:57.467631Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3148 TClient is connected to server localhost:3148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514897133 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:58.621968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:58.653775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:58.870760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:59.078444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:59.165041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:01.156926Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123709851242803:2258];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:01.157022Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:03.250293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123739916015525:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:03.250416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:03.250840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123739916015535:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:03.250888Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:03.696436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:03.806614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:03.881522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:03.912374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:03.941976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:03.992189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.045118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.116319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.238919Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123744210983725:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.239085Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.239467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123744210983730:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.239505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123744210983731:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:04.239759Z node 1 :KQP_WORKLOAD_SERVI ... our snapshot: [step: 1766514935668, txId: 281474976710715] shutting down 2025-12-23T18:35:35.682673Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514935668, txId: 281474976710716] shutting down 2025-12-23T18:35:35.795785Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 145ms, session id ydb://session/3?node_id=2&id=ODdkZWU0YWQtOTBjOGQyNGItNGZhODVkOWQtYWQ2OWJlYWU= } 2025-12-23T18:35:35.841866Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 148ms, session id ydb://session/3?node_id=2&id=NGE5ZjJkZDAtYzdjMzg4MjYtMmE1Mzg0YmMtMTlmOTllNGQ= } 2025-12-23T18:35:36.010934Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 151ms, session id ydb://session/3?node_id=2&id=MjIzYzYwMmEtZTViZDM5ZDgtMTFjMmM4MDktZjgxNmQ1Yzg= } 2025-12-23T18:35:36.113121Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514936109, txId: 281474976710721] shutting down 2025-12-23T18:35:36.116474Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514936109, txId: 281474976710720] shutting down 2025-12-23T18:35:36.122499Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514936116, txId: 281474976710719] shutting down 2025-12-23T18:35:36.234484Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 154ms, session id ydb://session/3?node_id=2&id=ZjcxZDMxMWQtZDM0Nzc1ODEtNjZmN2Y2YjUtOGM1ZDBmNDk= } 2025-12-23T18:35:36.421335Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 157ms, session id ydb://session/3?node_id=2&id=MTQyYmYxNDMtM2YxMTRhNDUtZWQ2MTY2MDQtOTU0ZDcxOWQ= } 2025-12-23T18:35:36.469843Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514936326, txId: 281474976710725] shutting down 2025-12-23T18:35:36.473361Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514936326, txId: 281474976710726] shutting down 2025-12-23T18:35:36.657141Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 160ms, session id ydb://session/3?node_id=2&id=OWE1YjI1YzctM2IyYmQ3MWUtNzg1NWE2NDItNjQ5ZGQwZGI= } 2025-12-23T18:35:36.670576Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 163ms, session id ydb://session/3?node_id=2&id=Mjg4ZTczZWItMTdmMjllZGQtZmZiODljOS02ODg0NTk1YQ== } 2025-12-23T18:35:36.813219Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514936809, txId: 281474976710729] shutting down 2025-12-23T18:35:36.817407Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514936809, txId: 281474976710730] shutting down 2025-12-23T18:35:36.846115Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 166ms, session id ydb://session/3?node_id=2&id=N2JjNjMxZmUtZjE4YjM2OWQtZTBiMWUxNDQtMzdkZWMwZmQ= } 2025-12-23T18:35:36.985460Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:35:36.985491Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:36.988928Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514936977, txId: 281474976710733] shutting down 2025-12-23T18:35:37.023138Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 169ms, session id ydb://session/3?node_id=2&id=OWM3MmQyMWUtZTEwMmU0M2YtOTFhZjdlYWYtYjExMjRlYzE= } 2025-12-23T18:35:37.210050Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 172ms, session id ydb://session/3?node_id=2&id=Yjc2Yjg0YjctYmFhZTc2ODktZDM4Y2NmNjgtZGE0YTcwMTE= } 2025-12-23T18:35:37.425997Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 175ms, session id ydb://session/3?node_id=2&id=NDkxMmRjYTktYjA3MDM4MzctZjhjZjY5ODItYWE5MDQyYjY= } 2025-12-23T18:35:37.449232Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514937376, txId: 281474976710736] shutting down 2025-12-23T18:35:37.450817Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514937376, txId: 281474976710735] shutting down 2025-12-23T18:35:37.634241Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 178ms, session id ydb://session/3?node_id=2&id=YTlkZjVjZDItNzZhN2UyMTMtZDVhNDAzMDMtZTUwMDk4M2Q= } 2025-12-23T18:35:37.706399Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514937712, txId: 281474976710739] shutting down 2025-12-23T18:35:37.718244Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514937712, txId: 281474976710740] shutting down 2025-12-23T18:35:37.778465Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 181ms, session id ydb://session/3?node_id=2&id=Nzk2MDVhOGQtYjE4MmYxMmMtYmU4NjUyYzAtMjBmYTFhMTg= } 2025-12-23T18:35:37.935085Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514937936, txId: 281474976710743] shutting down 2025-12-23T18:35:37.936380Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514937936, txId: 281474976710744] shutting down 2025-12-23T18:35:37.983406Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 184ms, session id ydb://session/3?node_id=2&id=ZGZhYTZiMjItYjYwMWRiMzAtY2E5MmFlMWItM2UyMDk1YmQ= } 2025-12-23T18:35:38.118983Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 187ms, session id ydb://session/3?node_id=2&id=MjY1ODE3ZWQtY2EzZjAyYWMtNzFjZjMxNTItZGZlMWY5 } 2025-12-23T18:35:38.243269Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514938258, txId: 281474976710747] shutting down 2025-12-23T18:35:38.246308Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514938258, txId: 281474976710748] shutting down 2025-12-23T18:35:38.510643Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 193ms, session id ydb://session/3?node_id=2&id=MzZjZTg1MzMtMzM4NmIwNmUtYWY0MjUxYjQtM2YxNmRjOWI= } 2025-12-23T18:35:38.526355Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514938538, txId: 281474976710751] shutting down 2025-12-23T18:35:38.677646Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 196ms, session id ydb://session/3?node_id=2&id=ZjE3MzQyNGMtZDlmZjZkN2MtMzEyZjExZjUtNDcxNDkyNjM= } 2025-12-23T18:35:38.739912Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514938755, txId: 281474976710753] shutting down 2025-12-23T18:35:38.893680Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 199ms, session id ydb://session/3?node_id=2&id=ZGVhNjdmMTktYmIwMTdhYmYtMWQ1OTRiNTQtNzRlZjQ3NGI= } 2025-12-23T18:35:38.971649Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514938993, txId: 281474976710755] shutting down 2025-12-23T18:35:39.074163Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 202ms, session id ydb://session/3?node_id=2&id=OTZhMGI4ZTYtNGE0ZmQ3MDQtMTdjNGUxNTMtNmUwNjAyN2Q= } 2025-12-23T18:35:39.236604Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514939245, txId: 281474976710757] shutting down 2025-12-23T18:35:39.240610Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514939245, txId: 281474976710758] shutting down 2025-12-23T18:35:39.504514Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 208ms, session id ydb://session/3?node_id=2&id=ZWQzNTYyZTUtMTYzODZmMjktMTFmODE2MDktNDI1NTE3NjE= } 2025-12-23T18:35:39.561092Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514939581, txId: 281474976710761] shutting down 2025-12-23T18:35:39.718843Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 211ms, session id ydb://session/3?node_id=2&id=MThmMzBjMDYtOGMzODQ0OWMtOGViNTNjOWMtNzM5NGYwZDI= } 2025-12-23T18:35:39.871351Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514939875, txId: 281474976710763] shutting down 2025-12-23T18:35:39.879057Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514939875, txId: 281474976710764] shutting down |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> ColumnBuildTest::Cancelling [GOOD] >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlterMainTable_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:35:34.637265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:34.637355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:34.637403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:34.637436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:34.637469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:34.637496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:34.637635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:34.637721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:34.638525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:34.638821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:34.740851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:34.740914Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:34.755333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:34.755588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:34.755823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:34.769122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:34.769385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:34.770107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:34.770344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:34.775525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:34.775744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:34.777143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:34.777229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:34.777413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:34.777480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:34.777536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:34.777671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:35.050413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.054450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.054630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.054717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.055139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.055210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.055292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.055409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.055513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.055580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.055653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.055734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.055833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.055947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.056039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 57 ready parts: 1/1 2025-12-23T18:35:43.441019Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2025-12-23T18:35:43.441053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2025-12-23T18:35:43.441092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2025-12-23T18:35:43.441186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1698:3564] message: TxId: 281474976730757 2025-12-23T18:35:43.441256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2025-12-23T18:35:43.441300Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2025-12-23T18:35:43.441338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976730757:0 2025-12-23T18:35:43.441414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2025-12-23T18:35:43.456109Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2025-12-23T18:35:43.456208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976730757 2025-12-23T18:35:43.456298Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730757 2025-12-23T18:35:43.456451Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2025-12-23T18:35:43.464898Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2025-12-23T18:35:43.465051Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:35:43.465108Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2025-12-23T18:35:43.468193Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2025-12-23T18:35:43.468352Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:35:43.468434Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2025-12-23T18:35:43.468619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:35:43.468667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1753:3608] TestWaitNotification: OK eventTxId 105 2025-12-23T18:35:43.469467Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2025-12-23T18:35:43.469806Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2025-12-23T18:35:43.475061Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-12-23T18:35:43.475398Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 376us result status StatusSuccess 2025-12-23T18:35:43.475957Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 >> TStreamingQueryTest::ParallelCreateSameStreamingQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Initiating_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:35:33.891211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:33.891317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:33.891364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:33.891403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:33.891435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:33.891460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:33.891511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:33.891598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:33.892440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:33.892725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:34.008443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:34.008511Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:34.040276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:34.040432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:34.040622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:34.074887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:34.075139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:34.075867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:34.076108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:34.083370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:34.083564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:34.084961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:34.085029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:34.085188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:34.085269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:34.085321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:34.085451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:34.235395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.236979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.237136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.237236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.237323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.237382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.237447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.237548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.237640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.237710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.237782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.237858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.237954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.238048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:34.238134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1474976730758 ready parts: 1/1 2025-12-23T18:35:43.396141Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2025-12-23T18:35:43.396169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2025-12-23T18:35:43.396200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2025-12-23T18:35:43.396272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1752:3618] message: TxId: 281474976730758 2025-12-23T18:35:43.396315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2025-12-23T18:35:43.396344Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2025-12-23T18:35:43.396373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976730758:0 2025-12-23T18:35:43.396455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2025-12-23T18:35:43.399833Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2025-12-23T18:35:43.399901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976730758 2025-12-23T18:35:43.399966Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730758 2025-12-23T18:35:43.400066Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2025-12-23T18:35:43.403072Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2025-12-23T18:35:43.403233Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:35:43.403291Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2025-12-23T18:35:43.411636Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2025-12-23T18:35:43.411825Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:35:43.411905Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2025-12-23T18:35:43.412098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:35:43.412148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1812:3666] TestWaitNotification: OK eventTxId 105 2025-12-23T18:35:43.412804Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2025-12-23T18:35:43.413114Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2025-12-23T18:35:43.414031Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-12-23T18:35:43.414241Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 232us result status StatusSuccess 2025-12-23T18:35:43.414665Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 30127, MsgBus: 26656 2025-12-23T18:35:31.062051Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123858016308689:2257];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:31.062223Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:31.185359Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004be4/r3tmp/tmptT7vHP/pdisk_1.dat 2025-12-23T18:35:31.880955Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:31.884358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:31.884445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:31.948045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:32.063005Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123858016308458:2081] 1766514931016296 != 1766514931016299 2025-12-23T18:35:32.063116Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:32.088769Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30127, node 1 2025-12-23T18:35:32.110969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:32.307511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:32.307530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:32.307547Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:32.307627Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26656 TClient is connected to server localhost:26656 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514932063 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:33.277871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:33.306263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:33.327052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:33.679887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:33.986753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:34.090216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:36.066865Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123858016308689:2257];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:36.066950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:37.577798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123883786114132:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:37.577917Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:37.579219Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123883786114141:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:37.579290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.385411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.451276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.489573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.530466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.583756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.654242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.723694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.791964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:39.001205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123888081082322:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:39.001284Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:39.001671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123888081082327:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:39.001712Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123888081082328:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:39.002044Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:39.004985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:39.039604Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123888081082331:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:39.126364Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123892376049683:3785] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:35:42.391443Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:35:42.391569Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007CC8C642C5F8 2025-12-23T18:35:42.391615Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7587123905260951902:2544], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n ", keepInCache: 1, split: 0{ TraceId: 01kd67xrdg44qmebaf2sg8fvrx, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg3ZDc3YTAtZjI5NzczNmMtNWExYTY4NmQtMTQ1MWU2Mjg=, PoolId: default, IsStreamingQuery: 0} 2025-12-23T18:35:42.391772Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:35:42.391834Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7587123905260951902:2544], queueSize: 1 2025-12-23T18:35:42.392433Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n 2025-12-23T18:35:42.392481Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7587123905260951902:2544], compileActor: [1:7587123905260951911:2550] 2025-12-23T18:35:42.392531Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2025-12-23T18:35:42.392569Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7587123905260951911:2550], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n ", startTime: 2025-12-23T18:35:42.392518Z 2025-12-23T18:35:42.682896Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7587123905260951911:2550]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1766514942","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\\n ","query_type":"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (0, +∞)\"],\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (0, +∞)\"],\"type\":\"Scan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (0, +∞)\"],\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"24ec43cc-fb0dd9ae-76cceb64-3583e","version":"1.0"} 2025-12-23T18:35:42.683411Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7587123905260951911:2550], duration: 0.290862s 2025-12-23T18:35:42.683439Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7587123905260951911:2550], owner: [1:7587123883786114091:2386], status: SUCCESS, issues: , uid: 24ec43cc-fb0dd9ae-76cceb64-3583e 2025-12-23T18:35:42.685519Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7587123905260951902:2544], status: SUCCESS, compileActor: [1:7587123905260951911:2550] 2025-12-23T18:35:42.685693Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:719: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:35:42.685795Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7587123905260951902:2544], queryUid: 24ec43cc-fb0dd9ae-76cceb64-3583e, status:SUCCESS |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |92.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats >> TStreamingQueryTest::ParallelCreateStreamingQuery >> ColumnBuildTest::DoNotRestoreDeletedRows [GOOD] >> ColumnBuildTest::DisabledAndEnabledFlag >> KqpScripting::SecondaryIndexes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 28386, MsgBus: 65271 2025-12-23T18:35:21.911096Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123815246833978:2132];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:21.912629Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:21.957471Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ed9/r3tmp/tmpsOYXop/pdisk_1.dat 2025-12-23T18:35:22.531396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:22.531485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:22.535666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:22.540341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28386, node 1 2025-12-23T18:35:22.826593Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123815246833885:2081] 1766514921817926 != 1766514921817929 2025-12-23T18:35:22.978105Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:22.988737Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:22.991158Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:23.058178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:23.058210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:23.058225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:23.058366Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:65271 TClient is connected to server localhost:65271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514923054 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:24.146054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:24.159024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:24.174201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:24.444830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:24.723601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:24.816150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:26.902953Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123815246833978:2132];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:26.903030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:27.607670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123841016639558:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:27.607774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:27.611015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123841016639568:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:27.611119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:28.270319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:28.327020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:28.408482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:28.464326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:28.518233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:28.599382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:28.659691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:28.743561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:28.853976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123845311607742:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:28.854061Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:28.855158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123845311607748:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:28.855205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123845311607747:2489], DatabaseId: /Root, PoolId: defau ... > Connecting 2025-12-23T18:35:32.923451Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26516, node 2 2025-12-23T18:35:33.107401Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:33.107424Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:33.107431Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:33.107513Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:35:33.146181Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4073 2025-12-23T18:35:33.650990Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:4073 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514932973 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:33.776441Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:33.794400Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:33.809660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:33.962959Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:34.246683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:34.351908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:37.586851Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123861355965199:2128];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:37.586926Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:38.003498Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123882830803468:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.003575Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.004048Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123887125770774:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.004093Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.181448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.233728Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.292208Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.352591Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.439227Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.499746Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.570603Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.675788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.823426Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123887125771644:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.823562Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.823945Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123887125771649:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.823987Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123887125771650:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.824019Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.829810Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:38.884782Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123887125771653:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:38.946252Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123887125771706:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:35:43.059633Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766514943074, txId: 281474976710673] shutting down |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |92.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> TStreamingQueryTest::AlterStreamingQuery >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 >> TStreamingQueryTest::DropStreamingQueryTwice [GOOD] >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |92.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ydb_stress_tool |92.7%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> TStreamingQueryTest::ParallelCreateSameStreamingQuery [GOOD] >> TestYmqHttpProxy::TestCreateQueue [GOOD] >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest [GOOD] >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery [GOOD] >> TStreamingQueryTest::ParallelCreateStreamingQuery [GOOD] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists [GOOD] >> TestKinesisHttpProxy::GoodRequestPutRecords ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SecondaryIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 20109, MsgBus: 7995 2025-12-23T18:35:17.743447Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123796593365514:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:17.744051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005edb/r3tmp/tmpThGi4a/pdisk_1.dat 2025-12-23T18:35:18.218982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:18.247390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:18.247492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:18.316513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:18.486864Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123796593365469:2081] 1766514917657241 != 1766514917657244 2025-12-23T18:35:18.500185Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:18.518658Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 20109, node 1 2025-12-23T18:35:18.795353Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:18.867357Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:18.867378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:18.867384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:18.867487Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7995 TClient is connected to server localhost:7995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514918560 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:20.106396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:20.112626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:20.208020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:20.494601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:20.835615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:21.018490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:22.730145Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123796593365514:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:22.730215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:23.143988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123822363171128:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.144151Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.145967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123822363171138:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.146051Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:23.652300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.693722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.737783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.786838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.835557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.919390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:23.995750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.077445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:24.235684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123826658139310:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.235791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.236168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123826658139315:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.236201Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123826658139316:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:24.236527Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [Work ... _CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28975 2025-12-23T18:35:32.835701Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514932245 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:33.069052Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:33.083363Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:35:33.098672Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:33.212501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:33.447241Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:33.569728Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:36.791517Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123860006096963:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:36.791589Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:37.491778Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123885775902574:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:37.492201Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:37.495119Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123885775902584:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:37.495210Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:37.564657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:37.613256Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:37.648676Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:37.699572Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:37.751046Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:37.805486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:37.866116Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:37.960194Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:38.073961Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123890070870748:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.074063Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.074306Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123890070870753:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.074384Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123890070870754:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.074507Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.078620Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:38.097159Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123890070870757:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:35:38.186710Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123890070870810:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:35:41.283626Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:41.349028Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:41.431571Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> Yq_1::DescribeConnection >> Yq_1::Basic >> Yq_1::ModifyConnections >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName >> Yq_1::ListConnections >> TestYmqHttpProxy::TestSendMessage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelCreateSameStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:35:46.208404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:46.208503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:46.208549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:46.208586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:46.208626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:46.208651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:46.208705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:46.208797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:46.209693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:46.210009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:46.301279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:46.301360Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:46.308429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:46.308732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:46.308973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:46.314106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:46.314332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:46.315178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:46.315473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:46.318048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:46.318251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:46.319697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:46.319775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:46.319933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:46.319982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:46.320025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:46.320167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:46.480762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.481884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.482036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.482110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.482175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.482239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.482322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.482402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.482477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.482532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.482610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.482675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.482796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.482881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.482964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... escribe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:35:47.509932Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 221us result status StatusSuccess 2025-12-23T18:35:47.510260Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:47.510798Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:35:47.510966Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 248us result status StatusSuccess 2025-12-23T18:35:47.511290Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 101 2025-12-23T18:35:47.515610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:35:47.515681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-12-23T18:35:47.515797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:35:47.515819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-12-23T18:35:47.515864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:35:47.515886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:35:47.516453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:35:47.516717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:35:47.516757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:707:2697] 2025-12-23T18:35:47.516976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:35:47.517049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:35:47.517097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:35:47.517146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:707:2697] 2025-12-23T18:35:47.517277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:35:47.517320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:707:2697] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-12-23T18:35:47.517841Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:35:47.518042Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 233us result status StatusSuccess 2025-12-23T18:35:47.518363Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2025-12-23T18:35:47.521800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "NilNoviSubLuna" } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:35:47.522087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 104:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery FailOnExist: false CreateStreamingQuery { Name: "NilNoviSubLuna" } 2025-12-23T18:35:47.522169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 104:0, path# /MyRoot/NilNoviSubLuna 2025-12-23T18:35:47.522289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:35:47.525177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-12-23T18:35:47.525416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), operation: CREATE STREAMING QUERY, path: NilNoviSubLuna TestModificationResult got TxId: 104, wait until txId: 104 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> TStreamingQueryTest::AlterStreamingQuery [GOOD] >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict >> ColumnBuildTest::Unlocking_Failed [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [GOOD] >> Yq_1::Basic_Null >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest [GOOD] Test command err: 2025-12-23T18:35:33.207279Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2025-12-23T18:35:33.366200Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:35:33.366285Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:35:33.366347Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:33.366422Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:182:2057] recipient: [1:14:2061] 2025-12-23T18:35:33.452366Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:181:2193], now have 1 active actors on pipe 2025-12-23T18:35:33.452523Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:35:33.504249Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2025-12-23T18:35:33.504464Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:33.506074Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2025-12-23T18:35:33.506266Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:35:33.506351Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:35:33.506871Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:35:33.507268Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2025-12-23T18:35:33.509954Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:35:33.510025Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-12-23T18:35:33.510075Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:188:2142] 2025-12-23T18:35:33.510126Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:35:33.519659Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:35:33.521154Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:35:33.521238Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:35:33.521291Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:35:33.521344Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:35:33.521380Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:35:33.521427Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:35:33.521498Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-12-23T18:35:33.521552Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:35:33.521611Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:35:33.521650Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:35:33.521690Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:35:33.521896Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:35:33.521939Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-12-23T18:35:33.522009Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:35:33.522247Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:35:33.522512Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:189:2142] 2025-12-23T18:35:33.531113Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:35:33.531186Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2025-12-23T18:35:33.531224Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:189:2142] 2025-12-23T18:35:33.531264Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:35:33.532533Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:35:33.533639Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2025-12-23T18:35:33.533680Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:35:33.533710Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:35:33.533765Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:35:33.533795Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:35:33.533846Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:35:33.533896Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2025-12-23T18:35:33.533927Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2025-12-23T18:35:33.533953Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:35:33.533980Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2025-12-23T18:35:33.534013Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2025-12-23T18:35:33.534167Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:35:33.534206Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-12-23T18:35:33.534273Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:35:33.534485Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:35:33.534712Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:35:33.534960Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:35:33.535098Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937 ... wer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.463612Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.463642Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.463674Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.463702Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.463737Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.463770Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 493288 from pos 0 cbcount 1 2025-12-23T18:35:47.463942Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 0 parts 16 suffix '0' 2025-12-23T18:35:47.463996Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 3 partno 16 count 0 parts 16 suffix '0' 2025-12-23T18:35:47.464040Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 3 partno 32 count 1 parts 8 suffix '0' 2025-12-23T18:35:47.464166Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2025-12-23T18:35:47.464208Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2025-12-23T18:35:47.464254Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 3:0 isTruncatedBlob 1 2025-12-23T18:35:47.492335Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 3:0 isTruncatedBlob 1 hasNonZeroParts 1 isMiddlePartOfMessage 0 2025-12-23T18:35:47.492495Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:350: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Send EvRead (Compact state) from offset: 3:16 2025-12-23T18:35:47.492913Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 28 Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer offset 3 partno 16 count 4294967295 size 4294967295 endOffset 4 max time lag 0ms effective offset 3 2025-12-23T18:35:47.493424Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 28 added 2 blobs, size 12781161 count 1 last offset 3, current partition end offset: 4 2025-12-23T18:35:47.493477Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 28. Send blob request. 2025-12-23T18:35:47.493584Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 16 count 0 parts_count 16 source 0 size 8191635 accessed 1 times before, last time 1970-01-01T00:00:00.000000Z 2025-12-23T18:35:47.493636Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 32 count 1 parts_count 8 source 1 size 4589526 accessed 4 times before, last time 1970-01-01T00:00:00.000000Z 2025-12-23T18:35:47.493692Z node 3 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 28. All 2 blobs are from cache. 2025-12-23T18:35:47.493770Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 3 partno 16 count 0 parts 16 suffix '0' 2025-12-23T18:35:47.493816Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 3 partno 32 count 1 parts 8 suffix '0' 2025-12-23T18:35:47.493894Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2025-12-23T18:35:47.494039Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 0 count 0 size 8191611 from pos 0 cbcount 16 2025-12-23T18:35:47.494192Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.494229Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.494262Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.494296Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.494332Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.494365Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.494397Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.494432Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-12-23T18:35:47.494465Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 493288 from pos 0 cbcount 1 2025-12-23T18:35:47.494587Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2025-12-23T18:35:47.494625Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2025-12-23T18:35:47.494667Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 3:16 isTruncatedBlob 1 2025-12-23T18:35:47.509992Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 3:16 isTruncatedBlob 1 hasNonZeroParts 1 isMiddlePartOfMessage 1 2025-12-23T18:35:47.538560Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000000_00032_0000000001_00015 2025-12-23T18:35:47.538723Z node 3 :PERSQUEUE DEBUG: partition.cpp:4522: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2025-12-23T18:35:47.539001Z node 3 :PERSQUEUE DEBUG: partition.cpp:4530: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2025-12-23T18:35:47.539055Z node 3 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:35:47.539177Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 0 size 271 2025-12-23T18:35:47.539256Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 32 count 1 size 187 2025-12-23T18:35:47.539339Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 0 size 8191590 2025-12-23T18:35:47.539371Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000002_00016_0000000000_00016(+) to d0000000000_00000000000000000002_00016_0000000000_00016(+) 2025-12-23T18:35:47.539400Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000000_00032_0000000001_00015(+) to d0000000000_00000000000000000000_00032_0000000001_00015(+) 2025-12-23T18:35:47.575621Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 0 size 271 actorID [3:139:2142] 2025-12-23T18:35:47.575700Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 2 count 1 size 4589526 actorID [3:139:2142] is actual 1 2025-12-23T18:35:47.575798Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 1 size 187 actorID [3:139:2142] 2025-12-23T18:35:47.575836Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 3 count 0 size 8191635 actorID [3:139:2142] is actual 1 2025-12-23T18:35:47.575903Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 3 count 0 size 8191590 actorID [3:139:2142] 2025-12-23T18:35:47.576017Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:35:47.576067Z node 3 :PERSQUEUE DEBUG: partition.cpp:2161: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2025-12-23T18:35:47.576110Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response 2025-12-23T18:35:47.576559Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 2 partno 32 count 1 parts 8 suffix '0' size 4589526 2025-12-23T18:35:47.578452Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 0 parts 16 suffix '0' size 8191635 2025-12-23T18:35:47.588246Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 0 count 0 parts 16 suffix '0' size 271 2025-12-23T18:35:47.588349Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 32 count 1 parts 8 suffix '0' size 187 2025-12-23T18:35:47.588396Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 0 parts 16 suffix '0' size 8191590 2025-12-23T18:35:47.600343Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [3:400:2375], now have 1 active actors on pipe 2025-12-23T18:35:47.600517Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:35:47.600568Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-12-23T18:35:47.600702Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 3 for user __ydb_compaction_consumer 2025-12-23T18:35:47.601087Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [3:402:2377], now have 1 active actors on pipe Got start offset = 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:35:46.169084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:46.169204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:46.169246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:46.169284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:46.169327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:46.169356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:46.169412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:46.169517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:46.170406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:46.170722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:46.264970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:46.265066Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:46.271505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:46.271762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:46.271919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:46.275277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:46.275493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:46.276247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:46.276497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:46.278635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:46.278868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:46.280334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:46.280417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:46.280586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:46.280637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:46.280683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:46.280832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:46.453495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.458458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.458669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.458766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.458837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.458903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.458998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.459088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.459175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.459240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.459322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.459398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.459493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.459577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.459663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard: 72057594046678944 2025-12-23T18:35:47.821126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-12-23T18:35:47.821267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:35:47.822299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:35:47.822407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:35:47.822462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:35:47.822523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-12-23T18:35:47.822581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-12-23T18:35:47.824281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 2 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:35:47.824381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 2 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:35:47.824412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:35:47.824457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 2 2025-12-23T18:35:47.824514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:35:47.824606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-12-23T18:35:47.829003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2025-12-23T18:35:47.829209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000040 2025-12-23T18:35:47.831449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:35:47.831756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:35:47.832100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:47.832228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:47.832289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 105:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000040, at schemeshard: 72057594046678944 2025-12-23T18:35:47.832429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2025-12-23T18:35:47.832602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:35:47.832714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 FAKE_COORDINATOR: Erasing txId 105 2025-12-23T18:35:47.835561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:47.835623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:47.835848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2025-12-23T18:35:47.836003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:47.836048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:851:2809], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-12-23T18:35:47.836093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:851:2809], at schemeshard: 72057594046678944, txId: 105, path id: 40 2025-12-23T18:35:47.836431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:35:47.836487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2025-12-23T18:35:47.836596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:35:47.836640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:35:47.836707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:35:47.836748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:35:47.836793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-12-23T18:35:47.836842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:35:47.836881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2025-12-23T18:35:47.836914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 105:0 2025-12-23T18:35:47.836987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:35:47.837030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-12-23T18:35:47.837068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-12-23T18:35:47.837097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2025-12-23T18:35:47.837837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:35:47.837951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:35:47.837989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:35:47.838028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-12-23T18:35:47.838075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-12-23T18:35:47.839763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:35:47.839848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:35:47.839917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:35:47.839949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2025-12-23T18:35:47.840014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:35:47.840100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-12-23T18:35:47.845544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:35:47.845964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelCreateStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:35:46.422712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:46.422829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:46.422886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:46.422923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:46.422966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:46.422997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:46.423054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:46.423161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:46.424087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:46.424399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:46.515864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:46.515945Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:46.522008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:46.522262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:46.522403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:46.525837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:46.526045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:46.526799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:46.527029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:46.529045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:46.529244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:46.530579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:46.530652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:46.530815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:46.530859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:46.530911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:46.531048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:46.707253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.708346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.708528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.708616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.708689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.708757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.708852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.708948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.709055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.709140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.709230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.709306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.709399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.709486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:46.709590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... th "/MyRoot/DirA/MyStreamingQuery1" took 244us result status StatusSuccess 2025-12-23T18:35:47.846536Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery1" PathDescription { Self { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery1" Properties { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:47.847362Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:35:47.847545Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery2" took 202us result status StatusSuccess 2025-12-23T18:35:47.847859Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery2" PathDescription { Self { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery2" Properties { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:47.848546Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:35:47.848711Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 169us result status StatusSuccess 2025-12-23T18:35:47.849203Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:47.849768Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:35:47.849980Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery1" took 183us result status StatusSuccess 2025-12-23T18:35:47.850259Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery1" PathDescription { Self { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery1" Properties { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:47.850914Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:35:47.851112Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery2" took 199us result status StatusSuccess 2025-12-23T18:35:47.851403Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery2" PathDescription { Self { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery2" Properties { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:35:45.510907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:45.511015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:45.511069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:45.511106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:45.511143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:45.511168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:45.511224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:45.511306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:45.512131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:45.512437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:45.620170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:45.620248Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:45.635739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:45.636011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:45.636163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:45.640971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:45.641214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:45.642114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:45.642374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:45.644567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:45.644763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:45.646070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:45.646128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:45.646268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:45.646314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:45.646363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:45.646535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:45.809440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.810467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.810596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.810668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.810732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.810826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.810887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.810977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.811045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.811113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.811191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.811293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.811399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.811495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:45.811598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:35:48.112150Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:35:48.113534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:35:48.113591Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:35:48.113622Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:35:48.114815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:35:48.114886Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:35:48.114936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:35:48.114988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:35:48.115105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:35:48.118212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:35:48.118348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:35:48.118753Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:48.118902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:48.118966Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:35:48.119147Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:35:48.119183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:35:48.119471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:35:48.119528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:35:48.121023Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:48.121073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:48.121270Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:48.121304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:35:48.121629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:35:48.121669Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:35:48.121751Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:35:48.121776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:35:48.121804Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:35:48.121829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:35:48.121857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:35:48.121889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:35:48.121916Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:35:48.121947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:35:48.122015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:35:48.122054Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:35:48.122090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:35:48.122613Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:35:48.122711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:35:48.122776Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:35:48.122828Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:35:48.122882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:35:48.122959Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:35:48.126243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:35:48.126563Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:35:48.126927Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:671:2661] Bootstrap 2025-12-23T18:35:48.127687Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:671:2661] Become StateWork (SchemeCache [2:676:2666]) 2025-12-23T18:35:48.129496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropStreamingQuery Drop { Name: "MyStreamingQuery" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:35:48.129611Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_streaming_query.cpp:182: [72057594046678944] TDropStreamingQuery Propose: opId# 101:0, path# /MyRoot/MyStreamingQuery 2025-12-23T18:35:48.129691Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-12-23T18:35:48.130481Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:671:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:35:48.134603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:48.134847Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: DROP STREAMING QUERY, path: MyStreamingQuery 2025-12-23T18:35:48.135487Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:35:48.135710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:35:48.135751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:35:48.136101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:35:48.136199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:35:48.136237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:686:2676] TestWaitNotification: OK eventTxId 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:33:24.203964Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:33:24.337014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:33:24.349712Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:33:24.350139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:33:24.350230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:33:24.701522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:24.701693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:24.867894Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514800922628 != 1766514800922632 2025-12-23T18:33:24.888159Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:24.940170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:25.065661Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ccb/r3tmp/tmpGtVUFE/pdisk_1.dat 2025-12-23T18:33:25.456774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:33:25.476325Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:25.600075Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:33:25.600384Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:33:25.600539Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:33:25.792802Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:33:25.792952Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:33:25.793643Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:33:25.793778Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:33:25.794133Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:33:25.794295Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:33:25.794479Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:33:25.794953Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:33:25.796845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:25.798087Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:33:25.798164Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:33:25.861442Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:33:25.862568Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:33:25.867082Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:33:25.867460Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:33:25.970122Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:33:25.974209Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:33:25.974411Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:33:25.976322Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:33:25.976424Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:33:25.976496Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:33:25.976886Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:33:25.977092Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:33:25.977228Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:33:25.977813Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:33:26.038718Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:33:26.039429Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:33:26.039578Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:33:26.039663Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:33:26.039702Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:33:26.039741Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:33:26.040021Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:33:26.040075Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:33:26.040545Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:33:26.040691Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:33:26.041177Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:33:26.041226Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:33:26.041272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:33:26.041314Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:33:26.041349Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:33:26.041380Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:33:26.041435Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:33:26.041528Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:33:26.041565Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:33:26.041625Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:33:26.041716Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:33:26.041767Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:33:26.041882Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:33:26.042164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:33:26.042242Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2 ... node 13 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037888 2025-12-23T18:35:43.923189Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:1264:3067]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T18:35:45.248690Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:67:2114] Handle TEvExecuteKqpTransaction 2025-12-23T18:35:45.248814Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:67:2114] TxId# 281474976715671 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-12-23T18:35:45.254112Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1846:3544], Recipient [13:993:2848]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-12-23T18:35:45.254370Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-12-23T18:35:45.254469Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037889 CompleteEdge# v9001/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T18:35:45.254561Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037889 changed HEAD read to non-repeatable v10000/18446744073709551615 2025-12-23T18:35:45.254685Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-12-23T18:35:45.254935Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2025-12-23T18:35:45.255024Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-12-23T18:35:45.255109Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-12-23T18:35:45.255176Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-12-23T18:35:45.255249Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2025-12-23T18:35:45.255325Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2025-12-23T18:35:45.255359Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-12-23T18:35:45.255386Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-12-23T18:35:45.255415Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-12-23T18:35:45.255606Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-12-23T18:35:45.256121Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[13:1846:3544], 0} after executionsCount# 1 2025-12-23T18:35:45.256250Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[13:1846:3544], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-12-23T18:35:45.256418Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[13:1846:3544], 0} finished in read 2025-12-23T18:35:45.256602Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2025-12-23T18:35:45.256639Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-12-23T18:35:45.256673Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-12-23T18:35:45.256732Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-12-23T18:35:45.256795Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2025-12-23T18:35:45.256824Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-12-23T18:35:45.256873Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2025-12-23T18:35:45.256960Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-12-23T18:35:45.257190Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-12-23T18:35:45.258873Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1846:3544], Recipient [13:993:2848]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:35:45.258974Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 21 } } 2025-12-23T18:35:46.035938Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:67:2114] Handle TEvExecuteKqpTransaction 2025-12-23T18:35:46.036071Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:67:2114] TxId# 281474976715672 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-12-23T18:35:46.044796Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1878:3570], Recipient [13:1264:3067]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-12-23T18:35:46.045356Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-12-23T18:35:46.045543Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037891 CompleteEdge# v9001/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T18:35:46.045786Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037891 changed HEAD read to non-repeatable v10000/18446744073709551615 2025-12-23T18:35:46.046020Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit CheckRead 2025-12-23T18:35:46.046354Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2025-12-23T18:35:46.046541Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit CheckRead 2025-12-23T18:35:46.046632Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-12-23T18:35:46.046705Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit BuildAndWaitDependencies 2025-12-23T18:35:46.046801Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037891 2025-12-23T18:35:46.046891Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2025-12-23T18:35:46.046926Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-12-23T18:35:46.046954Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit ExecuteRead 2025-12-23T18:35:46.046982Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit ExecuteRead 2025-12-23T18:35:46.047924Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037891 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-12-23T18:35:46.048539Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037891 Complete read# {[13:1878:3570], 0} after executionsCount# 1 2025-12-23T18:35:46.048675Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037891 read iterator# {[13:1878:3570], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-12-23T18:35:46.048851Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037891 read iterator# {[13:1878:3570], 0} finished in read 2025-12-23T18:35:46.048991Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2025-12-23T18:35:46.049027Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit ExecuteRead 2025-12-23T18:35:46.049086Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit CompletedOperations 2025-12-23T18:35:46.049140Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit CompletedOperations 2025-12-23T18:35:46.049214Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2025-12-23T18:35:46.049246Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit CompletedOperations 2025-12-23T18:35:46.049286Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037891 has finished 2025-12-23T18:35:46.049381Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-12-23T18:35:46.049607Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 2025-12-23T18:35:46.050807Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1878:3570], Recipient [13:1264:3067]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:35:46.050919Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037891 ReadCancel: { ReadId: 0 } { items { uint32_value: 10 } items { uint32_value: 110 } }, { items { uint32_value: 20 } items { uint32_value: 210 } } |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> TestYmqHttpProxy::TestReceiveMessage >> PrivateApi::PingTask >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 >> TestKinesisHttpProxy::MissingAction [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink >> IncrementalBackup::ComplexBackupSequenceWithDataVerification [GOOD] >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification >> ColumnBuildTest::DisabledAndEnabledFlag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Unlocking_Failed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:35:35.724217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:35.724311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:35.724360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:35.724398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:35.724438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:35.724462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:35.724531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:35.724634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:35.725469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:35.725754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:35.925173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:35.925243Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:35.946043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:35.946264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:35.946483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:35.987523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:35.987780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:35.988531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:35.988756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:35.997624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:35.997806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:35.999226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:35.999299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:35.999453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:35.999505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:35.999551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:35.999711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:36.321839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.323163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.323305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.323388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.323459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.323537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.323622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.323690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.323810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.323883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.323959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.324036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.324110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.324196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:36.324281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 8.651859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976725761, at schemeshard: 72075186233409549 2025-12-23T18:35:48.651929Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-12-23T18:35:48.651969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976725761 2025-12-23T18:35:48.652013Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976725761 2025-12-23T18:35:48.652107Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1544:3415], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976725761 2025-12-23T18:35:48.653721Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejection_Unlocking 2025-12-23T18:35:48.653837Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejection_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1544:3415], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:35:48.653891Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2025-12-23T18:35:48.655563Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejected 2025-12-23T18:35:48.655689Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejected TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1544:3415], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:35:48.655728Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-12-23T18:35:48.655847Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:35:48.655892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:1560:3431] TestWaitNotification: OK eventTxId 106 2025-12-23T18:35:48.656600Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-12-23T18:35:48.656987Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 Issues { message: "At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/ServerLessDB\' (id: [OwnerId: 72075186233409549, LocalPathId: 1])" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 Issues { message: "At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/ServerLessDB\' (id: [OwnerId: 72075186233409549, LocalPathId: 1])" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2025-12-23T18:35:48.657959Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-12-23T18:35:48.658194Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 260us result status StatusSuccess 2025-12-23T18:35:48.658639Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict [GOOD] >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> Yq_1::CreateQuery_With_Idempotency >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag [GOOD] >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> Yq_1::DeleteConnections >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::DisabledAndEnabledFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:35:35.186889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:35.186997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:35.187053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:35.187090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:35.187126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:35.187157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:35.187209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:35.187294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:35.188198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:35.188515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:35.321556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:35.321613Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:35.337498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:35.337713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:35.337934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:35.355808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:35.356064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:35.356804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:35.357046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:35.361612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:35.361831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:35.363152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:35.363216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:35.363388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:35.363457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:35.363502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:35.363619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:35.573046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.574158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.574320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.574422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.583179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.583360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.583442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.583546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.583665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.583747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.583825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.583929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.584014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.584123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.584217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 18Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2025-12-23T18:35:49.915653Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730761:0 progress is 1/1 2025-12-23T18:35:49.915677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2025-12-23T18:35:49.915707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730761, ready parts: 1/1, is published: true 2025-12-23T18:35:49.915775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1555:3422] message: TxId: 281474976730761 2025-12-23T18:35:49.915817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2025-12-23T18:35:49.915849Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730761:0 2025-12-23T18:35:49.915875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976730761:0 2025-12-23T18:35:49.915949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2025-12-23T18:35:49.918588Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976730761 2025-12-23T18:35:49.918677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976730761 2025-12-23T18:35:49.918763Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730761 2025-12-23T18:35:49.918880Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1746:3600], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730761 2025-12-23T18:35:49.921196Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2025-12-23T18:35:49.921353Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1746:3600], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:35:49.921421Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-12-23T18:35:49.923716Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2025-12-23T18:35:49.923884Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1746:3600], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:35:49.923939Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2025-12-23T18:35:49.924100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:35:49.924168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1762:3616] TestWaitNotification: OK eventTxId 105 2025-12-23T18:35:49.924880Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2025-12-23T18:35:49.925311Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2025-12-23T18:35:49.926197Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-12-23T18:35:49.926456Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 286us result status StatusSuccess 2025-12-23T18:35:49.926974Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:35:47.528921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:47.529014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:47.529058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:47.529093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:47.529154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:47.529207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:47.529272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:47.529338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:47.530199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:47.530500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:47.682505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:47.682599Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:47.709068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:47.709294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:47.709517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:47.759254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:47.759495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:47.760282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:47.760526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:47.770269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:47.770488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:47.772354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:47.772428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:47.772576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:47.772641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:47.772760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:47.772931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:47.967788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.968955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.969122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.969252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.969330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.969398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.969484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.969552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.969651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.969719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.969793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.969887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.969970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.970068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:47.970172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:35:50.671633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2025-12-23T18:35:50.672102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:35:50.672163Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:35:50.672277Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:35:50.672318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:35:50.672362Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:35:50.672399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:35:50.672444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:35:50.672494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:35:50.672532Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:35:50.672568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:35:50.672678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:35:50.672721Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-12-23T18:35:50.672763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-12-23T18:35:50.672792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2025-12-23T18:35:50.673511Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:35:50.673625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:35:50.673672Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:35:50.673715Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-12-23T18:35:50.673785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:35:50.674918Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:35:50.675009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:35:50.675057Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:35:50.675088Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:35:50.675139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:35:50.675219Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:35:50.678379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:35:50.679538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:35:50.679781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:35:50.679826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:35:50.680213Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:35:50.680330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:35:50.680372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:703:2693] TestWaitNotification: OK eventTxId 101 2025-12-23T18:35:50.680895Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:35:50.681123Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 352us result status StatusSuccess 2025-12-23T18:35:50.681527Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-12-23T18:35:50.685042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterStreamingQuery CreateStreamingQuery { Name: "UniqueName" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:35:50.685264Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_streaming_query.cpp:208: [72057594046678944] TAlterStreamingQuery Propose: opId# 102:0, path# /MyRoot/UniqueName 2025-12-23T18:35:50.685414Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery, at schemeshard: 72057594046678944 2025-12-23T18:35:50.688388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-12-23T18:35:50.688635Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery, operation: ALTER STREAMING QUERY, path: UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:35:50.688965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:35:50.689012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:35:50.689405Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:35:50.689513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:35:50.689561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:711:2701] TestWaitNotification: OK eventTxId 102 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 >> TestYmqHttpProxy::TestSendMessageFifoQueue >> TableWriter::Restore [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 >> Yq_1::CreateConnection_With_Existing_Name |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |92.7%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:35:35.259821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:35.259954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:35.260006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:35.260044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:35.260082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:35.260113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:35.260168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:35.260256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:35.261176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:35.261500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:35.358323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:35.358369Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:35.392155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:35.392364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:35.392619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:35.423510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:35.423758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:35.424518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:35.424743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:35.435511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:35.435712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:35.437084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:35.437152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:35.437351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:35.437420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:35.437474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:35.437610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:35.722460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.723529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.723665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.723758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.723827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.723887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.723951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.724038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.724121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.724193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.724272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.724349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.724429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.724507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:35.724591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976730758:0, at schemeshard: 72075186233409549 2025-12-23T18:35:50.848134Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976730758:0 ProgressState 2025-12-23T18:35:50.848233Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2025-12-23T18:35:50.848265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2025-12-23T18:35:50.848305Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2025-12-23T18:35:50.848332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2025-12-23T18:35:50.848365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2025-12-23T18:35:50.848456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2461:4220] message: TxId: 281474976730758 2025-12-23T18:35:50.848506Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2025-12-23T18:35:50.848540Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2025-12-23T18:35:50.848569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976730758:0 2025-12-23T18:35:50.848652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 12 2025-12-23T18:35:50.853322Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2025-12-23T18:35:50.853420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976730758 2025-12-23T18:35:50.853495Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976730758 2025-12-23T18:35:50.853616Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2025-12-23T18:35:50.864049Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking 2025-12-23T18:35:50.864253Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:35:50.864335Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-12-23T18:35:50.867152Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled 2025-12-23T18:35:50.867327Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:35:50.867368Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-12-23T18:35:50.867539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:35:50.867588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:2529:4277] TestWaitNotification: OK eventTxId 106 2025-12-23T18:35:50.868325Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-12-23T18:35:50.868624Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2025-12-23T18:35:50.869551Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-12-23T18:35:50.869828Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 296us result status StatusSuccess 2025-12-23T18:35:50.870320Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 350 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout |92.7%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> BSCStopPDisk::PDiskStop >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak >> TestKinesisHttpProxy::DoubleCreateStream >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> TestKinesisHttpProxy::TestRequestWithIAM >> BSCStopPDisk::PDiskStop [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCStopPDisk::PDiskStop [GOOD] Test command err: RandomSeed# 14361167085002438973 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:232:2060] recipient: [1:226:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:232:2060] recipient: [1:226:2145] Leader for TabletID 72057594046678944 is [1:243:2156] sender: [1:244:2060] recipient: [1:226:2145] 2025-12-23T18:33:53.779786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:53.779901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:53.779947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:53.779987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:53.780024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:53.780051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:53.780116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:53.780258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:53.781072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:53.781367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:53.903451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:33:53.903529Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:53.923335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:53.923599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:53.923780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:53.929751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:53.930022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:53.930733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:53.931035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:53.933073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:53.933289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:53.934558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:53.934619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:53.934806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:53.934858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:53.934901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:53.935036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:54.141405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.142437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.142565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.142638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.142707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.142798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.142875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.143004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.143099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.143185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.143281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.143346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.143441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.143530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:54.143612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:327:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:54.007360Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:54.007451Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [7:242:2155], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:54.007480Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:54.426359Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:54.426426Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:54.426502Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [7:242:2155], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:54.426540Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:54.855540Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:54.855608Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:54.855670Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [7:242:2155], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:54.855701Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:55.268083Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:55.268160Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:55.268255Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [7:242:2155], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:55.268284Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:55.687278Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:55.687348Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:55.687413Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [7:242:2155], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:55.687442Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:56.083185Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:56.083258Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:56.083321Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [7:242:2155], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:56.083350Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:56.492670Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:56.492741Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:56.492805Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [7:242:2155], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:56.492831Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:56.959036Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:56.959125Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:35:56.959226Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [7:242:2155], Recipient [7:242:2155]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:56.959266Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:35:57.070849Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [7:1489:3239], Recipient [7:242:2155]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-12-23T18:35:57.070941Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:35:57.071074Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:35:57.071303Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 221us result status StatusPathDoesNotExist 2025-12-23T18:35:57.071473Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:35:57.076102Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [7:1490:3240], Recipient [7:242:2155]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-12-23T18:35:57.076187Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:35:57.076316Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:35:57.076554Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 221us result status StatusPathDoesNotExist 2025-12-23T18:35:57.076735Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:35:57.077258Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [7:1491:3241], Recipient [7:242:2155]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2025-12-23T18:35:57.077312Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:35:57.077411Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:35:57.077578Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 165us result status StatusPathDoesNotExist 2025-12-23T18:35:57.077751Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 |92.7%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlWithIAM |92.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |92.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |92.7%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut >> BsControllerConfig::Basic >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 >> BsControllerConfig::PDiskCreate >> TestYmqHttpProxy::TestSendMessageWithAttributes >> BsControllerConfig::ExtendByCreatingSeparateBox >> BsControllerConfig::ManyPDisksRestarts >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:35:56.564364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:35:56.564465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:56.564504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:35:56.564539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:35:56.564585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:35:56.564619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:35:56.564672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:35:56.564751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:35:56.565568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:35:56.565838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:35:56.711540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:35:56.711621Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:56.717654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:35:56.717913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:35:56.718045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:35:56.721024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:35:56.721247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:35:56.721898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:35:56.722094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:35:56.724028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:56.724186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:35:56.725323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:35:56.725374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:35:56.725500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:35:56.725540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:35:56.725581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:35:56.725715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:35:56.906126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.907148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.907300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.907375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.907438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.907495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.907594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.907675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.907767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.907851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.907918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.908029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.908166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.908245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:35:56.908316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... oup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:58.518505Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:933:2855] connected; active server actors: 1 2025-12-23T18:35:58.550365Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:35:58.550642Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 305us result status StatusSuccess 2025-12-23T18:35:58.551502Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:35:59.491043Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-12-23T18:35:59.491216Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 1 2025-12-23T18:35:59.492513Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 2 DataSize: 16975298 UsedReserveSize: 0 2025-12-23T18:35:59.492672Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-12-23T18:35:59.493101Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186233409547][Topic1] TEvClientConnected TabletId 72057594046678944, NodeId 1, Generation 3 2025-12-23T18:35:59.493234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2025-12-23T18:35:59.511716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:36:00.008895Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-12-23T18:36:00.008991Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-12-23T18:36:00.009949Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2025-12-23T18:36:00.010011Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-12-23T18:36:00.010270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2025-12-23T18:36:00.039010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:36:00.562812Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-12-23T18:36:00.562908Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-12-23T18:36:00.563956Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2025-12-23T18:36:00.564022Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-12-23T18:36:00.564204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2025-12-23T18:36:00.590867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:36:00.638813Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:36:00.639191Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 508us result status StatusSuccess 2025-12-23T18:36:00.639908Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:36:00.641085Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:1007:2920] connected; active server actors: 1 2025-12-23T18:36:00.674135Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2025-12-23T18:36:00.675866Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-12-23T18:36:00.678571Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2025-12-23T18:36:00.706838Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:1051:2954] connected; active server actors: 1 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> BsControllerConfig::AddDriveSerial >> BsControllerConfig::SelectAllGroups >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 |92.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |92.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:33:34.036165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:33:34.156441Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:33:34.168087Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:33:34.168521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:33:34.168612Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:33:34.503903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:34.504089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:34.630321Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514808740090 != 1766514808740094 2025-12-23T18:33:34.655988Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:34.708345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:34.825624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005cc5/r3tmp/tmpneCTGy/pdisk_1.dat 2025-12-23T18:33:35.191710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:33:35.215787Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:35.335040Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:33:35.335127Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:33:35.335272Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:33:35.465737Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:33:35.465841Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:33:35.466593Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:33:35.466700Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:33:35.467173Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:33:35.467463Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:33:35.467643Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:33:35.467969Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:33:35.470094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:35.471380Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:33:35.471471Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:33:35.512295Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:33:35.513499Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:33:35.513848Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:33:35.514213Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:33:35.629466Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:33:35.630342Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:33:35.630508Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:33:35.640738Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:33:35.640901Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:33:35.640973Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:33:35.641450Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:33:35.641633Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:33:35.641752Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:33:35.655382Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:33:35.776492Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:33:35.776745Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:33:35.776883Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:33:35.776944Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:33:35.777009Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:33:35.777052Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:33:35.777377Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:33:35.777436Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:33:35.777832Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:33:35.777953Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:33:35.778522Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:33:35.778577Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:33:35.778676Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:33:35.778731Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:33:35.778796Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:33:35.778832Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:33:35.778904Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:33:35.779009Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:33:35.779054Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:33:35.779100Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:33:35.779240Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:33:35.779283Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:33:35.779420Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:33:35.779735Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:33:35.779815Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2 ... 12-23T18:35:59.306712Z node 13 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:1695: ActorId: [13:1186:2876] TxId: 281474976715665. Ctx: { TraceId: 01kd67y8qv7j7y2s36ktxggpme, Database: , SessionId: ydb://session/3?node_id=13&id=N2MwMDEzZmYtYTRlMmM3YmEtZDAwMjNlNzktOTQzNWEyYmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ExecuteDatashardTransaction traceId.verbosity: 0 2025-12-23T18:35:59.306908Z node 13 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [13:1186:2876] TxId: 281474976715665. Ctx: { TraceId: 01kd67y8qv7j7y2s36ktxggpme, Database: , SessionId: ydb://session/3?node_id=13&id=N2MwMDEzZmYtYTRlMmM3YmEtZDAwMjNlNzktOTQzNWEyYmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 0, readonly: 1, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:35:59.306977Z node 13 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:143: ActorId: [13:1186:2876] TxId: 281474976715665. Ctx: { TraceId: 01kd67y8qv7j7y2s36ktxggpme, Database: , SessionId: ydb://session/3?node_id=13&id=N2MwMDEzZmYtYTRlMmM3YmEtZDAwMjNlNzktOTQzNWEyYmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2025-12-23T18:35:59.307024Z node 13 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [13:1186:2876] TxId: 281474976715665. Ctx: { TraceId: 01kd67y8qv7j7y2s36ktxggpme, Database: , SessionId: ydb://session/3?node_id=13&id=N2MwMDEzZmYtYTRlMmM3YmEtZDAwMjNlNzktOTQzNWEyYmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-12-23T18:35:59.307067Z node 13 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:2227: ActorId: [13:1186:2876] TxId: 281474976715665. Ctx: { TraceId: 01kd67y8qv7j7y2s36ktxggpme, Database: , SessionId: ydb://session/3?node_id=13&id=N2MwMDEzZmYtYTRlMmM3YmEtZDAwMjNlNzktOTQzNWEyYmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-12-23T18:35:59.307337Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [13:1186:2876], Recipient [13:1155:2960]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1186 RawX2: 55834577724 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010&8\001 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-12-23T18:35:59.307381Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:35:59.307516Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [13:1155:2960], Recipient [13:1155:2960]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-12-23T18:35:59.307552Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-12-23T18:35:59.307626Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:35:59.307810Z node 13 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2025-12-23T18:35:59.307924Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-12-23T18:35:59.307977Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-12-23T18:35:59.308009Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-12-23T18:35:59.308041Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:35:59.308071Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:35:59.308117Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-12-23T18:35:59.308170Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715665] at 72075186224037888 2025-12-23T18:35:59.308205Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-12-23T18:35:59.308232Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:35:59.308257Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit BlockFailPoint 2025-12-23T18:35:59.308288Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit BlockFailPoint 2025-12-23T18:35:59.308316Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-12-23T18:35:59.308345Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BlockFailPoint 2025-12-23T18:35:59.308370Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-12-23T18:35:59.308396Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2025-12-23T18:35:59.308478Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:269: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2025-12-23T18:35:59.308623Z node 13 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true 2025-12-23T18:35:59.308741Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:521: add locks to result: 0 2025-12-23T18:35:59.308834Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-12-23T18:35:59.308867Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-12-23T18:35:59.308894Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:35:59.308922Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-12-23T18:35:59.308996Z node 13 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-12-23T18:35:59.309128Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-12-23T18:35:59.309163Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:35:59.309190Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:35:59.309223Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:35:59.309276Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-12-23T18:35:59.309303Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:35:59.309333Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-12-23T18:35:59.309404Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:35:59.309440Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-12-23T18:35:59.309480Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:35:59.309668Z node 13 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:1351: ActorId: [13:1186:2876] TxId: 281474976715665. Ctx: { TraceId: 01kd67y8qv7j7y2s36ktxggpme, Database: , SessionId: ydb://session/3?node_id=13&id=N2MwMDEzZmYtYTRlMmM3YmEtZDAwMjNlNzktOTQzNWEyYmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-12-23T18:35:59.309842Z node 13 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [13:1186:2876] TxId: 281474976715665. Ctx: { TraceId: 01kd67y8qv7j7y2s36ktxggpme, Database: , SessionId: ydb://session/3?node_id=13&id=N2MwMDEzZmYtYTRlMmM3YmEtZDAwMjNlNzktOTQzNWEyYmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:35:59.309957Z node 13 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [13:1186:2876] TxId: 281474976715665. Ctx: { TraceId: 01kd67y8qv7j7y2s36ktxggpme, Database: , SessionId: ydb://session/3?node_id=13&id=N2MwMDEzZmYtYTRlMmM3YmEtZDAwMjNlNzktOTQzNWEyYmI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-12-23T18:35:59.310119Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=13&id=N2MwMDEzZmYtYTRlMmM3YmEtZDAwMjNlNzktOTQzNWEyYmI=, ActorId: [13:1042:2876], ActorState: CleanupState, TraceId: 01kd67y8qv7j7y2s36ktxggpme, EndCleanup, isFinal: 0 2025-12-23T18:35:59.310347Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=13&id=N2MwMDEzZmYtYTRlMmM3YmEtZDAwMjNlNzktOTQzNWEyYmI=, ActorId: [13:1042:2876], ActorState: CleanupState, TraceId: 01kd67y8qv7j7y2s36ktxggpme, Sent query response back to proxy, proxyRequestId: 8, proxyId: [13:65:2112] 2025-12-23T18:35:59.607821Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [13:1195:2986], Recipient [13:1155:2960]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:35:59.607975Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:35:59.608078Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [13:1194:2985], serverId# [13:1195:2986], sessionId# [0:0:0] 2025-12-23T18:35:59.608363Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553224, Sender [13:827:2722], Recipient [13:1155:2960]: NKikimr::TEvDataShard::TEvGetOpenTxs |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries >> BsControllerConfig::SelectAllGroups [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 [GOOD] >> BsControllerConfig::SoleCommandErrorWhenCombined >> BsControllerConfig::PDiskCreate [GOOD] >> BsControllerConfig::SoleCommandRollback >> BsControllerConfig::Basic [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 >> BsControllerConfig::ReassignGroupDisk >> BsControllerConfig::SoleCommandErrorWhenCombined [GOOD] >> BsControllerConfig::CommandRollbackWhenAlone >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |92.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |92.8%| [LD] {RESULT} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] >> BsControllerConfig::SoleCommandRollback [GOOD] >> BsControllerConfig::UnsupportedCommandError >> BsControllerConfig::CommandRollbackWhenAlone [GOOD] >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive >> BsControllerConfig::MergeIntersectingBoxes >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> PrivateApi::PingTask [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecords >> BsControllerConfig::UnsupportedCommandError [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [FAIL] >> PrivateApi::GetTask >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 [GOOD] >> TestKinesisHttpProxy::TestRequestNoAuthorization >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SoleCommandErrorWhenCombined [GOOD] Test command err: 2025-12-23T18:36:02.917376Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:02.923320Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:02.923799Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:02.925844Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:02.926319Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:02.926574Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:02.926611Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:02.934996Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:02.972910Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:02.973101Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:02.973301Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:02.973450Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:02.973549Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:02.973612Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:03.236778Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2212} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.194541s 2025-12-23T18:36:03.236925Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.194722s Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:26:2057] recipient: [11:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:26:2057] recipient: [11:24:2068] Leader for TabletID 72057594037932033 is [11:28:2070] sender: [11:29:2057] recipient: [11:24:2068] 2025-12-23T18:36:04.786956Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:04.787968Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:04.788252Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:04.788695Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:04.789884Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:04.795418Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:04.795470Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:04.795658Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:04.810335Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:04.810475Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:04.810603Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:04.810718Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:04.810849Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:04.810917Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:28:2070] sender: [11:49:2057] recipient: [11:11:2058] 2025-12-23T18:36:04.822881Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:04.823029Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:04.848147Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:04.848295Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:04.848380Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:04.848448Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:04.848567Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:04.848630Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:04.848667Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:04.848712Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:04.861568Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:04.861702Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:04.873526Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:04.873670Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:04.875007Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:04.875063Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:04.875281Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:04.875322Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:04.876009Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { } } Command { EnableSelfHeal { } } Command { QueryBaseConfig { } } } Status { Success: true } Status { ErrorDescription: "command must be sole" } ErrorDescription: "command must be sole" Leader for TabletID 72057594037932033 is [0:0:0] sender: [12:26:2057] recipient: [12:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [12:26:2057] recipient: [12:23:2068] Leader for TabletID 72057594037932033 is [12:28:2070] sender: [12:29:2057] recipient: [12:23:2068] 2025-12-23T18:36:05.186213Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:05.195442Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:05.195780Z node 12 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:05.197095Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:05.197609Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:05.197825Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:05.197856Z node 12 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:05.198036Z node 12 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:05.215962Z node 12 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:05.216124Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:05.216245Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:05.216359Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:05.216457Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:05.216527Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [12:28:2070] sender: [12:49:2057] recipient: [12:11:2058] 2025-12-23T18:36:05.232596Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:05.232787Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:05.259287Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:05.259425Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:05.259523Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:05.259613Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:05.259729Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:05.259774Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:05.259813Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:05.259865Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:05.270582Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:05.270761Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:05.287273Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:05.287401Z node 12 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:05.292469Z node 12 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:05.292553Z node 12 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:05.292762Z node 12 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:05.292811Z node 12 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:05.293570Z node 12 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { } } Command { EnableSelfHeal { } } Command { QueryBaseConfig { } } } Status { Success: true } Status { ErrorDescription: "command must be sole" } ErrorDescription: "command must be sole" |92.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::UnsupportedCommandError [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:26:2057] recipient: [1:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:26:2057] recipient: [1:23:2068] Leader for TabletID 72057594037932033 is [1:28:2070] sender: [1:29:2057] recipient: [1:23:2068] 2025-12-23T18:36:05.599876Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:05.601472Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:05.601908Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:05.605473Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:05.607674Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:05.607918Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:05.607964Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:05.608163Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:05.628183Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:05.628389Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:05.628594Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:05.628720Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:05.628892Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:05.629016Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:28:2070] sender: [1:49:2057] recipient: [1:11:2058] 2025-12-23T18:36:05.640661Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:05.640904Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:05.662328Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:05.662478Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:05.662551Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:05.662625Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:05.662775Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:05.662887Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:05.662925Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:05.662973Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:05.673903Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:05.674060Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:05.684916Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:05.685083Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:05.686438Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:05.686484Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:05.686687Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:05.686803Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:05.708467Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { UpdateSettings { PDiskSpaceColorBorder: CYAN } } } Status { Success: true } Success: true 2025-12-23T18:36:05.720972Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { UpdateSettings { PDiskSpaceColorBorder: YELLOW } } Rollback: true } Status { Success: true } ErrorDescription: "transaction rollback" RollbackSuccess: true 2025-12-23T18:36:05.744447Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } Status { Success: true BaseConfig { Node { NodeId: 1 PhysicalLocation: "\n\001\000\000\000\024\001\000\000\000\036\001\000\000\000(\001\000\000\000" HostKey { Fqdn: "::1" IcPort: 12001 } Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Settings { DefaultMaxSlots: 16 EnableSelfHeal: true EnableDonorMode: true ScrubPeriodicitySeconds: 2592000 PDiskSpaceMarginPromille: 150 GroupReserveMin: 0 GroupReservePartPPM: 0 MaxScrubbedDisksAtOnce: 4294967295 PDiskSpaceColorBorder: CYAN EnableGroupLayoutSanitizer: false AllowMultipleRealmsOccupation: true UseSelfHealLocalPolicy: false TryToRelocateBrokenDisksLocallyFirst: false } } } Success: true ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [2:26:2057] recipient: [2:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [2:26:2057] recipient: [2:23:2068] Leader for TabletID 72057594037932033 is [2:28:2070] sender: [2:29:2057] recipient: [2:23:2068] 2025-12-23T18:36:05.863872Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:05.864745Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:05.865024Z node 2 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:05.866153Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:05.866560Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:05.875188Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:05.875252Z node 2 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:05.875459Z node 2 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:05.895388Z node 2 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:05.895545Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:05.895685Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:05.895803Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:05.895904Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:05.895968Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [2:28:2070] sender: [2:49:2057] recipient: [2:11:2058] 2025-12-23T18:36:05.907450Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:05.907611Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:05.934442Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:05.934572Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:05.934657Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:05.934765Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:05.934928Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:05.935002Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:05.935051Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:05.935107Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:05.947541Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:05.947677Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:05.959332Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEna ... 16 EnableSelfHeal: true EnableDonorMode: true ScrubPeriodicitySeconds: 2592000 PDiskSpaceMarginPromille: 150 GroupReserveMin: 0 GroupReservePartPPM: 0 MaxScrubbedDisksAtOnce: 4294967295 PDiskSpaceColorBorder: CYAN EnableGroupLayoutSanitizer: false AllowMultipleRealmsOccupation: true UseSelfHealLocalPolicy: false TryToRelocateBrokenDisksLocallyFirst: false } } } Success: true ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [3:26:2057] recipient: [3:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [3:26:2057] recipient: [3:23:2068] Leader for TabletID 72057594037932033 is [3:28:2070] sender: [3:29:2057] recipient: [3:23:2068] 2025-12-23T18:36:06.190654Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:06.191481Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:06.191709Z node 3 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:06.192709Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:06.193076Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:06.193201Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:06.193232Z node 3 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:06.193384Z node 3 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:06.202319Z node 3 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:06.202445Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:06.202592Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:06.202764Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:06.202881Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:06.202962Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [3:28:2070] sender: [3:49:2057] recipient: [3:11:2058] 2025-12-23T18:36:06.215282Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:06.215458Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:06.237420Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:06.237549Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:06.237623Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:06.237707Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:06.237846Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:06.237912Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:06.237955Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:06.238007Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:06.248857Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:06.248999Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:06.259870Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:06.259993Z node 3 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:06.261353Z node 3 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:06.261399Z node 3 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:06.261605Z node 3 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:06.261653Z node 3 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:06.262334Z node 3 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { } } Status { ErrorDescription: "unsupported command 0" } ErrorDescription: "unsupported command 0" Leader for TabletID 72057594037932033 is [0:0:0] sender: [4:26:2057] recipient: [4:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [4:26:2057] recipient: [4:24:2068] Leader for TabletID 72057594037932033 is [4:28:2070] sender: [4:29:2057] recipient: [4:24:2068] 2025-12-23T18:36:06.498180Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:06.499989Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:06.500269Z node 4 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:06.501540Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:06.502046Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:06.502159Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:06.502185Z node 4 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:06.502361Z node 4 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:06.512380Z node 4 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:06.512562Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:06.512707Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:06.512820Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:06.512934Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:06.513036Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [4:28:2070] sender: [4:49:2057] recipient: [4:11:2058] 2025-12-23T18:36:06.525845Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:06.526005Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:06.549735Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:06.549889Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:06.549999Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:06.550163Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:06.550292Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:06.550349Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:06.550403Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:06.550476Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:06.561352Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:06.561519Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:06.574712Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:06.574878Z node 4 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:06.576403Z node 4 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:06.576469Z node 4 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:06.576657Z node 4 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:06.576696Z node 4 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:06.577280Z node 4 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { } } Status { ErrorDescription: "unsupported command 0" } ErrorDescription: "unsupported command 0" |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::CommandRollbackWhenAlone [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:211:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:211:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:211:2077] 2025-12-23T18:36:01.812154Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:01.813408Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:01.813799Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:01.815679Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:01.816122Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:01.816360Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:01.816396Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:01.816614Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:01.832493Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:01.832661Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:01.832876Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:01.833050Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:01.833148Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:01.833220Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:247:2066] recipient: [1:20:2067] 2025-12-23T18:36:01.847408Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:01.847553Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:01.875133Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:01.875298Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:01.875381Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:01.875457Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:01.875568Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:01.875635Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:01.875679Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:01.875746Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:01.887300Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:01.887438Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:01.899362Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:01.899531Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:01.901008Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:01.901076Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:01.901288Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:01.901353Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:01.918580Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:201:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:201:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:201:2077] 2025-12-23T18:36:03.602256Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:03.619414Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:03.619746Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:03.636049Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:03.636518Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:03.636649Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:03.636690Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:03.636898Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:03.652045Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:03.652186Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:03.652310Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:03.652427Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:03.652553Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:03.652633Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:247:2066] recipient: [11:20:2067] 2025-12-23T18:36:03.666460Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:03.666656Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:03.695307Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:03.695458Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:03.695543Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:03.695631Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:03.695761Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:03.695861Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:03.695918Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:03.695968Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:03.707249Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:03.707390Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:03.719342Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:03.719469Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:03.720810Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:03.720862Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:03.721087Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:03.721136Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:03.721762Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:26:2057] recipient: [21:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:26:2057] recipient: [21:24:2068] Leader for TabletID 72057594037932033 is [21:28:2070] sender: [21:29:2057] recipient: [21:24:2068] 2025-12-23T18:36:05.706646Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:05.713782Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:05.714089Z node 21 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:05.715292Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:05.715815Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:05.715936Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:05.715964Z node 21 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:05.716135Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:05.738542Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:05.738693Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:05.742991Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:05.743239Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:05.743369Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:05.743485Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [21:28:2070] sender: [21:49:2057] recipient: [21:11:2058] 2025-12-23T18:36:05.755423Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:05.755608Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:05.779308Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:05.779438Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:05.779531Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:05.779613Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:05.779737Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:05.779796Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:05.779846Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:05.779914Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:05.791341Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:05.791487Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:05.802157Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:05.802267Z node 21 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:05.803592Z node 21 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:05.803644Z node 21 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:05.803818Z node 21 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:05.803856Z node 21 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:05.805702Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenAlone" } } Rollback: true } Status { Success: true } ErrorDescription: "transaction rollback" RollbackSuccess: true Leader for TabletID 72057594037932033 is [0:0:0] sender: [22:26:2057] recipient: [22:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [22:26:2057] recipient: [22:24:2068] Leader for TabletID 72057594037932033 is [22:28:2070] sender: [22:29:2057] recipient: [22:24:2068] 2025-12-23T18:36:06.027991Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:06.029006Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:06.029287Z node 22 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:06.029909Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:06.031152Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:06.031386Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:06.031434Z node 22 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:06.031629Z node 22 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:06.041302Z node 22 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:06.041454Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:06.041598Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:06.041732Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:06.041829Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:06.041894Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [22:28:2070] sender: [22:49:2057] recipient: [22:11:2058] 2025-12-23T18:36:06.053240Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:06.053421Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:06.074874Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:06.075006Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:06.075097Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:06.075387Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:06.075545Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:06.075630Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:06.075669Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:06.075731Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:06.086455Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:06.086603Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:06.097430Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:06.097563Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:06.098985Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:06.099042Z node 22 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:06.099239Z node 22 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:06.099295Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:06.100078Z node 22 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenAlone" } } Rollback: true } Status { Success: true } ErrorDescription: "transaction rollback" RollbackSuccess: true |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:33:33.785174Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:33:34.052505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:33:34.065028Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:33:34.065483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:33:34.065571Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:33:34.424616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:34.424756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:34.516983Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514809200549 != 1766514809200553 2025-12-23T18:33:34.539394Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:34.596318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:34.719593Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005cc6/r3tmp/tmplkYlUs/pdisk_1.dat 2025-12-23T18:33:35.067518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:33:35.083066Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:35.204995Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:33:35.205089Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:33:35.205252Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:33:35.371756Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:33:35.371882Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:33:35.372623Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:33:35.372735Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:33:35.373150Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:33:35.373337Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:33:35.373489Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:33:35.373802Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:33:35.376921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:35.378301Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:33:35.378379Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:33:35.498457Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:33:35.499711Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:33:35.500086Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:33:35.500448Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:33:35.547295Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:33:35.548201Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:33:35.548344Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:33:35.550458Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:33:35.550574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:33:35.550641Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:33:35.551101Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:33:35.551229Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:33:35.551295Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:33:35.551667Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:33:35.666440Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:33:35.666689Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:33:35.671012Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:33:35.671101Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:33:35.671167Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:33:35.671215Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:33:35.671563Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:33:35.671628Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:33:35.671961Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:33:35.672077Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:33:35.672590Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:33:35.672663Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:33:35.672740Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:33:35.672795Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:33:35.672840Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:33:35.672877Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:33:35.672946Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:33:35.673093Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:33:35.673141Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:33:35.673188Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:33:35.673344Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:33:35.673390Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:33:35.673548Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:33:35.673837Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:33:35.673904Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2 ... : [14:1068:2877] 2025-12-23T18:36:04.731717Z node 14 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [14:1201:2877] TxId: 281474976715665. Ctx: { TraceId: 01kd67ye2e6xtc73encn8qn7xp, Database: , SessionId: ydb://session/3?node_id=14&id=NTFjODVjY2MtNzc0NDA0NTctYmMxOTU4NDUtMzBhOWFjMTg=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. UNAVAILABLE: {
: Error: Wrong shard state. Table `/Root/table`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } 2025-12-23T18:36:04.732363Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=14&id=NTFjODVjY2MtNzc0NDA0NTctYmMxOTU4NDUtMzBhOWFjMTg=, ActorId: [14:1047:2877], ActorState: ExecuteState, TraceId: 01kd67ye2e6xtc73encn8qn7xp, Create QueryResponse for error on request, msg: , status: UNAVAILABLE, issues: { message: "Wrong shard state. Table `/Root/table`." issue_code: 2005 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state)" issue_code: 2029 severity: 1 } } 2025-12-23T18:36:04.733574Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [14:1068:2877], Recipient [14:893:2772]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true } Op: Rollback } 2025-12-23T18:36:04.733625Z node 14 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2025-12-23T18:36:04.733751Z node 14 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2025-12-23T18:36:04.733800Z node 14 :TX_DATASHARD NOTICE: datashard.cpp:3149: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) ... waiting for blocked lock status ... blocking NKikimr::NLongTxService::TEvLongTxService::TEvLockStatus from LONG_TX_SERVICE to TX_DATASHARD_ACTOR cookie 0 ... waiting for blocked lock status (done) 2025-12-23T18:36:04.736397Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 65543, Sender [14:828:2723], Recipient [14:893:2772]: NActors::TEvents::TEvPoison 2025-12-23T18:36:04.737251Z node 14 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 72075186224037888 2025-12-23T18:36:04.737357Z node 14 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2025-12-23T18:36:04.767977Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [14:1206:3007], Recipient [14:1208:3008]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:04.777488Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [14:1206:3007], Recipient [14:1208:3008]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:04.777756Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828684, Sender [14:1206:3007], Recipient [14:1208:3008]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:04.782978Z node 14 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [14:1208:3008] 2025-12-23T18:36:04.783435Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:36:04.791209Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:36:04.793255Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:36:04.797194Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:36:04.797350Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:36:04.797474Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:36:04.798214Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:36:04.798628Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:36:04.798735Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:36:04.798867Z node 14 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state PreOffline tabletId 72075186224037888 2025-12-23T18:36:04.799080Z node 14 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 1 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-12-23T18:36:04.799176Z node 14 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:36:04.799382Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [14:1222:3015] 2025-12-23T18:36:04.799454Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:36:04.799537Z node 14 :TX_DATASHARD INFO: datashard.cpp:1292: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2025-12-23T18:36:04.799618Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:36:04.800049Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [14:1208:3008], Recipient [14:1208:3008]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:36:04.800115Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:36:04.800669Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435075, Sender [14:1208:3008], Recipient [14:1208:3008]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressResendReadSet 2025-12-23T18:36:04.800723Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3181: StateWork, processing event TEvPrivate::TEvProgressResendReadSet 2025-12-23T18:36:04.801273Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [14:26:2073], Recipient [14:1208:3008]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 700} 2025-12-23T18:36:04.801331Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-12-23T18:36:04.801417Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 700 2025-12-23T18:36:04.801500Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:36:04.802426Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [14:69:2116], Recipient [14:1208:3008]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 14 Status: STATUS_NOT_FOUND 2025-12-23T18:36:04.802952Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:36:04.803042Z node 14 :TX_DATASHARD INFO: datashard__progress_tx.cpp:21: Progress tx at non-ready tablet 72075186224037888 state 5 2025-12-23T18:36:04.803128Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:36:04.803678Z node 14 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2025-12-23T18:36:04.803778Z node 14 :TX_DATASHARD INFO: datashard.cpp:4131: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-12-23T18:36:04.803867Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-12-23T18:36:04.805024Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [14:1208:3008], Recipient [14:1109:2921]: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-12-23T18:36:04.805090Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-12-23T18:36:04.805182Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-12-23T18:36:04.805344Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-12-23T18:36:04.805448Z node 14 :TX_DATASHARD NOTICE: datashard_pipeline.cpp:734: Outdated readset for 600:281474976715663 at 72075186224037889 2025-12-23T18:36:04.805545Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2025-12-23T18:36:04.805659Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037889 {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-12-23T18:36:04.805862Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [14:26:2073], Recipient [14:1208:3008]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 500 NextReadStep# 700 ReadStep# 700 } 2025-12-23T18:36:04.805923Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-12-23T18:36:04.806110Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 500 next step 700 2025-12-23T18:36:04.806370Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [14:1109:2921], Recipient [14:1208:3008]: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-12-23T18:36:04.806422Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:36:04.806509Z node 14 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-12-23T18:36:04.806635Z node 14 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-12-23T18:36:04.980001Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes >> BsControllerConfig::ReassignGroupDisk [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 >> BsControllerConfig::AddDriveSerialMassive [GOOD] >> TestKinesisHttpProxy::ListShards >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:211:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:211:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:211:2077] 2025-12-23T18:36:01.784561Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:01.791229Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:01.791669Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:01.800218Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:01.800700Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:01.800965Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:01.801009Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:01.801234Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:01.820575Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:01.820746Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:01.820926Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:01.821044Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:01.821133Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:01.821191Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:248:2066] recipient: [1:20:2067] 2025-12-23T18:36:01.833108Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:01.833229Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:01.867327Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:01.867486Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:01.867562Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:01.867634Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:01.867742Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:01.867842Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:01.867896Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:01.867953Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:01.881194Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:01.881337Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:01.892187Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:01.892349Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:01.893755Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:01.893803Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:01.894027Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:01.894087Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:01.912223Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2025-12-23T18:36:01.912933Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-12-23T18:36:01.913002Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-12-23T18:36:01.913058Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-12-23T18:36:01.913094Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-12-23T18:36:01.913127Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-12-23T18:36:01.913164Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-12-23T18:36:01.913189Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-12-23T18:36:01.913216Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-12-23T18:36:01.913255Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-12-23T18:36:01.913279Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-12-23T18:36:01.913301Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-12-23T18:36:01.913323Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-12-23T18:36:01.913345Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-12-23T18:36:01.913384Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-12-23T18:36:01.913421Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-12-23T18:36:01.913450Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-12-23T18:36:01.913473Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-12-23T18:36:01.913505Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-12-23T18:36:01.913530Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-12-23T18:36:01.913554Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-12-23T18:36:01.913578Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-12-23T18:36:01.913614Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-12-23T18:36:01.913639Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-12-23T18:36:01.913660Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-12-23T18:36:01.913700Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-12-23T18:36:01.913742Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-12-23T18:36:01.913771Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-12-23T18:36:01.913794Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-12-23T18:36:01.913817Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-12-23T18:36:01.913844Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:201:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:201:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:201:2077] 2025-12-23T18:36:03.851755Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:03.852772Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:03.853049Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:03.854489Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:03.854984Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:03.855114Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNo ... Port: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-12-23T18:36:05.762929Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2025-12-23T18:36:05.762971Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2025-12-23T18:36:05.762994Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2025-12-23T18:36:05.763017Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 24:1000 Path# /dev/disk 2025-12-23T18:36:05.763042Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 25:1000 Path# /dev/disk 2025-12-23T18:36:05.763069Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 26:1000 Path# /dev/disk 2025-12-23T18:36:05.763091Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 27:1000 Path# /dev/disk 2025-12-23T18:36:05.763129Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 28:1000 Path# /dev/disk 2025-12-23T18:36:05.763161Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 29:1000 Path# /dev/disk 2025-12-23T18:36:05.763197Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 30:1000 Path# /dev/disk 2025-12-23T18:36:05.763221Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 31:1000 Path# /dev/disk 2025-12-23T18:36:05.763242Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 32:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-12-23T18:36:05.801438Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [33:290:2068] recipient: [33:267:2079] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [33:290:2068] recipient: [33:267:2079] Leader for TabletID 72057594037932033 is [33:292:2081] sender: [33:293:2068] recipient: [33:267:2079] 2025-12-23T18:36:07.724842Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:07.725861Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:07.726117Z node 33 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:07.730718Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:07.731204Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:07.731558Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:07.731583Z node 33 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:07.731827Z node 33 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:07.742899Z node 33 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:07.743077Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:07.743203Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:07.743366Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:07.743451Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:07.743511Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [33:292:2081] sender: [33:314:2068] recipient: [33:22:2069] 2025-12-23T18:36:07.757907Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:07.758098Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:07.785382Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:07.785594Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:07.785688Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:07.785771Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:07.785913Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:07.785987Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:07.786031Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:07.786079Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:07.796813Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:07.796959Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:07.811741Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:07.811883Z node 33 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:07.813185Z node 33 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:07.813229Z node 33 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:07.813456Z node 33 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:07.813542Z node 33 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:07.814437Z node 33 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-12-23T18:36:07.815871Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 33:1000 Path# /dev/disk 2025-12-23T18:36:07.815921Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 34:1000 Path# /dev/disk 2025-12-23T18:36:07.815943Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 35:1000 Path# /dev/disk 2025-12-23T18:36:07.815994Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 36:1000 Path# /dev/disk 2025-12-23T18:36:07.816041Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 37:1000 Path# /dev/disk 2025-12-23T18:36:07.816069Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 38:1000 Path# /dev/disk 2025-12-23T18:36:07.816091Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 39:1000 Path# /dev/disk 2025-12-23T18:36:07.816111Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 40:1000 Path# /dev/disk 2025-12-23T18:36:07.816132Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 41:1000 Path# /dev/disk 2025-12-23T18:36:07.816161Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 42:1000 Path# /dev/disk 2025-12-23T18:36:07.816186Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 43:1000 Path# /dev/disk 2025-12-23T18:36:07.816207Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 44:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-12-23T18:36:07.842336Z node 33 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" ConfigTxSeqNo: 1 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> TestYmqHttpProxy::TestSetQueueAttributes >> BsControllerConfig::DeleteStoragePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:211:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:211:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:211:2077] 2025-12-23T18:36:02.658474Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:02.659795Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:02.660177Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:02.662195Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:02.662623Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:02.662890Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:02.662931Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:02.663148Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:02.673024Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:02.673160Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:02.673318Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:02.673425Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:02.673511Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:02.673565Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:247:2066] recipient: [1:20:2067] 2025-12-23T18:36:02.685922Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:02.686070Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:02.711358Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:02.711519Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:02.711618Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:02.711695Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:02.711816Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:02.711913Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:02.711955Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:02.712044Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:02.723025Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:02.723182Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:02.735377Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:02.735545Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:02.737078Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:02.737127Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:02.737353Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:02.737411Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:02.760332Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-12-23T18:36:02.764172Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-12-23T18:36:02.764913Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:201:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:201:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:201:2077] 2025-12-23T18:36:04.347652Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:04.348575Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:04.348797Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:04.350291Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:04.350771Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:04.350898Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:04.350923Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:04.351200Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:04.361810Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:04.361956Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:04.362101Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:04.362229Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:04.362344Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:04.362412Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:247:2066] recipient: [11:20:2067] 2025-12-23T18:36:04.373853Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:04.373998Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:04.403295Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:04.403445Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:04.403556Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:04.403662Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:04.403787Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:04.403867Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:04.403915Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:04.403978Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:04.415333Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:04.415475Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:04.426468Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:04.426610Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:04.428124Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:04.428195Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:04.428415Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:04.428471Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:04.429175Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-12-23T18:36:04.430285Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# ... ommand { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-12-23T18:36:06.572965Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-12-23T18:36:06.573727Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-12-23T18:36:06.574318Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-12-23T18:36:06.575004Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-12-23T18:36:06.575756Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-12-23T18:36:06.576553Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-12-23T18:36:06.577256Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-12-23T18:36:06.577950Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-12-23T18:36:06.578733Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-12-23T18:36:06.579515Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-12-23T18:36:06.580246Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-12-23T18:36:06.581015Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-12-23T18:36:06.581697Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-12-23T18:36:06.582403Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:224:2066] recipient: [31:204:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:224:2066] recipient: [31:204:2077] Leader for TabletID 72057594037932033 is [31:226:2079] sender: [31:227:2066] recipient: [31:204:2077] 2025-12-23T18:36:08.480188Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:08.481251Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:08.481525Z node 31 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:08.482861Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:08.483365Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:08.483622Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:08.483650Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:08.483843Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:08.492387Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:08.492521Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:08.492607Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:08.492702Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:08.492778Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:08.492875Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:226:2079] sender: [31:247:2066] recipient: [31:20:2067] 2025-12-23T18:36:08.504170Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:08.504334Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:08.530633Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:08.530802Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:08.530877Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:08.531069Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:08.531269Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:08.531342Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:08.531383Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:08.531437Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:08.542174Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:08.542315Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:08.558786Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:08.558917Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:08.560269Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:08.560318Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:08.560538Z node 31 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:08.560602Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:08.561269Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2025-12-23T18:36:08.562255Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2025-12-23T18:36:08.567023Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2025-12-23T18:36:08.568048Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2025-12-23T18:36:08.568702Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2025-12-23T18:36:08.569398Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-12-23T18:36:08.570085Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-12-23T18:36:08.570776Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-12-23T18:36:08.571536Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-12-23T18:36:08.572160Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-12-23T18:36:08.573042Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-12-23T18:36:08.573995Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-12-23T18:36:08.574694Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-12-23T18:36:08.575605Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-12-23T18:36:08.576350Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-12-23T18:36:08.577130Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-12-23T18:36:08.577930Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-12-23T18:36:08.578735Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-12-23T18:36:08.579471Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-12-23T18:36:08.580281Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:33:34.819780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:33:34.953966Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:33:34.967925Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:33:34.968294Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:33:34.968371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:33:35.324221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:35.324384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:35.555965Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514810617014 != 1766514810617018 2025-12-23T18:33:35.610287Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:35.712163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:35.849945Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005cbd/r3tmp/tmpJabWJx/pdisk_1.dat 2025-12-23T18:33:36.335426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:33:36.355631Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:36.481121Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:33:36.481193Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:33:36.481319Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:33:36.715906Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:33:36.716029Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:33:36.716689Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:33:36.716808Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:33:36.717121Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:33:36.717261Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:33:36.717459Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:33:36.717760Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:33:36.723872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:36.725265Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:33:36.725354Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:33:36.784755Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:33:36.785907Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:33:36.786253Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:33:36.786502Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:33:36.912734Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:33:36.913559Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:33:36.913684Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:33:36.916062Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:33:36.916166Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:33:36.916231Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:33:36.916599Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:33:36.916809Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:33:36.916899Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:33:36.917410Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:33:37.049853Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:33:37.050070Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:33:37.050209Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:33:37.050246Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:33:37.050285Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:33:37.050318Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:33:37.050584Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:33:37.050637Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:33:37.063164Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:33:37.063353Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:33:37.063834Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:33:37.063884Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:33:37.063923Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:33:37.063963Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:33:37.064016Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:33:37.064058Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:33:37.064118Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:33:37.064233Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:33:37.064273Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:33:37.064331Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:33:37.064433Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:33:37.064484Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:33:37.064599Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:33:37.064880Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:33:37.064934Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2 ... [0:5] at 72075186224037888 executing on unit FinishProposeWrite 2025-12-23T18:36:07.398685Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:36:07.398758Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:36:07.398824Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T18:36:07.398878Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:36:07.398927Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2025-12-23T18:36:07.399033Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-12-23T18:36:07.399126Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:5] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:36:07.399208Z node 16 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 5 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_LOCKS_BROKEN 2025-12-23T18:36:07.399389Z node 16 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-12-23T18:36:07.399509Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:36:07.399828Z node 16 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [16:1109:2873], Table: `/Root/table` ([72057594046644480:38:1]), SessionActorId: [16:1044:2873]Got LOCKS BROKEN for table `/Root/table`. ShardID=72075186224037888, Sink=[16:1109:2873].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2025-12-23T18:36:07.400046Z node 16 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [16:1102:2873], SessionActorId: [16:1044:2873], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[16:1044:2873]. 2025-12-23T18:36:07.400518Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=16&id=NTA3NTlmNTYtYzc1NzA5ODktOWVkMWVkMjQtMTk0NWYzNzY=, ActorId: [16:1044:2873], ActorState: ExecuteState, TraceId: 01kd67yg9s9r5w8n8ahd2726rv, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [16:1103:2873] from: [16:1102:2873] 2025-12-23T18:36:07.400741Z node 16 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [16:1103:2873] TxId: 281474976710663. Ctx: { TraceId: 01kd67yg9s9r5w8n8ahd2726rv, Database: , SessionId: ydb://session/3?node_id=16&id=NTA3NTlmNTYtYzc1NzA5ODktOWVkMWVkMjQtMTk0NWYzNzY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } 2025-12-23T18:36:07.401359Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=16&id=NTA3NTlmNTYtYzc1NzA5ODktOWVkMWVkMjQtMTk0NWYzNzY=, ActorId: [16:1044:2873], ActorState: ExecuteState, TraceId: 01kd67yg9s9r5w8n8ahd2726rv, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/table`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } } 2025-12-23T18:36:07.402819Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [16:1102:2873], Recipient [16:893:2771]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976710661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2025-12-23T18:36:07.402863Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2025-12-23T18:36:07.403004Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [16:893:2771], Recipient [16:893:2771]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-12-23T18:36:07.403044Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-12-23T18:36:07.403116Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2025-12-23T18:36:07.403255Z node 16 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976710661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2025-12-23T18:36:07.403377Z node 16 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976710661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2025-12-23T18:36:07.403482Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-12-23T18:36:07.403529Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:36:07.403558Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-12-23T18:36:07.403590Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:36:07.403642Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:36:07.403708Z node 16 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v501/0 ImmediateWriteEdgeReplied# v501/0 2025-12-23T18:36:07.403779Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2025-12-23T18:36:07.403814Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:36:07.403859Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:36:07.403887Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2025-12-23T18:36:07.403912Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2025-12-23T18:36:07.403975Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:36:07.404004Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2025-12-23T18:36:07.404028Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-12-23T18:36:07.404053Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-12-23T18:36:07.404120Z node 16 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2025-12-23T18:36:07.404247Z node 16 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976710661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2025-12-23T18:36:07.404324Z node 16 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:6] at 72075186224037888 2025-12-23T18:36:07.404425Z node 16 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2025-12-23T18:36:07.404524Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:36:07.404570Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-12-23T18:36:07.404635Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-12-23T18:36:07.404695Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:36:07.404733Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-12-23T18:36:07.404760Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-12-23T18:36:07.404787Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:36:07.404814Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:36:07.404863Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2025-12-23T18:36:07.404888Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:36:07.404911Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2025-12-23T18:36:07.404969Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-12-23T18:36:07.405013Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:36:07.405048Z node 16 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-12-23T18:36:07.405116Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:36:07.407336Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [16:69:2116], Recipient [16:893:2771]: NKikimrLongTxService.TEvLockStatus LockId: 281474976710661 LockNode: 16 Status: STATUS_NOT_FOUND 2025-12-23T18:36:07.412075Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [16:1117:2923], Recipient [16:893:2771]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:36:07.412198Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:36:07.412290Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [16:1116:2922], serverId# [16:1117:2923], sessionId# [0:0:0] 2025-12-23T18:36:07.412557Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553224, Sender [16:827:2722], Recipient [16:893:2771]: NKikimr::TEvDataShard::TEvGetOpenTxs |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> BsControllerConfig::OverlayMap >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 >> BsControllerConfig::OverlayMap [GOOD] >> BsControllerConfig::OverlayMapCrossReferences >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |92.8%| [LD] {RESULT} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 >> Yq_1::Basic_Null [GOOD] >> Yq_1::Basic_TaggedLiteral >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2025-12-23T18:35:26.435406Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:35:26.435656Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:35:26.516272Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:35:26.516434Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:35:26.577159Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:35:26.577822Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=11289041574400576500, session=0, seqNo=0) 2025-12-23T18:35:26.578010Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:35:26.591235Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=11289041574400576500, session=1) 2025-12-23T18:35:26.591615Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=3529166647815671274, session=0, seqNo=0) 2025-12-23T18:35:26.591782Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:35:26.605014Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=3529166647815671274, session=2) 2025-12-23T18:35:26.605680Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:148:2170], cookie=4571007039208183258, name="Sem1", limit=1) 2025-12-23T18:35:26.605856Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-12-23T18:35:26.620926Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:148:2170], cookie=4571007039208183258) 2025-12-23T18:35:26.621322Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2025-12-23T18:35:26.621557Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-12-23T18:35:26.621816Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2025-12-23T18:35:26.643485Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2025-12-23T18:35:26.643595Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=222) 2025-12-23T18:35:26.644331Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:156:2178], cookie=9512440280189291775, name="Sem1") 2025-12-23T18:35:26.644487Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:156:2178], cookie=9512440280189291775) 2025-12-23T18:35:26.645045Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:159:2181], cookie=11121286014457545406, name="Sem1") 2025-12-23T18:35:26.645137Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:159:2181], cookie=11121286014457545406) 2025-12-23T18:35:27.180893Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:27.203328Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:27.619143Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:27.638397Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:28.067098Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:28.087278Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:28.507455Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:28.526896Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:28.989586Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:29.005611Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:29.425433Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:29.450868Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:29.871168Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:29.891480Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:30.295049Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:30.312612Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:30.735088Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:30.759053Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:31.236859Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:31.253201Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:31.690555Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:31.707856Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:32.119351Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:32.142134Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:32.565361Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:32.587519Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:33.012485Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:33.031759Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:33.505811Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:33.523343Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:33.919073Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:33.941802Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:34.331101Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:34.347762Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:34.782059Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:34.801546Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:35.220442Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:35.238070Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:35.695126Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:35.715411Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:36.105223Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:36.122946Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:36.535270Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:36.553498Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:36.940798Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:36.956653Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:37.316422Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:37.330143Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:37.747865Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:37.763636Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:38.159343Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:38.174344Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:38.593762Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:38.608630Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:39.009973Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:39.023660Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:39.439123Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:39.457313Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:39.892445Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:39.917232Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:40.351233Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:40.367484Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:40.796959Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:40.823035Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:41.239331Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:41.255563Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:35:41.640681Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:35:41.655667Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::C ... G: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:36:06.937297Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:36:07.309033Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:36:07.328071Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:36:07.710273Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:36:07.731713Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:36:08.219841Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:36:08.243696Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:36:08.649823Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:36:08.662493Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:36:09.051064Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:36:09.072152Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:36:09.467190Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:36:09.488570Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:36:09.895190Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:36:09.915649Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:36:10.304788Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:36:10.319860Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:36:10.705317Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:36:10.724165Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:36:11.113355Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:36:11.132494Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:36:11.535516Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:36:11.555785Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:36:11.961081Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:36:11.973433Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:36:12.399180Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-12-23T18:36:12.399296Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-12-23T18:36:12.399358Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-12-23T18:36:12.412937Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-12-23T18:36:12.427703Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:453:2413], cookie=5849471612942766267, name="Sem1") 2025-12-23T18:36:12.427841Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:453:2413], cookie=5849471612942766267) 2025-12-23T18:36:13.076765Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:36:13.076900Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:36:13.101622Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:36:13.101766Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:36:13.148453Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:36:13.149081Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=12369734606347381953, session=0, seqNo=0) 2025-12-23T18:36:13.149254Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:36:13.161826Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=12369734606347381953, session=1) 2025-12-23T18:36:13.162239Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=14281712027779613340, session=0, seqNo=0) 2025-12-23T18:36:13.162392Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:36:13.175425Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=14281712027779613340, session=2) 2025-12-23T18:36:13.175799Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=13850065247092102639, session=0, seqNo=0) 2025-12-23T18:36:13.175968Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2025-12-23T18:36:13.195327Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=13850065247092102639, session=3) 2025-12-23T18:36:13.195983Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:150:2172], cookie=13652970146862555953, name="Sem1", limit=3) 2025-12-23T18:36:13.196147Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-12-23T18:36:13.208510Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:150:2172], cookie=13652970146862555953) 2025-12-23T18:36:13.208864Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=111, session=1, semaphore="Sem1" count=2) 2025-12-23T18:36:13.209058Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-12-23T18:36:13.209311Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2025-12-23T18:36:13.209406Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-12-23T18:36:13.209504Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=333, session=3, semaphore="Sem1" count=1) 2025-12-23T18:36:13.221477Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=111) 2025-12-23T18:36:13.221578Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=222) 2025-12-23T18:36:13.221615Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=333) 2025-12-23T18:36:13.222298Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:158:2180], cookie=12881562020055160879, name="Sem1") 2025-12-23T18:36:13.222416Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:158:2180], cookie=12881562020055160879) 2025-12-23T18:36:13.222949Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:161:2183], cookie=2909993744451074575, name="Sem1") 2025-12-23T18:36:13.223042Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:161:2183], cookie=2909993744451074575) 2025-12-23T18:36:13.223315Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=444, session=1, semaphore="Sem1" count=1) 2025-12-23T18:36:13.223446Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-12-23T18:36:13.235827Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=444) 2025-12-23T18:36:13.236531Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:166:2188], cookie=17753348991489219874, name="Sem1") 2025-12-23T18:36:13.236635Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:166:2188], cookie=17753348991489219874) 2025-12-23T18:36:13.237134Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:169:2191], cookie=1649998018472009161, name="Sem1") 2025-12-23T18:36:13.237210Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:169:2191], cookie=1649998018472009161) 2025-12-23T18:36:13.263653Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:36:13.263774Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:36:13.264347Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:36:13.265065Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:36:13.325049Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:36:13.325291Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-12-23T18:36:13.325388Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-12-23T18:36:13.325424Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-12-23T18:36:13.325838Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:208:2221], cookie=7520181833718110044, name="Sem1") 2025-12-23T18:36:13.325948Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:208:2221], cookie=7520181833718110044) 2025-12-23T18:36:13.326567Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:217:2229], cookie=383777542873297731, name="Sem1") 2025-12-23T18:36:13.326664Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:217:2229], cookie=383777542873297731) |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 >> TestYmqHttpProxy::BillingRecordsForJsonApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:36:12.664325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:36:12.664453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:12.664493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:36:12.664528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:36:12.664570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:36:12.664598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:36:12.664677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:12.664787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:36:12.665668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:36:12.665982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:36:12.781604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:36:12.781682Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:12.787934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:36:12.788183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:36:12.788328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:36:12.791353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:36:12.791563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:36:12.792321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:12.792544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:36:12.794586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:12.794808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:12.796007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:12.796073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:12.796230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:12.796281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:12.796324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:12.796459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:36:13.050724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.051916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.052072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.052172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.052254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.052317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.052385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.052502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.052613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.052698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.052775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.052853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.052976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.053142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:13.053228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard: 72057594046678944 2025-12-23T18:36:14.752730Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2025-12-23T18:36:14.752798Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-12-23T18:36:14.753450Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:36:14.755833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:36:14.755875Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:36:14.756024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:36:14.756501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:36:14.756534Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:36:14.763618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:36:14.763725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:36:14.763924Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877763, Sender [1:1311:3160], Recipient [1:686:2673]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [1:1311:3160] ServerId: [1:1312:3161] } 2025-12-23T18:36:14.763969Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-12-23T18:36:14.763995Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6176: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:36:14.764440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:36:14.764484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:36:14.765012Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:1327:3176], Recipient [1:686:2673]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:36:14.765058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:36:14.765093Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:36:14.765193Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124996, Sender [1:910:2844], Recipient [1:686:2673]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2025-12-23T18:36:14.765242Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-12-23T18:36:14.765306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:36:14.765398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:36:14.765439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1325:3174] 2025-12-23T18:36:14.765626Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [1:1327:3176], Recipient [1:686:2673]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:36:14.765666Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:36:14.765711Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-12-23T18:36:14.766308Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [1:1328:3177], Recipient [1:686:2673]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-12-23T18:36:14.766358Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:36:14.766464Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:36:14.766658Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 198us result status StatusSuccess 2025-12-23T18:36:14.767151Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:36:14.768016Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271188001, Sender [1:1329:3178], Recipient [1:686:2673]: NKikimrPQ.TEvPeriodicTopicStats PathId: 40 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2025-12-23T18:36:14.768084Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-12-23T18:36:14.768133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 40] DataSize 151 UsedReserveSize 151 2025-12-23T18:36:14.768226Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:128: Will execute TTxStoreStats, queue# 1 2025-12-23T18:36:14.768365Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [1:1330:3179], Recipient [1:686:2673]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-12-23T18:36:14.768405Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:36:14.768821Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:36:14.775620Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 206us result status StatusSuccess 2025-12-23T18:36:14.776167Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |92.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |92.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots >> KqpWorkloadServiceDistributed::TestDistributedQueue >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:36:14.870287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:36:14.870368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:14.870431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:36:14.870480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:36:14.870515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:36:14.870552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:36:14.870635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:14.870720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:36:14.871604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:36:14.871943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:36:15.008835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:36:15.008917Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:15.028309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:36:15.028550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:36:15.028722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:36:15.042799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:36:15.043041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:36:15.043732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:15.043944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:36:15.048251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:15.048434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:15.049890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:15.049963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:15.050178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:15.050237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:15.050283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:15.050399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:36:15.234654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.235775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.235942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.236067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.236146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.236212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.236280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.236355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.236461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.236553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.236654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.236731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.236819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.236991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.237079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 8:36:17.032449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:17.032595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:17.032641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:17.032743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:17.032951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:17.033034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:17.033730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:17.034007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:17.034126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:17.034187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:17.034342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:17.034400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:17.034445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:17.034778Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:36:17.055700Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:36:17.055858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:17.059643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435083, Sender [1:935:2865], Recipient [1:935:2865]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-12-23T18:36:17.059704Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-12-23T18:36:17.067472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:17.067561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:17.067646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:17.067701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:17.067775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:17.067817Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:36:17.086375Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 274399233, Sender [1:971:2865], Recipient [1:935:2865]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-12-23T18:36:17.086431Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5420: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-12-23T18:36:17.086489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:935:2865] sender: [1:993:2058] recipient: [1:15:2062] 2025-12-23T18:36:17.147732Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [1:992:2910], Recipient [1:935:2865]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-12-23T18:36:17.147799Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:36:17.147938Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:36:17.148178Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 232us result status StatusSuccess 2025-12-23T18:36:17.148844Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:36:17.149643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271188001, Sender [1:994:2911], Recipient [1:935:2865]: NKikimrPQ.TEvPeriodicTopicStats PathId: 38 Generation: 1 Round: 96 DataSize: 19 UsedReserveSize: 7 2025-12-23T18:36:17.149703Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-12-23T18:36:17.149741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 19 UsedReserveSize 7 2025-12-23T18:36:17.149800Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:128: Will execute TTxStoreStats, queue# 1 2025-12-23T18:36:17.149875Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:141: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-12-23T18:36:17.150124Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [1:995:2912], Recipient [1:935:2865]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-12-23T18:36:17.150165Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:36:17.150316Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:36:17.150545Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 271us result status StatusSuccess 2025-12-23T18:36:17.151055Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> Yq_1::ListConnections [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable >> TestYmqHttpProxy::TestDeleteQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:36:04.523614Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:36:04.646623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:36:04.659222Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:36:04.659662Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:36:04.659747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:36:04.979263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:36:04.979408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:36:05.077765Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514959693797 != 1766514959693801 2025-12-23T18:36:05.102535Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:05.156453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:36:05.301941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0037b5/r3tmp/tmpN2iMbn/pdisk_1.dat 2025-12-23T18:36:05.747961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:36:05.748496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:36:05.751233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:36:05.752381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:36:05.754429Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-12-23T18:36:05.754515Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:399:2398] Proxy marker# C1 2025-12-23T18:36:05.767229Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:36:05.793663Z node 1 :HIVE DEBUG: hive_impl.cpp:2502: HIVE#72057594037968897 ProcessTabletBalancer [(72057594046644480:1,0)] MaxUsage=0.000000000 on #1 MinUsage=0.000000000 on #1 Scatter=0.000000000 2025-12-23T18:36:05.793830Z node 1 :HIVE DEBUG: hive_impl.cpp:406: HIVE#72057594037968897 Handle BalancerOut 2025-12-23T18:36:05.871254Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2025-12-23T18:36:05.871390Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-12-23T18:36:05.871715Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2025-12-23T18:36:05.872230Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2025-12-23T18:36:05.872296Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:399:2398] Proxy 2025-12-23T18:36:05.872859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:36:05.874835Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-12-23T18:36:05.874980Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-12-23T18:36:05.875026Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-12-23T18:36:05.875090Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2025-12-23T18:36:05.875435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:36:05.875523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-12-23T18:36:05.876719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-12-23T18:36:05.879952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:36:05.881613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:36:05.881689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:05.882615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-12-23T18:36:05.886720Z node 1 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-12-23T18:36:05.898942Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-12-23T18:36:05.899080Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-12-23T18:36:05.899367Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-12-23T18:36:05.899438Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:449: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,38): {} 2025-12-23T18:36:05.899507Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-12-23T18:36:05.899692Z node 1 :HIVE DEBUG: hive_impl.cpp:2888: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-12-23T18:36:05.900378Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-12-23T18:36:05.900571Z node 1 :HIVE DEBUG: hive_impl.cpp:1108: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-12-23T18:36:05.901317Z node 1 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-12-23T18:36:05.901745Z node 1 :HIVE DEBUG: hive_impl.cpp:461: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } 2025-12-23T18:36:05.901875Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136585236303776}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-12-23T18:36:05.901971Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136585236303776}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-12-23T18:36:05.902161Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136585236303776}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-12-23T18:36:05.902266Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2025-12-23T18:36:05.902332Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Stopped -> Booting 2025-12-23T18:36:05.902386Z node 1 :HIVE DEBUG: hive_impl.cpp:370: HIVE#72057594037968897 ProcessBootQueue (1) 2025-12-23T18:36:05.902595Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-12-23T18:36:05.902645Z node 1 :HIVE DEBUG: hive_impl.cpp:250: HIVE#72057594037968897 Handle ProcessBootQueue (size: 1) 2025-12-23T18:36:05.902706Z node 1 :HIVE DEBUG: hive_impl.cpp:1254: HIVE#72057594037968897 [FBN] Finding best node for tablet DataShard.72075186224037888.Leader.0 2025-12-23T18:36:05.906768Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 1) 2025-12-23T18:36:05.907029Z node 1 :HIVE DEBUG: hive_impl.cpp:330: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-12-23T18:36:05.907210Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (7207 ... at tablet# 72075186224037889 2025-12-23T18:36:16.329718Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_table.cpp:493: TDropTable Propose, path: /Root/table-2, pathId: 0, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-12-23T18:36:16.329955Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715665:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:36:16.336127Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: /Root/table-2 2025-12-23T18:36:16.344026Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:36:16.344190Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715665 ssId 72057594046644480 seqNo 2:4 2025-12-23T18:36:16.344272Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715665 at tablet 72075186224037889 2025-12-23T18:36:16.345541Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:36:16.345894Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:36:16.362157Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:136: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [2:327:2368] NKikimrLocal.TEvStopTablet TabletId: 72075186224037888 FollowerId: 0 Generation: 1,0x10040206 [2:398:2397] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1 Actions: NKikimr::TTabletReqBlockBlobStorage} 2025-12-23T18:36:16.362514Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-12-23T18:36:16.364047Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2025-12-23T18:36:16.364202Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2025-12-23T18:36:16.368729Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037888 OK) 2025-12-23T18:36:16.368869Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2025-12-23T18:36:16.369262Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:36:16.369375Z node 2 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-12-23T18:36:16.369427Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-12-23T18:36:16.369495Z node 2 :HIVE DEBUG: hive_impl.cpp:925: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2025-12-23T18:36:16.369972Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2025-12-23T18:36:16.383634Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T18:36:16.385365Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2025-12-23T18:36:16.385438Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 281474976715665 step# 4500 Status# 16 SEND to# [2:398:2397] Proxy marker# C1 2025-12-23T18:36:16.396447Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2025-12-23T18:36:16.483327Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 281474976715665 has been planned 2025-12-23T18:36:16.483458Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2025-12-23T18:36:16.483503Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2025-12-23T18:36:16.483814Z node 2 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 5000 in 0.500000s at 4.950000s 2025-12-23T18:36:16.484309Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 4500, txid# 281474976715665 marker# C2 2025-12-23T18:36:16.484403Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 281474976715665 stepId# 4500 Status# 17 SEND EvProposeTransactionStatus to# [2:398:2397] Proxy 2025-12-23T18:36:16.484874Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 4500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:36:16.485540Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715665 at step 4500 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 4500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-12-23T18:36:16.485596Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:36:16.485885Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:36:16.485935Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:36:16.485987Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [4500:281474976715665] in PlanQueue unit at 72075186224037889 2025-12-23T18:36:16.486190Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 4500:281474976715665 keys extracted: 0 2025-12-23T18:36:16.486318Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:36:16.486571Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:36:16.486661Z node 2 :TX_DATASHARD INFO: drop_table_unit.cpp:72: Trying to DROP TABLE at 72075186224037889 2025-12-23T18:36:16.487350Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:36:16.489711Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 4500} 2025-12-23T18:36:16.489791Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:36:16.490094Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-12-23T18:36:16.490228Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-12-23T18:36:16.490293Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-12-23T18:36:16.490341Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2025-12-23T18:36:16.490392Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 acknowledged 2025-12-23T18:36:16.490652Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:36:16.490725Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:36:16.495127Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2025-12-23T18:36:16.495269Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:36:16.500162Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2025-12-23T18:36:16.504469Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2025-12-23T18:36:16.504574Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-12-23T18:36:16.505291Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715665:0 2025-12-23T18:36:16.505699Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:36:16.535273Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-12-23T18:36:16.535608Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-12-23T18:36:16.537522Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:36:16.538214Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-12-23T18:36:16.538591Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186224037889 2025-12-23T18:36:16.538640Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-12-23T18:36:16.538736Z node 2 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-12-23T18:36:16.538909Z node 2 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-12-23T18:36:16.539033Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minstep/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> TestKinesisHttpProxy::ListShards [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-12-23T18:33:24.846857Z :WriteAndReadSomeMessagesWithSyncCompression INFO: Random seed for debugging is 1766514804846811 2025-12-23T18:33:25.484620Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587123317429441520:2238];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:25.484695Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:33:25.538428Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00599d/r3tmp/tmpQc9dvP/pdisk_1.dat 2025-12-23T18:33:25.625545Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:33:26.209726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:26.209794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:33:26.242985Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:26.487116Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:33:26.491046Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:26.499556Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:26.503138Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:26.503357Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:26.567465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:26.567648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:26.575791Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:26.575865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:26.696133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:26.795998Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:26.799831Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:33:26.802973Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123319619565582:2082] 1766514805361127 != 1766514805361130 2025-12-23T18:33:26.832724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7752, node 1 2025-12-23T18:33:27.333957Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:27.347413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:27.379465Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/00599d/r3tmp/yandexNSsPC5.tmp 2025-12-23T18:33:27.379486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/00599d/r3tmp/yandexNSsPC5.tmp 2025-12-23T18:33:27.379703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/00599d/r3tmp/yandexNSsPC5.tmp 2025-12-23T18:33:27.379786Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:33:27.545199Z INFO: TTestServer started on Port 29504 GrpcPort 7752 TClient is connected to server localhost:29504 PQClient connected to localhost:7752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514806966 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:28.423990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:33:30.495834Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123317429441520:2238];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:30.496192Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:33:33.435774Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123351789180065:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:33.436043Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:33.436559Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123351789180077:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:33.436619Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123351789180078:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:33.437058Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:33.446523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:33:33.498761Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123351789180081:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-12-23T18:33:33.615177Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123351789180113:2147] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:33:34.124887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:33:34.125575Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587123351789180120:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:33:34.126409Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=ZjNhZWFhMWEtYjZjZTA3ZTgtOGI5YWMzNDUtNDE0YjQ3YTY=, ActorId: [2:7587123351789180063:2306], ActorState: ExecuteState, TraceId: 01kd67stfm7nnkjvn425fd8zzx, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:33:34.128854Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: ... 2025-12-23T18:36:13.619624Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-12-23T18:36:13.619663Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-12-23T18:36:13.619751Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-12-23T18:36:13.620167Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|ababea1f-3ae3b8c7-eb39e313-d45695b9_0 2025-12-23T18:36:13.623508Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1766514973623 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:36:13.623665Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|ababea1f-3ae3b8c7-eb39e313-d45695b9_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-12-23T18:36:13.623894Z :INFO: [] MessageGroupId [src] SessionId [src|ababea1f-3ae3b8c7-eb39e313-d45695b9_0] Write session: close. Timeout = 0 ms 2025-12-23T18:36:13.623947Z :INFO: [] MessageGroupId [src] SessionId [src|ababea1f-3ae3b8c7-eb39e313-d45695b9_0] Write session will now close 2025-12-23T18:36:13.623996Z :DEBUG: [] MessageGroupId [src] SessionId [src|ababea1f-3ae3b8c7-eb39e313-d45695b9_0] Write session: aborting 2025-12-23T18:36:13.624469Z :INFO: [] MessageGroupId [src] SessionId [src|ababea1f-3ae3b8c7-eb39e313-d45695b9_0] Write session: gracefully shut down, all writes complete 2025-12-23T18:36:13.624518Z :DEBUG: [] MessageGroupId [src] SessionId [src|ababea1f-3ae3b8c7-eb39e313-d45695b9_0] Write session: destroy 2025-12-23T18:36:13.635076Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|ababea1f-3ae3b8c7-eb39e313-d45695b9_0 grpc read done: success: 0 data: 2025-12-23T18:36:13.635111Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|ababea1f-3ae3b8c7-eb39e313-d45695b9_0 grpc read failed 2025-12-23T18:36:13.635153Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|ababea1f-3ae3b8c7-eb39e313-d45695b9_0 grpc closed 2025-12-23T18:36:13.635178Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|ababea1f-3ae3b8c7-eb39e313-d45695b9_0 is DEAD 2025-12-23T18:36:13.635864Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:36:13.646891Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [15:7587124040287802737:2465] destroyed 2025-12-23T18:36:13.646967Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T18:36:13.647007Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:13.647032Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:13.647057Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:13.647086Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:13.647109Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:36:13.707582Z :INFO: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Starting read session 2025-12-23T18:36:13.707640Z :DEBUG: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Starting cluster discovery 2025-12-23T18:36:13.707908Z :INFO: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3179: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:3179
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:3179. " 2025-12-23T18:36:13.707952Z :DEBUG: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Restart cluster discovery in 0.006612s 2025-12-23T18:36:13.714912Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:13.714965Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:13.714993Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:13.715024Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:13.715048Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:36:13.722824Z :DEBUG: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Starting cluster discovery 2025-12-23T18:36:13.723240Z :INFO: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3179: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:3179
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:3179. " 2025-12-23T18:36:13.723300Z :DEBUG: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Restart cluster discovery in 0.016782s 2025-12-23T18:36:13.742911Z :DEBUG: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Starting cluster discovery 2025-12-23T18:36:13.743286Z :INFO: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3179: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:3179
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:3179. " 2025-12-23T18:36:13.743318Z :DEBUG: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Restart cluster discovery in 0.031644s 2025-12-23T18:36:13.778890Z :DEBUG: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Starting cluster discovery 2025-12-23T18:36:13.779221Z :NOTICE: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3179: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:3179
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:3179. " } 2025-12-23T18:36:13.779417Z :NOTICE: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:3179: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:3179
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:3179. " } 2025-12-23T18:36:13.779549Z :INFO: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Closing read session. Close timeout: 0.000000s 2025-12-23T18:36:13.779663Z :NOTICE: [/Root] [/Root] [81b434cd-84a22398-6c1f5e72-63d0d230] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:36:13.814979Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:13.815014Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:13.815035Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:13.815059Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:13.815075Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:36:13.919377Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:13.919431Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:13.919454Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:13.919492Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:13.919512Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:36:14.022950Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:14.022990Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:14.023013Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:14.023033Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:14.023048Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:36:14.095449Z node 15 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1177: ActorId: [15:7587124044582770059:2476] TxId: 281474976710674. Ctx: { TraceId: 01kd67yppxdjs8v72ch8yyrzds, Database: /Root, SessionId: ydb://session/3?node_id=15&id=ZTUxNmM2ZTktNjY2MWY0MTgtN2Y2ZGQyZDItZDZkZDBiNjk=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 16 2025-12-23T18:36:14.095653Z node 15 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [15:7587124044582770063:2476], TxId: 281474976710674, task: 3. Ctx: { TraceId : 01kd67yppxdjs8v72ch8yyrzds. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=15&id=ZTUxNmM2ZTktNjY2MWY0MTgtN2Y2ZGQyZDItZDZkZDBiNjk=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [15:7587124044582770059:2476], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |92.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> PrivateApi::GetTask [GOOD] >> PrivateApi::Nodes >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] >> DqUnboxedValueToNativeArrowConversion::VariantOverTupleWithOptionals [GOOD] >> HashShuffle::BackPressureInMemory [GOOD] >> HashShuffle::BackPressureInMemoryMulti [GOOD] >> HashShuffle::BackPressureInMemoryLoad >> TestKinesisHttpProxy::ListShardsEmptyFields >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 >> TestYmqHttpProxy::TestListQueues >> Yq_1::Basic [GOOD] >> Yq_1::Basic_EmptyList >> TestYmqHttpProxy::TestTagQueue >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |92.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings >> DqOutputChannelTests::SingleRead [GOOD] >> DqOutputChannelTests::PopAll [GOOD] >> DqOutputChannelWithStorageTests::Spill [GOOD] >> DqOutputChannelWithStorageTests::Overflow [GOOD] >> DqOutputWideChannelTests::Overflow [GOOD] >> DqOutputWideChannelTests::BigRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 21534, MsgBus: 6154 2025-12-23T18:33:56.113150Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123448876111524:2255];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:56.113315Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003c6b/r3tmp/tmpKZEkxk/pdisk_1.dat 2025-12-23T18:33:56.542586Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:56.546367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:56.546508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:56.650953Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123448876111305:2081] 1766514836052681 != 1766514836052684 2025-12-23T18:33:56.665543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:56.670625Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21534, node 1 2025-12-23T18:33:56.834974Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:56.857272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:56.857310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:56.857317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:56.857394Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6154 2025-12-23T18:33:57.111985Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514836730 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:57.826569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:33:57.892528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:58.215895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:58.415900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:58.510241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:01.114969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123448876111524:2255];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:01.115066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:34:01.465003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123470350949665:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:01.465144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:01.465630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123470350949675:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:01.465680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.209592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.269341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.317336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.367787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.456252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.554679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.705636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.804677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.954942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123474645917847:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.955053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.955528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123474645917852:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.955583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123474645917853:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.955616Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 20 ... nExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:34:50.728979Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:34:50.736878Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:34:50.749737Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:50.845244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:51.199743Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:51.315943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:54.315058Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123699714047641:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:54.315142Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:54.315639Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123699714047650:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:54.315697Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:54.412196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:54.473217Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:54.528484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:54.558725Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587123678239209383:2131];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:54.558843Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:34:54.572857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:54.614942Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:54.667343Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:54.731319Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:54.843868Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:55.099912Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123704009015826:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:55.100019Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:55.100871Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123704009015829:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:55.100944Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:55.101028Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587123704009015833:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:55.106224Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:34:55.129959Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587123704009015835:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:34:55.189417Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587123704009015886:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:34:58.992794Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:04.822866Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:35:04.822905Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:07.330261Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [2:7587124013246739408:8485], TxId: 281474976713868, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67ygq335thekcv03vz3t0b. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=MTE2MGM3NzEtOTY2NTczMzgtNDQ1NDA4NWYtNGFhMDM0MWU=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-12-23T18:36:07.330647Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [2:7587124013246739409:8480], TxId: 281474976713868, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67ygq335thekcv03vz3t0b. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=MTE2MGM3NzEtOTY2NTczMzgtNDQ1NDA4NWYtNGFhMDM0MWU=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:7587124013246739397:4046], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T18:36:07.331359Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=MTE2MGM3NzEtOTY2NTczMzgtNDQ1NDA4NWYtNGFhMDM0MWU=, ActorId: [2:7587123918757434747:4046], ActorState: ExecuteState, TraceId: 01kd67ygq335thekcv03vz3t0b, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } Execute err: PRECONDITION_FAILED
: Error: Conflict with existing key., code: 2012 cont with status: PRECONDITION_FAILED finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 >> DqOutputWideChannelTests::SingleRead [GOOD] >> DqOutputWideChannelTests::PartialRead [GOOD] >> DqOutputWideChannelTests::PopAll [GOOD] >> DqOutputWideChannelWithStorageTests::Spill [GOOD] >> DqOutputWideChannelWithStorageTests::Overflow [GOOD] >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple >> Yq_1::ModifyConnections [GOOD] >> Yq_1::ModifyQuery >> Yq_1::DeleteConnections [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection >> DqUnboxedValueDoNotFitToArrow::DictUtf8ToInterval >> DqOutputWideChannelTests::BigRow [GOOD] >> DqOutputWideChannelTests::ChunkSizeLimit [GOOD] >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] >> Channels20::IcChannelLateBinding >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:36:04.382219Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:36:04.539769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:36:04.551586Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:36:04.552044Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:36:04.552138Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:36:04.888394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:36:04.888552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:36:04.989496Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514959848842 != 1766514959848846 2025-12-23T18:36:05.011810Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:05.060331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:36:05.213774Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0037b2/r3tmp/tmpnOnAR9/pdisk_1.dat 2025-12-23T18:36:05.644979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:36:05.645403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:36:05.647336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:36:05.648189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:36:05.650111Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-12-23T18:36:05.650188Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:399:2398] Proxy marker# C1 2025-12-23T18:36:05.672250Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:36:05.713940Z node 1 :HIVE DEBUG: hive_impl.cpp:2502: HIVE#72057594037968897 ProcessTabletBalancer [(72057594046644480:1,0)] MaxUsage=0.000000000 on #1 MinUsage=0.000000000 on #1 Scatter=0.000000000 2025-12-23T18:36:05.714084Z node 1 :HIVE DEBUG: hive_impl.cpp:406: HIVE#72057594037968897 Handle BalancerOut 2025-12-23T18:36:05.801172Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2025-12-23T18:36:05.801312Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-12-23T18:36:05.801608Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2025-12-23T18:36:05.802082Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2025-12-23T18:36:05.802149Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:399:2398] Proxy 2025-12-23T18:36:05.802601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:36:05.804536Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-12-23T18:36:05.804672Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-12-23T18:36:05.804731Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-12-23T18:36:05.804786Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2025-12-23T18:36:05.805122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:36:05.805221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-12-23T18:36:05.806399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-12-23T18:36:05.809956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:36:05.812282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:36:05.812373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:05.813424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-12-23T18:36:05.817824Z node 1 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-12-23T18:36:05.829301Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-12-23T18:36:05.829440Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-12-23T18:36:05.829689Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-12-23T18:36:05.829772Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:449: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,38): {} 2025-12-23T18:36:05.829844Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-12-23T18:36:05.830026Z node 1 :HIVE DEBUG: hive_impl.cpp:2888: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-12-23T18:36:05.830660Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-12-23T18:36:05.830858Z node 1 :HIVE DEBUG: hive_impl.cpp:1108: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-12-23T18:36:05.831572Z node 1 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-12-23T18:36:05.831999Z node 1 :HIVE DEBUG: hive_impl.cpp:461: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } 2025-12-23T18:36:05.832143Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136835509374880}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-12-23T18:36:05.832226Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136835509374880}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-12-23T18:36:05.832371Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136835509374880}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-12-23T18:36:05.832460Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2025-12-23T18:36:05.832525Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Stopped -> Booting 2025-12-23T18:36:05.832576Z node 1 :HIVE DEBUG: hive_impl.cpp:370: HIVE#72057594037968897 ProcessBootQueue (1) 2025-12-23T18:36:05.832765Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-12-23T18:36:05.832810Z node 1 :HIVE DEBUG: hive_impl.cpp:250: HIVE#72057594037968897 Handle ProcessBootQueue (size: 1) 2025-12-23T18:36:05.832865Z node 1 :HIVE DEBUG: hive_impl.cpp:1254: HIVE#72057594037968897 [FBN] Finding best node for tablet DataShard.72075186224037888.Leader.0 2025-12-23T18:36:05.833012Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 1) 2025-12-23T18:36:05.833210Z node 1 :HIVE DEBUG: hive_impl.cpp:330: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-12-23T18:36:05.833338Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (7207 ... 94037968897 THive::TTxBlockStorageResult::Execute(72075186224037888 OK) 2025-12-23T18:36:20.420623Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2025-12-23T18:36:20.420854Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:36:20.420957Z node 2 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-12-23T18:36:20.421004Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-12-23T18:36:20.421052Z node 2 :HIVE DEBUG: hive_impl.cpp:925: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2025-12-23T18:36:20.421471Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2025-12-23T18:36:20.434252Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T18:36:20.445208Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2025-12-23T18:36:20.466476Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=DataShard 72075186224037889 is blocked by a schema operation;tx_id=281474976715662; 2025-12-23T18:36:20.466672Z node 2 :TX_DATASHARD INFO: datashard_pipeline.cpp:1318: Outdated Tx 281474976715662 is cleaned at tablet 72075186224037889 and outdatedStep# 34500 2025-12-23T18:36:20.466818Z node 2 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:38: Cleaned up old txs at 72075186224037889 TxInFly 0 2025-12-23T18:36:20.467084Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:36:20.467156Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715666 ssId 72057594046644480 seqNo 2:4 2025-12-23T18:36:20.467215Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715666 at tablet 72075186224037889 2025-12-23T18:36:20.467435Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:36:20.467608Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:36:20.467752Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:36:20.467787Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:36:20.467830Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 1 2025-12-23T18:36:20.478798Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:36:20.478943Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:36:20.480476Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 281474976715666 HANDLE EvProposeTransaction marker# C0 2025-12-23T18:36:20.480549Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 281474976715666 step# 35000 Status# 16 SEND to# [2:398:2397] Proxy marker# C1 2025-12-23T18:36:20.543249Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 281474976715666 has been planned 2025-12-23T18:36:20.543372Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 2025-12-23T18:36:20.543420Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 2025-12-23T18:36:20.543727Z node 2 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 35500 in 0.500000s at 35.450000s 2025-12-23T18:36:20.544187Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 35000, txid# 281474976715666 marker# C2 2025-12-23T18:36:20.544276Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 281474976715666 stepId# 35000 Status# 17 SEND EvProposeTransactionStatus to# [2:398:2397] Proxy 2025-12-23T18:36:20.545004Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715666 at step 35000 at tablet 72075186224037889 { Transactions { TxId: 281474976715666 AckTo { RawX1: 0 RawX2: 0 } } Step: 35000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-12-23T18:36:20.545073Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:36:20.545270Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 35000, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:36:20.545738Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:36:20.545797Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:36:20.545850Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [35000:281474976715666] in PlanQueue unit at 72075186224037889 2025-12-23T18:36:20.546062Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 35000:281474976715666 keys extracted: 0 2025-12-23T18:36:20.546208Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:36:20.546489Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:36:20.546575Z node 2 :TX_DATASHARD INFO: drop_table_unit.cpp:72: Trying to DROP TABLE at 72075186224037889 2025-12-23T18:36:20.547076Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:36:20.549229Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 35000} 2025-12-23T18:36:20.549336Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:36:20.550026Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-12-23T18:36:20.550154Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-12-23T18:36:20.550215Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-12-23T18:36:20.550249Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 acknowledged 2025-12-23T18:36:20.550302Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 acknowledged 2025-12-23T18:36:20.550731Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:36:20.550825Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [35000 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:36:20.550896Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715666 state PreOffline TxInFly 0 2025-12-23T18:36:20.550995Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:36:20.551902Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 281474976715666, done: 0, blocked: 1 2025-12-23T18:36:20.556010Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715666:0 2025-12-23T18:36:20.556107Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715666, publications: 2, subscribers: 1 2025-12-23T18:36:20.556428Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715666 datashard 72075186224037889 state PreOffline 2025-12-23T18:36:20.556484Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-12-23T18:36:20.557148Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 1 2025-12-23T18:36:20.557554Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:36:20.573690Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-12-23T18:36:20.574085Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-12-23T18:36:20.576087Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:36:20.577234Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-12-23T18:36:20.578452Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186224037889 2025-12-23T18:36:20.578527Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-12-23T18:36:20.578657Z node 2 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-12-23T18:36:20.578860Z node 2 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-12-23T18:36:20.579007Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minstep/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqOutputWideChannelTests::ChunkSizeLimit [GOOD] >> Yq_1::DescribeConnection [GOOD] >> Yq_1::DeleteQuery |92.8%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:36:15.898471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:36:15.898554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:15.898607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:36:15.898648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:36:15.898686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:36:15.898724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:36:15.898866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:15.898958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:36:15.899829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:36:15.900130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:36:16.025536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:36:16.025595Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:16.046462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:36:16.046659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:36:16.050980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:36:16.076355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:36:16.076613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:36:16.077369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:16.077583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:36:16.090862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:16.091082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:16.092517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:16.092587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:16.092749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:16.092862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:16.092917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:16.093067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:36:16.370464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.371525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.371661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.371794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.371870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.371961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.372031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.372113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.372199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.372292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.372384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.372471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.372571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.372665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:16.372832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... thId: 37] was 0 2025-12-23T18:36:22.182721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-12-23T18:36:22.187070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.187261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:22.187316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:36:22.187499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:22.187718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:22.188055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-12-23T18:36:22.188452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.188593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.189262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.189341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.189604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.189777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.189869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.189963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.190185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.190293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.190699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.191572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.191705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.191777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.191959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.192025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.192072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:22.200247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:22.210274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:22.210383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:22.211402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:22.211466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:22.211513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:22.217569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1163:3115] sender: [1:1218:2058] recipient: [1:15:2062] 2025-12-23T18:36:22.265688Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:36:22.265945Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeTable" took 292us result status StatusSuccess 2025-12-23T18:36:22.266396Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeTable" PathDescription { Self { Name: "SomeTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SomeTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 Family: 1 FamilyName: "alternative" NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 4140 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1020 IndexSize: 0 } PoolsUsage { PoolKind: "pool-kind-2" DataSize: 3120 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82624 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:36:22.269230Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:36:22.269455Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 266us result status StatusSuccess 2025-12-23T18:36:22.269976Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "SomeTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest |92.8%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |92.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 |92.8%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> ConvertUnboxedValueToArrowAndBack::Tuple >> ConvertUnboxedValueToArrowAndBack::Tuple [GOOD] >> ConvertUnboxedValueToArrowAndBack::VariantOverStruct [GOOD] >> ConvertUnboxedValueToArrowAndBack::VariantOverTupleWithOptionals [GOOD] >> DqOutputChannelTests::PartialRead [GOOD] >> DqOutputChannelTests::Overflow [GOOD] >> DqOutputChannelTests::BigRow |92.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::TestWrongStream >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] >> DqOutputChannelTests::BigRow [GOOD] >> DqOutputChannelTests::ChunkSizeLimit [GOOD] >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> Yq_1::CreateConnections_With_Idempotency >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 >> Channels20::LocalChannelBackPressure >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor >> TActorTest::TestWaitFor [GOOD] >> BsControllerConfig::MergeIntersectingBoxes [GOOD] >> BsControllerConfig::MoveGroups |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqOutputChannelTests::ChunkSizeLimit [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> TActorTest::TestWaitForFirstEvent >> TActorTest::TestWaitForFirstEvent [GOOD] >> DqUnboxedValueToNativeArrowConversion::Struct [GOOD] >> DqUnboxedValueToNativeArrowConversion::Tuple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitFor [GOOD] Test command err: ... waiting for value = 42 ... waiting for value = 42 (done) |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/testlib/actors/ut/unittest >> DqUnboxedValueToNativeArrowConversion::Tuple [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalListOfOptional [GOOD] >> DqUnboxedValueToNativeArrowConversion::VariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverTupleWithOptionals [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 >> DqUnboxedValueDoNotFitToArrow::DictUtf8ToInterval [GOOD] >> DqUnboxedValueDoNotFitToArrow::OptionalOfOptional [GOOD] >> DqUnboxedValueDoNotFitToArrow::LargeVariant >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitForFirstEvent [GOOD] Test command err: ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger (done) ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger ... waiting for NKikimr::NTestSuiteTActorTest::TTestCaseTestWaitForFirstEvent::Execute_(NUnitTest::TTestContext&)::TEvTrigger (done) |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/testlib/actors/ut/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverTupleWithOptionals [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] >> TActorTest::TestCreateChildActor [GOOD] >> TActorTest::TestBlockEvents >> TActorTest::TestBlockEvents [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 >> BsControllerConfig::DeleteStoragePool [GOOD] >> BsControllerConfig::CommandRollbackWhenCombined ------- [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestBlockEvents [GOOD] Test command err: ... waiting for blocked 3 events ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor cookie 0 ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor cookie 0 ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor cookie 0 ... waiting for blocked 3 events (done) ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for blocked 1 more event ... blocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor cookie 0 ... waiting for blocked 1 more event (done) ... waiting for processed 2 more events ... waiting for processed 2 more events (done) ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... unblocking NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TEvTrigger from NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TSourceActor to NKikimr::NTestSuiteTActorTest::TTestCaseTestBlockEvents::Execute_(NUnitTest::TTestContext&)::TTargetActor ... waiting for processed 3 more events ... waiting for processed 3 more events (done) |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestHandleEvent [GOOD] >> TActorTest::TestGetCtxTime [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:36:15.381108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:36:15.381202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:15.381254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:36:15.381298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:36:15.381338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:36:15.381377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:36:15.381450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:15.381540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:36:15.382523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:36:15.382850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:36:15.508067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:36:15.508124Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:15.531346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:36:15.531566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:36:15.531738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:36:15.549605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:36:15.549859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:36:15.550605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:15.550873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:36:15.556279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:15.556479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:15.557916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:15.557979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:15.558134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:15.558195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:15.558256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:15.558393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:36:15.802663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.806788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.806959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.807087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.807179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.807277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.807353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.807424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.807506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.807578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.807657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.807729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.807842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.807938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:15.808068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2025-12-23T18:36:27.666000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.666115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.666495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.666573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.666876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.667057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.667109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.667224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.667466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.667560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.668021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.668309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.668391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.668447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.668635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.668707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.668755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:36:27.669144Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:36:27.680418Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:36:27.680605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:27.686236Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435083, Sender [1:1539:3470], Recipient [1:1539:3470]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-12-23T18:36:27.686300Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-12-23T18:36:27.686645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:27.686718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:27.687022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:1539:3470], Recipient [1:1539:3470]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:36:27.687062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:36:27.687209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:27.687259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:27.687304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:27.687343Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:36:27.687523Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 274399233, Sender [1:1575:3470], Recipient [1:1539:3470]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-12-23T18:36:27.687563Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5420: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-12-23T18:36:27.687597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1539:3470] sender: [1:1594:2058] recipient: [1:15:2062] 2025-12-23T18:36:27.744919Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [1:1593:3513], Recipient [1:1539:3470]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-12-23T18:36:27.745019Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:36:27.745145Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:36:27.745424Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 279us result status StatusSuccess 2025-12-23T18:36:27.746270Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 24345 Memory: 141504 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> BsControllerConfig::CommandRollbackWhenCombined [GOOD] >> TActorTest::TestScheduleEvent [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier >> TActorTest::TestScheduleReaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel_unstable/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus 2025-12-23 18:36:05,464 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-12-23 18:36:05,602 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1060001 58.9M 59.0M 33.0M test_tool run_ut @/home/runner/.ya/build/build_root/8ptz/005b33/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/test_tool.args 1060117 577M 579M 573M └─ ydb-core-cms-ut_sentinel_unstable --trace-path-append /home/runner/.ya/build/build_root/8ptz/005b33/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/ytest.report Test command err: 2025-12-23T18:26:07.173988Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2025-12-23T18:26:07.174046Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2025-12-23T18:26:07.174092Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:26:07.174117Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2025-12-23T18:26:07.174149Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-12-23T18:26:07.174215Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-12-23T18:26:07.174857Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-12-23T18:26:07.178185Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... d::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: CommonLoggerInitError } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 799870110 2025-12-23T18:36:05.506899Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 799870110 2025-12-23T18:36:05.507031Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 799870110 2025-12-23T18:36:05.507098Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:36:05.519508Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:36:05.519598Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2025-12-23T18:36:05.519749Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2025-12-23T18:36:05.519801Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2025-12-23T18:36:05.519840Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2025-12-23T18:36:05.519874Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2025-12-23T18:36:05.519910Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2025-12-23T18:36:05.519949Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2025-12-23T18:36:05.519986Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2025-12-23T18:36:05.520020Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2025-12-23T18:36:05.520345Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 799880110 2025-12-23T18:36:05.521248Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 799880110 2025-12-23T18:36:05.521401Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 799880110 2025-12-23T18:36:05.521556Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 799880110 2025-12-23T18:36:05.521706Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: CommonLoggerInitError } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 799880110 2025-12-23T18:36:05.521851Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 799880110 2025-12-23T18:36:05.522040Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: CommonLoggerInitError } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 799880110 2025-12-23T18:36:05.522174Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: CommonLoggerInitError } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 799880110 2025-12-23T18:36:05.522237Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/005b33/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/005b33/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> TestYmqHttpProxy::TestTagQueue [GOOD] >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId >> TActorTest::TestSendEvent [GOOD] >> TActorTest::TestSendAfterDelay >> TestYmqHttpProxy::BillingRecordsForJsonApi [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestGetCtxTime [GOOD] >> TActorTest::TestSendAfterDelay [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/testlib/actors/ut/unittest >> TestYmqHttpProxy::TestListQueues [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel_unstable/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestScheduleReaction [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/testlib/actors/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::CommandRollbackWhenCombined [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2964:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2964:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2964:2117] 2025-12-23T18:36:12.438503Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:12.439792Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:12.440206Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:12.443380Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:12.444178Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:12.444471Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:12.444512Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:12.444851Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:12.455036Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:12.455180Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:12.455364Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:12.455489Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:12.455584Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:12.455680Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2025-12-23T18:36:12.470189Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:12.470355Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:12.535392Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:12.535547Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:12.535635Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:12.535718Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:12.535853Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:12.535920Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:12.535970Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:12.536066Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:12.547360Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:12.547541Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:12.559393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:12.559560Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:12.560913Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:12.560989Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:12.561191Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:12.561250Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:12.594607Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } } 2025-12-23T18:36:12.598012Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-12-23T18:36:12.598095Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-12-23T18:36:12.598133Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-12-23T18:36:12.598163Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-12-23T18:36:12.598185Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-12-23T18:36:12.598214Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-12-23T18:36:12.598252Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-12-23T18:36:12.598274Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-12-23T18:36:12.598296Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-12-23T18:36:12.598317Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-12-23T18:36:12.598338Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-12-23T18:36:12.598359Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-12-23T18:36:12.598384Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-12-23T18:36:12.598420Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-12-23T18:36:12.598475Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-12-23T18:36:12.598496Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-12-23T18:36:12.598515Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-12-23T18:36:12.598537Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-12-23T18:36:12.598561Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-12-23T18:36:12.598582Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-12-23T18:36:12.598615Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-12-23T18:36:12.598644Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk ... UG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:29.300230Z node 101 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:29.300398Z node 101 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:29.300432Z node 101 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:29.300639Z node 101 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:29.312402Z node 101 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:29.312562Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:29.312678Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:29.312796Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:29.312925Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:29.313033Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [101:28:2070] sender: [101:49:2057] recipient: [101:11:2058] 2025-12-23T18:36:29.327559Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:29.327726Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:29.350131Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:29.350248Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:29.350315Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:29.350375Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:29.350485Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:29.350547Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:29.350591Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:29.350635Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:29.361369Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:29.361535Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:29.372318Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:29.372467Z node 101 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:29.373915Z node 101 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:29.373985Z node 101 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:29.374217Z node 101 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:29.374270Z node 101 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:29.376099Z node 101 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" } } Command { ReadHostConfig { } } Rollback: true } Status { Success: true } Status { Success: true } Status { Success: true HostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" ItemConfigGeneration: 1 } } ErrorDescription: "transaction rollback" RollbackSuccess: true 2025-12-23T18:36:29.377252Z node 101 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } } Status { Success: true } Success: true ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [102:26:2057] recipient: [102:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [102:26:2057] recipient: [102:24:2068] Leader for TabletID 72057594037932033 is [102:28:2070] sender: [102:29:2057] recipient: [102:24:2068] 2025-12-23T18:36:29.628935Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:29.630028Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:29.630319Z node 102 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:29.631587Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:29.632967Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:29.633315Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:29.633342Z node 102 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:29.633563Z node 102 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:29.643859Z node 102 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:29.644024Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:29.644142Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:29.644265Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:29.644360Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:29.644424Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [102:28:2070] sender: [102:49:2057] recipient: [102:11:2058] 2025-12-23T18:36:29.657930Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:29.658095Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:29.682281Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:29.682422Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:29.682498Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:29.682586Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:29.682753Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:29.682816Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:29.682851Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:29.682893Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:29.696757Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:29.696910Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:29.707724Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:29.707868Z node 102 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:29.709203Z node 102 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:29.709254Z node 102 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:29.709451Z node 102 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:29.709495Z node 102 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:29.710166Z node 102 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" } } Command { ReadHostConfig { } } Rollback: true } Status { Success: true } Status { Success: true } Status { Success: true HostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" ItemConfigGeneration: 1 } } ErrorDescription: "transaction rollback" RollbackSuccess: true 2025-12-23T18:36:29.715596Z node 102 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } } Status { Success: true } Success: true ConfigTxSeqNo: 1 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestSendAfterDelay [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/testlib/actors/ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibility >> TestYmqHttpProxy::TestUntagQueue >> TestYmqHttpProxy::TestPurgeQueue >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 >> TActorTest::TestDie [GOOD] >> TActorTest::TestFilteredGrab >> TActorTest::TestFilteredGrab [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestFilteredGrab [GOOD] |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |92.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |92.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/testlib/actors/ut/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |92.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 [GOOD] >> TestKinesisHttpProxy::TestWrongStream [GOOD] >> Channels20::LocalChannelBackPressure [GOOD] >> TSchemeShardAuditSettings::CreateExtSubdomain >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] >> PrivateApi::Nodes [GOOD] >> Channels20::IcChannelLateBinding [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedQueue [GOOD] >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] >> TSchemeShardAuditSettings::CreateSubdomain >> TSchemeShardAuditSettings::AlterSubdomain >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [FAIL] >> TestYmqHttpProxy::TestDeleteQueue [GOOD] >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification [GOOD] >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [GOOD] >> YdbIndexTable::OnlineBuild [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 [GOOD] >> TestYmqHttpProxy::TestUntagQueue [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TestYmqHttpProxy::TestPurgeQueue [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier [GOOD] >> Yq_1::Basic_TaggedLiteral [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] >> BsControllerConfig::MoveGroups [GOOD] >> Yq_1::Basic_EmptyList [GOOD] >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> Channels20::LocalChannelAsyncRead >> TestKinesisHttpProxy::TestWrongStream2 >> KqpWorkloadServiceDistributed::TestNodeDisconnect >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 >> TestKinesisHttpProxy::ListShardsTimestamp >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 >> YdbIndexTable::OnlineBuildWithDataColumn >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks >> Yq_1::ModifyQuery [GOOD] >> Yq_1::CreateQuery_Without_Connection [GOOD] >> Yq_1::DeleteQuery [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] >> Yq_1::CreateConnections_With_Idempotency [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 >> Yq_1::Basic_EmptyDict >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] >> Channels20::IcChannelAsyncRead >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 >> TestYmqHttpProxy::TestListDeadLetterSourceQueues >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] >> Channels20::LocalChannelAsyncRead [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 >> TestYmqHttpProxy::TestSendMessageBatch >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [GOOD] >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |92.8%| [TM] {RESULT} ydb/core/cms/ut_sentinel_unstable/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |92.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |92.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2025-12-23T18:35:49.665137Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123933934763829:2201];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:49.665207Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:50.281469Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] E1223 18:35:50.287678228 1206263 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:35:50.287819167 1206263 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:35:50.545371Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.545405Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:50.683889Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:50.695083Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:13354: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:13354 2025-12-23T18:35:50.695550Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:35:50.743334Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.802933Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.837711Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:50.847060Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.851251Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.852963Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.871767Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.872532Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.894248Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.894330Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.901444Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.901575Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.901608Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.901639Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.904359Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.951074Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:50.992674Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:51.075120Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:51.094996Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:51.098434Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:51.117068Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:51.122692Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:51.139550Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:51.151561Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:51.152415Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:13354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13354 } ] 2025-12-23T18:35:51.154056Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Roo ... P_COMPUTE DEBUG: kqp_read_actor.cpp:684: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. Add point to new shardId: 72075186224037891 2025-12-23T18:36:41.243174Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. Pending shards States: TShardState{ TabletId: 72075186224037891, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudpbkmodb96s7b393)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudpbkmodb96s7b393)], RetryAttempt: 0, ResolveAttempt: 1 }; 2025-12-23T18:36:41.243196Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. effective maxinflight 1024 sorted 0 2025-12-23T18:36:41.243208Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. BEFORE: 1.0 2025-12-23T18:36:41.243257Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. Send EvRead to shardId: 72075186224037891, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2025-12-23T18:36:41.243305Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. AFTER: 0.1 2025-12-23T18:36:41.243318Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-12-23T18:36:41.244279Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. Recv TEvReadResult from ShardID=72075186224037891, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-12-23T18:36:41.244310Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. Taken 0 locks 2025-12-23T18:36:41.244323Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. new data for read #0 seqno = 1 finished = 1 2025-12-23T18:36:41.244350Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124160995231808:2940], TxId: 281474976715737, task: 1. Ctx: { TraceId : 01kd67zhwfd1132wyydtgzs12c. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzdjNTJjYWQtMjc3ZDQ3YjUtYTkxNWYxZDctYzUyY2Q3MTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 276037645 2025-12-23T18:36:41.244370Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124160995231808:2940], TxId: 281474976715737, task: 1. Ctx: { TraceId : 01kd67zhwfd1132wyydtgzs12c. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzdjNTJjYWQtMjc3ZDQ3YjUtYTkxNWYxZDctYzUyY2Q3MTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:41.244398Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-12-23T18:36:41.244416Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. enter pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-12-23T18:36:41.244449Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. exit pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 1 freeSpace: 8388572 2025-12-23T18:36:41.244470Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. returned 1 rows; processed 1 rows 2025-12-23T18:36:41.244510Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. dropping batch for read #0 2025-12-23T18:36:41.244522Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. effective maxinflight 1024 sorted 0 2025-12-23T18:36:41.244536Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-12-23T18:36:41.244552Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715737, task: 1, CA Id [4:7587124160995231808:2940]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-12-23T18:36:41.244785Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7587124160995231808:2940], TxId: 281474976715737, task: 1. Ctx: { TraceId : 01kd67zhwfd1132wyydtgzs12c. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzdjNTJjYWQtMjc3ZDQ3YjUtYTkxNWYxZDctYzUyY2Q3MTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:36:41.244819Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124160995231808:2940], TxId: 281474976715737, task: 1. Ctx: { TraceId : 01kd67zhwfd1132wyydtgzs12c. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzdjNTJjYWQtMjc3ZDQ3YjUtYTkxNWYxZDctYzUyY2Q3MTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:41.244859Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715737, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-12-23T18:36:41.244881Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124160995231809:2941], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zhwfd1132wyydtgzs12c. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzdjNTJjYWQtMjc3ZDQ3YjUtYTkxNWYxZDctYzUyY2Q3MTM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2025-12-23T18:36:41.244909Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715737, task: 2. Finish input channelId: 1, from: [4:7587124160995231808:2940] 2025-12-23T18:36:41.244962Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124160995231809:2941], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zhwfd1132wyydtgzs12c. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzdjNTJjYWQtMjc3ZDQ3YjUtYTkxNWYxZDctYzUyY2Q3MTM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:36:41.245108Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7587124160995231809:2941], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zhwfd1132wyydtgzs12c. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzdjNTJjYWQtMjc3ZDQ3YjUtYTkxNWYxZDctYzUyY2Q3MTM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:36:41.245123Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124160995231808:2940], TxId: 281474976715737, task: 1. Ctx: { TraceId : 01kd67zhwfd1132wyydtgzs12c. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzdjNTJjYWQtMjc3ZDQ3YjUtYTkxNWYxZDctYzUyY2Q3MTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646927 2025-12-23T18:36:41.245148Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124160995231808:2940], TxId: 281474976715737, task: 1. Ctx: { TraceId : 01kd67zhwfd1132wyydtgzs12c. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzdjNTJjYWQtMjc3ZDQ3YjUtYTkxNWYxZDctYzUyY2Q3MTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:41.245171Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715737, task: 1. Tasks execution finished 2025-12-23T18:36:41.245185Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [4:7587124160995231808:2940], TxId: 281474976715737, task: 1. Ctx: { TraceId : 01kd67zhwfd1132wyydtgzs12c. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzdjNTJjYWQtMjc3ZDQ3YjUtYTkxNWYxZDctYzUyY2Q3MTM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2025-12-23T18:36:41.245311Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715737, task: 1. pass away 2025-12-23T18:36:41.245416Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715737;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:36:41.245875Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124160995231809:2941], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zhwfd1132wyydtgzs12c. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzdjNTJjYWQtMjc3ZDQ3YjUtYTkxNWYxZDctYzUyY2Q3MTM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:36:41.245917Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715737, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-12-23T18:36:41.245927Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715737, task: 2. Tasks execution finished 2025-12-23T18:36:41.245939Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [4:7587124160995231809:2941], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zhwfd1132wyydtgzs12c. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NzdjNTJjYWQtMjc3ZDQ3YjUtYTkxNWYxZDctYzUyY2Q3MTM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:36:41.245990Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715737, task: 2. pass away 2025-12-23T18:36:41.246043Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715737;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:36:41.368541Z node 4 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:16606: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:16606 >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true >> RetryPolicy::TWriteSession_TestBrokenPolicy >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 >> TestKinesisHttpProxy::ListShardsToken >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 >> Channels20::IcChannelTrivial >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2964:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2964:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2964:2117] 2025-12-23T18:36:03.459708Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:03.461249Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:03.461914Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:03.464195Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:03.465031Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:03.465343Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:03.465392Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:03.465723Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:03.492152Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:03.492291Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:03.492468Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:03.492565Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:03.492652Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:03.492736Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2025-12-23T18:36:03.507676Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:03.507856Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:03.615426Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:03.615567Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:03.615647Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:03.615765Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:03.615891Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:03.615942Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:03.615977Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:03.616041Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:03.628239Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:03.628385Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:03.639366Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:03.639521Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:03.640933Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:03.640989Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:03.641196Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:03.641243Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:03.682344Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2025-12-23T18:36:03.691414Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-12-23T18:36:03.691510Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-12-23T18:36:03.691548Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-12-23T18:36:03.691570Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-12-23T18:36:03.691592Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-12-23T18:36:03.691615Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-12-23T18:36:03.691654Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-12-23T18:36:03.691677Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-12-23T18:36:03.691698Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-12-23T18:36:03.691718Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-12-23T18:36:03.691743Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-12-23T18:36:03.691763Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-12-23T18:36:03.691793Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-12-23T18:36:03.691820Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-12-23T18:36:03.691872Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-12-23T18:36:03.691913Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-12-23T18:36:03.691943Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-12-23T18:36:03.691965Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-12-23T18:36:03.691985Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-12-23T18:36:03.692009Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-12-23T18:36:03.692029Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-12-23T18:36:03.692050Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-12-23T18:36:03.692086Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-12-23T18:36:03.692126Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-12-23T18:36:03.692147Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-12-23T18:36:03.692167Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-12-23T18:36:03.692188Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-12-23T18:36:03.692208Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-12-23T18:36:03.692229Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-12-23T18:36:03.692282Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2025-12-23T18:36:03.692339Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-12-23T18:36:03.692383Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-12-23T18:36:03.692428Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Cr ... R NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1000 Path# /dev/disk1 2025-12-23T18:36:33.548522Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1001 Path# /dev/disk2 2025-12-23T18:36:33.548551Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1002 Path# /dev/disk3 2025-12-23T18:36:33.548590Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1000 Path# /dev/disk1 2025-12-23T18:36:33.548623Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1001 Path# /dev/disk2 2025-12-23T18:36:33.548650Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1002 Path# /dev/disk3 2025-12-23T18:36:33.548678Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1000 Path# /dev/disk1 2025-12-23T18:36:33.548708Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1001 Path# /dev/disk2 2025-12-23T18:36:33.548737Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1002 Path# /dev/disk3 2025-12-23T18:36:33.548782Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1000 Path# /dev/disk1 2025-12-23T18:36:33.548834Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1001 Path# /dev/disk2 2025-12-23T18:36:33.548862Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1002 Path# /dev/disk3 2025-12-23T18:36:33.548890Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1000 Path# /dev/disk1 2025-12-23T18:36:33.548919Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1001 Path# /dev/disk2 2025-12-23T18:36:33.548949Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1002 Path# /dev/disk3 2025-12-23T18:36:33.548978Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1000 Path# /dev/disk1 2025-12-23T18:36:33.549015Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1001 Path# /dev/disk2 2025-12-23T18:36:33.549047Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1002 Path# /dev/disk3 2025-12-23T18:36:33.549076Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1000 Path# /dev/disk1 2025-12-23T18:36:33.549105Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1001 Path# /dev/disk2 2025-12-23T18:36:33.549135Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1002 Path# /dev/disk3 2025-12-23T18:36:34.008235Z node 161 :BS_CONTROLLER ERROR: {BSC07@impl.h:2212} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.466481s 2025-12-23T18:36:34.008480Z node 161 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.466751s 2025-12-23T18:36:34.068125Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2025-12-23T18:36:34.070495Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1000 Path# /dev/disk1 2025-12-23T18:36:34.070591Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2025-12-23T18:36:34.070625Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1002 Path# /dev/disk3 2025-12-23T18:36:34.070656Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1000 Path# /dev/disk1 2025-12-23T18:36:34.070691Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2025-12-23T18:36:34.070721Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1002 Path# /dev/disk3 2025-12-23T18:36:34.070769Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1000 Path# /dev/disk1 2025-12-23T18:36:34.070799Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2025-12-23T18:36:34.070829Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1002 Path# /dev/disk3 2025-12-23T18:36:34.070859Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1000 Path# /dev/disk1 2025-12-23T18:36:34.070890Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1001 Path# /dev/disk2 2025-12-23T18:36:34.070919Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2025-12-23T18:36:34.070948Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1000 Path# /dev/disk1 2025-12-23T18:36:34.070976Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1001 Path# /dev/disk2 2025-12-23T18:36:34.071005Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 2025-12-23T18:36:34.071036Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1000 Path# /dev/disk1 2025-12-23T18:36:34.071067Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1001 Path# /dev/disk2 2025-12-23T18:36:34.071096Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2025-12-23T18:36:34.071127Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1000 Path# /dev/disk1 2025-12-23T18:36:34.071155Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1001 Path# /dev/disk2 2025-12-23T18:36:34.071183Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2025-12-23T18:36:34.071211Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1000 Path# /dev/disk1 2025-12-23T18:36:34.071243Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1001 Path# /dev/disk2 2025-12-23T18:36:34.071273Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2025-12-23T18:36:34.071302Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1000 Path# /dev/disk1 2025-12-23T18:36:34.071330Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1001 Path# /dev/disk2 2025-12-23T18:36:34.071358Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2025-12-23T18:36:34.071423Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1000 Path# /dev/disk1 2025-12-23T18:36:34.071461Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1001 Path# /dev/disk2 2025-12-23T18:36:34.071489Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 >> TestKinesisHttpProxy::TestWrongRequest >> Channels20::IcChannelAsyncRead [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 [GOOD] >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [GOOD] >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 >> TestKinesisHttpProxy::TestWrongRequest [GOOD] >> Channels20::IcChannelEarlyFinish >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:36:34.341174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:36:34.341276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:34.341318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:36:34.341359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:36:34.341398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:36:34.341426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:36:34.341506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:34.341614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:36:34.342475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:36:34.345464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:36:34.480156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:36:34.480243Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:34.486701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:36:34.486975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:36:34.487109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:36:34.490656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:36:34.490930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:36:34.491687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:34.491927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:36:34.494269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:34.494477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:34.495869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:34.495932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:34.496058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:34.496103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:34.496147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:34.496289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:36:34.688893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.689948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.690094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.690164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.700577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.700831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.700906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.701008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.701086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.701209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.701286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.701349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.701449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.701520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:34.701614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at step: 5000112 FAKE_COORDINATOR: advance: minStep5000112 State->FrontStep: 5000111 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000112 2025-12-23T18:36:38.424689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000112, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:38.424801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000112 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:36:38.424847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:190: TDropExtSubdomain TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000112, at schemeshard: 72057594046678944 2025-12-23T18:36:38.424919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 62] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:38.424948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2025-12-23T18:36:38.424982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 128 -> 134 2025-12-23T18:36:38.426166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-12-23T18:36:38.426598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-12-23T18:36:38.428169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-12-23T18:36:38.428216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:36:38.428310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 134 -> 135 2025-12-23T18:36:38.428454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:36:38.428507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 FAKE_COORDINATOR: Erasing txId 175 2025-12-23T18:36:38.430301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:38.430358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:38.430497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2025-12-23T18:36:38.430597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:38.430628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-12-23T18:36:38.430683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 175, path id: 62 2025-12-23T18:36:38.431211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-12-23T18:36:38.431257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 175:0 ProgressState 2025-12-23T18:36:38.431286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 135 -> 240 2025-12-23T18:36:38.431917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2025-12-23T18:36:38.431999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2025-12-23T18:36:38.432040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-12-23T18:36:38.432076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 106 2025-12-23T18:36:38.432107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:36:38.432807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-12-23T18:36:38.432916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-12-23T18:36:38.432953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-12-23T18:36:38.432987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 62], version: 18446744073709551615 2025-12-23T18:36:38.433014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 3 2025-12-23T18:36:38.433083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-12-23T18:36:38.435948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-12-23T18:36:38.435996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 175:0 ProgressState 2025-12-23T18:36:38.436075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2025-12-23T18:36:38.436107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-12-23T18:36:38.436139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2025-12-23T18:36:38.436172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-12-23T18:36:38.436200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-12-23T18:36:38.436232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-12-23T18:36:38.436261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 175:0 2025-12-23T18:36:38.436288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 175:0 2025-12-23T18:36:38.436337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 2025-12-23T18:36:38.437130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:36:38.437173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2025-12-23T18:36:38.437232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 1 2025-12-23T18:36:38.437540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:36:38.437578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2025-12-23T18:36:38.437638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:36:38.438299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-12-23T18:36:38.438428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-12-23T18:36:38.440513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:36:38.440623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-12-23T18:36:38.441913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-12-23T18:36:38.441950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-12-23T18:36:38.443519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-12-23T18:36:38.443631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-12-23T18:36:38.443665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:3016:5004] TestWaitNotification: OK eventTxId 175 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 >> TestKinesisHttpProxy::ErroneousRequestGetRecords >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 >> TestKinesisHttpProxy::ListShardsToken [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2025-12-23T18:35:53.665541Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123953964719153:2089];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:53.665713Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:53.686954Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.016147s E1223 18:35:53.873164293 1208400 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:35:53.873440643 1208400 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:35:54.442369Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.483600Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.483673Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.488986Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.594723Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.596012Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.596207Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.610287Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.650821Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.650965Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.651012Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.683249Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:54.690410Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.778394Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.890734Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:54.890872Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:35:54.893388Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.941839Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.947657Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:54.949168Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:1908 2025-12-23T18:35:54.971297Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:55.020889Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:55.023949Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:55.034655Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:55.038878Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:55.068260Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:55.084183Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:55.084306Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:55.084361Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:55.084389Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:1908 } ] 2025-12-23T18:35:55.087911Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:1908: Failed to connect to remote ... 77\377\377\377\377\377\377\001`\000h\347\247\200\200\200\200@p\004z\000z\000\240\001\000\270\001\000\300\001\001\312\001\000" } WatermarksMode: WATERMARKS_MODE_DISABLED } } 2025-12-23T18:36:52.605251Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124207985928193:2334], TxId: 281474976715751, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zwzr864xbpwb6c4bqw16. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YmU4ZGI2NGUtYTEwNTA1ZGEtZTA0YjFhZWQtNzZlZjcwYzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2025-12-23T18:36:52.605350Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1107: SelfId: [4:7587124207985928193:2334], TxId: 281474976715751, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zwzr864xbpwb6c4bqw16. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YmU4ZGI2NGUtYTEwNTA1ZGEtZTA0YjFhZWQtNzZlZjcwYzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 SrcEndpoint { ActorId { RawX1: 7587124207985928193 RawX2: 4503616807242014 } } DstEndpoint { ActorId { RawX1: 7587124207985928189 RawX2: 4503616807242014 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2025-12-23T18:36:52.605428Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:454: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. Shards State: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : idempotency_key)], RetryAttempt: 0, ResolveAttempt: 0 } 2025-12-23T18:36:52.605518Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:521: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. Sending TEvResolveKeySet update for table 'Root/yq/idempotency_keys', range: [(String : yandexcloud://Execute_folder_id, String : idempotency_key) ; (String : yandexcloud://Execute_folder_id, String : idempotency_key)], attempt #1 2025-12-23T18:36:52.605619Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124207985928193:2334], TxId: 281474976715751, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zwzr864xbpwb6c4bqw16. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YmU4ZGI2NGUtYTEwNTA1ZGEtZTA0YjFhZWQtNzZlZjcwYzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:36:52.605636Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-12-23T18:36:52.605653Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-12-23T18:36:52.605727Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:538: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. Received TEvResolveKeySetResult update for table 'Root/yq/idempotency_keys' 2025-12-23T18:36:52.605792Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. Processing resolved ShardId# 72075186224037896, partition range: [(String : yandexcloud://Execute_folder_id, String : idempotency_key) ; ()), i: 0, state ranges: 0, points: 1 2025-12-23T18:36:52.605809Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:684: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. Add point to new shardId: 72075186224037896 2025-12-23T18:36:52.605912Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. Pending shards States: TShardState{ TabletId: 72075186224037896, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : idempotency_key)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://Execute_folder_id, String : idempotency_key)], RetryAttempt: 0, ResolveAttempt: 1 }; 2025-12-23T18:36:52.605927Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. effective maxinflight 1024 sorted 0 2025-12-23T18:36:52.605941Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. BEFORE: 1.0 2025-12-23T18:36:52.605983Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. Send EvRead to shardId: 72075186224037896, tablePath: Root/yq/idempotency_keys, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=18446744073709551615,step=1766515012619), lockTxId = 281474976715751, lockNodeId = 4 2025-12-23T18:36:52.606020Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. AFTER: 0.1 2025-12-23T18:36:52.606032Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-12-23T18:36:52.606762Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. Recv TEvReadResult from ShardID=72075186224037896, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= LockId: 281474976715751 DataShard: 72075186224037896 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 51, BrokenTxLocks= 2025-12-23T18:36:52.606789Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. Taken 1 locks 2025-12-23T18:36:52.606807Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. new data for read #0 seqno = 1 finished = 1 2025-12-23T18:36:52.606842Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124207985928193:2334], TxId: 281474976715751, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zwzr864xbpwb6c4bqw16. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YmU4ZGI2NGUtYTEwNTA1ZGEtZTA0YjFhZWQtNzZlZjcwYzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2025-12-23T18:36:52.606868Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124207985928193:2334], TxId: 281474976715751, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zwzr864xbpwb6c4bqw16. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YmU4ZGI2NGUtYTEwNTA1ZGEtZTA0YjFhZWQtNzZlZjcwYzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:36:52.606894Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-12-23T18:36:52.606917Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. enter pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-12-23T18:36:52.606964Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. exit pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 1 freeSpace: 8388586 2025-12-23T18:36:52.606987Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. returned 1 rows; processed 1 rows 2025-12-23T18:36:52.607087Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. dropping batch for read #0 2025-12-23T18:36:52.607106Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. effective maxinflight 1024 sorted 0 2025-12-23T18:36:52.607121Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-12-23T18:36:52.607140Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715751, task: 1, CA Id [4:7587124207985928193:2334]. returned async data processed rows 1 left freeSpace 8388586 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-12-23T18:36:52.607326Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7587124207985928193:2334], TxId: 281474976715751, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zwzr864xbpwb6c4bqw16. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YmU4ZGI2NGUtYTEwNTA1ZGEtZTA0YjFhZWQtNzZlZjcwYzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:36:52.607425Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124207985928193:2334], TxId: 281474976715751, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zwzr864xbpwb6c4bqw16. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YmU4ZGI2NGUtYTEwNTA1ZGEtZTA0YjFhZWQtNzZlZjcwYzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:36:52.607469Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715751, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-12-23T18:36:52.607507Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124207985928193:2334], TxId: 281474976715751, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zwzr864xbpwb6c4bqw16. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YmU4ZGI2NGUtYTEwNTA1ZGEtZTA0YjFhZWQtNzZlZjcwYzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:36:52.607528Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715751, task: 1. Tasks execution finished 2025-12-23T18:36:52.607543Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [4:7587124207985928193:2334], TxId: 281474976715751, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zwzr864xbpwb6c4bqw16. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YmU4ZGI2NGUtYTEwNTA1ZGEtZTA0YjFhZWQtNzZlZjcwYzM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:36:52.607677Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715751, task: 1. pass away 2025-12-23T18:36:52.607812Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715751;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:36:52.754291Z node 4 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:3760: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:3760 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2025-12-23T18:35:49.134001Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123937862102293:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:49.134036Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:50.176526Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] E1223 18:35:50.250325451 1205891 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:35:50.250501566 1205891 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:35:50.293638Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:50.314589Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.316843Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:50.316922Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:35:50.465995Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.490922Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:63654 2025-12-23T18:35:50.538855Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.577789Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:50.623968Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.683308Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.683370Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.683398Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.683589Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.709544Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.719759Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.764994Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.765126Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.765171Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.786789Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.826820Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.869313Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.875740Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:50.918837Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:51.066905Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:51.144037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:51.169506Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:51.231203Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:51.262070Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:51.273826Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:51.277973Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:63654: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:63654 } ] 2025-12-23T18:35:51.279116Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last er ... s String; DECLARE $job as String; DECLARE $query as String; DECLARE $status as Int64; DECLARE $internal as String; DECLARE $result_id as String; DECLARE $query_id as String; DECLARE $now as Timestamp; DECLARE $ttl as Timestamp; DECLARE $retry_counter_update_time as Timestamp; DECLARE $retry_rate as Double; DECLARE $retry_counter as Uint64; DECLARE $owner as String; UPDATE `pending_small` SET `last_seen_at` = $now, `assigned_until` = $ttl, `retry_counter` = $retry_counter, `retry_counter_updated_at` = $retry_counter_update_time, `retry_rate` = $retry_rate, `owner` = $owner WHERE `tenant` = $tenant AND `scope` = $scope AND `query_id` = $query_id; UPSERT INTO `jobs` (`scope`, `query_id`, `job_id`, `job`) VALUES($scope, $query_id, $job_id, $job); UPDATE `queries` SET `query` = $query, `status` = $status, `internal` = $internal, `result_sets_expire_at` = NULL, `expire_at` = NULL, `meta_revision` = `meta_revision` + 1 WHERE `scope` = $scope AND `query_id` = $query_id; 2025-12-23T18:36:50.574826Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710758;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:36:50.576035Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710758, task: 4. pass away 2025-12-23T18:36:50.576181Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710758;task_id=4;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:36:50.576401Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124196853222259:3031], TxId: 281474976710758, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67ztzpbxe3dpe26fzwk7t2. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjMxMjcwNDUtOWE4YzcyNTMtNzgzMGIzMzEtZGU0N2M5ZmY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2025-12-23T18:36:50.576459Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976710758, task: 2. Finish input channelId: 1, from: [4:7587124196853222257:3029] 2025-12-23T18:36:50.576514Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124196853222259:3031], TxId: 281474976710758, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67ztzpbxe3dpe26fzwk7t2. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjMxMjcwNDUtOWE4YzcyNTMtNzgzMGIzMzEtZGU0N2M5ZmY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2025-12-23T18:36:50.576557Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [4:7587124196853222259:3031], TxId: 281474976710758, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67ztzpbxe3dpe26fzwk7t2. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjMxMjcwNDUtOWE4YzcyNTMtNzgzMGIzMzEtZGU0N2M5ZmY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [4:7587124196853222253:2333], status: ABORTED, reason: {
: Error: Terminate execution } 2025-12-23T18:36:50.576609Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710758, task: 2. pass away 2025-12-23T18:36:50.576659Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710758;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:36:50.576891Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124196853222257:3029], TxId: 281474976710758, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67ztzpbxe3dpe26fzwk7t2. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjMxMjcwNDUtOWE4YzcyNTMtNzgzMGIzMzEtZGU0N2M5ZmY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2025-12-23T18:36:50.576928Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [4:7587124196853222257:3029], TxId: 281474976710758, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67ztzpbxe3dpe26fzwk7t2. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjMxMjcwNDUtOWE4YzcyNTMtNzgzMGIzMzEtZGU0N2M5ZmY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [4:7587124196853222253:2333], status: ABORTED, reason: {
: Error: Terminate execution } 2025-12-23T18:36:50.577004Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710758, task: 1. pass away 2025-12-23T18:36:50.577070Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710758;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:36:50.577218Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124196853222261:3033], TxId: 281474976710758, task: 5. Ctx: { CheckpointId : . TraceId : 01kd67ztzpbxe3dpe26fzwk7t2. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjMxMjcwNDUtOWE4YzcyNTMtNzgzMGIzMzEtZGU0N2M5ZmY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2025-12-23T18:36:50.577251Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [4:7587124196853222261:3033], TxId: 281474976710758, task: 5. Ctx: { CheckpointId : . TraceId : 01kd67ztzpbxe3dpe26fzwk7t2. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YjMxMjcwNDUtOWE4YzcyNTMtNzgzMGIzMzEtZGU0N2M5ZmY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [4:7587124196853222253:2333], status: ABORTED, reason: {
: Error: Terminate execution } 2025-12-23T18:36:50.577285Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710758, task: 5. pass away 2025-12-23T18:36:50.577324Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710758;task_id=5;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:36:50.577499Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2928: SelfId: [4:7587124196853222252:2333], SessionActorId: [4:7587124110953871355:2333], Create new TableWriteActor for table `Root/yq/jobs` ([72057594046644480:55:1]). lockId=281474976710755. ActorId=[4:7587124196853222278:2333] 2025-12-23T18:36:50.577552Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:477: Table: `Root/yq/jobs` ([72057594046644480:55:1]), SessionActorId: [4:7587124110953871355:2333]Open: token=0 2025-12-23T18:36:50.578392Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=4&id=YjMxMjcwNDUtOWE4YzcyNTMtNzgzMGIzMzEtZGU0N2M5ZmY=, ActorId: [4:7587124110953871355:2333], ActorState: ExecuteState, TraceId: 01kd67ztzpbxe3dpe26fzwk7t2, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Client lost" severity: 1 } 2025-12-23T18:36:50.581070Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:775: PingTaskRequest - PingTaskResult: {owner_id: "1651b421-adf3317b-13b56ff3-2d37cfd217" query_id { value: "utqudpbkmla7mrtoi7jo" } scope: "yandexcloud://Execute_folder_id" started_at { seconds: 1766515010 nanos: 460000000 } deadline { seconds: 1766601410 nanos: 399261000 } tenant: "TestTenant" } ERROR: [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint localhost:18245 } ] 2025-12-23T18:36:50.587817Z node 4 :YQL_PRIVATE_PROXY ERROR: task_ping.cpp:69: PrivatePingTask - QueryId: utqudpbkmla7mrtoi7jo, Owner: 1651b421-adf3317b-13b56ff3-2d37cfd217, Failed with code: GENERIC_ERROR Details:
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint localhost:18245
: Error: ControlPlane PingTaskError 2025-12-23T18:36:50.587970Z node 4 :FQ_PINGER WARN: pinger.cpp:358: QueryId: utqudpbkmla7mrtoi7jo, Owner: 1651b421-adf3317b-13b56ff3-2d37cfd217 Ping response error: [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint [::]:18245 } ]. Retry after: 0.000000s 2025-12-23T18:36:50.587987Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3537: SelfId: [4:7587124196853222252:2333], SessionActorId: [4:7587124110953871355:2333], Start rollback 2025-12-23T18:36:50.589548Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3607: SelfId: [4:7587124196853222252:2333], SessionActorId: [4:7587124110953871355:2333], Send EvWrite (external) to ShardID=72075186224037900, isPrepare=0, isRollback=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710755 DataShard: 72075186224037900 Generation: 1 Counter: 9 SchemeShard: 72057594046644480 PathId: 55, Size=0, Cookie=1, OperationsCount=0, IsFinal=1, Attempts=0 2025-12-23T18:36:50.589649Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3607: SelfId: [4:7587124196853222252:2333], SessionActorId: [4:7587124110953871355:2333], Send EvWrite (external) to ShardID=72075186224037895, isPrepare=0, isRollback=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710755 DataShard: 72075186224037895 Generation: 1 Counter: 11 SchemeShard: 72057594046644480 PathId: 50, Size=0, Cookie=1, OperationsCount=0, IsFinal=1, Attempts=0 2025-12-23T18:36:50.589706Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3607: SelfId: [4:7587124196853222252:2333], SessionActorId: [4:7587124110953871355:2333], Send EvWrite (external) to ShardID=72075186224037891, isPrepare=0, isRollback=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710755 DataShard: 72075186224037891 Generation: 1 Counter: 11 SchemeShard: 72057594046644480 PathId: 44, Size=0, Cookie=1, OperationsCount=0, IsFinal=1, Attempts=0 2025-12-23T18:36:50.591619Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4194: SelfId: [4:7587124196853222252:2333], SessionActorId: [4:7587124110953871355:2333], Recv EvWriteResult (external) from ShardID=72075186224037900, Status=STATUS_COMPLETED, TxId=22, Locks= , Cookie=1 2025-12-23T18:36:50.592336Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4194: SelfId: [4:7587124196853222252:2333], SessionActorId: [4:7587124110953871355:2333], Recv EvWriteResult (external) from ShardID=72075186224037895, Status=STATUS_COMPLETED, TxId=47, Locks= , Cookie=1 2025-12-23T18:36:50.592364Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4194: SelfId: [4:7587124196853222252:2333], SessionActorId: [4:7587124110953871355:2333], Recv EvWriteResult (external) from ShardID=72075186224037891, Status=STATUS_COMPLETED, TxId=53, Locks= , Cookie=1 2025-12-23T18:36:50.592377Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4541: SelfId: [4:7587124196853222252:2333], SessionActorId: [4:7587124110953871355:2333], RolledBack TxId=0 2025-12-23T18:36:50.595085Z node 4 :FQ_PINGER WARN: pinger.cpp:358: QueryId: utqudpbkmla7mrtoi7jo, Owner: 1651b421-adf3317b-13b56ff3-2d37cfd217 Ping response error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:18245: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint [::]:18245 } ]. Retry after: 0.071960s 2025-12-23T18:36:50.676039Z node 4 :FQ_PINGER WARN: pinger.cpp:358: QueryId: utqudpbkmla7mrtoi7jo, Owner: 1651b421-adf3317b-13b56ff3-2d37cfd217 Ping response error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:18245: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint [::]:18245 } ]. Retry after: 0.102375s >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [GOOD] >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 [GOOD] >> TestYmqHttpProxy::TestListQueueTags >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 [GOOD] >> TestKinesisHttpProxy::GoodRequestCreateStream >> TestYmqHttpProxy::TestListQueueTags [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2025-12-23T18:35:48.901201Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123933813018392:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:48.901245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:48.976496Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007667s 2025-12-23T18:35:49.911380Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:49.931166Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; E1223 18:35:50.264895599 1205637 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:35:50.265056242 1205637 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:35:50.266241Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.495674Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.538286Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:50.538410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:35:50.551150Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.582615Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.582799Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.612243Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.634850Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.671979Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.695782Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.696119Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30299 2025-12-23T18:35:50.707323Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.770023Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.823318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:50.827098Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.835557Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.865341Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.875563Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.906877Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.954934Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:50.972039Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:51.015199Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:51.043940Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:51.068532Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:51.158764Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:51.158848Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:51.178087Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30299 } ] 2025-12-23T18:35:51.183952Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30299: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on ... t. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:50.833942Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715751, task: 1, CA Id [4:7587124200123671390:2972]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-12-23T18:36:50.833958Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715751, task: 1, CA Id [4:7587124200123671390:2972]. enter pack cells method shardId: 72075186224037900 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-12-23T18:36:50.833976Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715751, task: 1, CA Id [4:7587124200123671390:2972]. exit pack cells method shardId: 72075186224037900 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-12-23T18:36:50.833987Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715751, task: 1, CA Id [4:7587124200123671390:2972]. returned 0 rows; processed 0 rows 2025-12-23T18:36:50.834020Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715751, task: 1, CA Id [4:7587124200123671390:2972]. dropping batch for read #0 2025-12-23T18:36:50.834031Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715751, task: 1, CA Id [4:7587124200123671390:2972]. effective maxinflight 1024 sorted 0 2025-12-23T18:36:50.834043Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715751, task: 1, CA Id [4:7587124200123671390:2972]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-12-23T18:36:50.834056Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715751, task: 1, CA Id [4:7587124200123671390:2972]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-12-23T18:36:50.834106Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1107: SelfId: [4:7587124200123671391:2973], TxId: 281474976715751, task: 2. Ctx: { TraceId : 01kd67zts57ahdyw07q0zw8c6f. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7587124200123671390 RawX2: 4503616807242652 } } DstEndpoint { ActorId { RawX1: 7587124200123671391 RawX2: 4503616807242653 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7587124200123671391 RawX2: 4503616807242653 } } DstEndpoint { ActorId { RawX1: 7587124200123671386 RawX2: 4503616807242217 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2025-12-23T18:36:50.834122Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1114: SelfId: [4:7587124200123671391:2973], TxId: 281474976715751, task: 2. Ctx: { TraceId : 01kd67zts57ahdyw07q0zw8c6f. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Update input channelId: 1, peer: [4:7587124200123671390:2972] 2025-12-23T18:36:50.834125Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7587124200123671390:2972], TxId: 281474976715751, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zts57ahdyw07q0zw8c6f. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:36:50.834209Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124200123671391:2973], TxId: 281474976715751, task: 2. Ctx: { TraceId : 01kd67zts57ahdyw07q0zw8c6f. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2025-12-23T18:36:50.834324Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1107: SelfId: [4:7587124200123671391:2973], TxId: 281474976715751, task: 2. Ctx: { TraceId : 01kd67zts57ahdyw07q0zw8c6f. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7587124200123671390 RawX2: 4503616807242652 } } DstEndpoint { ActorId { RawX1: 7587124200123671391 RawX2: 4503616807242653 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7587124200123671391 RawX2: 4503616807242653 } } DstEndpoint { ActorId { RawX1: 7587124200123671386 RawX2: 4503616807242217 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2025-12-23T18:36:50.834351Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124200123671391:2973], TxId: 281474976715751, task: 2. Ctx: { TraceId : 01kd67zts57ahdyw07q0zw8c6f. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:36:50.834373Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124200123671391:2973], TxId: 281474976715751, task: 2. Ctx: { TraceId : 01kd67zts57ahdyw07q0zw8c6f. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2025-12-23T18:36:50.834393Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715751, task: 2. Finish input channelId: 1, from: [4:7587124200123671390:2972] 2025-12-23T18:36:50.834447Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124200123671391:2973], TxId: 281474976715751, task: 2. Ctx: { TraceId : 01kd67zts57ahdyw07q0zw8c6f. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:36:50.834469Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124200123671390:2972], TxId: 281474976715751, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zts57ahdyw07q0zw8c6f. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646927 2025-12-23T18:36:50.834497Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7587124200123671391:2973], TxId: 281474976715751, task: 2. Ctx: { TraceId : 01kd67zts57ahdyw07q0zw8c6f. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:36:50.834498Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124200123671390:2972], TxId: 281474976715751, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zts57ahdyw07q0zw8c6f. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:50.834538Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715751, task: 1. Tasks execution finished 2025-12-23T18:36:50.834552Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124200123671391:2973], TxId: 281474976715751, task: 2. Ctx: { TraceId : 01kd67zts57ahdyw07q0zw8c6f. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:36:50.834555Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [4:7587124200123671390:2972], TxId: 281474976715751, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zts57ahdyw07q0zw8c6f. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2025-12-23T18:36:50.834585Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715751, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-12-23T18:36:50.834596Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715751, task: 2. Tasks execution finished 2025-12-23T18:36:50.834612Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [4:7587124200123671391:2973], TxId: 281474976715751, task: 2. Ctx: { TraceId : 01kd67zts57ahdyw07q0zw8c6f. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MWI2MzVjNGEtMTZmZWM1NzgtNDc2Njk0MzYtMzhkOWI2NWE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:36:50.834691Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715751, task: 2. pass away 2025-12-23T18:36:50.834710Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715751, task: 1. pass away 2025-12-23T18:36:50.834799Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715751;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:36:50.834815Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715751;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:36:50.841285Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:775: DescribeQueryRequest - DescribeQueryResult: {query_id: "utqudpbkmhsm9t68avc3" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:669: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } 2025-12-23T18:36:51.071149Z node 4 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: Client is stopped ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: 2025-12-23T18:35:51.713289Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123946007520234:2262];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:51.713378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1223 18:35:52.212639461 1207367 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:35:52.212774471 1207367 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:35:52.347956Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.564577Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.707606Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:17413 2025-12-23T18:35:52.707737Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.712061Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:52.713517Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.732735Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.761416Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.834301Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.834386Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.834421Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.834480Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.881855Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.881946Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.881981Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.882010Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.882043Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.935179Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.935256Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.935558Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:52.938668Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:53.017927Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:53.019389Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:53.019496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:35:53.040758Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:53.054347Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:53.093012Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:53.114841Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:53.152313Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:53.152397Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:17413 } ] 2025-12-23T18:35:53.156103Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:17413: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint local ... 976710733, task: 1, CA Id [1:7587124189071888876:2885]. Received TEvResolveKeySetResult update for table 'Root/yq/pending_small' 2025-12-23T18:36:48.261020Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. Processing resolved ShardId# 72075186224037890, partition range: [(String : TestTenant, String : NULL, String : NULL) ; ()), i: 0, state ranges: 1, points: 0 2025-12-23T18:36:48.261096Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:655: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. Compare range #0 [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)] with partition range [(String : TestTenant, String : NULL, String : NULL) ; ()) : 0 2025-12-23T18:36:48.261143Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:662: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. Add range to new shardId: 72075186224037890, range: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)] 2025-12-23T18:36:48.261257Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. Pending shards States: TShardState{ TabletId: 72075186224037890, Last Key , Ranges: [#0: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [#0: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 1 }; 2025-12-23T18:36:48.261270Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. effective maxinflight 1024 sorted 0 2025-12-23T18:36:48.261282Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. BEFORE: 1.0 2025-12-23T18:36:48.261359Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. Send EvRead to shardId: 72075186224037890, tablePath: Root/yq/pending_small, ranges: [(String : TestTenant) ; (String : TestTenant)] , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2025-12-23T18:36:48.261395Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. AFTER: 0.1 2025-12-23T18:36:48.261406Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-12-23T18:36:48.262110Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. Recv TEvReadResult from ShardID=72075186224037890, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-12-23T18:36:48.262125Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. Taken 0 locks 2025-12-23T18:36:48.262136Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. new data for read #0 seqno = 1 finished = 1 2025-12-23T18:36:48.262157Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124189071888876:2885], TxId: 281474976710733, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zrp3418rtnjq4z93k312. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=Y2EzOWJiYjctNjRjYzE5YzItMzJkNWNkN2YtODc1MTVlMDM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 276037645 2025-12-23T18:36:48.262172Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124189071888876:2885], TxId: 281474976710733, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zrp3418rtnjq4z93k312. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=Y2EzOWJiYjctNjRjYzE5YzItMzJkNWNkN2YtODc1MTVlMDM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:48.262188Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-12-23T18:36:48.262203Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. enter pack cells method shardId: 72075186224037890 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-12-23T18:36:48.262217Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. exit pack cells method shardId: 72075186224037890 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-12-23T18:36:48.262227Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. returned 0 rows; processed 0 rows 2025-12-23T18:36:48.262261Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. dropping batch for read #0 2025-12-23T18:36:48.262271Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. effective maxinflight 1024 sorted 0 2025-12-23T18:36:48.262283Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-12-23T18:36:48.262295Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710733, task: 1, CA Id [1:7587124189071888876:2885]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-12-23T18:36:48.262361Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7587124189071888876:2885], TxId: 281474976710733, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zrp3418rtnjq4z93k312. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=Y2EzOWJiYjctNjRjYzE5YzItMzJkNWNkN2YtODc1MTVlMDM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:36:48.262380Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124189071888877:2886], TxId: 281474976710733, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zrp3418rtnjq4z93k312. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=Y2EzOWJiYjctNjRjYzE5YzItMzJkNWNkN2YtODc1MTVlMDM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646923 2025-12-23T18:36:48.262399Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976710733, task: 2. Finish input channelId: 1, from: [1:7587124189071888876:2885] 2025-12-23T18:36:48.262424Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124189071888877:2886], TxId: 281474976710733, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zrp3418rtnjq4z93k312. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=Y2EzOWJiYjctNjRjYzE5YzItMzJkNWNkN2YtODc1MTVlMDM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:48.262465Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7587124189071888877:2886], TxId: 281474976710733, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zrp3418rtnjq4z93k312. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=Y2EzOWJiYjctNjRjYzE5YzItMzJkNWNkN2YtODc1MTVlMDM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:36:48.262476Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124189071888876:2885], TxId: 281474976710733, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zrp3418rtnjq4z93k312. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=Y2EzOWJiYjctNjRjYzE5YzItMzJkNWNkN2YtODc1MTVlMDM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646927 2025-12-23T18:36:48.262495Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124189071888876:2885], TxId: 281474976710733, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zrp3418rtnjq4z93k312. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=Y2EzOWJiYjctNjRjYzE5YzItMzJkNWNkN2YtODc1MTVlMDM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:48.262512Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710733, task: 1. Tasks execution finished 2025-12-23T18:36:48.262524Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [1:7587124189071888876:2885], TxId: 281474976710733, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zrp3418rtnjq4z93k312. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=Y2EzOWJiYjctNjRjYzE5YzItMzJkNWNkN2YtODc1MTVlMDM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2025-12-23T18:36:48.262627Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710733, task: 1. pass away 2025-12-23T18:36:48.262723Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710733;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:36:48.263098Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124189071888877:2886], TxId: 281474976710733, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zrp3418rtnjq4z93k312. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=Y2EzOWJiYjctNjRjYzE5YzItMzJkNWNkN2YtODc1MTVlMDM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:48.263135Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710733, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-12-23T18:36:48.263144Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710733, task: 2. Tasks execution finished 2025-12-23T18:36:48.263154Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [1:7587124189071888877:2886], TxId: 281474976710733, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zrp3418rtnjq4z93k312. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=Y2EzOWJiYjctNjRjYzE5YzItMzJkNWNkN2YtODc1MTVlMDM=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2025-12-23T18:36:48.263202Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710733, task: 2. pass away 2025-12-23T18:36:48.263249Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710733;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; [good] Yq_1::CreateQuery_Without_Connection ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2025-12-23T18:35:52.372428Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123948193156680:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:52.372517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1223 18:35:53.010007254 1207730 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:35:53.010128198 1207730 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:35:53.089599Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.112424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:53.447249Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.454267Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:53.454460Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:35:53.454901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:53.598990Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.622903Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.626826Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.636451Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:22615 2025-12-23T18:35:53.654822Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.658565Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.659463Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.691439Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.691596Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.691657Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.691674Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.720438Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.725379Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.743544Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.813336Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.827650Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.856598Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.931453Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.931715Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:53.981240Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:53.985778Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:54.001128Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:54.001233Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:54.009053Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:54.018839Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:22615: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:22615 } ] 2025-12-23T18:35:54.026071Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UN ... 760, task: 2. Add data: 233 / 233 2025-12-23T18:36:51.013331Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4974: SelfId: [4:7587124202796163884:3031], TxId: 281474976715760, task: 2. Send data=233, closed=1, bufferActorId=[4:7587124202796163873:2386] 2025-12-23T18:36:51.013351Z node 4 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:423: SelfId: [4:7587124202796163879:3031], TxId: 281474976715760, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zv3kamsdxx6msfejatnp. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjgzMzdjYWMtYWJjMjNkNzEtM2ZhMmRiYzctMWU2NDIzNDk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 233 2025-12-23T18:36:51.013380Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715760, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-12-23T18:36:51.013390Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715760, task: 2. Tasks execution finished 2025-12-23T18:36:51.013401Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2121: SelfId: [4:7587124202796163879:3031], TxId: 281474976715760, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zv3kamsdxx6msfejatnp. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjgzMzdjYWMtYWJjMjNkNzEtM2ZhMmRiYzctMWU2NDIzNDk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Waiting finish of sink[0] 2025-12-23T18:36:51.013422Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124202796163878:3030], TxId: 281474976715760, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zv3kamsdxx6msfejatnp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjgzMzdjYWMtYWJjMjNkNzEtM2ZhMmRiYzctMWU2NDIzNDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2025-12-23T18:36:51.013449Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124202796163878:3030], TxId: 281474976715760, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zv3kamsdxx6msfejatnp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjgzMzdjYWMtYWJjMjNkNzEtM2ZhMmRiYzctMWU2NDIzNDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:36:51.013469Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715760, task: 1. Tasks execution finished 2025-12-23T18:36:51.013483Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [4:7587124202796163878:3030], TxId: 281474976715760, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zv3kamsdxx6msfejatnp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjgzMzdjYWMtYWJjMjNkNzEtM2ZhMmRiYzctMWU2NDIzNDk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:36:51.013626Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715760, task: 1. pass away 2025-12-23T18:36:51.013736Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715760;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:36:51.014711Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2928: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Create new TableWriteActor for table `Root/yq/connections` ([72057594046644480:56:1]). lockId=281474976715753. ActorId=[4:7587124202796163885:2386] 2025-12-23T18:36:51.014800Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:477: Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7587124104011911335:2386]Open: token=0 2025-12-23T18:36:51.015124Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:485: SelfId: [4:7587124202796163885:2386], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7587124104011911335:2386]Write: token=0 2025-12-23T18:36:51.015237Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:492: SelfId: [4:7587124202796163885:2386], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7587124104011911335:2386]Close: token=0 2025-12-23T18:36:51.015281Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4879: SelfId: [4:7587124202796163884:3031], TxId: 281474976715760, task: 2. TKqpForwardWriteActor recieve EvBufferWriteResult from [4:7587124202796163873:2386] 2025-12-23T18:36:51.015298Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4897: SelfId: [4:7587124202796163884:3031], TxId: 281474976715760, task: 2. Finished 2025-12-23T18:36:51.015316Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124202796163879:3031], TxId: 281474976715760, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zv3kamsdxx6msfejatnp. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjgzMzdjYWMtYWJjMjNkNzEtM2ZhMmRiYzctMWU2NDIzNDk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:51.015359Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715760, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-12-23T18:36:51.015368Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715760, task: 2. Tasks execution finished 2025-12-23T18:36:51.015382Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [4:7587124202796163879:3031], TxId: 281474976715760, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zv3kamsdxx6msfejatnp. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjgzMzdjYWMtYWJjMjNkNzEtM2ZhMmRiYzctMWU2NDIzNDk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2025-12-23T18:36:51.015444Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715760, task: 2. pass away 2025-12-23T18:36:51.015502Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715760;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:36:51.015873Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3473: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Start prepare for distributed commit 2025-12-23T18:36:51.015888Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1063: SelfId: [4:7587124202796163885:2386], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7587124104011911335:2386]SetPrepare; txId=281474976715760 2025-12-23T18:36:51.015903Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3396: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Flush data 2025-12-23T18:36:51.016160Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1231: SelfId: [4:7587124202796163885:2386], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7587124104011911335:2386]Send EvWrite to ShardID=72075186224037899, isPrepare=1, isImmediateCommit=0, TxId=281474976715760, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715753 DataShard: 72075186224037899 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 56, Size=320, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=1, BufferMemory=320 2025-12-23T18:36:51.016264Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3607: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Send EvWrite (external) to ShardID=72075186224037890, isPrepare=1, isRollback=0, TxId=281474976715760, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715753 DataShard: 72075186224037890 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 47, Size=0, Cookie=0, OperationsCount=0, IsFinal=1, Attempts=0 2025-12-23T18:36:51.017102Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:736: SelfId: [4:7587124202796163885:2386], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7587124104011911335:2386]Recv EvWriteResult from ShardID=72075186224037899, Status=STATUS_PREPARED, TxId=281474976715760, Locks= , Cookie=1 2025-12-23T18:36:51.017151Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3396: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Flush data 2025-12-23T18:36:51.017182Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4137: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Recv EvWriteResult (external) from ShardID=72075186224037890, Status=STATUS_PREPARED, TxId=281474976715760, Locks= , Cookie=0 2025-12-23T18:36:51.017196Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4455: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Got prepared result TxId=281474976715760, TabletId=72075186224037890, Cookie=0 2025-12-23T18:36:51.017220Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3519: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Start distributed commit with TxId=281474976715760 2025-12-23T18:36:51.017231Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1071: SelfId: [4:7587124202796163885:2386], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7587124104011911335:2386]SetDistributedCommit; txId=281474976715760 2025-12-23T18:36:51.017257Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3706: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Execute planned transaction, coordinator: 72057594046316545, volitale: 1, shards: 2 2025-12-23T18:36:51.017958Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3783: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Got transaction status, status: 16 2025-12-23T18:36:51.017971Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3783: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Got transaction status, status: 17 2025-12-23T18:36:51.021816Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4169: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Recv EvWriteResult (external) from ShardID=72075186224037890, Status=STATUS_COMPLETED, TxId=281474976715760, Locks= , Cookie=0 2025-12-23T18:36:51.021846Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4484: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Got completed result TxId=281474976715760, TabletId=72075186224037890, Cookie=0, Locks= 2025-12-23T18:36:51.022523Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:736: SelfId: [4:7587124202796163885:2386], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7587124104011911335:2386]Recv EvWriteResult from ShardID=72075186224037899, Status=STATUS_COMPLETED, TxId=281474976715760, Locks= , Cookie=0 2025-12-23T18:36:51.022553Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1019: SelfId: [4:7587124202796163885:2386], Table: `Root/yq/connections` ([72057594046644480:56:1]), SessionActorId: [4:7587124104011911335:2386]Got completed result TxId=281474976715760, TabletId=72075186224037899, Cookie=0, Mode=2, Locks= 2025-12-23T18:36:51.022573Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4522: SelfId: [4:7587124202796163873:2386], SessionActorId: [4:7587124104011911335:2386], Committed TxId=281474976715760 2025-12-23T18:36:51.187104Z node 4 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: Client is stopped >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2025-12-23T18:35:50.382268Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123939229149561:2265];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:50.382323Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:50.442263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:35:51.383988Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:51.448620Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] E1223 18:35:51.509704516 1206792 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:35:51.566862199 1206792 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:35:51.687131Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:51.687389Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:51.786817Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:51.786950Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:51.786997Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:51.808619Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:51.840432Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:51.840754Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:16841 2025-12-23T18:35:51.840843Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:51.841010Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:51.875516Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:51.875560Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:51.888970Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:51.897257Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:52.026892Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:52.075737Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:52.101977Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:52.123615Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:35:52.126242Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:52.149918Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:52.151592Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:52.191370Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:52.206186Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:52.250873Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:52.267024Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:52.272647Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:52.299830Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:52.316734Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16841 } ] 2025-12-23T18:35:52.316795Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16841: F ... 23T18:36:32.663802Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710681, task: 1. Tasks execution finished 2025-12-23T18:36:32.663815Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2121: SelfId: [7:7587124122122586149:2474], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67z9bzbzrfdh80kgz76rqp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YTlkMTFkZjQtY2Y3YmVkZS00NmI5NWFlLWYxOTc3ODNm. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2025-12-23T18:36:32.663834Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [7:7587124122122586149:2474], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67z9bzbzrfdh80kgz76rqp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YTlkMTFkZjQtY2Y3YmVkZS00NmI5NWFlLWYxOTc3ODNm. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2025-12-23T18:36:32.663853Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1107: SelfId: [7:7587124122122586149:2474], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67z9bzbzrfdh80kgz76rqp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YTlkMTFkZjQtY2Y3YmVkZS00NmI5NWFlLWYxOTc3ODNm. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: 2025-12-23T18:36:32.663866Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710681, task: 1. Tasks execution finished 2025-12-23T18:36:32.663871Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2121: SelfId: [7:7587124122122586149:2474], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67z9bzbzrfdh80kgz76rqp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YTlkMTFkZjQtY2Y3YmVkZS00NmI5NWFlLWYxOTc3ODNm. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2025-12-23T18:36:32.663894Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [7:7587124122122586149:2474], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67z9bzbzrfdh80kgz76rqp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YTlkMTFkZjQtY2Y3YmVkZS00NmI5NWFlLWYxOTc3ODNm. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:36:32.663903Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710681, task: 1. Tasks execution finished 2025-12-23T18:36:32.663921Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2121: SelfId: [7:7587124122122586149:2474], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67z9bzbzrfdh80kgz76rqp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YTlkMTFkZjQtY2Y3YmVkZS00NmI5NWFlLWYxOTc3ODNm. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2025-12-23T18:36:32.663958Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2928: SelfId: [7:7587124122122586141:2474], SessionActorId: [7:7587124113532650203:2474], Create new TableWriteActor for table `Root/yq/nodes` ([72057594046644480:50:1]). lockId=281474976710679. ActorId=[7:7587124122122586156:2474] 2025-12-23T18:36:32.664009Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:477: Table: `Root/yq/nodes` ([72057594046644480:50:1]), SessionActorId: [7:7587124113532650203:2474]Open: token=0 2025-12-23T18:36:32.664112Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:485: SelfId: [7:7587124122122586156:2474], Table: `Root/yq/nodes` ([72057594046644480:50:1]), SessionActorId: [7:7587124113532650203:2474]Write: token=0 2025-12-23T18:36:32.664187Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:492: SelfId: [7:7587124122122586156:2474], Table: `Root/yq/nodes` ([72057594046644480:50:1]), SessionActorId: [7:7587124113532650203:2474]Close: token=0 2025-12-23T18:36:32.664222Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4879: SelfId: [7:7587124122122586153:2474], TxId: 281474976710681, task: 1. TKqpForwardWriteActor recieve EvBufferWriteResult from [7:7587124122122586141:2474] 2025-12-23T18:36:32.664234Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4897: SelfId: [7:7587124122122586153:2474], TxId: 281474976710681, task: 1. Finished 2025-12-23T18:36:32.664246Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [7:7587124122122586149:2474], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67z9bzbzrfdh80kgz76rqp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YTlkMTFkZjQtY2Y3YmVkZS00NmI5NWFlLWYxOTc3ODNm. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:36:32.664257Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710681, task: 1. Tasks execution finished 2025-12-23T18:36:32.664267Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [7:7587124122122586149:2474], TxId: 281474976710681, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67z9bzbzrfdh80kgz76rqp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YTlkMTFkZjQtY2Y3YmVkZS00NmI5NWFlLWYxOTc3ODNm. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:36:32.664327Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710681, task: 1. pass away 2025-12-23T18:36:32.664395Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710681;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:36:32.664717Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3498: SelfId: [7:7587124122122586141:2474], SessionActorId: [7:7587124113532650203:2474], Start immediate commit 2025-12-23T18:36:32.664727Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1077: SelfId: [7:7587124122122586156:2474], Table: `Root/yq/nodes` ([72057594046644480:50:1]), SessionActorId: [7:7587124113532650203:2474]SetImmediateCommit 2025-12-23T18:36:32.664738Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3396: SelfId: [7:7587124122122586141:2474], SessionActorId: [7:7587124113532650203:2474], Flush data 2025-12-23T18:36:32.664879Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1231: SelfId: [7:7587124122122586156:2474], Table: `Root/yq/nodes` ([72057594046644480:50:1]), SessionActorId: [7:7587124113532650203:2474]Send EvWrite to ShardID=72075186224037890, isPrepare=0, isImmediateCommit=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710679 DataShard: 72075186224037890 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 50, Size=228, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=3, BufferMemory=228 2025-12-23T18:36:32.665220Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:492: SelfId: [7:7587124122122586155:2342], Table: `Root/yq/nodes` ([72057594046644480:50:1]), SessionActorId: [7:7587124100647747522:2342]Close: token=0 2025-12-23T18:36:32.665262Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4879: SelfId: [7:7587124122122586154:2342], TxId: 281474976710680, task: 1. TKqpForwardWriteActor recieve EvBufferWriteResult from [7:7587124122122586142:2342] 2025-12-23T18:36:32.665275Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4897: SelfId: [7:7587124122122586154:2342], TxId: 281474976710680, task: 1. Finished 2025-12-23T18:36:32.665293Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [7:7587124122122586150:2342], TxId: 281474976710680, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67z9cr48wqat5wvt87j8bf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=YjQwYWM4Ni03YjlkYjU4Ny01MjdhYzFmNy1jZTFkZTBhMg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:36:32.665316Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710680, task: 1. Tasks execution finished 2025-12-23T18:36:32.665327Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [7:7587124122122586150:2342], TxId: 281474976710680, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67z9cr48wqat5wvt87j8bf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=YjQwYWM4Ni03YjlkYjU4Ny01MjdhYzFmNy1jZTFkZTBhMg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:36:32.665384Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710680, task: 1. pass away 2025-12-23T18:36:32.665466Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710680;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:36:32.666027Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3498: SelfId: [7:7587124122122586142:2342], SessionActorId: [7:7587124100647747522:2342], Start immediate commit 2025-12-23T18:36:32.666050Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1077: SelfId: [7:7587124122122586155:2342], Table: `Root/yq/nodes` ([72057594046644480:50:1]), SessionActorId: [7:7587124100647747522:2342]SetImmediateCommit 2025-12-23T18:36:32.666063Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3396: SelfId: [7:7587124122122586142:2342], SessionActorId: [7:7587124100647747522:2342], Flush data 2025-12-23T18:36:32.666186Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1231: SelfId: [7:7587124122122586155:2342], Table: `Root/yq/nodes` ([72057594046644480:50:1]), SessionActorId: [7:7587124100647747522:2342]Send EvWrite to ShardID=72075186224037890, isPrepare=0, isImmediateCommit=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710678 DataShard: 72075186224037890 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 50, Size=212, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=3, BufferMemory=212 2025-12-23T18:36:32.676848Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:736: SelfId: [7:7587124122122586156:2474], Table: `Root/yq/nodes` ([72057594046644480:50:1]), SessionActorId: [7:7587124113532650203:2474]Recv EvWriteResult from ShardID=72075186224037890, Status=STATUS_COMPLETED, TxId=4, Locks= , Cookie=1 2025-12-23T18:36:32.676874Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:736: SelfId: [7:7587124122122586155:2342], Table: `Root/yq/nodes` ([72057594046644480:50:1]), SessionActorId: [7:7587124100647747522:2342]Recv EvWriteResult from ShardID=72075186224037890, Status=STATUS_COMPLETED, TxId=5, Locks= , Cookie=1 2025-12-23T18:36:32.676893Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1019: SelfId: [7:7587124122122586155:2342], Table: `Root/yq/nodes` ([72057594046644480:50:1]), SessionActorId: [7:7587124100647747522:2342]Got completed result TxId=5, TabletId=72075186224037890, Cookie=1, Mode=3, Locks= 2025-12-23T18:36:32.676893Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1019: SelfId: [7:7587124122122586156:2474], Table: `Root/yq/nodes` ([72057594046644480:50:1]), SessionActorId: [7:7587124113532650203:2474]Got completed result TxId=4, TabletId=72075186224037890, Cookie=1, Mode=3, Locks= 2025-12-23T18:36:32.676957Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4522: SelfId: [7:7587124122122586141:2474], SessionActorId: [7:7587124113532650203:2474], Committed TxId=0 2025-12-23T18:36:32.676958Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4522: SelfId: [7:7587124122122586142:2342], SessionActorId: [7:7587124100647747522:2342], Committed TxId=0 |92.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestWrongRequest [GOOD] Test command err: 2025-12-23T18:35:37.893967Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123883211961404:2189];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:37.894263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d30/r3tmp/tmp9kqJ6J/pdisk_1.dat 2025-12-23T18:35:38.667597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:38.667689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:38.699617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:38.809328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:39.005163Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:39.022990Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123883211961248:2081] 1766514937822045 != 1766514937822048 2025-12-23T18:35:39.024473Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14720, node 1 2025-12-23T18:35:39.263536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:39.311128Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:39.311151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:39.311159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:39.311254Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514938958 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:40.064084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:40.079770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:7589 2025-12-23T18:35:40.572403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:35:40.581913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:35:40.649036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:40.809476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:40.868918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:40.971971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:41.050234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:41.138114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:41.203696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:41.293373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:41.387359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:41.448403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:42.870911Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123883211961404:2189];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:42.871000Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:44.719223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123913276733992:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:44.719405Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:44.719594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123913276734004:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:44.722790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123913276734006:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:44.722888Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:44.724643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:44.749936Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123913276734007:2376], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:35:44.819318Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123913276734059:3112] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, Local ... settings": [], "truncated": false} 2025-12-23T18:36:50.242664Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 37ms 2025-12-23T18:36:50.243268Z node 8 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:36:50.243301Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 35ms 2025-12-23T18:36:50.243730Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:36:50.243762Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-12-23T18:36:50.243883Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 36ms 2025-12-23T18:36:50.244305Z node 8 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:36:50.244673Z node 8 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:36:50.413717Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7587124198272666762:2439]: Pool not found 2025-12-23T18:36:50.414762Z node 8 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-12-23T18:36:50.754296Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7587124198272666757:2435]: Pool not found 2025-12-23T18:36:50.754494Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-12-23T18:36:50.763173Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587124198272666906:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:50.763305Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7587124198272666907:2457], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:36:50.763382Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:50.768459Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587124198272666910:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:50.768537Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:51.080286Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7587124198272666904:2455]: Pool not found 2025-12-23T18:36:51.080524Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete Http output full {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 400 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 2025-12-23T18:36:51.196740Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:39862) incoming connection opened 2025-12-23T18:36:51.196829Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:39862) -> (POST /, 87 bytes) 2025-12-23T18:36:51.197036Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d813:885c:8d7b:0:c013:885c:8d7b:0] request [CreateStream] url [/] database [] requestId: 95d2201-a54e3159-8697d043-91245a12 2025-12-23T18:36:51.197683Z node 8 :HTTP_PROXY WARN: http_req.cpp:971: http request [CreateStream] requestId [95d2201-a54e3159-8697d043-91245a12] got new request with incorrect json from [d813:885c:8d7b:0:c013:885c:8d7b:0] database '' 2025-12-23T18:36:51.197910Z node 8 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateStream] requestId [95d2201-a54e3159-8697d043-91245a12] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName 2025-12-23T18:36:51.198256Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:39862) <- (400 InvalidArgumentException, 135 bytes) 2025-12-23T18:36:51.198313Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:39862) Request: POST / HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "ShardCount":5, "StreamName":"testtopic", "WrongStreamName":"WrongStreamName" } 2025-12-23T18:36:51.198349Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:39862) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 95d2201-a54e3159-8697d043-91245a12 Content-Type: application/x-amz-json-1.1 Content-Length: 135 2025-12-23T18:36:51.198445Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:39862) connection closed >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:36:36.650974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:36:36.651095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:36.651140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:36:36.651193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:36:36.651232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:36:36.651267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:36:36.651329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:36.651452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:36:36.652393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:36:36.652734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:36:36.753250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:36:36.753333Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:36.760429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:36:36.760678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:36:36.760823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:36:36.764364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:36:36.764557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:36:36.765285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:36.765432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:36:36.768636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:36.768829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:36.770202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:36.770266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:36.770391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:36.770434Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:36.770480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:36.770599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:36:36.939468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.944821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.945023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.945122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.945200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.945267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.945331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.945427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.945513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.945610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.945686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.945756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.945862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.945936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.946032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 65: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2025-12-23T18:36:40.313378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000112 FAKE_COORDINATOR: advance: minStep5000112 State->FrontStep: 5000111 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000112 2025-12-23T18:36:40.314656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000112, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:40.314786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000112 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:36:40.314832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_unsafe.cpp:47: TDropForceUnsafe TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000112, at schemeshard: 72057594046678944 2025-12-23T18:36:40.314877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 62] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:40.314904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2025-12-23T18:36:40.315022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 128 -> 130 2025-12-23T18:36:40.315191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:36:40.315247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 2025-12-23T18:36:40.316480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-12-23T18:36:40.320761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Erasing txId 175 2025-12-23T18:36:40.322037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:40.322079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:40.322226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2025-12-23T18:36:40.322398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:40.322445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-12-23T18:36:40.322490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 175, path id: 62 2025-12-23T18:36:40.322932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-12-23T18:36:40.322979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-12-23T18:36:40.323046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2025-12-23T18:36:40.323076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-12-23T18:36:40.323105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2025-12-23T18:36:40.323150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-12-23T18:36:40.323188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: false 2025-12-23T18:36:40.323218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-12-23T18:36:40.323249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 175:0 2025-12-23T18:36:40.323274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 175:0 2025-12-23T18:36:40.323349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 3 2025-12-23T18:36:40.323380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 175, publications: 2, subscribers: 0 2025-12-23T18:36:40.323407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 1], 106 2025-12-23T18:36:40.323432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 62], 18446744073709551615 2025-12-23T18:36:40.324100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2025-12-23T18:36:40.324191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2025-12-23T18:36:40.324224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 175 2025-12-23T18:36:40.324265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 106 2025-12-23T18:36:40.324295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:36:40.325368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-12-23T18:36:40.325463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-12-23T18:36:40.325499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 175 2025-12-23T18:36:40.325532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 62], version: 18446744073709551615 2025-12-23T18:36:40.325565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 2025-12-23T18:36:40.325643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 175, subscribers: 0 2025-12-23T18:36:40.325918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:36:40.325956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2025-12-23T18:36:40.326039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 1 2025-12-23T18:36:40.326878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:36:40.326942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2025-12-23T18:36:40.327004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:36:40.329803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-12-23T18:36:40.330059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-12-23T18:36:40.331758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:36:40.331983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-12-23T18:36:40.333293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-12-23T18:36:40.333338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-12-23T18:36:40.335354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-12-23T18:36:40.335502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-12-23T18:36:40.335542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2891:4880] TestWaitNotification: OK eventTxId 175 |92.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2025-12-23T18:35:49.103075Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.011075s 2025-12-23T18:35:49.148608Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123937109773332:2153];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:49.148655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:49.308226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown E1223 18:35:49.469647548 1205863 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:35:49.469858107 1205863 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:35:49.811286Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.246820Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:14868 2025-12-23T18:35:50.258997Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:50.308994Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.414060Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.414140Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.456091Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.456215Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.456252Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.456289Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.459419Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.581776Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.611658Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.611755Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.611850Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.611920Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.611987Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.639816Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.744798Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.768038Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.768221Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.768409Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.768481Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.768528Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.771360Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.935660Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:50.962909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:50.963038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:35:50.972241Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14868 } ] 2025-12-23T18:35:51.006882Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14868: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost ... KS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7587124162235649207 RawX2: 4503616807242582 } } DstEndpoint { ActorId { RawX1: 7587124162235649202 RawX2: 4503616807242202 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2025-12-23T18:36:42.338108Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1114: SelfId: [4:7587124162235649207:2902], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Update input channelId: 1, peer: [4:7587124162235649206:2901] 2025-12-23T18:36:42.338159Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124162235649207:2902], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646926 2025-12-23T18:36:42.338253Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1107: SelfId: [4:7587124162235649207:2902], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7587124162235649206 RawX2: 4503616807242581 } } DstEndpoint { ActorId { RawX1: 7587124162235649207 RawX2: 4503616807242582 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7587124162235649207 RawX2: 4503616807242582 } } DstEndpoint { ActorId { RawX1: 7587124162235649202 RawX2: 4503616807242202 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2025-12-23T18:36:42.338285Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124162235649207:2902], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:42.339477Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715737, task: 1, CA Id [4:7587124162235649206:2901]. Recv TEvReadResult from ShardID=72075186224037889, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-12-23T18:36:42.339501Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715737, task: 1, CA Id [4:7587124162235649206:2901]. Taken 0 locks 2025-12-23T18:36:42.339514Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715737, task: 1, CA Id [4:7587124162235649206:2901]. new data for read #0 seqno = 1 finished = 1 2025-12-23T18:36:42.339536Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124162235649206:2901], TxId: 281474976715737, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 276037645 2025-12-23T18:36:42.339564Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124162235649206:2901], TxId: 281474976715737, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:42.339581Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715737, task: 1, CA Id [4:7587124162235649206:2901]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-12-23T18:36:42.339598Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715737, task: 1, CA Id [4:7587124162235649206:2901]. enter pack cells method shardId: 72075186224037889 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-12-23T18:36:42.339615Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715737, task: 1, CA Id [4:7587124162235649206:2901]. exit pack cells method shardId: 72075186224037889 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-12-23T18:36:42.339627Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715737, task: 1, CA Id [4:7587124162235649206:2901]. returned 0 rows; processed 0 rows 2025-12-23T18:36:42.339656Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715737, task: 1, CA Id [4:7587124162235649206:2901]. dropping batch for read #0 2025-12-23T18:36:42.339668Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715737, task: 1, CA Id [4:7587124162235649206:2901]. effective maxinflight 1 sorted 1 2025-12-23T18:36:42.339680Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715737, task: 1, CA Id [4:7587124162235649206:2901]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-12-23T18:36:42.339695Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715737, task: 1, CA Id [4:7587124162235649206:2901]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-12-23T18:36:42.339771Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7587124162235649206:2901], TxId: 281474976715737, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:36:42.339774Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124162235649207:2902], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646923 2025-12-23T18:36:42.339795Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715737, task: 2. Finish input channelId: 1, from: [4:7587124162235649206:2901] 2025-12-23T18:36:42.339851Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124162235649207:2902], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:42.339905Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7587124162235649207:2902], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:36:42.339918Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124162235649206:2901], TxId: 281474976715737, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646927 2025-12-23T18:36:42.339954Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124162235649206:2901], TxId: 281474976715737, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:42.339996Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715737, task: 1. Tasks execution finished 2025-12-23T18:36:42.340026Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [4:7587124162235649206:2901], TxId: 281474976715737, task: 1. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2025-12-23T18:36:42.340154Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715737, task: 1. pass away 2025-12-23T18:36:42.340252Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715737;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:36:42.340462Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587124162235649207:2902], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:36:42.340508Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715737, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-12-23T18:36:42.340518Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715737, task: 2. Tasks execution finished 2025-12-23T18:36:42.340548Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [4:7587124162235649207:2902], TxId: 281474976715737, task: 2. Ctx: { CheckpointId : . TraceId : 01kd67zjfyfww5awfj5netj6r0. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=OWU4YWY4ZmUtNWY4MTY0LTE0ZmRhNDE4LTYwNzU0NzE3. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2025-12-23T18:36:42.340627Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715737, task: 2. pass away 2025-12-23T18:36:42.340677Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715737;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; |92.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:36:34.841752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:36:34.841837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:34.841911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:36:34.841940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:36:34.841971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:36:34.841994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:36:34.842033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:34.842110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:36:34.842803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:36:34.843103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:36:34.924469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:36:34.924544Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:34.937795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:36:34.938163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:36:34.938306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:36:34.945949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:36:34.946238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:36:34.947115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:34.947445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:36:34.950598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:34.950833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:34.954934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:34.955014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:34.955155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:34.955204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:34.955246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:34.955397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:36:35.136347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.137419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.137565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.137636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.137708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.137771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.137836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.137918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.138010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.138136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.138234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.138299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.138393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.138485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:35.138574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 112 at step: 5000049 FAKE_COORDINATOR: advance: minStep5000049 State->FrontStep: 5000048 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000049 2025-12-23T18:36:36.624908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000049, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:36.625007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000049 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:36:36.625050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:190: TDropExtSubdomain TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000049, at schemeshard: 72057594046678944 2025-12-23T18:36:36.625153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 43] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:36.625189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2025-12-23T18:36:36.625222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 128 -> 134 2025-12-23T18:36:36.627583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-12-23T18:36:36.628297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-12-23T18:36:36.629745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-12-23T18:36:36.629815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 112:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:36:36.629956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 134 -> 135 2025-12-23T18:36:36.630135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:36:36.630188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 FAKE_COORDINATOR: Erasing txId 112 2025-12-23T18:36:36.632181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:36.632228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:36.632416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2025-12-23T18:36:36.632562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:36.632618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-12-23T18:36:36.632690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 112, path id: 43 2025-12-23T18:36:36.633071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-12-23T18:36:36.633129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 112:0 ProgressState 2025-12-23T18:36:36.633166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 135 -> 240 2025-12-23T18:36:36.633979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:36:36.634080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:36:36.634123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-12-23T18:36:36.634212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 30 2025-12-23T18:36:36.634248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:36:36.635932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:36:36.636026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:36:36.636076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-12-23T18:36:36.636149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2025-12-23T18:36:36.636182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2025-12-23T18:36:36.636252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-12-23T18:36:36.638863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-12-23T18:36:36.638914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 112:0 ProgressState 2025-12-23T18:36:36.638992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2025-12-23T18:36:36.639040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-12-23T18:36:36.639077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2025-12-23T18:36:36.639101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-12-23T18:36:36.639131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: true 2025-12-23T18:36:36.639164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-12-23T18:36:36.639193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 112:0 2025-12-23T18:36:36.639226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 112:0 2025-12-23T18:36:36.639305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2025-12-23T18:36:36.639663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:36:36.639706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2025-12-23T18:36:36.639758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2025-12-23T18:36:36.640279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:36:36.640317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2025-12-23T18:36:36.640375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:36:36.641182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-12-23T18:36:36.641308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-12-23T18:36:36.643626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:36:36.644863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-12-23T18:36:36.645225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-12-23T18:36:36.645261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-12-23T18:36:36.646162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-12-23T18:36:36.646264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-12-23T18:36:36.646297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:1058:3048] TestWaitNotification: OK eventTxId 112 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:36:36.487073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:36:36.487173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:36.487212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:36:36.487252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:36:36.487294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:36:36.487324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:36:36.487397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:36.487532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:36:36.488643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:36:36.488976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:36:36.600702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:36:36.600784Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:36.609937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:36:36.610227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:36:36.610383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:36:36.614280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:36:36.614516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:36:36.615371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:36.615627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:36:36.617963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:36.618164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:36.619572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:36.619653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:36.619805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:36.619856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:36.619903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:36.620060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:36:36.806352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.810368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.810535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.810603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.810669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.810733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.810810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.810892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.810981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.811056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.811137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.811197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.811287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.811351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:36.811439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... pp:665: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-12-23T18:36:37.911375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000049 FAKE_COORDINATOR: advance: minStep5000049 State->FrontStep: 5000048 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000049 2025-12-23T18:36:37.912318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000049, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:37.912431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000049 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:36:37.912496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_unsafe.cpp:47: TDropForceUnsafe TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000049, at schemeshard: 72057594046678944 2025-12-23T18:36:37.912577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 43] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:37.912619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2025-12-23T18:36:37.912726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 128 -> 130 2025-12-23T18:36:37.912905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:36:37.913001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2025-12-23T18:36:37.913870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-12-23T18:36:37.916382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2025-12-23T18:36:37.919881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:37.919920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:37.920056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2025-12-23T18:36:37.920198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:37.920239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-12-23T18:36:37.920278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 112, path id: 43 2025-12-23T18:36:37.920580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-12-23T18:36:37.920623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2025-12-23T18:36:37.920692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2025-12-23T18:36:37.920739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-12-23T18:36:37.920772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2025-12-23T18:36:37.920835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-12-23T18:36:37.920877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-12-23T18:36:37.920905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-12-23T18:36:37.920931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 112:0 2025-12-23T18:36:37.920955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 112:0 2025-12-23T18:36:37.921017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2025-12-23T18:36:37.921047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2025-12-23T18:36:37.921073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 30 2025-12-23T18:36:37.921096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 43], 18446744073709551615 2025-12-23T18:36:37.921794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:36:37.921899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:36:37.921940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-12-23T18:36:37.921969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 30 2025-12-23T18:36:37.921999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:36:37.922502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:36:37.922579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-12-23T18:36:37.922635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-12-23T18:36:37.922680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2025-12-23T18:36:37.922714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2025-12-23T18:36:37.922864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-12-23T18:36:37.923339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:36:37.923382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2025-12-23T18:36:37.923483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2025-12-23T18:36:37.923866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:36:37.923902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2025-12-23T18:36:37.923947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:36:37.928371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-12-23T18:36:37.928653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-12-23T18:36:37.930358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:36:37.930445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-12-23T18:36:37.930811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-12-23T18:36:37.930881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-12-23T18:36:37.931428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-12-23T18:36:37.931532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-12-23T18:36:37.931563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:1070:3060] TestWaitNotification: OK eventTxId 112 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 |92.9%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2025-12-23T18:35:38.623405Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123888995948791:2181];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:38.623971Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:38.682667Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d31/r3tmp/tmpl1jYqz/pdisk_1.dat 2025-12-23T18:35:39.184617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:39.184748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:39.331367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:39.357832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:39.623138Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:39.767458Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123888995948631:2081] 1766514938595639 != 1766514938595642 2025-12-23T18:35:39.775204Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:39.846838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 30854, node 1 2025-12-23T18:35:40.162752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:40.162773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:40.162779Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:40.162854Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514939441 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:40.878591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:40.889252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:30547 2025-12-23T18:35:41.324821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:35:41.379274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:35:41.388222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:35:41.434865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-12-23T18:35:41.463419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:41.759480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:41.872663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2025-12-23T18:35:41.887888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:42.004151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:42.059404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:42.165345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:42.263376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:42.313261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:42.402072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:42.465590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:43.614936Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123888995948791:2181];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:43.615001Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:45.371398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123919060721372:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:45.371535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:45.371781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123919060721384:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:45.376741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:45.376958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123919060721386:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:45.377021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workl ... nding answer to proxy actor [7:7587124192932815963:2499]: SendMessageBatch { RequestId: "35c0f875-24b62748-a9ecc9d3-84dceada" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "7d63754d-5ecda814-8360eb9d-49d1746" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "f9497e24-9810a57a-e423fdc4-3b8cbe84" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "35c0f875-24b62748-a9ecc9d3-84dceada" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2025-12-23T18:36:50.089599Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7587124192932815967:3755]. Found: 1 2025-12-23T18:36:50.089642Z node 7 :SQS TRACE: executor.cpp:192: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program: ( (let queueIdNumber (Parameter 'QUEUE_ID_NUMBER (DataType 'Uint64))) (let queueIdNumberHash (Parameter 'QUEUE_ID_NUMBER_HASH (DataType 'Uint64))) (let timeFrom (Parameter 'TIME_FROM (DataType 'Uint64))) (let sentTsIdx '/Root/SQS/.FIFO/SentTimestampIdx) (let sentIdxRange '( '('QueueIdNumberHash queueIdNumberHash queueIdNumberHash) '('QueueIdNumber queueIdNumber queueIdNumber) '('SentTimestamp timeFrom (Uint64 '18446744073709551615)) '('Offset (Uint64 '0) (Uint64 '18446744073709551615)))) (let sentIdxSelect '( 'SentTimestamp 'Offset)) (let selectResult (SelectRange sentTsIdx sentIdxRange sentIdxSelect '('('"ItemsLimit" (Uint64 '1))))) (let messages (Member selectResult 'List)) (return (Extend (AsList (SetResult 'messages messages)) )) ) 2025-12-23T18:36:50.089761Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [35c0f875-24b62748-a9ecc9d3-84dceada] HandleResponse: { SendMessageBatch { RequestId: "35c0f875-24b62748-a9ecc9d3-84dceada" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "7d63754d-5ecda814-8360eb9d-49d1746" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "f9497e24-9810a57a-e423fdc4-3b8cbe84" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "35c0f875-24b62748-a9ecc9d3-84dceada" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true }, status: OK 2025-12-23T18:36:50.089935Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [35c0f875-24b62748-a9ecc9d3-84dceada] Sending reply from proxy actor: { SendMessageBatch { RequestId: "35c0f875-24b62748-a9ecc9d3-84dceada" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "7d63754d-5ecda814-8360eb9d-49d1746" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "f9497e24-9810a57a-e423fdc4-3b8cbe84" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "35c0f875-24b62748-a9ecc9d3-84dceada" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2025-12-23T18:36:50.090329Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [SendMessageBatch] requestId [35c0f875-24b62748-a9ecc9d3-84dceada] Got succesfult GRPC response. 2025-12-23T18:36:50.090561Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [SendMessageBatch] requestId [35c0f875-24b62748-a9ecc9d3-84dceada] reply ok 2025-12-23T18:36:50.090696Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [SendMessageBatch] requestId [35c0f875-24b62748-a9ecc9d3-84dceada] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 1063 ResponseSizeInBytes: 618 SourceAddress: b8d4:3c41:fe7b:0:a0d4:3c41:fe7b:0 ResourceId: 000000000000000101v0 Action: SendMessageBatch 2025-12-23T18:36:50.090801Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:43726) <- (200 , 464 bytes) 2025-12-23T18:36:50.090915Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:43726) connection closed 2025-12-23T18:36:50.092528Z node 7 :SQS TRACE: executor.cpp:256: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program response: { Status: 48 MiniKQLCompileResults { CompiledProgram: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001>\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } } 2025-12-23T18:36:50.092567Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] compilation duration: 3ms 2025-12-23T18:36:50.092615Z node 7 :SQS DEBUG: queue_leader.cpp:464: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) has been prepared 2025-12-23T18:36:50.092628Z node 7 :SQS DEBUG: queue_leader.cpp:514: Request [] Executing compiled query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) 2025-12-23T18:36:50.092704Z node 7 :SQS DEBUG: executor.cpp:83: Request [] Starting executor actor for query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID). Mode: COMPILE_AND_EXEC 2025-12-23T18:36:50.092790Z node 7 :SQS TRACE: executor.cpp:154: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 0, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 12311263855443095412, "TIME_FROM": 0} 2025-12-23T18:36:50.093109Z node 7 :SQS TRACE: executor.cpp:203: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001>\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } Params { Bin: "\037\000\005\205\n\203\010\203\010\203\010\203\004\203\010> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2025-12-23T18:35:37.507610Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123885402737164:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:37.509608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d38/r3tmp/tmpDkVKxP/pdisk_1.dat 2025-12-23T18:35:38.077099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:38.170928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:38.171024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:38.346099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:38.461852Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:38.463131Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123885402737132:2081] 1766514937469587 != 1766514937469590 TServer::EnableGrpc on GrpcPort 9422, node 1 2025-12-23T18:35:38.487332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:38.555603Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:38.662494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:38.662528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:38.662539Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:38.662622Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514938426 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:39.618332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:39.735996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:21003 2025-12-23T18:35:40.145270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:35:40.155727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:35:40.164032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:35:40.207393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-12-23T18:35:40.223199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:40.572350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:40.667056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2025-12-23T18:35:40.683633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:40.800614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:40.883347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:40.942661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:41.013824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:41.108872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:41.188088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:41.268255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:42.498863Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123885402737164:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:42.498935Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:44.410364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123915467509883:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:44.416252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:44.418997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123915467509871:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:44.419154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:44.423237Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123915467509913:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:44.423330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:44.4 ... :36:50.261320Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_STATE_ID) Queue [cloud4/000000000000000101v0] execution duration: 8ms 2025-12-23T18:36:50.261488Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [] Sending executed reply 2025-12-23T18:36:50.261661Z node 7 :SQS DEBUG: queue_leader.cpp:288: Handle state for [cloud4/000000000000000101v0] 2025-12-23T18:36:50.262172Z node 7 :SQS TRACE: executor.cpp:286: Request [cb1d11df-184f8939-8a66eaa8-d5c66c0d] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000101v0] HandleResponse { Status: 48 TxId: 281474976710714 Step: 1766515010302 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2025-12-23T18:36:50.262203Z node 7 :SQS DEBUG: executor.cpp:287: Request [cb1d11df-184f8939-8a66eaa8-d5c66c0d] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000101v0] Attempt 1 execution duration: 10ms 2025-12-23T18:36:50.262722Z node 7 :SQS TRACE: executor.cpp:325: Request [cb1d11df-184f8939-8a66eaa8-d5c66c0d] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976710714 Step: 1766515010302 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2025-12-23T18:36:50.262824Z node 7 :SQS TRACE: executor.cpp:327: Request [cb1d11df-184f8939-8a66eaa8-d5c66c0d] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"attrs": {"ContentBasedDeduplication": false, "DelaySeconds": 0, "DlqArn": "", "DlqName": "", "FifoQueue": false, "MaxReceiveCount": 0, "MaximumMessageSize": 262144, "MessageRetentionPeriod": 345600000, "ReceiveMessageWaitTime": 0, "ShowDetailedCountersDeadline": null, "VisibilityTimeout": 30000}, "queueExists": true, "tags": "{}"} 2025-12-23T18:36:50.262965Z node 7 :SQS DEBUG: executor.cpp:401: Request [cb1d11df-184f8939-8a66eaa8-d5c66c0d] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000101v0] execution duration: 13ms 2025-12-23T18:36:50.264009Z node 7 :SQS DEBUG: executor.cpp:114: Request [] Sending execute request for query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) to queue leader 2025-12-23T18:36:50.264108Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [cb1d11df-184f8939-8a66eaa8-d5c66c0d] Sending executed reply 2025-12-23T18:36:50.264329Z node 7 :SQS DEBUG: queue_leader.cpp:514: Request [] Executing compiled query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) 2025-12-23T18:36:50.264398Z node 7 :SQS DEBUG: executor.cpp:83: Request [] Starting executor actor for query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID). Mode: COMPILE_AND_EXEC 2025-12-23T18:36:50.264678Z node 7 :SQS TRACE: executor.cpp:154: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 1, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 5923258363543965525, "TIME_FROM": 1766515009850} 2025-12-23T18:36:50.265017Z node 7 :SQS TRACE: executor.cpp:203: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\n\203\010\203\010\203\004\203\010\203\010\213\n?$?&?(\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001=\000\000\000\000\000\000\000?\014\005?\"\003?\036\n\003? \010\003\013?.\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?T\003?V> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] Test command err: 2025-12-23T18:35:42.763252Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123905023743743:2140];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:42.763703Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:42.917712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d2c/r3tmp/tmpbM9nUn/pdisk_1.dat 2025-12-23T18:35:43.297039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:43.297175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:43.309606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:43.423287Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:43.600956Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:43.605938Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123905023743641:2081] 1766514942725291 != 1766514942725294 TServer::EnableGrpc on GrpcPort 18054, node 1 2025-12-23T18:35:43.758980Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:43.875033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:43.975618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:43.975644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:43.975651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:43.975739Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514944019 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:44.569532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:44.577346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:5144 2025-12-23T18:35:45.020200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:35:45.035784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:35:45.044355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:35:45.090205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:45.350575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:45.439670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-12-23T18:35:45.445431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:45.527751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-12-23T18:35:45.533778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:45.585296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:45.638074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:45.736882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:45.822032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:45.878967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:45.926433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:47.732678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123905023743743:2140];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:47.732759Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:48.066976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123930793549090:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:48.066976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123930793549081:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:48.067085Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:48.067416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123930793549096:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:48.067465Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:48.070690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESc ... g.cpp:320: (#74,[::1]:42500) Response: HTTP/1.1 500 InternalFailure Connection: close x-amzn-requestid: f3f5d6a7-e376c9d9-5723ed5c-ecb064be Content-Type: application/x-amz-json-1.1 Content-Length: 96 2025-12-23T18:36:50.033591Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7587124192676623746:5406]. Found: 1 2025-12-23T18:36:50.033648Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#74,[::1]:42500) connection closed 2025-12-23T18:36:50.033886Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [880b6063-b0d6c01d-9aba2836-6900606] HandleResponse: { UntagQueue { Error { Status: 500 Message: "Untag queue query failed, conflicting query in parallel" ErrorCode: "InternalFailure" } RequestId: "880b6063-b0d6c01d-9aba2836-6900606" } RequestId: "880b6063-b0d6c01d-9aba2836-6900606" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k20" Value: "v" } }, status: OK 2025-12-23T18:36:50.033968Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [880b6063-b0d6c01d-9aba2836-6900606] Sending reply from proxy actor: { UntagQueue { Error { Status: 500 Message: "Untag queue query failed, conflicting query in parallel" ErrorCode: "InternalFailure" } RequestId: "880b6063-b0d6c01d-9aba2836-6900606" } RequestId: "880b6063-b0d6c01d-9aba2836-6900606" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k20" Value: "v" } } 2025-12-23T18:36:50.034103Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:450: http request [UntagQueue] requestId [880b6063-b0d6c01d-9aba2836-6900606] Not retrying GRPC response. Code: 500, Error: InternalFailure 2025-12-23T18:36:50.034121Z node 7 :HTTP_PROXY INFO: http_req.cpp:1602: http request [UntagQueue] requestId [880b6063-b0d6c01d-9aba2836-6900606] reply with status: STATUS_UNDEFINED message: Untag queue query failed, conflicting query in parallel 2025-12-23T18:36:50.034191Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [UntagQueue] requestId [880b6063-b0d6c01d-9aba2836-6900606] Send metering event. HttpStatusCode: 500 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 565 ResponseSizeInBytes: 263 SourceAddress: b8ba:1c4a:7e7b:0:a0ba:1c4a:7e7b:0 ResourceId: 000000000000000301v0 Action: UntagQueue 2025-12-23T18:36:50.034271Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#104,[::1]:42490) <- (500 InternalFailure, 96 bytes) 2025-12-23T18:36:50.034317Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#104,[::1]:42490) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.UntagQueue X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"http://ghrun-ou4bsyjxme.auto.internal:8771/cloud4/000000000000000301v0/ExampleQueueName.fifo", "TagKeys": [ "k24" ] } 2025-12-23T18:36:50.034340Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#104,[::1]:42490) Response: HTTP/1.1 500 InternalFailure Connection: close x-amzn-requestid: 880b6063-b0d6c01d-9aba2836-6900606 Content-Type: application/x-amz-json-1.1 Content-Length: 96 2025-12-23T18:36:50.034396Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#104,[::1]:42490) connection closed 2025-12-23T18:36:50.035123Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:450: http request [UntagQueue] requestId [acb9d146-2e4c09d6-85a9ee78-339c6508] Not retrying GRPC response. Code: 500, Error: InternalFailure 2025-12-23T18:36:50.035154Z node 7 :HTTP_PROXY INFO: http_req.cpp:1602: http request [UntagQueue] requestId [acb9d146-2e4c09d6-85a9ee78-339c6508] reply with status: STATUS_UNDEFINED message: Untag queue query failed, conflicting query in parallel 2025-12-23T18:36:50.035215Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [UntagQueue] requestId [acb9d146-2e4c09d6-85a9ee78-339c6508] Send metering event. HttpStatusCode: 500 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 565 ResponseSizeInBytes: 264 SourceAddress: 7892:1b4a:7e7b:0:6092:1b4a:7e7b:0 ResourceId: 000000000000000301v0 Action: UntagQueue 2025-12-23T18:36:50.035291Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#137,[::1]:42550) <- (500 InternalFailure, 96 bytes) 2025-12-23T18:36:50.035326Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#137,[::1]:42550) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.UntagQueue X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"http://ghrun-ou4bsyjxme.auto.internal:8771/cloud4/000000000000000301v0/ExampleQueueName.fifo", "TagKeys": [ "k22" ] } 2025-12-23T18:36:50.035348Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#137,[::1]:42550) Response: HTTP/1.1 500 InternalFailure Connection: close x-amzn-requestid: acb9d146-2e4c09d6-85a9ee78-339c6508 Content-Type: application/x-amz-json-1.1 Content-Length: 96 2025-12-23T18:36:50.035403Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#137,[::1]:42550) connection closed 2025-12-23T18:36:50.036892Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:42576) incoming connection opened 2025-12-23T18:36:50.036951Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:42576) -> (POST /Root, 111 bytes) 2025-12-23T18:36:50.037061Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d82b:a149:7e7b:0:c02b:a149:7e7b:0] request [ListQueueTags] url [/Root] database [/Root] requestId: ec57a4ed-109dd04e-f137d22e-e65d9c32 2025-12-23T18:36:50.037526Z node 7 :HTTP_PROXY INFO: http_req.cpp:543: http request [ListQueueTags] requestId [ec57a4ed-109dd04e-f137d22e-e65d9c32] got new request from [d82b:a149:7e7b:0:c02b:a149:7e7b:0] 2025-12-23T18:36:50.037950Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:477: http request [ListQueueTags] requestId [ec57a4ed-109dd04e-f137d22e-e65d9c32] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-12-23T18:36:50.037965Z node 7 :HTTP_PROXY INFO: http_req.cpp:301: http request [ListQueueTags] requestId [ec57a4ed-109dd04e-f137d22e-e65d9c32] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:36:50.038103Z node 7 :SQS DEBUG: ymq_proxy.cpp:148: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: ec57a4ed-109dd04e-f137d22e-e65d9c32 2025-12-23T18:36:50.038194Z node 7 :SQS DEBUG: proxy_actor.cpp:263: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Proxy actor: used user_name='cloud4', queue_name='000000000000000301v0', folder_id='folder4' 2025-12-23T18:36:50.038202Z node 7 :SQS DEBUG: proxy_actor.cpp:78: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Request proxy started 2025-12-23T18:36:50.038256Z node 7 :SQS DEBUG: service.cpp:761: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Answer configuration for queue [cloud4/000000000000000301v0] without leader 2025-12-23T18:36:50.038315Z node 7 :SQS DEBUG: proxy_actor.cpp:97: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Get configuration duration: 0ms 2025-12-23T18:36:50.038377Z node 7 :SQS DEBUG: proxy_service.cpp:246: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Send get leader node request to sqs service for cloud4/000000000000000301v0 2025-12-23T18:36:50.038395Z node 7 :SQS DEBUG: service.cpp:581: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Leader node for queue [cloud4/000000000000000301v0] is 7 2025-12-23T18:36:50.038416Z node 7 :SQS DEBUG: proxy_service.cpp:170: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Got leader node for queue response. Node id: 7. Status: 0 2025-12-23T18:36:50.038504Z node 7 :SQS TRACE: proxy_service.cpp:303: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Sending request from proxy to leader node 7: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "ec57a4ed-109dd04e-f137d22e-e65d9c32" 2025-12-23T18:36:50.038572Z node 7 :SQS DEBUG: proxy_service.cpp:70: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Received Sqs Request: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "ec57a4ed-109dd04e-f137d22e-e65d9c32" 2025-12-23T18:36:50.038615Z node 7 :SQS DEBUG: action.h:133: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Request started. Actor: [7:7587124196971591244:5606] 2025-12-23T18:36:50.038679Z node 7 :SQS TRACE: service.cpp:1472: Inc local leader ref for actor [7:7587124196971591244:5606] 2025-12-23T18:36:50.038705Z node 7 :SQS DEBUG: service.cpp:754: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Forward configuration request to queue [cloud4/000000000000000301v0] leader 2025-12-23T18:36:50.038801Z node 7 :SQS DEBUG: action.h:627: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Get configuration duration: 0ms 2025-12-23T18:36:50.038817Z node 7 :SQS TRACE: action.h:647: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Got configuration. Root url: http://ghrun-ou4bsyjxme.auto.internal:8771, Shards: 1, Fail: 0 2025-12-23T18:36:50.038832Z node 7 :SQS TRACE: action.h:427: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] DoRoutine 2025-12-23T18:36:50.038882Z node 7 :SQS TRACE: action.h:264: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] SendReplyAndDie from action actor { ListQueueTags { RequestId: "ec57a4ed-109dd04e-f137d22e-e65d9c32" } } 2025-12-23T18:36:50.038968Z node 7 :SQS TRACE: proxy_service.h:35: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Sending sqs response: { ListQueueTags { RequestId: "ec57a4ed-109dd04e-f137d22e-e65d9c32" } RequestId: "ec57a4ed-109dd04e-f137d22e-e65d9c32" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-12-23T18:36:50.039027Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7587124196971591244:5606]. Found: 1 2025-12-23T18:36:50.039076Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ListQueueTags { RequestId: "ec57a4ed-109dd04e-f137d22e-e65d9c32" } RequestId: "ec57a4ed-109dd04e-f137d22e-e65d9c32" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-12-23T18:36:50.039138Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7587124196971591243:2764]: ListQueueTags { RequestId: "ec57a4ed-109dd04e-f137d22e-e65d9c32" } RequestId: "ec57a4ed-109dd04e-f137d22e-e65d9c32" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-12-23T18:36:50.039513Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] HandleResponse: { ListQueueTags { RequestId: "ec57a4ed-109dd04e-f137d22e-e65d9c32" } RequestId: "ec57a4ed-109dd04e-f137d22e-e65d9c32" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true }, status: OK 2025-12-23T18:36:50.039564Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [ec57a4ed-109dd04e-f137d22e-e65d9c32] Sending reply from proxy actor: { ListQueueTags { RequestId: "ec57a4ed-109dd04e-f137d22e-e65d9c32" } RequestId: "ec57a4ed-109dd04e-f137d22e-e65d9c32" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-12-23T18:36:50.039705Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [ListQueueTags] requestId [ec57a4ed-109dd04e-f137d22e-e65d9c32] Got succesfult GRPC response. 2025-12-23T18:36:50.039759Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ListQueueTags] requestId [ec57a4ed-109dd04e-f137d22e-e65d9c32] reply ok 2025-12-23T18:36:50.039884Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [ListQueueTags] requestId [ec57a4ed-109dd04e-f137d22e-e65d9c32] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 154 SourceAddress: d82b:a149:7e7b:0:c02b:a149:7e7b:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2025-12-23T18:36:50.039982Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:42576) <- (200 , 2 bytes) 2025-12-23T18:36:50.040045Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:42576) connection closed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: 2025-12-23T18:35:34.367485Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2025-12-23T18:35:34.532011Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:35:34.532093Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:35:34.532152Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:34.532208Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2025-12-23T18:35:34.579764Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:34.622314Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:35:34.630695Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2025-12-23T18:35:34.633448Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2025-12-23T18:35:34.635752Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2025-12-23T18:35:34.637607Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2025-12-23T18:35:34.651308Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:35:34.651769Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c92796ca-a24b8c45-e67e26ab-1451c870_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:35:34.659240Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:35:34.659671Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|61cfc085-8821421-6d3b4c0d-6f53bd69_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:35:34.694709Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:35:34.695180Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c19d658c-94019f09-ccc8b030-714e60d8_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:244:2057] recipient: [1:103:2137] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:247:2057] recipient: [1:246:2242] Leader for TabletID 72057594037927937 is [1:248:2243] sender: [1:249:2057] recipient: [1:246:2242] 2025-12-23T18:35:34.825007Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:35:34.825102Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:35:34.826285Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:34.826346Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:35:34.827088Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:297:2243] 2025-12-23T18:35:34.829300Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:298:2243] 2025-12-23T18:35:34.838085Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:35:34.838549Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:35:34.839034Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:297:2243] 2025-12-23T18:35:34.839299Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:298:2243] 2025-12-23T18:35:34.848281Z node 1 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2025-12-23T18:35:34.851324Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [1:248:2243] sender: [1:326:2057] recipient: [1:14:2061] Got start offset = 0 2025-12-23T18:35:35.581352Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:113:2057] recipient: [2:106:2138] 2025-12-23T18:35:35.698638Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:35:35.698705Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:35:35.698776Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:35.698831Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927938 is [2:158:2176] sender: [2:159:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:184:2057] recipient: [2:14:2061] 2025-12-23T18:35:35.775667Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:35.776550Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 2 actor [2:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } 2025-12-23T18:35:35.777308Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:190:2142] 2025-12-23T18:35:35.780002Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:190:2142] 2025-12-23T18:35:35.781713Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:191:2142] 2025-12-23T18:35:35.791724Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:191:2142] 2025-12-23T18:35:35.803063Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:35:35.803451Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bbbc706f-8baf22bd-db0d0842-ea872f1b_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:35:35.819375Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:35:35.819811Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e409f630-a534f745-b41d74a5-fba3fb8e_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:35:35.880339Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:35:35.880757Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|15c7d81b-6623329-d0349ebe-bbb18ea1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:112:2142]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:244:2057] recipient: [2:104:2137] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:247:2057] recipient: [2:246:2241] Leader for TabletID 72057594037927937 is [2:248:2242] sender: [2:249:2057] recipient: [2:246:2241] 2025-12-23T18:35:35.972018Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:35:35.972080Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:35:35.972909Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:35.972969Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12- ... 7594037927937] Config applied version 55 actor [55:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 } 2025-12-23T18:36:34.637580Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [55:189:2142] 2025-12-23T18:36:34.641185Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:189:2142] 2025-12-23T18:36:34.643203Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [55:190:2142] 2025-12-23T18:36:34.645564Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:190:2142] 2025-12-23T18:36:34.657185Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:36:34.657588Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|9b779561-39fff0f-fc3170ca-3ae6930a_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:36:34.665435Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:36:34.665868Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|900e6eed-41ac13a1-e271712a-42c6364f_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:36:34.687748Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:36:34.688180Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|329a7a72-a9813d51-59504686-7bc17f73_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [55:111:2142] sender: [55:244:2057] recipient: [55:103:2137] Leader for TabletID 72057594037927937 is [55:111:2142] sender: [55:247:2057] recipient: [55:246:2242] Leader for TabletID 72057594037927937 is [55:248:2243] sender: [55:249:2057] recipient: [55:246:2242] 2025-12-23T18:36:34.753048Z node 55 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:36:34.753112Z node 55 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:36:34.753842Z node 55 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:36:34.753891Z node 55 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:36:34.754834Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [55:297:2243] 2025-12-23T18:36:34.757432Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [55:298:2243] 2025-12-23T18:36:34.771167Z node 55 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:36:34.771767Z node 55 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:36:34.772631Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:297:2243] 2025-12-23T18:36:34.772889Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:298:2243] 2025-12-23T18:36:34.784673Z node 55 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2025-12-23T18:36:34.787571Z node 55 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 55 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [55:248:2243] sender: [55:326:2057] recipient: [55:14:2061] Got start offset = 0 2025-12-23T18:36:35.205234Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:107:2057] recipient: [56:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:107:2057] recipient: [56:105:2138] Leader for TabletID 72057594037927937 is [56:111:2142] sender: [56:112:2057] recipient: [56:105:2138] 2025-12-23T18:36:35.282051Z node 56 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:36:35.282109Z node 56 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:36:35.282152Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:36:35.282201Z node 56 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [56:153:2057] recipient: [56:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [56:153:2057] recipient: [56:151:2172] Leader for TabletID 72057594037927938 is [56:157:2176] sender: [56:158:2057] recipient: [56:151:2172] Leader for TabletID 72057594037927937 is [56:111:2142] sender: [56:183:2057] recipient: [56:14:2061] 2025-12-23T18:36:35.322242Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:36:35.323067Z node 56 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 56 actor [56:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 56 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 56 } 2025-12-23T18:36:35.323729Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [56:189:2142] 2025-12-23T18:36:35.333341Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [56:189:2142] 2025-12-23T18:36:35.337963Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [56:190:2142] 2025-12-23T18:36:35.341966Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [56:190:2142] 2025-12-23T18:36:35.358096Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:36:35.358495Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a99ae8ac-b0ce3cca-7315db07-35354888_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:36:35.371840Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:36:35.372261Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6e4802e9-b09f4fbd-8bb0d65d-fb884c87_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:36:35.439868Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:36:35.440291Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|edefc78-d2979080-aad3e07-7b1372eb_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [56:111:2142] sender: [56:244:2057] recipient: [56:103:2137] Leader for TabletID 72057594037927937 is [56:111:2142] sender: [56:247:2057] recipient: [56:246:2242] Leader for TabletID 72057594037927937 is [56:248:2243] sender: [56:249:2057] recipient: [56:246:2242] 2025-12-23T18:36:35.579632Z node 56 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:36:35.579694Z node 56 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:36:35.580475Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:36:35.580523Z node 56 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:36:35.581540Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [56:297:2243] 2025-12-23T18:36:35.584949Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [56:298:2243] 2025-12-23T18:36:35.596103Z node 56 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:36:35.596568Z node 56 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:36:35.597260Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [56:297:2243] 2025-12-23T18:36:35.597527Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [56:298:2243] 2025-12-23T18:36:35.615187Z node 56 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2025-12-23T18:36:35.615708Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [56:248:2243] sender: [56:326:2057] recipient: [56:14:2061] Got start offset = 0 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2964:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2964:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2964:2117] 2025-12-23T18:36:07.274368Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:07.275828Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:07.276263Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:07.278438Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:07.279649Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:07.279982Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:07.280022Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:07.280427Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:07.300363Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:07.300514Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:07.300681Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:07.300787Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:07.300882Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:07.300963Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2025-12-23T18:36:07.313196Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:07.313362Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:07.366407Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:07.366544Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:07.366635Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:07.366770Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:07.367010Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:07.367120Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:07.367182Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:07.367246Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:07.379055Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:07.379209Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:07.391512Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:07.391714Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:07.395480Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:07.395569Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:07.395824Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:07.395882Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:07.417950Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 150 PDiskFilter { Property { Type: ROT } } } } } 2025-12-23T18:36:07.419974Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-12-23T18:36:07.420069Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-12-23T18:36:07.420102Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-12-23T18:36:07.420167Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-12-23T18:36:07.420201Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-12-23T18:36:07.420231Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-12-23T18:36:07.420278Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-12-23T18:36:07.420309Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-12-23T18:36:07.420337Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-12-23T18:36:07.420363Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-12-23T18:36:07.420392Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-12-23T18:36:07.420419Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-12-23T18:36:07.420452Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-12-23T18:36:07.420487Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-12-23T18:36:07.420535Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-12-23T18:36:07.420564Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-12-23T18:36:07.420590Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-12-23T18:36:07.420616Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-12-23T18:36:07.420650Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-12-23T18:36:07.420693Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-12-23T18:36:07.420728Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:3 ... 8:1000 Path# /dev/disk1 2025-12-23T18:36:36.256955Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 178:1001 Path# /dev/disk2 2025-12-23T18:36:36.256995Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 178:1002 Path# /dev/disk3 2025-12-23T18:36:36.257032Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1000 Path# /dev/disk1 2025-12-23T18:36:36.257073Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1001 Path# /dev/disk2 2025-12-23T18:36:36.257107Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1002 Path# /dev/disk3 2025-12-23T18:36:36.257149Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1000 Path# /dev/disk1 2025-12-23T18:36:36.257198Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1001 Path# /dev/disk2 2025-12-23T18:36:36.257251Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1002 Path# /dev/disk3 2025-12-23T18:36:36.257308Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1000 Path# /dev/disk1 2025-12-23T18:36:36.257353Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1001 Path# /dev/disk2 2025-12-23T18:36:36.257410Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1002 Path# /dev/disk3 2025-12-23T18:36:36.257445Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1000 Path# /dev/disk1 2025-12-23T18:36:36.257502Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1001 Path# /dev/disk2 2025-12-23T18:36:36.257538Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1002 Path# /dev/disk3 2025-12-23T18:36:36.257568Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1000 Path# /dev/disk1 2025-12-23T18:36:36.257619Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1001 Path# /dev/disk2 2025-12-23T18:36:36.257689Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1002 Path# /dev/disk3 2025-12-23T18:36:36.257736Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1000 Path# /dev/disk1 2025-12-23T18:36:36.257783Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1001 Path# /dev/disk2 2025-12-23T18:36:36.257830Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1002 Path# /dev/disk3 2025-12-23T18:36:36.257874Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1000 Path# /dev/disk1 2025-12-23T18:36:36.257926Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1001 Path# /dev/disk2 2025-12-23T18:36:36.257964Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1002 Path# /dev/disk3 2025-12-23T18:36:36.257994Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1000 Path# /dev/disk1 2025-12-23T18:36:36.258035Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1001 Path# /dev/disk2 2025-12-23T18:36:36.258085Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1002 Path# /dev/disk3 2025-12-23T18:36:36.258115Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1000 Path# /dev/disk1 2025-12-23T18:36:36.258142Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1001 Path# /dev/disk2 2025-12-23T18:36:36.258170Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1002 Path# /dev/disk3 2025-12-23T18:36:36.258200Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1000 Path# /dev/disk1 2025-12-23T18:36:36.258228Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1001 Path# /dev/disk2 2025-12-23T18:36:36.258261Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1002 Path# /dev/disk3 2025-12-23T18:36:36.258291Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1000 Path# /dev/disk1 2025-12-23T18:36:36.258325Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1001 Path# /dev/disk2 2025-12-23T18:36:36.258366Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1002 Path# /dev/disk3 2025-12-23T18:36:36.258394Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1000 Path# /dev/disk1 2025-12-23T18:36:36.258420Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1001 Path# /dev/disk2 2025-12-23T18:36:36.258449Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1002 Path# /dev/disk3 2025-12-23T18:36:36.258477Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1000 Path# /dev/disk1 2025-12-23T18:36:36.258509Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1001 Path# /dev/disk2 2025-12-23T18:36:36.258535Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1002 Path# /dev/disk3 2025-12-23T18:36:36.258578Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1000 Path# /dev/disk1 2025-12-23T18:36:36.258610Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1001 Path# /dev/disk2 2025-12-23T18:36:36.258636Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1002 Path# /dev/disk3 2025-12-23T18:36:36.258664Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1000 Path# /dev/disk1 2025-12-23T18:36:36.258691Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1001 Path# /dev/disk2 2025-12-23T18:36:36.258720Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1002 Path# /dev/disk3 2025-12-23T18:36:36.258764Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1000 Path# /dev/disk1 2025-12-23T18:36:36.258792Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1001 Path# /dev/disk2 2025-12-23T18:36:36.258823Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1002 Path# /dev/disk3 2025-12-23T18:36:36.258851Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1000 Path# /dev/disk1 2025-12-23T18:36:36.258885Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1001 Path# /dev/disk2 2025-12-23T18:36:36.258916Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2025-12-23T18:36:36.258943Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1000 Path# /dev/disk1 2025-12-23T18:36:36.258984Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1001 Path# /dev/disk2 2025-12-23T18:36:36.259036Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2025-12-23T18:36:36.259071Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1000 Path# /dev/disk1 2025-12-23T18:36:36.259113Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1001 Path# /dev/disk2 2025-12-23T18:36:36.259147Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2025-12-23T18:36:36.259174Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1000 Path# /dev/disk1 2025-12-23T18:36:36.259204Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1001 Path# /dev/disk2 2025-12-23T18:36:36.259232Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1002 Path# /dev/disk3 2025-12-23T18:36:36.259260Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1000 Path# /dev/disk1 2025-12-23T18:36:36.259303Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1001 Path# /dev/disk2 2025-12-23T18:36:36.259338Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1002 Path# /dev/disk3 2025-12-23T18:36:36.259366Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1000 Path# /dev/disk1 2025-12-23T18:36:36.259394Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1001 Path# /dev/disk2 2025-12-23T18:36:36.259422Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1002 Path# /dev/disk3 2025-12-23T18:36:36.736324Z node 151 :BS_CONTROLLER ERROR: {BSC07@impl.h:2212} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.486350s 2025-12-23T18:36:36.736497Z node 151 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.486554s 2025-12-23T18:36:36.774216Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-12-23T18:36:36.884246Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2025-12-23T18:36:36.912205Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-12-23T18:36:37.024809Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2025-12-23T18:36:37.042164Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-12-23T18:36:37.141882Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2025-12-23T18:36:37.170678Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:32:41.851869Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:32:41.957349Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:32:41.968333Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:32:41.968717Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:32:41.968800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:32:42.255775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:32:42.255898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:32:42.323166Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766514758406401 != 1766514758406405 2025-12-23T18:32:42.333091Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:32:42.376941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:32:42.465275Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c21/r3tmp/tmpibbpcA/pdisk_1.dat 2025-12-23T18:32:42.819931Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:830:2725], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:42.820005Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:42.820039Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046644480 2025-12-23T18:32:42.820166Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122432, Sender [1:828:2723], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-12-23T18:32:42.820198Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5258: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-12-23T18:32:42.965248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-12-23T18:32:42.965419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:42.965581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:32:42.965621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:32:42.965785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:32:42.965857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:32:42.965943Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:42.966555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:32:42.966683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:32:42.966723Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:42.966772Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-12-23T18:32:42.966961Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:32:42.967000Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:32:42.967081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:42.967141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:32:42.967182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:32:42.967215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:32:42.967330Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:42.967855Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:42.967900Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-12-23T18:32:42.968025Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:32:42.968062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:32:42.968157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:42.968211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:32:42.968250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:32:42.968321Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:42.968660Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:42.968682Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-12-23T18:32:42.968774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:32:42.968808Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:32:42.968853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:42.968877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:32:42.968914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-12-23T18:32:42.968940Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-12-23T18:32:42.968965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:32:42.979015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:32:42.979542Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:32:42.979583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:32:42.979669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:32:42.979853Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2025-12-23T18:32:42.980139Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:836:2731], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:42.980173Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:32:42.980204Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046644480 2025-12-23T18:32:42.980272Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124996, Sender [1:828:2723], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-12-23T18:32:42.980291Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-12-23T18:32:42.980347Z node 1 :FLAT_TX_SCHEMESHARD DE ... 337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:36.999518Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:36.999549Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2025-12-23T18:36:37.010157Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [11:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:36:37.010260Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:36:37.010298Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-12-23T18:36:37.010377Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 2 2025-12-23T18:36:37.010416Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-12-23T18:36:37.010540Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 60 shard idx 72057594046644480:11 data size 271 row count 13 2025-12-23T18:36:37.010608Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037898 maps to shardIdx: 72057594046644480:11 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 60], pathId map=SequenceTable, is column=0, is olap=0, RowCount 13, DataSize 271, with borrowed parts 2025-12-23T18:36:37.010642Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037898, followerId 0 2025-12-23T18:36:37.010736Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037898: SplitByLoadNotEnabledForTable 2025-12-23T18:36:37.010807Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 68 shard idx 72057594046644480:17 data size 368 row count 16 2025-12-23T18:36:37.010850Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037906 maps to shardIdx: 72057594046644480:17 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 68], pathId map=SequenceTable, is column=0, is olap=0, RowCount 16, DataSize 368, with borrowed parts 2025-12-23T18:36:37.010877Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037906, followerId 0 2025-12-23T18:36:37.010929Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037906: SplitByLoadNotEnabledForTable 2025-12-23T18:36:37.010995Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:36:37.021758Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [11:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:36:37.021855Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:36:37.021891Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-12-23T18:36:37.032437Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:37.032523Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.032559Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:37.032596Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.032653Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2025-12-23T18:36:37.058519Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:37.058619Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.058661Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:37.058697Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.058730Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:36:37.069756Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:37.069828Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.069862Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:37.069911Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.069942Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2025-12-23T18:36:37.080532Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:37.080610Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.080646Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:37.080682Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.080713Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2025-12-23T18:36:37.105186Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:37.105260Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.105294Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:37.105335Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.105367Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:36:37.119102Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:37.119177Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.119211Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:37.119245Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.119275Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2025-12-23T18:36:37.131813Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:37.131890Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.131925Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:37.131975Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.132005Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2025-12-23T18:36:37.153592Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:37.153670Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.153706Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:37.153758Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.153793Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:36:37.164357Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:37.164426Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.164461Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:37.164497Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.164542Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2025-12-23T18:36:37.176288Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:37.176382Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.176420Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:37.176466Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:37.176497Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2025-12-23T18:36:37.643298Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037907, clientId# [11:3275:4465], serverId# [11:3276:4466], sessionId# [0:0:0] { items { uint32_value: 6 } items { uint32_value: 600 } }, { items { uint32_value: 7 } items { uint32_value: 70 } }, { items { uint32_value: 8 } items { uint32_value: 80 } }, { items { uint32_value: 9 } items { uint32_value: 90 } } |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |92.9%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest >> TActorTest::TestStateSwitch [GOOD] >> TActorTest::TestSendFromAnotherThread >> TActorTest::TestWaitFuture [GOOD] |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |92.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |92.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |92.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |92.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListQueueTags [GOOD] Test command err: 2025-12-23T18:35:42.392162Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123904606482895:2083];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:42.392238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d2b/r3tmp/tmpn1pjIp/pdisk_1.dat 2025-12-23T18:35:43.058575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:43.058884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:43.060918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:43.083723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:43.464257Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:43.471208Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20284, node 1 2025-12-23T18:35:43.509394Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:43.854489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:43.854511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:43.854518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:43.854619Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514943452 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:44.546567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:44.562984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:63826 2025-12-23T18:35:44.799959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:35:44.808041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:35:44.810313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:35:44.843096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-12-23T18:35:44.850296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:45.020980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:45.091593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:45.175156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:45.223264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:45.259346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:45.314826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:45.362358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:45.413426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:45.459747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:47.392404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123904606482895:2083];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:47.392476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:47.750525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123926081320986:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:47.750526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123926081320998:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:47.750688Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:47.755883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:47.760356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123926081321030:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:47.760867Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:47.771658Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123926081321000:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:35:47.849784Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123926081321053:3112] txid# 281474976 ... \203\005@\203\001H?\322\030Invoke\000\003?\326\014Equals\003?\330\000\t\211\004?\322\207\203\001H?\322 Coalesce\000\t\211\004?\342\207\205\004\207\203\001H?\342\026\032\203\004\030Member\000\t\211\n?\354\203\005\004\200\205\004\203\004\203\004\026\032\213\004\203\001H\203\001H\203\004\036\000\003?\362 \000\001\205\000\000\000\000\001\'\000\000\000\000\000\000\000?\352\005?\370\003?\364\004\003?\366 \003\013?\376\t\351\000?\372\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?%\002\003?)\002\022USER_NAME\003\022\000\003?\374(000000000000000301v0\002\003?\001\002\000\037\003?\356\002\002\003?\322\004{}\002\003\003?\302\004{}?a\002\002\002\001\000/" } Params { Bin: "\037\000\005\205\010\203\001H\203\010\203\010\203\001H\020NAME> TActorTest::TestStateSwitch [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestWaitFuture [GOOD] |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |92.9%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |92.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/testlib/actors/ut/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/testlib/actors/ut/unittest |92.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |92.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |92.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> ConvertUnboxedValueToArrowAndBack::Struct [GOOD] >> ConvertUnboxedValueToArrowAndBack::ListOfJsons [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalListOfOptional [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalVariantOverStruct [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalVariantOverTupleWithOptionals [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalOfOptional [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition >> ConvertUnboxedValueToArrowAndBack::LargeVariant >> TestArrowBlockSplitter::SplitLargeBlock [GOOD] >> TestArrowBlockSplitter::SplitWithScalars [GOOD] >> TestArrowBlockSplitter::PassSmallBlock [GOOD] >> TestArrowBlockSplitter::CheckLargeRows [GOOD] >> TestArrowBlockSplitter::CheckLargeScalarRows [GOOD] >> HashShuffle::BackPressureWithSpillingMulti [GOOD] |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange >> TActorTest::TestSendFromAnotherThread [GOOD] |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2025-12-23T18:35:41.379090Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123901741227782:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:41.379383Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:41.400352Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d2f/r3tmp/tmpGp3TIJ/pdisk_1.dat 2025-12-23T18:35:41.712835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:41.722912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:41.723069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:41.736187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:41.838693Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:41.842938Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123901741227541:2081] 1766514941317009 != 1766514941317012 TServer::EnableGrpc on GrpcPort 18326, node 1 2025-12-23T18:35:42.022861Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:42.071784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:42.071805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:42.071829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:42.071902Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14280 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:35:42.345092Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514941940 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:42.506401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:14280 2025-12-23T18:35:43.517132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:35:43.535448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:35:43.538484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:35:43.631787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:43.971539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:44.082933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-12-23T18:35:44.089187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:44.250961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-12-23T18:35:44.264661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:44.345859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:44.405453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:44.454842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:44.511262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:44.570377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:44.614687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:46.355727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123923216065684:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:46.355935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:46.356183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123923216065696:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:46.361510Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123901741227782:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:46.361636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123923216065698:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:46.361691Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:46.361973Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:46.367103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create ... 5: (#37,[::1]:38088) connection closed Http output full {"NextToken":"CM6DgOS0MxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CM6DgOS0MxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-12-23T18:36:56.144559Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:38090) incoming connection opened 2025-12-23T18:36:56.144665Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:38090) -> (POST /Root, 157 bytes) 2025-12-23T18:36:56.144825Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [5852:98f9:327c:0:4052:98f9:327c:0] request [ListShards] url [/Root] database [/Root] requestId: b0f8faaf-6203f2ab-c6546e8e-58135c5d 2025-12-23T18:36:56.145232Z node 8 :HTTP_PROXY INFO: http_req.cpp:982: http request [ListShards] requestId [b0f8faaf-6203f2ab-c6546e8e-58135c5d] got new request from [5852:98f9:327c:0:4052:98f9:327c:0] database '/Root' stream 'teststream' 2025-12-23T18:36:56.145642Z node 8 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ListShards] requestId [b0f8faaf-6203f2ab-c6546e8e-58135c5d] [auth] Authorized successfully 2025-12-23T18:36:56.145730Z node 8 :HTTP_PROXY INFO: http_req.cpp:701: http request [ListShards] requestId [b0f8faaf-6203f2ab-c6546e8e-58135c5d] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1766515016.145812 1229119 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-12-23T18:36:56.146884Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72075186224037911] server connected, pipe [8:7587124223806626352:2490], now have 1 active actors on pipe 2025-12-23T18:36:56.146898Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72075186224037907] server connected, pipe [8:7587124223806626351:2489], now have 1 active actors on pipe 2025-12-23T18:36:56.147277Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037907] server disconnected, pipe [8:7587124223806626351:2489] destroyed 2025-12-23T18:36:56.147309Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037911] server disconnected, pipe [8:7587124223806626352:2490] destroyed 2025-12-23T18:36:56.147425Z node 8 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ListShards] requestId [b0f8faaf-6203f2ab-c6546e8e-58135c5d] reply ok 2025-12-23T18:36:56.147544Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:38090) <- (200 , 449 bytes) 2025-12-23T18:36:56.147651Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:38090) connection closed Http output full {"NextToken":"CNODgOS0MxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CNODgOS0MxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-12-23T18:36:56.206134Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:36:56.206177Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.206135Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:36:56.206210Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.206213Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:56.206224Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:56.206234Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.206241Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.206247Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037909][Partition][3][StateIdle] Try persist 2025-12-23T18:36:56.206253Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][1][StateIdle] Try persist 2025-12-23T18:36:56.209740Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:56.209784Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.209802Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:56.209824Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.209855Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037911][Partition][0][StateIdle] Try persist 2025-12-23T18:36:56.210183Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:36:56.210213Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.210224Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:56.210238Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.210250Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037908][Partition][2][StateIdle] Try persist 2025-12-23T18:36:56.214321Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:36:56.214366Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.214380Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:56.214401Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.214414Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037910][Partition][4][StateIdle] Try persist 2025-12-23T18:36:56.306507Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:36:56.306513Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:36:56.306538Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.306546Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.306557Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:56.306559Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:56.306577Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.306578Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.306591Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][1][StateIdle] Try persist 2025-12-23T18:36:56.306591Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037909][Partition][3][StateIdle] Try persist 2025-12-23T18:36:56.310093Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:36:56.310136Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.310153Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:56.310197Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.310211Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037911][Partition][0][StateIdle] Try persist 2025-12-23T18:36:56.310544Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:36:56.310567Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.310579Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:56.310594Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.310606Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037908][Partition][2][StateIdle] Try persist 2025-12-23T18:36:56.314656Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:36:56.314697Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.314713Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:36:56.314735Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:36:56.314760Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037910][Partition][4][StateIdle] Try persist |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> HashShuffle::BackPressureWithSpillingMulti [GOOD] |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/testlib/actors/ut/unittest >> TActorTest::TestSendFromAnotherThread [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/testlib/actors/ut/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 |92.9%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] |92.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TA] $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange [GOOD] |92.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpDataIntegrityTrails::BrokenReadLock-UseSink >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:36:59.413297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:36:59.413392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:59.413433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:36:59.413473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:36:59.413515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:36:59.413546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:36:59.413597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:59.413684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:36:59.414619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:36:59.414945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:36:59.515540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:36:59.515614Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:59.531666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:36:59.531947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:36:59.532118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:36:59.536013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:36:59.536210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:36:59.537071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:59.537293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:36:59.539772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:59.539949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:59.541454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:59.541526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:59.541657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:59.541737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:59.541788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:59.541938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:36:59.711783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.713052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.713205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.713290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.713392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.713481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.713558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.713668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.713834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.713930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.713996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.714095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.714209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.714298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.714404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } } } TxId: 110 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:37:00.823908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 110:0, at schemeshard: 72057594046678944 2025-12-23T18:37:00.824087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 110:1, propose status:StatusInvalidParameter, reason: Last patrition 2 doesn't have the highest bound "AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9", at schemeshard: 72057594046678944 2025-12-23T18:37:00.828436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 110, response: Status: StatusInvalidParameter Reason: "Last patrition 2 doesn\'t have the highest bound \"AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\"" TxId: 110 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:37:00.828745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 110, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Last patrition 2 doesn't have the highest bound "AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9", operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 2025-12-23T18:37:00.829131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 110: send EvNotifyTxCompletion 2025-12-23T18:37:00.829178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 110 2025-12-23T18:37:00.829623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2025-12-23T18:37:00.829726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2025-12-23T18:37:00.829771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [1:1015:2935] TestWaitNotification: OK eventTxId 110 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } TestModificationResults wait txId: 112 2025-12-23T18:37:00.834438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } } } TxId: 112 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:37:00.834635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 112:0, at schemeshard: 72057594046678944 2025-12-23T18:37:00.835410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 112:1, propose status:StatusInvalidParameter, reason: Only 1 root partitions has new bounds, required: 3, at schemeshard: 72057594046678944 2025-12-23T18:37:00.841357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 112, response: Status: StatusInvalidParameter Reason: "Only 1 root partitions has new bounds, required: 3" TxId: 112 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:37:00.841712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 112, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Only 1 root partitions has new bounds, required: 3, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-12-23T18:37:00.842095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-12-23T18:37:00.842144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-12-23T18:37:00.842596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-12-23T18:37:00.842728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-12-23T18:37:00.842793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:1022:2942] TestWaitNotification: OK eventTxId 112 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 CreatePartition: false } RootPartitionBoundaries { Partition: 2 CreatePartition: false } TestModificationResults wait txId: 114 2025-12-23T18:37:00.846566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 CreatePartition: false } RootPartitionBoundaries { Partition: 2 CreatePartition: false } } } TxId: 114 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:37:00.846937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 114:0, at schemeshard: 72057594046678944 2025-12-23T18:37:00.847124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 114:1, propose status:StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, at schemeshard: 72057594046678944 2025-12-23T18:37:00.851899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 114, response: Status: StatusInvalidParameter Reason: "KeyRange must be specified for root partition bounds" TxId: 114 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:37:00.852233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 114, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 114, wait until txId: 114 TestWaitNotification wait txId: 114 2025-12-23T18:37:00.852737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 114: send EvNotifyTxCompletion 2025-12-23T18:37:00.852794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 114 2025-12-23T18:37:00.853280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 114, at schemeshard: 72057594046678944 2025-12-23T18:37:00.853413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-12-23T18:37:00.853465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:1029:2949] TestWaitNotification: OK eventTxId 114 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } TestModificationResults wait txId: 116 2025-12-23T18:37:00.862035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } } } TxId: 116 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:37:00.862368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 116:0, at schemeshard: 72057594046678944 2025-12-23T18:37:00.862565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 116:1, propose status:StatusInvalidParameter, reason: Partitions 0 and 0 have overlapped bounds at point "-inf", at schemeshard: 72057594046678944 2025-12-23T18:37:00.865461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 116, response: Status: StatusInvalidParameter Reason: "Partitions 0 and 0 have overlapped bounds at point \"-inf\"" TxId: 116 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:37:00.865780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 116, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Partitions 0 and 0 have overlapped bounds at point "-inf", operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 116, wait until txId: 116 TestWaitNotification wait txId: 116 2025-12-23T18:37:00.866218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 116: send EvNotifyTxCompletion 2025-12-23T18:37:00.866274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 116 2025-12-23T18:37:00.866860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 116, at schemeshard: 72057594046678944 2025-12-23T18:37:00.867003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2025-12-23T18:37:00.867075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:1036:2956] TestWaitNotification: OK eventTxId 116 |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |92.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:36:59.324490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:36:59.324594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:59.324632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:36:59.324723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:36:59.324792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:36:59.324828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:36:59.324916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:59.325020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:36:59.326011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:36:59.326342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:36:59.404665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:36:59.404728Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:59.414554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:36:59.415223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:36:59.415412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:36:59.421450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:36:59.421746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:36:59.423001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:59.423690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:36:59.427964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:59.428225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:59.430818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:59.430902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:59.431084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:59.431147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:59.431198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:59.431377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:36:59.603037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.604234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.604414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.604534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.604612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.604698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.604776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.604909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.604990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.605069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.605157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.605248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.605359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.605450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:59.605548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... on for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2025-12-23T18:37:01.149682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2025-12-23T18:37:01.149729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2025-12-23T18:37:01.149762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2025-12-23T18:37:01.149789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2025-12-23T18:37:01.149814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2025-12-23T18:37:01.150003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.150126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.150260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-12-23T18:37:01.150323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:37:01.150385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:37:01.150413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2025-12-23T18:37:01.150460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:37:01.150574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.150678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.151112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-12-23T18:37:01.151334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:37:01.151828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.151983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.152469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.152588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.152972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.153189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.153258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.153357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.153598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.153725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.154309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.154638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.154724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.159251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.159515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.159587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.159696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:37:01.183911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:37:01.196268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:37:01.196376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:01.196452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:37:01.197360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:37:01.197458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:37:01.197684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1075:2977] sender: [1:1133:2058] recipient: [1:15:2062] 2025-12-23T18:37:01.252206Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:37:01.252611Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 410us result status StatusSuccess 2025-12-23T18:37:01.253560Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |92.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TargetDiscoverer::Dirs >> KqpCost::IndexLookupJoin-StreamLookupJoin |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |93.0%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpCost::CTASWithRetry-isOlap |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |93.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 |93.0%| [TA] {RESULT} $(B)/ydb/core/testlib/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 >> KqpCost::PointLookup >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2025-12-23T18:35:37.137724Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123883716704154:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:37.138001Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:37.224649Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d3b/r3tmp/tmpjKIhNk/pdisk_1.dat 2025-12-23T18:35:37.770348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:37.770448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:37.823414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:37.961944Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:37.964041Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:37.967566Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123883716703940:2081] 1766514937024282 != 1766514937024285 TServer::EnableGrpc on GrpcPort 5431, node 1 2025-12-23T18:35:38.126972Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:38.162383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:35:38.163272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:35:38.163280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:35:38.163289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:35:38.166276Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17213 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514937901 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:38.659062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:35:38.687628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:17213 2025-12-23T18:35:39.363490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:35:39.377107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:35:39.379382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:35:39.431757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-12-23T18:35:39.441941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:39.708437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:39.791830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2025-12-23T18:35:39.796713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:35:39.882492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:39.950445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:40.012947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:40.062043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:40.101504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:40.163907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:40.226148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:35:42.105354Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123883716704154:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:42.105415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:43.100597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123909486509384:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:43.101069Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123909486509375:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:43.101170Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:43.106070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:43.106526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123909486509390:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:43.106813Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_worklo ... pp:2733: [PQ: 72075186224037909] server connected, pipe [8:7587124240896500380:2481], now have 1 active actors on pipe 2025-12-23T18:37:00.901411Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72075186224037911] server connected, pipe [8:7587124240896500382:2483], now have 1 active actors on pipe 2025-12-23T18:37:00.901419Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72075186224037907] server connected, pipe [8:7587124240896500378:2479], now have 1 active actors on pipe 2025-12-23T18:37:00.902362Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037910] server disconnected, pipe [8:7587124240896500381:2482] destroyed 2025-12-23T18:37:00.902390Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037909] server disconnected, pipe [8:7587124240896500380:2481] destroyed 2025-12-23T18:37:00.902407Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037911] server disconnected, pipe [8:7587124240896500382:2483] destroyed 2025-12-23T18:37:00.902436Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037907] server disconnected, pipe [8:7587124240896500378:2479] destroyed 2025-12-23T18:37:00.902454Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037908] server disconnected, pipe [8:7587124240896500379:2480] destroyed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1766515021,"StorageLimitMb":0,"StreamName":"testtopic"}} 2025-12-23T18:37:00.903050Z node 8 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DescribeStream] requestId [688f3e8d-bdb585c9-8dfbb553-36a6fb3e] reply ok 200 {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1766515021,"StorageLimitMb":0,"StreamName":"testtopic"}} 2025-12-23T18:37:00.903293Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:45718) <- (200 , 1672 bytes) 2025-12-23T18:37:00.903378Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:45718) connection closed 2025-12-23T18:37:00.904598Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#40,[::1]:45730) incoming connection opened 2025-12-23T18:37:00.904704Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#40,[::1]:45730) -> (POST /Root, 30 bytes) 2025-12-23T18:37:00.904820Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [3863:33b7:3d7c:0:2063:33b7:3d7c:0] request [DescribeStreamSummary] url [/Root] database [/Root] requestId: 92c5711c-563f4377-a74f4821-7686bd16 2025-12-23T18:37:00.905150Z node 8 :HTTP_PROXY INFO: http_req.cpp:982: http request [DescribeStreamSummary] requestId [92c5711c-563f4377-a74f4821-7686bd16] got new request from [3863:33b7:3d7c:0:2063:33b7:3d7c:0] database '/Root' stream 'testtopic' 2025-12-23T18:37:00.905624Z node 8 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DescribeStreamSummary] requestId [92c5711c-563f4377-a74f4821-7686bd16] [auth] Authorized successfully 2025-12-23T18:37:00.905722Z node 8 :HTTP_PROXY INFO: http_req.cpp:701: http request [DescribeStreamSummary] requestId [92c5711c-563f4377-a74f4821-7686bd16] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1766515.02,"StreamName":"testtopic"}} 2025-12-23T18:37:00.906720Z node 8 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DescribeStreamSummary] requestId [92c5711c-563f4377-a74f4821-7686bd16] reply ok 2025-12-23T18:37:00.906887Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#40,[::1]:45730) <- (200 , 238 bytes) 2025-12-23T18:37:00.906971Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#40,[::1]:45730) connection closed 200 {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1766515.02,"StreamName":"testtopic"}} 2025-12-23T18:37:00.907744Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:45746) incoming connection opened 2025-12-23T18:37:00.907823Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:45746) -> (POST /Root, 30 bytes) 2025-12-23T18:37:00.907937Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [98bb:29b7:3d7c:0:80bb:29b7:3d7c:0] request [DescribeStream] url [/Root] database [/Root] requestId: 5a7f1d1c-eb66b7bc-abe1ec14-9a62747 2025-12-23T18:37:00.908229Z node 8 :HTTP_PROXY INFO: http_req.cpp:982: http request [DescribeStream] requestId [5a7f1d1c-eb66b7bc-abe1ec14-9a62747] got new request from [98bb:29b7:3d7c:0:80bb:29b7:3d7c:0] database '/Root' stream 'testtopic' 2025-12-23T18:37:00.908623Z node 8 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DescribeStream] requestId [5a7f1d1c-eb66b7bc-abe1ec14-9a62747] [auth] Authorized successfully 2025-12-23T18:37:00.908687Z node 8 :HTTP_PROXY INFO: http_req.cpp:701: http request [DescribeStream] requestId [5a7f1d1c-eb66b7bc-abe1ec14-9a62747] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:37:00.909561Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72075186224037907] server connected, pipe [8:7587124240896500405:2491], now have 1 active actors on pipe 2025-12-23T18:37:00.909632Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72075186224037909] server connected, pipe [8:7587124240896500407:2493], now have 1 active actors on pipe 2025-12-23T18:37:00.909640Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72075186224037908] server connected, pipe [8:7587124240896500406:2492], now have 1 active actors on pipe 2025-12-23T18:37:00.909668Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72075186224037910] server connected, pipe [8:7587124240896500408:2494], now have 1 active actors on pipe 2025-12-23T18:37:00.909716Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72075186224037911] server connected, pipe [8:7587124240896500409:2495], now have 1 active actors on pipe 2025-12-23T18:37:00.910675Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037910] server disconnected, pipe [8:7587124240896500408:2494] destroyed 2025-12-23T18:37:00.910701Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037911] server disconnected, pipe [8:7587124240896500409:2495] destroyed 2025-12-23T18:37:00.910719Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037907] server disconnected, pipe [8:7587124240896500405:2491] destroyed 2025-12-23T18:37:00.910759Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037908] server disconnected, pipe [8:7587124240896500406:2492] destroyed 2025-12-23T18:37:00.910778Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037909] server disconnected, pipe [8:7587124240896500407:2493] destroyed 2025-12-23T18:37:00.910915Z node 8 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DescribeStream] requestId [5a7f1d1c-eb66b7bc-abe1ec14-9a62747] reply ok 2025-12-23T18:37:00.911330Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:45746) <- (200 , 1672 bytes) 2025-12-23T18:37:00.911426Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:45746) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1766515021,"StorageLimitMb":0,"StreamName":"testtopic"}} |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> KqpCost::QuerySeviceRangeFullScan >> KqpCost::CTASWithRetry+isOlap |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow-isSink+isOlap [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] >> KqpCost::OlapRange |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow-isSink+isOlap [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: 2025-12-23T18:35:32.759490Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2025-12-23T18:35:32.892635Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:35:32.892725Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:35:32.892785Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:32.892855Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2025-12-23T18:35:32.942000Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:32.986655Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:35:32.992123Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2025-12-23T18:35:32.999189Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2025-12-23T18:35:33.002170Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2025-12-23T18:35:33.008357Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2025-12-23T18:35:33.027260Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:35:33.027820Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8642a715-2aab7920-64d0a890-bb72a044_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:35:33.044843Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d9cda8ed-2e315ec2-a320a2c3-e7ca265f_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:35:33.101428Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:35:33.101909Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f986374a-1b2c2c24-cd901a8b-4209059_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:35:33.132780Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:35:33.133198Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|21cbf131-ce7628d4-56819c6b-b81aed22_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:35:33.164611Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:35:33.165059Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3cb30f00-99f5037f-62c289e3-1c91169a_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:35:33.200528Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:35:33.200982Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8573779b-5bf2bc41-253c2581-8b008984_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:35:34.159104Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:113:2057] recipient: [2:106:2138] 2025-12-23T18:35:34.293437Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:35:34.293531Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:35:34.293606Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:34.293677Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927938 is [2:158:2176] sender: [2:159:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:184:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:112:2142]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:186:2057] recipient: [2:104:2137] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:189:2057] recipient: [2:188:2197] Leader for TabletID 72057594037927937 is [2:190:2198] sender: [2:191:2057] recipient: [2:188:2197] 2025-12-23T18:35:34.460356Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:35:34.460458Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:35:34.460521Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:34.460580Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:112:2142]) rebooted! 2025-12-23T18:35:34.476060Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 !Reboot 72057594037927937 (actor [2:112:2142]) tablet resolver refreshed! new actor is[2:190:2198] 2025-12-23T18:35:34.551032Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:35:34.585507Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:35:34.599063Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:35:34.620122Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:35:34.697379Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:35:34.747071Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:35:34.879208Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:35:34.915365Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:35:35.239257Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:35:35.263040Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:35:35.651562Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:35:35.955485Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:35:36.087232Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:190:2198] sender: [2:270:2057] recipient: [2:14:2061] 2025-12-23T18:35:36.308281Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:35:36.309489Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 2 actor [2:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } 2025-12-23T18:35:36.310951Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:276:2198] 2025-12-23T18:35:36.313941Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partit ... LVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:37:03.345433Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:37:03.427560Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:37:03.611264Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:37:03.839229Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:37:03.863271Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [47:290:2277] sender: [47:390:2057] recipient: [47:14:2061] 2025-12-23T18:37:04.704609Z node 48 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 48 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:107:2057] recipient: [48:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:107:2057] recipient: [48:105:2138] Leader for TabletID 72057594037927937 is [48:111:2142] sender: [48:112:2057] recipient: [48:105:2138] 2025-12-23T18:37:04.904150Z node 48 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:37:04.904218Z node 48 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:37:04.904270Z node 48 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:37:04.904325Z node 48 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:153:2057] recipient: [48:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:153:2057] recipient: [48:151:2172] Leader for TabletID 72057594037927938 is [48:157:2176] sender: [48:158:2057] recipient: [48:151:2172] Leader for TabletID 72057594037927937 is [48:111:2142] sender: [48:181:2057] recipient: [48:14:2061] 2025-12-23T18:37:04.940322Z node 48 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:37:04.941320Z node 48 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 48 actor [48:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 } 2025-12-23T18:37:04.942475Z node 48 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [48:187:2142] 2025-12-23T18:37:04.947272Z node 48 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:187:2142] 2025-12-23T18:37:04.949490Z node 48 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [48:188:2142] 2025-12-23T18:37:04.954147Z node 48 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:188:2142] 2025-12-23T18:37:04.969376Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:37:04.969922Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e203d5fe-306f1a36-6f00cc87-ebbf5579_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:37:04.980701Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|22b82a3d-5b757b9e-da17d3fb-5ded2617_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:37:05.070064Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:37:05.070669Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|98898e05-f652403e-c059e712-77b5f407_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:37:05.081865Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:37:05.082443Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|9a153706-930ffe06-246586d5-9733958f_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:37:05.096586Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:37:05.097148Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|34c127e2-21ebf28-346a441c-6665dc58_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:37:05.107606Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:37:05.108189Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3a72ba30-79b0db52-6abd86d6-cdc868c0_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:37:05.751511Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:107:2057] recipient: [49:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:107:2057] recipient: [49:105:2138] Leader for TabletID 72057594037927937 is [49:111:2142] sender: [49:112:2057] recipient: [49:105:2138] 2025-12-23T18:37:05.879226Z node 49 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:37:05.879299Z node 49 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:37:05.879354Z node 49 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:37:05.879431Z node 49 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:153:2057] recipient: [49:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:153:2057] recipient: [49:151:2172] Leader for TabletID 72057594037927938 is [49:157:2176] sender: [49:158:2057] recipient: [49:151:2172] Leader for TabletID 72057594037927937 is [49:111:2142] sender: [49:183:2057] recipient: [49:14:2061] 2025-12-23T18:37:05.902769Z node 49 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:37:05.903678Z node 49 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 49 actor [49:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 } 2025-12-23T18:37:05.904810Z node 49 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [49:189:2142] 2025-12-23T18:37:05.907864Z node 49 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:189:2142] 2025-12-23T18:37:05.909967Z node 49 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [49:190:2142] 2025-12-23T18:37:05.912383Z node 49 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:190:2142] 2025-12-23T18:37:05.922719Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:37:05.923472Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|963076c-6d6c1627-3fe57fa8-128fdf99_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:37:05.930619Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3d0da39e-e57d8b87-e3461d47-8ebe0ce7_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:37:05.968591Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:37:05.969170Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|dd17956-29df229d-2f8db741-4a926c0f_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:37:05.980344Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:37:05.980932Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6fb36a47-740c9392-a586015f-9014c364_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:37:05.995015Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:37:05.995591Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f994a6-29b171bc-7ab45024-48edbba5_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:37:06.006196Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:37:06.006777Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|56c680f9-9bd62563-d49964ad-ff06b8c8_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] >> TargetDiscoverer::Dirs [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow+isSink+isOlap [GOOD] >> KqpCost::ScanQueryRangeFullScan+SourceRead >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |93.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |93.0%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |93.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 >> KqpCost::IndexLookup+useSink |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow+isSink+isOlap [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 16987, MsgBus: 61330 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0030b5/r3tmp/tmp3dJd4S/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16987, node 1 TClient is connected to server localhost:61330 TClient is connected to server localhost:61330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515020550 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.0%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5398, MsgBus: 18843 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0030c0/r3tmp/tmpzjF4bv/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5398, node 1 TClient is connected to server localhost:18843 TClient is connected to server localhost:18843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515020151 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2025-12-23T18:37:02.854349Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124248602227765:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:02.854404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004110/r3tmp/tmpiASec1/pdisk_1.dat 2025-12-23T18:37:03.254884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:03.311167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:03.311291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:03.398462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:03.418454Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:03.426902Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124248602227744:2081] 1766515022853503 != 1766515022853506 2025-12-23T18:37:03.578899Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28031 TServer::EnableGrpc on GrpcPort 12550, node 1 2025-12-23T18:37:03.876936Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:03.914653Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:03.914676Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:03.914682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:03.914784Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515023518 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:04.497638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:04.504603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:37:04.540837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:04.882903Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1766515024547, tx_id: 1 } } } 2025-12-23T18:37:04.882928Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-12-23T18:37:04.918107Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1766515023518, tx_id: 281474976710657 } }, { name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1766515024582, tx_id: 281474976715658 } }] } } 2025-12-23T18:37:04.918133Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-12-23T18:37:04.929603Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766515024666, tx_id: 281474976715659 } }] } } 2025-12-23T18:37:04.929628Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2025-12-23T18:37:07.645125Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766515024666, tx_id: 281474976715659 } } } 2025-12-23T18:37:07.645150Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2025-12-23T18:37:07.645183Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table 2025-12-23T18:37:07.859108Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124248602227765:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:07.859196Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |93.0%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22069, MsgBus: 22788 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0030b6/r3tmp/tmp7dXYCD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22069, node 1 TClient is connected to server localhost:22788 TClient is connected to server localhost:22788 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515020851 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpCost::OlapPointLookup |93.0%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 >> KqpCost::CTAS-isOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:36:54.714918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:36:54.715024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:54.715059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:36:54.715098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:36:54.715133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:36:54.715162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:36:54.715220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:36:54.715340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:36:54.716150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:36:54.716427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:36:54.800828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:36:54.800904Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:54.806713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:36:54.806961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:36:54.807077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:36:54.810326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:36:54.810535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:36:54.811244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:36:54.811464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:36:54.813273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:54.813463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:36:54.814658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:36:54.814711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:36:54.814853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:36:54.814897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:36:54.814948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:36:54.815074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:36:54.953624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.954604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.954728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.954815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.954879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.954940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.954993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.955070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.955155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.955258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.955324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.955382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.955474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.955544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:36:54.955631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 57594046678944, cookie: 175 2025-12-23T18:37:07.762140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2025-12-23T18:37:07.762171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-12-23T18:37:07.762206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 106 2025-12-23T18:37:07.762242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:37:07.770327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-12-23T18:37:07.770466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-12-23T18:37:07.770500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-12-23T18:37:07.770541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 62], version: 18446744073709551615 2025-12-23T18:37:07.770580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 6 2025-12-23T18:37:07.770687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-12-23T18:37:07.773299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2025-12-23T18:37:07.773400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:74 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:37:07.773433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:73 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:37:07.773459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:75 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:37:07.773912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-12-23T18:37:07.773961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 175:0 ProgressState 2025-12-23T18:37:07.774053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2025-12-23T18:37:07.774087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-12-23T18:37:07.774127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2025-12-23T18:37:07.774157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-12-23T18:37:07.774191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-12-23T18:37:07.774229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-12-23T18:37:07.774261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 175:0 2025-12-23T18:37:07.774290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 175:0 2025-12-23T18:37:07.774463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 5 2025-12-23T18:37:07.780920Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 0 TabletID: 72075186233409619 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 0 TabletID: 72075186233409619 2025-12-23T18:37:07.782504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 74, at schemeshard: 72057594046678944 2025-12-23T18:37:07.783059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 4 Forgetting tablet 72075186233409619 2025-12-23T18:37:07.783937Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 0 TabletID: 72075186233409618 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 0 TabletID: 72075186233409618 2025-12-23T18:37:07.784324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6957: Transaction 281474976840657 reset current state at schemeshard 72075186233409618 because pipe to tablet 72075186233409619 disconnected 2025-12-23T18:37:07.784374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6986: Pipe attached message is found and resent into the new pipe, opId:281474976840657:4294967295, dst tableId: 72075186233409619, msg type: 269090816, msg cookie: 0:281474976840657, at schemeshardId: 72075186233409618 2025-12-23T18:37:07.784588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-12-23T18:37:07.784781Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 0 TabletID: 72075186233409620 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 0 TabletID: 72075186233409620 2025-12-23T18:37:07.786003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:37:07.848951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 Forgetting tablet 72075186233409618 2025-12-23T18:37:07.850510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 73, at schemeshard: 72057594046678944 2025-12-23T18:37:07.851007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 3 Forgetting tablet 72075186233409620 2025-12-23T18:37:07.851455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 75, at schemeshard: 72057594046678944 2025-12-23T18:37:07.851655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 2025-12-23T18:37:07.860053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:37:07.860148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2025-12-23T18:37:07.860288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 1 2025-12-23T18:37:07.860679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:37:07.860724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2025-12-23T18:37:07.860790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:37:07.864825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:74 2025-12-23T18:37:07.864892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:74 tabletId 72075186233409619 2025-12-23T18:37:07.864977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:73 2025-12-23T18:37:07.865002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:73 tabletId 72075186233409618 2025-12-23T18:37:07.865213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:75 2025-12-23T18:37:07.865259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:75 tabletId 72075186233409620 2025-12-23T18:37:07.867602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:37:07.867783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-12-23T18:37:07.876073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-12-23T18:37:07.876145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-12-23T18:37:07.877937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-12-23T18:37:07.878110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-12-23T18:37:07.878150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:8311:9337] TestWaitNotification: OK eventTxId 175 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] >> KqpCost::VectorIndexLookup+useSink ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16696, MsgBus: 7284 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0030b2/r3tmp/tmpHw1LUZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16696, node 1 TClient is connected to server localhost:7284 TClient is connected to server localhost:7284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515021138 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |93.0%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] |93.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} >> Yq_1::Basic_EmptyDict [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 30014, MsgBus: 18807 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0030a9/r3tmp/tmpkZCnYG/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30014, node 1 TClient is connected to server localhost:18807 TClient is connected to server localhost:18807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515022979 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |93.0%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 >> KqpCost::ScanScriptingRangeFullScan+SourceRead >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes |93.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |93.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails-isSink+isOlap [GOOD] >> KqpCost::PointLookup [GOOD] |93.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2348, MsgBus: 25499 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0030af/r3tmp/tmp1nwXcE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2348, node 1 TClient is connected to server localhost:25499 TClient is connected to server localhost:25499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515022160 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> DqUnboxedValueDoNotFitToArrow::LargeVariant [GOOD] >> DqUnboxedValueToNativeArrowConversion::ListOfJsons [GOOD] >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverTupleWithOptionals [GOOD] |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpCost::WriteRowInsertFails+isSink+isOlap [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails-isSink+isOlap [GOOD] |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails+isSink-isOlap >> KqpCost::QuerySeviceRangeFullScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2025-12-23T18:35:48.897828Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123929790377579:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:48.898913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1223 18:35:49.645541389 1205718 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:35:49.645665133 1205718 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:35:49.656099Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:49.959110Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:49.968173Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:49.968240Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:49.969228Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:49.969284Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:49.969555Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:49.969613Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:49.969656Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:49.969699Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:49.970091Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.149963Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.155429Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.181708Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.245841Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.246704Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.255558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:50.255678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:35:50.303214Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30257 2025-12-23T18:35:50.336440Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.387298Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.391434Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.394911Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.410785Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.416233Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.416941Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.443326Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.477736Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.492785Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.512011Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:50.515888Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30257 } ] 2025-12-23T18:35:50.523457Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". C ... 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. Processing resolved ShardId# 72075186224037893, partition range: [(String : yandexcloud://some_folder_id, String : utqudpbklu5ek0qq72dn) ; ()), i: 0, state ranges: 0, points: 1 2025-12-23T18:37:09.416473Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:684: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. Add point to new shardId: 72075186224037893 2025-12-23T18:37:09.416615Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. Pending shards States: TShardState{ TabletId: 72075186224037893, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudpbklu5ek0qq72dn)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudpbklu5ek0qq72dn)], RetryAttempt: 0, ResolveAttempt: 1 }; 2025-12-23T18:37:09.416631Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. effective maxinflight 1024 sorted 0 2025-12-23T18:37:09.416645Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. BEFORE: 1.0 2025-12-23T18:37:09.416694Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. Send EvRead to shardId: 72075186224037893, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2025-12-23T18:37:09.416742Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. AFTER: 0.1 2025-12-23T18:37:09.416756Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2025-12-23T18:37:09.419259Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. Recv TEvReadResult from ShardID=72075186224037893, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-12-23T18:37:09.419288Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. Taken 0 locks 2025-12-23T18:37:09.419303Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. new data for read #0 seqno = 1 finished = 1 2025-12-23T18:37:09.419330Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [7:7587124278964856783:2911], TxId: 281474976710735, task: 1. Ctx: { CheckpointId : . TraceId : 01kd680dcv2hpghkwxhswq03cx. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=OWQ5Y2FkMzQtNjg2NDJmYzItOTRmOWIyM2ItZDk2YTU3MmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 276037645 2025-12-23T18:37:09.419352Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [7:7587124278964856783:2911], TxId: 281474976710735, task: 1. Ctx: { CheckpointId : . TraceId : 01kd680dcv2hpghkwxhswq03cx. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=OWQ5Y2FkMzQtNjg2NDJmYzItOTRmOWIyM2ItZDk2YTU3MmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:37:09.419377Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-12-23T18:37:09.419398Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. enter pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-12-23T18:37:09.419431Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. exit pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 1 freeSpace: 8388572 2025-12-23T18:37:09.419451Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. returned 1 rows; processed 1 rows 2025-12-23T18:37:09.419493Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. dropping batch for read #0 2025-12-23T18:37:09.419508Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. effective maxinflight 1024 sorted 0 2025-12-23T18:37:09.419524Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-12-23T18:37:09.419542Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710735, task: 1, CA Id [7:7587124278964856783:2911]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-12-23T18:37:09.419753Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7587124278964856783:2911], TxId: 281474976710735, task: 1. Ctx: { CheckpointId : . TraceId : 01kd680dcv2hpghkwxhswq03cx. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=OWQ5Y2FkMzQtNjg2NDJmYzItOTRmOWIyM2ItZDk2YTU3MmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:37:09.419771Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [7:7587124278964856783:2911], TxId: 281474976710735, task: 1. Ctx: { CheckpointId : . TraceId : 01kd680dcv2hpghkwxhswq03cx. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=OWQ5Y2FkMzQtNjg2NDJmYzItOTRmOWIyM2ItZDk2YTU3MmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:37:09.419824Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976710735, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-12-23T18:37:09.419845Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [7:7587124278964856784:2912], TxId: 281474976710735, task: 2. Ctx: { CheckpointId : . TraceId : 01kd680dcv2hpghkwxhswq03cx. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=OWQ5Y2FkMzQtNjg2NDJmYzItOTRmOWIyM2ItZDk2YTU3MmY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2025-12-23T18:37:09.419879Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976710735, task: 2. Finish input channelId: 1, from: [7:7587124278964856783:2911] 2025-12-23T18:37:09.419921Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [7:7587124278964856784:2912], TxId: 281474976710735, task: 2. Ctx: { CheckpointId : . TraceId : 01kd680dcv2hpghkwxhswq03cx. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=OWQ5Y2FkMzQtNjg2NDJmYzItOTRmOWIyM2ItZDk2YTU3MmY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:37:09.420064Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7587124278964856784:2912], TxId: 281474976710735, task: 2. Ctx: { CheckpointId : . TraceId : 01kd680dcv2hpghkwxhswq03cx. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=OWQ5Y2FkMzQtNjg2NDJmYzItOTRmOWIyM2ItZDk2YTU3MmY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:37:09.420079Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [7:7587124278964856783:2911], TxId: 281474976710735, task: 1. Ctx: { CheckpointId : . TraceId : 01kd680dcv2hpghkwxhswq03cx. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=OWQ5Y2FkMzQtNjg2NDJmYzItOTRmOWIyM2ItZDk2YTU3MmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646927 2025-12-23T18:37:09.420108Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [7:7587124278964856783:2911], TxId: 281474976710735, task: 1. Ctx: { CheckpointId : . TraceId : 01kd680dcv2hpghkwxhswq03cx. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=OWQ5Y2FkMzQtNjg2NDJmYzItOTRmOWIyM2ItZDk2YTU3MmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2025-12-23T18:37:09.420134Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710735, task: 1. Tasks execution finished 2025-12-23T18:37:09.420150Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [7:7587124278964856783:2911], TxId: 281474976710735, task: 1. Ctx: { CheckpointId : . TraceId : 01kd680dcv2hpghkwxhswq03cx. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=OWQ5Y2FkMzQtNjg2NDJmYzItOTRmOWIyM2ItZDk2YTU3MmY=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2025-12-23T18:37:09.420290Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710735, task: 1. pass away 2025-12-23T18:37:09.420405Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710735;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:37:09.420960Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [7:7587124278964856784:2912], TxId: 281474976710735, task: 2. Ctx: { CheckpointId : . TraceId : 01kd680dcv2hpghkwxhswq03cx. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=OWQ5Y2FkMzQtNjg2NDJmYzItOTRmOWIyM2ItZDk2YTU3MmY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:37:09.421007Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710735, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-12-23T18:37:09.421019Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710735, task: 2. Tasks execution finished 2025-12-23T18:37:09.421032Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [7:7587124278964856784:2912], TxId: 281474976710735, task: 2. Ctx: { CheckpointId : . TraceId : 01kd680dcv2hpghkwxhswq03cx. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=OWQ5Y2FkMzQtNjg2NDJmYzItOTRmOWIyM2ItZDk2YTU3MmY=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:37:09.421091Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710735, task: 2. pass away 2025-12-23T18:37:09.421147Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710735;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails+isSink+isOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 21302, MsgBus: 11472 2025-12-23T18:37:02.949311Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124249014181403:2225];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:02.949378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:02.998095Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b40/r3tmp/tmpnDUpKi/pdisk_1.dat 2025-12-23T18:37:03.435787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:03.435894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:03.442595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:03.493700Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:03.537966Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:03.541066Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124249014181215:2081] 1766515022892623 != 1766515022892626 TServer::EnableGrpc on GrpcPort 21302, node 1 2025-12-23T18:37:03.774324Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:03.774857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:03.774878Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:03.774891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:03.774966Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:03.970536Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:11472 TClient is connected to server localhost:11472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515023581 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:04.430199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:04.437587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:04.449792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:04.617284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:04.878073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:04.964334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:07.022352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124270489019564:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:07.022502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:07.024181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124270489019574:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:07.024260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:07.394251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:07.454890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:07.511899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:07.580317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:07.657959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:07.745979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:07.804268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:07.866066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:07.952450Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124249014181403:2225];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:07.956778Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:07.985205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124270489020439:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:07.985268Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:07.985629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124270489020444:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:07.985669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124270489020445:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:07.985790Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:07.990624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:08.007525Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124270489020448:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:08.102377Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124274783987800:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:10.537425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:10.593680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:10.647811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/Join1_2 1 19 /Root/Join1_1 8 136 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 14193, MsgBus: 23049 2025-12-23T18:37:03.942485Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124253484937518:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:03.942545Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b3b/r3tmp/tmpmkHpWP/pdisk_1.dat 2025-12-23T18:37:04.280678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:04.364988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:04.365103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:04.380553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:04.497089Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:04.506122Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124253484937492:2081] 1766515023940564 != 1766515023940567 2025-12-23T18:37:04.509983Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 14193, node 1 2025-12-23T18:37:04.718136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:04.718157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:04.718168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:04.718271Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:04.944540Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:23049 TClient is connected to server localhost:23049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515024435 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:05.480851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:05.507004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:37:05.526184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:05.689054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:05.855812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:05.996143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:07.941499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124270664808563:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:07.941651Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:07.942127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124270664808573:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:07.942191Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:08.289429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:08.363896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:08.450851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:08.507682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:08.566397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:08.666817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:08.737619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:08.816207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:08.947206Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124253484937518:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:08.947290Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:08.962028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124274959776747:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:08.962134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:08.963245Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124274959776752:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:08.963298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124274959776753:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:08.963347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:08.967186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:08.980473Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124274959776756:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:37:09.059947Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124279254744104:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.1%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverTupleWithOptionals [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:119:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:119:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046447617 is [1:125:2150] sender: [1:129:2058] recipient: [1:112:2143] Leader for TabletID 72057594046316545 is [1:134:2157] sender: [1:135:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:139:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:33:20.072301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:33:20.072402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:20.072444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:33:20.072499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:33:20.072550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:33:20.072576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:33:20.072649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:33:20.072752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:33:20.073609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:33:20.073834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:33:20.200681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:33:20.200786Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:20.201756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:125:2150] sender: [1:196:2058] recipient: [1:15:2062] 2025-12-23T18:33:20.218504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:33:20.218601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:33:20.218718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:33:20.223102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:33:20.223345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:33:20.224098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:33:20.224379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:33:20.227448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:20.227691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:33:20.228974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:33:20.229040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:33:20.229187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:33:20.229237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:33:20.229280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:33:20.229479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:33:20.402473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.403588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.403734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.403806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.403922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.404008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.404079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.404177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.404274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.404344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.404488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.404577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:33:20.404658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... in: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1661 } } CommitVersion { Step: 5000040 TxId: 1003 } 2025-12-23T18:37:11.445593Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-12-23T18:37:11.449896Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1661 } } CommitVersion { Step: 5000040 TxId: 1003 } 2025-12-23T18:37:11.450086Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1661 } } CommitVersion { Step: 5000040 TxId: 1003 } debug: NTableState::TProposedWaitParts operationId# 1003:0 2025-12-23T18:37:11.451029Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 841 RawX2: 420906797818 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-12-23T18:37:11.451092Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-12-23T18:37:11.451244Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 841 RawX2: 420906797818 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-12-23T18:37:11.451311Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:37:11.451424Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 841 RawX2: 420906797818 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-12-23T18:37:11.451516Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:37:11.451569Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-12-23T18:37:11.451620Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:37:11.451675Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:37:11.451726Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 129 -> 240 2025-12-23T18:37:11.467304Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-12-23T18:37:11.467624Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-12-23T18:37:11.468099Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-12-23T18:37:11.468180Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2025-12-23T18:37:11.468252Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-12-23T18:37:11.468305Z node 98 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2025-12-23T18:37:11.468399Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-12-23T18:37:11.468451Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 240 -> 240 2025-12-23T18:37:11.477797Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-12-23T18:37:11.477886Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-12-23T18:37:11.478024Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2025-12-23T18:37:11.478069Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-12-23T18:37:11.478122Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2025-12-23T18:37:11.478166Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-12-23T18:37:11.478215Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-12-23T18:37:11.478266Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-12-23T18:37:11.478312Z node 98 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1003:0 2025-12-23T18:37:11.478354Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1003:0 2025-12-23T18:37:11.478551Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2025-12-23T18:37:11.478598Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-12-23T18:37:11.488709Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-12-23T18:37:11.488779Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-12-23T18:37:11.489242Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-12-23T18:37:11.489357Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-12-23T18:37:11.489399Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [98:932:2892] TestWaitNotification: OK eventTxId 1003 2025-12-23T18:37:11.489928Z node 98 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:37:11.490166Z node 98 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 277us result status StatusSuccess 2025-12-23T18:37:11.500524Z node 98 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> KqpCost::IndexLookupAndTake+useSink |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpCost::VectorIndexLookup-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 23714, MsgBus: 7748 2025-12-23T18:37:04.596831Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124258640869117:2253];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:04.597067Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b3a/r3tmp/tmp88piZp/pdisk_1.dat 2025-12-23T18:37:05.202920Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:05.211504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:05.211650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:05.295768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:05.386882Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124258640868893:2081] 1766515024550074 != 1766515024550077 2025-12-23T18:37:05.413649Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:05.430679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 23714, node 1 2025-12-23T18:37:05.603332Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:05.683708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:05.683730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:05.683742Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:05.683819Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7748 TClient is connected to server localhost:7748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515025478 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:06.386342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:06.399728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:37:06.425801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:06.651978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:06.868400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:06.989992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:09.422306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124280115707255:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:09.422430Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:09.423621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124280115707265:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:09.423689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:09.586844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124258640869117:2253];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:09.586916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:09.897935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:09.989123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:10.068659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:10.138945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:10.220544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:10.287348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:10.353198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:10.435630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:10.553444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124284410675437:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:10.553532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:10.553852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124284410675442:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:10.553900Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124284410675443:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:10.553925Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:10.557522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:10.583455Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124284410675446:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:37:10.638127Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124284410675499:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> ResourcePoolClassifiersDdl::TestExplicitPoolId [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 >> KqpCost::ScanScriptingRangeFullScan-SourceRead >> KqpCost::WriteRow+isSink-isOlap |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 24892, MsgBus: 8458 2025-12-23T18:37:05.727474Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124260950408114:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:05.734196Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b36/r3tmp/tmp4dQCCO/pdisk_1.dat 2025-12-23T18:37:06.182946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:06.215359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:06.215450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:06.290783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:06.294842Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124260950407983:2081] 1766515025693446 != 1766515025693449 2025-12-23T18:37:06.297883Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24892, node 1 2025-12-23T18:37:06.469237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:06.495661Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:06.495683Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:06.495691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:06.495804Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:06.749974Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:8458 TClient is connected to server localhost:8458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515026353 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:07.322122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:07.350621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:07.679847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:07.951159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:08.083048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:10.489890Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124282425246332:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:10.490016Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:10.490628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124282425246342:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:10.490689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:10.729560Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124260950408114:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:10.729634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:10.979676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.036965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.099792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.179338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.244539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.313292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.374079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.458086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.598286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124286720214516:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:11.598371Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:11.599019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124286720214521:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:11.599094Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124286720214522:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:11.599445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:11.604283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:11.639417Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124286720214525:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:11.715861Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124286720214576:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:13.670654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] Test command err: Trying to start YDB, gRPC: 31836, MsgBus: 10532 2025-12-23T18:37:05.779078Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124260977225359:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:05.779153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b37/r3tmp/tmpT71d0x/pdisk_1.dat 2025-12-23T18:37:06.316026Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:06.325933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:06.326297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:06.508002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:06.600378Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:06.603054Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124260977225336:2081] 1766515025773037 != 1766515025773040 TServer::EnableGrpc on GrpcPort 31836, node 1 2025-12-23T18:37:06.622902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:06.746635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:06.746660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:06.746669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:06.746752Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:06.822903Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10532 TClient is connected to server localhost:10532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515026570 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:07.603713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:07.615531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:07.641837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:07.877643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:08.064655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:37:08.153130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:10.652851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124282452063684:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:10.652982Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:10.653632Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124282452063694:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:10.653680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:10.779503Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124260977225359:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:10.779578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:11.126817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.217288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.304313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.371811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.437897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.545087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.649364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.801143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:12.039185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124291041999173:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:12.039288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:12.039706Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124291041999178:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:12.039740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124291041999179:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:12.039839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:12.044054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:12.074273Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124291041999182:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:12.155698Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124291041999233:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 >> KqpCost::WriteRowInsertFails-isSink-isOlap |93.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] |93.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |93.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> KqpCost::CTAS+isOlap >> KqpCost::IndexLookupJoin+StreamLookupJoin |93.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> KqpCost::OlapRange [GOOD] >> KqpCost::ScanQueryRangeFullScan-SourceRead >> KqpCost::AAARangeFullScan >> KqpWorkloadServiceDistributed::TestNodeDisconnect [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit >> KqpCost::CTASWithRetry-isOlap [GOOD] >> KqpCost::CTAS-isOlap [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] |93.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |93.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> KqpCost::IndexLookup+useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRange [GOOD] Test command err: Trying to start YDB, gRPC: 7878, MsgBus: 14034 2025-12-23T18:37:06.450885Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124265538351413:2236];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:06.451376Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b33/r3tmp/tmpiS6Gkv/pdisk_1.dat 2025-12-23T18:37:06.977969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:06.997508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:06.997604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:07.137640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:07.255975Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:07.266894Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124265538351206:2081] 1766515026423226 != 1766515026423229 2025-12-23T18:37:07.282838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 7878, node 1 2025-12-23T18:37:07.456448Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:07.512964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:07.512983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:07.512989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:07.513076Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14034 TClient is connected to server localhost:14034 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515027291 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:08.364712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:08.378768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:08.629444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:08.855075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:08.953364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:11.444468Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124265538351413:2236];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:11.444534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:12.564271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124291308156866:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:12.564363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:12.564656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124291308156876:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:12.564683Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:13.058929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:13.107865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:13.148798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:13.193614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:13.231498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:13.301991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:13.398235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:13.483684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:13.641937Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124295603125047:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:13.642030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:13.642405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124295603125052:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:13.642443Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124295603125053:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:13.642546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2 ... d=72075186224037935;tx_id=281474976715673;this=136254325218880;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1766515037179;max=18446744073709551615;plan=0;src=[1:7587124265538351545:2139];cookie=482:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.192658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7587124308488027435:2545];ev=NActors::IEventHandle;tablet_id=72075186224037930;tx_id=281474976715673;this=136254325221344;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1766515037192;max=18446744073709551615;plan=0;src=[1:7587124265538351545:2139];cookie=432:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.195703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;self_id=[1:7587124308488027412:2540];ev=NActors::IEventHandle;tablet_id=72075186224037934;tx_id=281474976715673;this=136254325222912;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1766515037195;max=18446744073709551615;plan=0;src=[1:7587124265538351545:2139];cookie=472:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.196410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;self_id=[1:7587124308488027463:2549];ev=NActors::IEventHandle;tablet_id=72075186224037932;tx_id=281474976715673;this=136254325223584;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1766515037196;max=18446744073709551615;plan=0;src=[1:7587124265538351545:2139];cookie=452:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.197070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;self_id=[1:7587124308488027539:2550];ev=NActors::IEventHandle;tablet_id=72075186224037933;tx_id=281474976715673;this=136254325224256;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1766515037196;max=18446744073709551615;plan=0;src=[1:7587124265538351545:2139];cookie=462:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.198051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;self_id=[1:7587124308488027413:2541];ev=NActors::IEventHandle;tablet_id=72075186224037928;tx_id=281474976715673;this=136254325256512;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1766515037197;max=18446744073709551615;plan=0;src=[1:7587124265538351545:2139];cookie=412:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.198988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7587124308488027411:2539];ev=NActors::IEventHandle;tablet_id=72075186224037936;tx_id=281474976715673;this=136254325257184;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1766515037198;max=18446744073709551615;plan=0;src=[1:7587124265538351545:2139];cookie=492:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.200037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7587124308488027440:2546];ev=NActors::IEventHandle;tablet_id=72075186224037931;tx_id=281474976715673;this=136254325258080;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1766515037199;max=18446744073709551615;plan=0;src=[1:7587124265538351545:2139];cookie=442:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.200616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;self_id=[1:7587124308488027445:2548];ev=NActors::IEventHandle;tablet_id=72075186224037929;tx_id=281474976715673;this=136254325226272;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1766515037200;max=18446744073709551615;plan=0;src=[1:7587124265538351545:2139];cookie=422:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.216828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.216939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.216964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.219142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.219195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.219212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.274091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.274187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.274207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.274308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.274357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.274372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.287059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.287127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.287147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.303724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.303794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.303810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.322338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.322415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.322432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.341545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.341618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.341634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.355897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.355966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.355985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.365172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.365241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:17.365259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |93.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 25558, MsgBus: 7950 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0030c2/r3tmp/tmplxsfyh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25558, node 1 TClient is connected to server localhost:7950 TClient is connected to server localhost:7950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515019822 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TS3WrapperTests::PutObject [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTAS-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 17944, MsgBus: 18813 2025-12-23T18:37:10.721151Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124285384599313:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:10.727075Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:10.806714Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b23/r3tmp/tmpvLBDNh/pdisk_1.dat 2025-12-23T18:37:11.215972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:11.216083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:11.289473Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:11.303248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:11.362913Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17944, node 1 2025-12-23T18:37:11.584629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:11.701423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:11.701448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:11.701454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:11.701546Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:11.755092Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18813 TClient is connected to server localhost:18813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515031435 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:12.502118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:12.513663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:12.529969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:12.684036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:12.874917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:12.961035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:15.705420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124306859437610:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.705632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.711227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124306859437620:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.711379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.722865Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124285384599313:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:15.722944Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:16.228030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.298229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.355884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.429087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.509144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.606922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.748093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.868324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:17.070839Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124315449373091:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:17.070932Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:17.071273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124315449373096:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:17.071309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124315449373097:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:17.071569Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:17.075592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:17.095301Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124315449373100:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:17.186910Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124315449373151:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:19.105656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:19.415583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 9731 table_access { name: "/Root/.tmp/sessions/ce4ba8ef-4fbe-77a7-a8d4-76b350381910/Root/TestTable2_6dba3fa6-4f60-0bf5-b112-db863f797fce" updates { rows: 4 bytes: 80 } partitions_count: 1 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 80 } partitions_count: 1 } cpu_time_us: 3807 affected_shards: 1 } compilation { duration_us: 10463 cpu_time_us: 6404 } process_cpu_time_us: 847 total_duration_us: 354570 total_cpu_time_us: 11058 2025-12-23T18:37:19.627834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:37:19.636404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710680, at schemeshard: 72057594046644480 2025-12-23T18:37:19.640782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |93.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] >> KqpCost::OlapPointLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTASWithRetry-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 24876, MsgBus: 24592 ... waiting for SysViewsRoster update finished 2025-12-23T18:37:07.025762Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:07.147411Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:37:07.159702Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:37:07.160154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:37:07.160246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:07.501524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:07.501655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:07.581866Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515023273137 != 1766515023273141 2025-12-23T18:37:07.604522Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:07.658682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:07.875108Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b3f/r3tmp/tmpfFaBx5/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24876, node 1 2025-12-23T18:37:08.330205Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:08.330292Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:08.330334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:08.330993Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:08.343581Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24592 TClient is connected to server localhost:24592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:08.953507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:09.004618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:09.353401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:09.997746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:10.372578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:11.377978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1909:3516], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:11.378339Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:11.383972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1983:3535], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:11.384147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:11.423794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:11.731690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:12.061566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:12.396414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:12.721705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:13.009012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:13.338552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:13.735002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:14.448244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2800:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:14.448472Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:14.448961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2803:4184], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:14.449146Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:14.449210Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2807:4188], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:14.469265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:14.699246Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2809:4190], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:37:14.752269Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2864:4226] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:17.705373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:18.819333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 1766515027930189 table_access { name: "/Root/.tmp/sessions/a55ba454-4576-1ced-2861-ffbc759e032f/Root/TestTable2_876868b6-4e40-a07c-dd19-7c963a1f52cc" updates { rows: 4 bytes: 80 } partitions_count: 1 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 80 } partitions_count: 1 } cpu_time_us: 5309 affected_shards: 1 } compilation { duration_us: 23586 cpu_time_us: 7407 } process_cpu_time_us: 1371 total_duration_us: 1440798 total_cpu_time_us: 14087 2025-12-23T18:37:19.703406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2025-12-23T18:37:22.912860Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# F4C76F39-3AAE-4394-BD97-CB54910AEA96, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:24307 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6EDF80CD-4AA2-43C3-A2E0-897D5E92CF38 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-12-23T18:37:22.934154Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# F4C76F39-3AAE-4394-BD97-CB54910AEA96, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 20444, MsgBus: 1801 2025-12-23T18:37:09.606210Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124280772419714:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:09.606990Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b26/r3tmp/tmpL2QmZT/pdisk_1.dat 2025-12-23T18:37:10.071104Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:10.075847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:10.075964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:10.132728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:10.266858Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124280772419676:2081] 1766515029598551 != 1766515029598554 2025-12-23T18:37:10.286061Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:10.330067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 20444, node 1 2025-12-23T18:37:10.627857Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:10.628203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:10.628210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:10.628219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:10.628296Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1801 TClient is connected to server localhost:1801 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515030371 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:11.726593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:11.733891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:11.752842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:12.086051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:12.334007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:12.454079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:14.607246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124280772419714:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:14.607315Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:15.292724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124306542225326:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.292875Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.293276Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124306542225336:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.293331Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.805216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:15.858817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:15.896645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:15.933331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:15.976403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.052118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.163545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.345278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.540955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124310837193502:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.541047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.541540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124310837193507:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.541575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124310837193508:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.541605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.546001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:16.564198Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124310837193511:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:16.631440Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124310837193564:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:19.071568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 16452, MsgBus: 29922 2025-12-23T18:37:09.256579Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124279491837136:2211];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:09.256858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:09.274160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b29/r3tmp/tmpLAHIbk/pdisk_1.dat 2025-12-23T18:37:09.812820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:09.812914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:09.814892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:09.848230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:09.997065Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:10.034861Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124279491836938:2081] 1766515029167290 != 1766515029167293 2025-12-23T18:37:10.089951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 16452, node 1 2025-12-23T18:37:10.259286Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:10.339432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:10.339453Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:10.339459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:10.339526Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29922 TClient is connected to server localhost:29922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515030077 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:11.203036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:11.218884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:11.235888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:11.675119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:11.992443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:12.124289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:14.259024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124279491837136:2211];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:14.259113Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:15.508532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124305261642594:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.508676Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.509363Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124305261642604:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.509404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.215853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.315326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.381254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.445174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.518473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.597671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.681464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.772444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.926307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124309556610792:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.926392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.931799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124309556610798:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.931855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124309556610799:2492], DatabaseId: /Root, PoolId: defau ... .h:526: SelfId: [1:7587124326736480356:2540], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kd680qs9fn0b7ez6cdn93hbd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:37:20.385938Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124326736480356:2540], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kd680qs9fn0b7ez6cdn93hbd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. CA StateFunc 271646922 2025-12-23T18:37:20.386010Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976710674, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-12-23T18:37:20.386034Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124326736480357:2541], TxId: 281474976710674, task: 2. Ctx: { TraceId : 01kd680qs9fn0b7ez6cdn93hbd. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646923 2025-12-23T18:37:20.386061Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976710674, task: 2. Finish input channelId: 1, from: [1:7587124326736480356:2540] 2025-12-23T18:37:20.386138Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124326736480357:2541], TxId: 281474976710674, task: 2. Ctx: { TraceId : 01kd680qs9fn0b7ez6cdn93hbd. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:37:20.386309Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7587124326736480357:2541], TxId: 281474976710674, task: 2. Ctx: { TraceId : 01kd680qs9fn0b7ez6cdn93hbd. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:37:20.386326Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124326736480356:2540], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kd680qs9fn0b7ez6cdn93hbd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. CA StateFunc 271646927 2025-12-23T18:37:20.386353Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124326736480356:2540], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kd680qs9fn0b7ez6cdn93hbd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. CA StateFunc 271646922 2025-12-23T18:37:20.386373Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710674, task: 1. Tasks execution finished 2025-12-23T18:37:20.386391Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [1:7587124326736480356:2540], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kd680qs9fn0b7ez6cdn93hbd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:37:20.386509Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710674, task: 1. pass away 2025-12-23T18:37:20.386625Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710674;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:37:20.386771Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710674, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-12-23T18:37:20.387018Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:326: ActorId: [1:7587124326736480352:2532] TxId: 281474976710674. Ctx: { TraceId: 01kd680qs9fn0b7ez6cdn93hbd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=, PoolId: default, IsStreamingQuery: 0}. Send TEvStreamData to [1:7587124326736480324:2532], seqNo: 1, nRows: 1 2025-12-23T18:37:20.387176Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [1:7587124326736480352:2532] TxId: 281474976710674. Ctx: { TraceId: 01kd680qs9fn0b7ez6cdn93hbd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [1:7587124326736480356:2540], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 21552 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 1190 FinishTimeMs: 1766515040386 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } IngressRows: 3 ComputeCpuTimeUs: 145 BuildCpuTimeUs: 1045 HostName: "ghrun-ou4bsyjxme" NodeId: 1 StartTimeMs: 1766515040385 CreateTimeMs: 1766515040362 UpdateTimeMs: 1766515040386 } MaxMemoryUsage: 1048576 } 2025-12-23T18:37:20.387236Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710674. Ctx: { TraceId: 01kd680qs9fn0b7ez6cdn93hbd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7587124326736480356:2540] 2025-12-23T18:37:20.387288Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [1:7587124326736480352:2532] TxId: 281474976710674. Ctx: { TraceId: 01kd680qs9fn0b7ez6cdn93hbd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7587124326736480357:2541], 2025-12-23T18:37:20.387467Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515040 AvailableComputeActors: 9999 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 9999 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 1 DataCenterId: "1" } 2025-12-23T18:37:20.390360Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:515: TxId: 281474976710674, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388468, to: [1:7587124326736480359:2541] 2025-12-23T18:37:20.390415Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124326736480357:2541], TxId: 281474976710674, task: 2. Ctx: { TraceId : 01kd680qs9fn0b7ez6cdn93hbd. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:37:20.390478Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710674, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-12-23T18:37:20.390486Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710674, task: 2. Tasks execution finished 2025-12-23T18:37:20.390496Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [1:7587124326736480357:2541], TxId: 281474976710674, task: 2. Ctx: { TraceId : 01kd680qs9fn0b7ez6cdn93hbd. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:37:20.390558Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710674, task: 2. pass away 2025-12-23T18:37:20.390626Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710674;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:37:20.390797Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710674, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-12-23T18:37:20.390882Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 2025-12-23T18:37:22.384606Z, after 1.994386s 2025-12-23T18:37:20.391001Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [1:7587124326736480352:2532] TxId: 281474976710674. Ctx: { TraceId: 01kd680qs9fn0b7ez6cdn93hbd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [1:7587124326736480357:2541], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 3885 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 1053 FinishTimeMs: 1766515040390 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 153 BuildCpuTimeUs: 900 HostName: "ghrun-ou4bsyjxme" NodeId: 1 CreateTimeMs: 1766515040374 UpdateTimeMs: 1766515040390 } MaxMemoryUsage: 1048576 } 2025-12-23T18:37:20.391047Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710674. Ctx: { TraceId: 01kd680qs9fn0b7ez6cdn93hbd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7587124326736480357:2541] 2025-12-23T18:37:20.391183Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [1:7587124326736480352:2532] TxId: 281474976710674. Ctx: { TraceId: 01kd680qs9fn0b7ez6cdn93hbd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=, PoolId: default, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:37:20.391227Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [1:7587124326736480352:2532] TxId: 281474976710674. Ctx: { TraceId: 01kd680qs9fn0b7ez6cdn93hbd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjJiMzc0NGEtMzliMGI2NjItZGI5NDA1ZTYtNDJmOTE0NTE=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.025437s ReadRows: 1 ReadBytes: 20 ru: 16 rate limiter was not found force flag: 1 2025-12-23T18:37:20.391958Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515040402, txId: 281474976710673] shutting down |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 >> KqpCost::IndexLookupAndTake-useSink [GOOD] >> TS3WrapperTests::GetUnknownObject >> TS3WrapperTests::MultipartUpload >> TS3WrapperTests::GetUnknownObject [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 23796, MsgBus: 7509 2025-12-23T18:37:12.744606Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124291697887815:2194];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:12.744764Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b21/r3tmp/tmp64kFjr/pdisk_1.dat 2025-12-23T18:37:13.159164Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:13.181309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:13.181395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:13.281378Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:13.296085Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124291697887646:2081] 1766515032666926 != 1766515032666929 2025-12-23T18:37:13.326096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:13.358889Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 23796, node 1 2025-12-23T18:37:13.541026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:13.541051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:13.541061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:13.541143Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:13.755992Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7509 TClient is connected to server localhost:7509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515033402 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:14.569544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:14.582944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:14.603195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:14.844997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:15.085170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:15.207274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:17.249707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124313172726006:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:17.249822Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:17.254568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124313172726015:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:17.254644Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:17.688350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:17.734581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:17.743951Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124291697887815:2194];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:17.744076Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:17.780325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:17.830400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:17.865102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:17.907455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:17.957962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:18.012923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:18.126467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124317467694181:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:18.126553Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:18.126995Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124317467694186:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:18.127047Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124317467694187:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:18.127080Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:18.131974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:18.155031Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124317467694190:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:18.259325Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124317467694243:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:20.605547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] >> TS3WrapperTests::HeadObject >> TS3WrapperTests::AbortMultipartUpload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapPointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 26602, MsgBus: 10705 2025-12-23T18:37:10.537980Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124283766039140:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:10.538056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:10.611427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b24/r3tmp/tmpvWIaNv/pdisk_1.dat 2025-12-23T18:37:11.006735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:11.019974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:11.036730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:11.162381Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:11.182195Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124283766038929:2081] 1766515030492820 != 1766515030492823 2025-12-23T18:37:11.184487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26602, node 1 2025-12-23T18:37:11.437486Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:11.478219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:11.478243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:11.478249Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:11.478405Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:11.542910Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10705 TClient is connected to server localhost:10705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515031253 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:12.300985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:12.313503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:12.587959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:12.810612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:12.881559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:15.030088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124305240877286:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.030215Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.030576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124305240877296:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.030621Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.399173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:15.478405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:15.542963Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124283766039140:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:15.543054Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:15.639731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:15.742672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:15.832308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:15.932868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.029334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.191286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.532729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124309535845480:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.532857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.533377Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124309535845485:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.533422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124309535845486:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.533456Z node 1 :KQP_WORKLOAD_SE ... 72075186224037932;tx_id=281474976710673;this=136558738367360;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1766515040652;max=18446744073709551615;plan=0;src=[1:7587124283766039302:2156];cookie=452:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.655956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7587124326715715139:2543];ev=NActors::IEventHandle;tablet_id=72075186224037930;tx_id=281474976710673;this=136558738368032;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1766515040655;max=18446744073709551615;plan=0;src=[1:7587124283766039302:2156];cookie=432:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.659961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7587124326715715141:2545];ev=NActors::IEventHandle;tablet_id=72075186224037936;tx_id=281474976710673;this=136558738346976;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1766515040659;max=18446744073709551615;plan=0;src=[1:7587124283766039302:2156];cookie=492:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.660573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7587124326715715136:2540];ev=NActors::IEventHandle;tablet_id=72075186224037931;tx_id=281474976710673;this=136558738370496;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1766515040660;max=18446744073709551615;plan=0;src=[1:7587124283766039302:2156];cookie=442:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.663302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;self_id=[1:7587124326715715150:2547];ev=NActors::IEventHandle;tablet_id=72075186224037933;tx_id=281474976710673;this=136558738371168;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1766515040662;max=18446744073709551615;plan=0;src=[1:7587124283766039302:2156];cookie=462:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.664835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;self_id=[1:7587124326715715154:2549];ev=NActors::IEventHandle;tablet_id=72075186224037929;tx_id=281474976710673;this=136558738348768;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1766515040664;max=18446744073709551615;plan=0;src=[1:7587124283766039302:2156];cookie=422:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.665433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;self_id=[1:7587124326715715142:2546];ev=NActors::IEventHandle;tablet_id=72075186224037935;tx_id=281474976710673;this=136558738349440;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1766515040665;max=18446744073709551615;plan=0;src=[1:7587124283766039302:2156];cookie=482:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.667958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[1:7587124326715715137:2541];ev=NActors::IEventHandle;tablet_id=72075186224037927;tx_id=281474976710673;this=136558738350784;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1766515040667;max=18446744073709551615;plan=0;src=[1:7587124283766039302:2156];cookie=402:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.669905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;self_id=[1:7587124326715715138:2542];ev=NActors::IEventHandle;tablet_id=72075186224037928;tx_id=281474976710673;this=136558738373856;method=TTxController::StartProposeOnExecute;tx_info=281474976710673:TX_KIND_SCHEMA;min=1766515040669;max=18446744073709551615;plan=0;src=[1:7587124283766039302:2156];cookie=412:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.674786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.674844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.674923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.674947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.675171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.675201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.720329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.720394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.720410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.720445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.720492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.720506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.730866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.730935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.730950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.739189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.739254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.739270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.750063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.750136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.750155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.753280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.753364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.753381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.761820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.761884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.761899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.770220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.770314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:20.770335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TS3WrapperTests::CompleteUnknownUpload >> TS3WrapperTests::HeadObject [GOOD] |93.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2025-12-23T18:37:25.710227Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 240189AF-B8B4-4F93-A4F4-28B4F893F3E9, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:18701 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 37B706A8-6251-4697-9E34-7C789D5855C4 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-12-23T18:37:25.716435Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 240189AF-B8B4-4F93-A4F4-28B4F893F3E9, response# No response body. |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 >> TS3WrapperTests::CompleteUnknownUpload [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2025-12-23T18:37:26.057893Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 6C29B947-8F0F-4A29-8B4D-9148EB183479, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:62078 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3C5F295D-2398-4FD1-8A8D-34EC36E0F91B amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-12-23T18:37:26.093143Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 6C29B947-8F0F-4A29-8B4D-9148EB183479, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-12-23T18:37:26.095005Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 34FBA9DB-8053-49D4-936B-6244075DD6D7, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:62078 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9BAF376F-2224-4F3A-9EDF-5C6A8FB0ADB0 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2025-12-23T18:37:26.110640Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 34FBA9DB-8053-49D4-936B-6244075DD6D7, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-12-23T18:37:26.111374Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 51F1E5CA-656D-4B14-ABCD-CF2F1AE59697, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:62078 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1B7FAC7B-83B1-4347-9732-5DCAE024BAFA amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2025-12-23T18:37:26.120720Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 51F1E5CA-656D-4B14-ABCD-CF2F1AE59697, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-12-23T18:37:26.121527Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 465ABACB-C466-4846-B0C1-B9465A645528, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:62078 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 732E0E53-4F17-4F43-8D10-45D7BEE9BCED amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-12-23T18:37:26.125105Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 465ABACB-C466-4846-B0C1-B9465A645528, response# GetObjectResult { } |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 16289, MsgBus: 17864 2025-12-23T18:37:13.185843Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124297915343709:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:13.186343Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b20/r3tmp/tmp5fsLWD/pdisk_1.dat 2025-12-23T18:37:13.306955Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:13.774887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:13.977168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:13.977281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:14.018543Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:14.031235Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124297915343663:2081] 1766515033089082 != 1766515033089085 2025-12-23T18:37:14.163092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:14.164205Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 16289, node 1 2025-12-23T18:37:14.199005Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:14.443464Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:14.443499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:14.443507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:14.443605Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17864 TClient is connected to server localhost:17864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515034228 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:15.635910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:15.645259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:15.654327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:15.899296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:16.135979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:16.316074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:18.158608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124297915343709:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:18.158693Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:19.217052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124323685149334:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:19.217176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:19.217977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124323685149344:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:19.218026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:19.597145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:19.649325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:19.716280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:19.781403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:19.849824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:19.938310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:20.015351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:20.114198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:20.235957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124327980117516:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:20.236037Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:20.236334Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124327980117521:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:20.236367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124327980117522:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:20.236394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:20.240841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:20.261384Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124327980117525:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:20.330126Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124327980117578:3789] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:23.563035Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515043566, txId: 281474976710673] shutting down ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2025-12-23T18:37:26.369964Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 2B807A04-4446-4159-8EFE-07075A94C726, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:23124 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FAFB1D55-EC5D-41CE-8229-FD749357689F amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-12-23T18:37:26.385440Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 2B807A04-4446-4159-8EFE-07075A94C726, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-12-23T18:37:26.386111Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 30B2E33C-B90B-47D6-82B7-7C8BD82C7A89, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:23124 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 94CFB5B3-08E5-41B0-A371-90B26CF1A020 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2025-12-23T18:37:26.403458Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 30B2E33C-B90B-47D6-82B7-7C8BD82C7A89, response# AbortMultipartUploadResult { } 2025-12-23T18:37:26.404036Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 0D871CC8-6EED-4A93-B721-8BA85CF3E090, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:23124 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FCE0492B-A21F-4AB2-9C51-80469B12994B amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-12-23T18:37:26.415733Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 0D871CC8-6EED-4A93-B721-8BA85CF3E090, response# No response body. |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 18946, MsgBus: 62430 2025-12-23T18:37:13.024603Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124298278706998:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:13.037717Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b1e/r3tmp/tmpWEVvs1/pdisk_1.dat 2025-12-23T18:37:13.491090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:13.491186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:13.653799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:13.665907Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124293983739670:2081] 1766515033010728 != 1766515033010731 2025-12-23T18:37:13.679316Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:13.687621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18946, node 1 2025-12-23T18:37:13.979128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:14.023776Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:14.023797Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:14.023808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:14.023901Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:14.047471Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:62430 TClient is connected to server localhost:62430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515033752 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:14.794639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:14.820338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:14.975389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:15.177188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:15.296410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:18.021357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124298278706998:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:18.021437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:18.144640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124319753545333:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:18.144747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:18.145068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124319753545343:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:18.145109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:18.572896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:18.615324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:18.665695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:18.726590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:18.775893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:18.842496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:18.894602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:18.953911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:19.069932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124324048513517:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:19.070013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:19.070396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124324048513522:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:19.070438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124324048513523:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:19.070471Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:19.074907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:19.088088Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124324048513526:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:19.185419Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124324048513581:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:22.311432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2025-12-23T18:37:26.660041Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# F3555BA4-8184-406D-A6AA-A2247A799051, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:2268 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5FC2DF8D-8FD9-4C0D-8C3B-14F2E905A760 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-12-23T18:37:26.679406Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# F3555BA4-8184-406D-A6AA-A2247A799051, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-12-23T18:37:26.680114Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 7312FE0A-6D54-4CAF-9AD8-30B00E97B42F, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:2268 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 68BE3EC9-6FCD-4A48-8EC0-6E3580DF2AF3 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-12-23T18:37:26.696274Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 7312FE0A-6D54-4CAF-9AD8-30B00E97B42F, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject |93.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |93.1%| [LD] {RESULT} $(B)/ydb/core/mon/ut/ydb-core-mon-ut ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2025-12-23T18:37:26.960617Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 62C3042F-FE26-459C-B5FB-E806086B8656, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:9281 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C608959A-D17D-4688-B88D-00208154D9A0 amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2025-12-23T18:37:26.968540Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 62C3042F-FE26-459C-B5FB-E806086B8656, response# |93.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] >> EscapingBasics::EscapeStringShouldWork [GOOD] |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |93.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut >> TS3WrapperTests::HeadUnknownObject [GOOD] >> TS3WrapperTests::UploadUnknownPart |93.1%| [LD] {RESULT} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |93.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut >> IcebergClusterProcessor::ValidateDdlCreationForHiveWithS3 [GOOD] >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 >> IcebergClusterProcessor::ValidateDdlCreationForHadoopWithS3 [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] >> TS3WrapperTests::UploadUnknownPart [GOOD] >> TS3WrapperTests::GetObject >> BsControllerConfig::MergeBoxes [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 >> KqpCost::WriteRowInsertFails+isSink-isOlap [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] Test command err: test case: 1 test case: 2 test case: 3 test case: 4 test case: 5 test case: 6 test case: 7 test case: 8 test case: 9 |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] >> TS3WrapperTests::GetObject [GOOD] |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2025-12-23T18:37:28.481109Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# FF6BFF6B-27B0-4768-B2B4-D5EF1CB3FFEB, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:5607 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B3ED8C0B-3144-4D3D-9F4D-72B21B9D29AD amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2025-12-23T18:37:28.499386Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# FF6BFF6B-27B0-4768-B2B4-D5EF1CB3FFEB, response# No response body. |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> KqpCost::IndexLookupAndTake+useSink [GOOD] >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> SplitterBasic::LimitExceed [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2025-12-23T18:37:29.013560Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# D68F0F49-D634-44A2-97DD-C74963E41519, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:12690 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B010177B-17DF-43EC-BA5D-DA4A38C63299 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2025-12-23T18:37:29.025468Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# D68F0F49-D634-44A2-97DD-C74963E41519, response# |93.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |93.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |93.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |93.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> EscapingBasics::HideSecretsShouldWork [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] >> EntityId::Distinct [GOOD] >> EntityId::MaxId [GOOD] >> EntityId::CheckId [GOOD] >> EntityId::Order >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 >> IssuesTextFiltering::ShouldRemoveDatabasePath >> Cache::Test1 [GOOD] >> Cache::Test2 [GOOD] >> Cache::Test3 [GOOD] >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> TS3WrapperTests::AbortUnknownUpload |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11114:2156] recipient: [1:10914:2167] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11114:2156] recipient: [1:10914:2167] Leader for TabletID 72057594037932033 is [1:11116:2169] sender: [1:11117:2156] recipient: [1:10914:2167] 2025-12-23T18:36:04.456701Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-12-23T18:36:04.458198Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-12-23T18:36:04.458622Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-12-23T18:36:04.462311Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:36:04.462726Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-12-23T18:36:04.463267Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2088} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:04.463313Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-12-23T18:36:04.463789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-12-23T18:36:04.474092Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-12-23T18:36:04.474256Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-12-23T18:36:04.474440Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-12-23T18:36:04.474553Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:04.474637Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-12-23T18:36:04.474760Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:11116:2169] sender: [1:11138:2156] recipient: [1:110:2157] 2025-12-23T18:36:04.489073Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-12-23T18:36:04.489246Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:04.575754Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-12-23T18:36:04.575938Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:04.576024Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-12-23T18:36:04.576104Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:04.576212Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-12-23T18:36:04.576274Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:04.576326Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-12-23T18:36:04.576398Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:04.589010Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-12-23T18:36:04.589189Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:04.600128Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-12-23T18:36:04.600287Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-12-23T18:36:04.601670Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-12-23T18:36:04.601754Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2219} LoadFinished 2025-12-23T18:36:04.601985Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-12-23T18:36:04.602043Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-12-23T18:36:04.620247Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Ke ... } Create new pdisk PDiskId# 275:1002 Path# /dev/disk3 2025-12-23T18:37:21.251086Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1000 Path# /dev/disk1 2025-12-23T18:37:21.251112Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1001 Path# /dev/disk2 2025-12-23T18:37:21.251156Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1002 Path# /dev/disk3 2025-12-23T18:37:21.251202Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1000 Path# /dev/disk1 2025-12-23T18:37:21.251230Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2025-12-23T18:37:21.251283Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1002 Path# /dev/disk3 2025-12-23T18:37:21.251325Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1000 Path# /dev/disk1 2025-12-23T18:37:21.251361Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2025-12-23T18:37:21.251390Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1002 Path# /dev/disk3 2025-12-23T18:37:21.251416Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1000 Path# /dev/disk1 2025-12-23T18:37:21.251444Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2025-12-23T18:37:21.251473Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1002 Path# /dev/disk3 2025-12-23T18:37:21.251500Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1000 Path# /dev/disk1 2025-12-23T18:37:21.251537Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2025-12-23T18:37:21.251575Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1002 Path# /dev/disk3 2025-12-23T18:37:21.251602Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1000 Path# /dev/disk1 2025-12-23T18:37:21.251631Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2025-12-23T18:37:21.251659Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1002 Path# /dev/disk3 2025-12-23T18:37:21.251686Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1000 Path# /dev/disk1 2025-12-23T18:37:21.251730Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2025-12-23T18:37:21.251784Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1002 Path# /dev/disk3 2025-12-23T18:37:21.251820Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1000 Path# /dev/disk1 2025-12-23T18:37:21.251862Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2025-12-23T18:37:21.251894Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1002 Path# /dev/disk3 2025-12-23T18:37:21.251920Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1000 Path# /dev/disk1 2025-12-23T18:37:21.251949Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2025-12-23T18:37:21.251976Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1002 Path# /dev/disk3 2025-12-23T18:37:21.252002Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1000 Path# /dev/disk1 2025-12-23T18:37:21.252028Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2025-12-23T18:37:21.252064Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1002 Path# /dev/disk3 2025-12-23T18:37:21.252100Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1000 Path# /dev/disk1 2025-12-23T18:37:21.252128Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1001 Path# /dev/disk2 2025-12-23T18:37:21.252156Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1002 Path# /dev/disk3 2025-12-23T18:37:21.252197Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1000 Path# /dev/disk1 2025-12-23T18:37:21.252226Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2025-12-23T18:37:21.252271Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1002 Path# /dev/disk3 2025-12-23T18:37:21.252299Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1000 Path# /dev/disk1 2025-12-23T18:37:21.252325Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2025-12-23T18:37:21.252352Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1002 Path# /dev/disk3 2025-12-23T18:37:21.252393Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1000 Path# /dev/disk1 2025-12-23T18:37:21.252432Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1001 Path# /dev/disk2 2025-12-23T18:37:21.252499Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1002 Path# /dev/disk3 2025-12-23T18:37:21.252528Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1000 Path# /dev/disk1 2025-12-23T18:37:21.252573Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2025-12-23T18:37:21.252602Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1002 Path# /dev/disk3 2025-12-23T18:37:21.252638Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1000 Path# /dev/disk1 2025-12-23T18:37:21.252690Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2025-12-23T18:37:21.252722Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1002 Path# /dev/disk3 2025-12-23T18:37:21.252747Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1000 Path# /dev/disk1 2025-12-23T18:37:21.252773Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1001 Path# /dev/disk2 2025-12-23T18:37:21.252814Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2025-12-23T18:37:21.252865Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1000 Path# /dev/disk1 2025-12-23T18:37:21.252897Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1001 Path# /dev/disk2 2025-12-23T18:37:21.252927Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1002 Path# /dev/disk3 2025-12-23T18:37:21.252956Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1000 Path# /dev/disk1 2025-12-23T18:37:21.252989Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2025-12-23T18:37:21.253036Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2025-12-23T18:37:21.253065Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1000 Path# /dev/disk1 2025-12-23T18:37:21.253092Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1001 Path# /dev/disk2 2025-12-23T18:37:21.253120Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2025-12-23T18:37:21.253155Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1000 Path# /dev/disk1 2025-12-23T18:37:21.253192Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1001 Path# /dev/disk2 2025-12-23T18:37:21.253221Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2025-12-23T18:37:21.253259Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1000 Path# /dev/disk1 2025-12-23T18:37:21.253305Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1001 Path# /dev/disk2 2025-12-23T18:37:21.253336Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2025-12-23T18:37:21.253364Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1000 Path# /dev/disk1 2025-12-23T18:37:21.253406Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1001 Path# /dev/disk2 2025-12-23T18:37:21.253436Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2025-12-23T18:37:21.253462Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1000 Path# /dev/disk1 2025-12-23T18:37:21.253504Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1001 Path# /dev/disk2 2025-12-23T18:37:21.253558Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2025-12-23T18:37:21.253590Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1000 Path# /dev/disk1 2025-12-23T18:37:21.253616Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1001 Path# /dev/disk2 2025-12-23T18:37:21.253646Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2025-12-23T18:37:21.519754Z node 251 :BS_CONTROLLER ERROR: {BSC07@impl.h:2212} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.271091s 2025-12-23T18:37:21.519980Z node 251 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.271344s 2025-12-23T18:37:21.569255Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2025-12-23T18:37:21.596797Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [GOOD] >> EntityId::Order [GOOD] >> EntityId::MinId [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> Cache::Test4 [GOOD] >> Cache::Test5 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2025-12-23T18:37:29.500673Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 9FB31612-52B0-4C31-8B1D-C96A2CF74B91, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:15196 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 92E7ECE9-A638-4179-B2E4-7C3721803A4F amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-12-23T18:37:29.522068Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 9FB31612-52B0-4C31-8B1D-C96A2CF74B91, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-12-23T18:37:29.522991Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 72444300-C7FC-4533-B4FC-210E89FEAD2C, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:15196 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9DB5840D-3013-45C9-A00F-D0E8FD8CC4AE amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-12-23T18:37:29.543470Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 72444300-C7FC-4533-B4FC-210E89FEAD2C, response# GetObjectResult { } |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest |93.2%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test3 [GOOD] >> TS3WrapperTests::AbortUnknownUpload [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] |93.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |93.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |93.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |93.2%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> Cache::Test5 [GOOD] >> Cache::Test6 |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |93.2%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpCost::AAARangeFullScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails+isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 1979, MsgBus: 3911 2025-12-23T18:37:15.238509Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124306527448915:2138];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:15.238689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:15.295133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b19/r3tmp/tmpnUpwS8/pdisk_1.dat 2025-12-23T18:37:15.886920Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:16.159131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:16.159247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:16.175035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:16.246902Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124306527448815:2081] 1766515035210936 != 1766515035210939 2025-12-23T18:37:16.278761Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:16.279239Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:16.312142Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 1979, node 1 2025-12-23T18:37:16.611490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:16.611512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:16.611522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:16.611640Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3911 TClient is connected to server localhost:3911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515036216 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:37:17.890814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:37:17.912349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:18.181960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:18.398857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:18.523496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:20.236578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124306527448915:2138];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:20.236639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:21.511970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124332297254464:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:21.512128Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:21.519338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124332297254474:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:21.519416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:22.373569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:22.440936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:22.487788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:22.531044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:22.593640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:22.648116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:22.721607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:22.845904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:23.000504Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124336592222649:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:23.000623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:23.007192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124336592222654:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:23.007262Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124336592222655:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:23.007430Z node 1 :KQP_WORKLOAD_SERVICE ... ssue_code: 2012 severity: 1 } } query_phases { duration_us: 7003 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 1949 } compilation { duration_us: 100384 cpu_time_us: 94126 } process_cpu_time_us: 908 total_duration_us: 109592 total_cpu_time_us: 96983 2025-12-23T18:37:27.418671Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=6; 2025-12-23T18:37:27.419028Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:966: SelfId: [1:7587124358067059899:2542], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7587124349477124937:2542]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7587124358067059899:2542].{
: Error: Conflict with existing key., code: 2012 } 2025-12-23T18:37:27.419103Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:7587124358067059892:2542], SessionActorId: [1:7587124349477124937:2542], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7587124349477124937:2542]. 2025-12-23T18:37:27.419325Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=OTBjMjMwN2QtNjgwOGQ3ZGQtN2Q2Y2Q0MTQtYWJjZjFjNzA=, ActorId: [1:7587124349477124937:2542], ActorState: ExecuteState, TraceId: 01kd680yt2531mp4je234abyk3, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7587124358067059893:2542] from: [1:7587124358067059892:2542] 2025-12-23T18:37:27.419401Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:7587124358067059893:2542] TxId: 281474976715687. Ctx: { TraceId: 01kd680yt2531mp4je234abyk3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTBjMjMwN2QtNjgwOGQ3ZGQtN2Q2Y2Q0MTQtYWJjZjFjNzA=, PoolId: default, IsStreamingQuery: 0}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-12-23T18:37:27.419648Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=OTBjMjMwN2QtNjgwOGQ3ZGQtN2Q2Y2Q0MTQtYWJjZjFjNzA=, ActorId: [1:7587124349477124937:2542], ActorState: ExecuteState, TraceId: 01kd680yt2531mp4je234abyk3, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } query_phases { duration_us: 5069 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1837 } compilation { duration_us: 175564 cpu_time_us: 169114 } process_cpu_time_us: 914 total_duration_us: 184945 total_cpu_time_us: 171865 2025-12-23T18:37:27.555500Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=7; 2025-12-23T18:37:27.556973Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:966: SelfId: [1:7587124358067059922:2542], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7587124349477124937:2542]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7587124358067059922:2542].{
: Error: Conflict with existing key., code: 2012 } 2025-12-23T18:37:27.557062Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:7587124358067059915:2542], SessionActorId: [1:7587124349477124937:2542], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7587124349477124937:2542]. 2025-12-23T18:37:27.557264Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=OTBjMjMwN2QtNjgwOGQ3ZGQtN2Q2Y2Q0MTQtYWJjZjFjNzA=, ActorId: [1:7587124349477124937:2542], ActorState: ExecuteState, TraceId: 01kd680z04f8eqky2s8nbcr1sf, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7587124358067059916:2542] from: [1:7587124358067059915:2542] 2025-12-23T18:37:27.557343Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:7587124358067059916:2542] TxId: 281474976715689. Ctx: { TraceId: 01kd680z04f8eqky2s8nbcr1sf, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTBjMjMwN2QtNjgwOGQ3ZGQtN2Q2Y2Q0MTQtYWJjZjFjNzA=, PoolId: default, IsStreamingQuery: 0}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-12-23T18:37:27.557637Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=OTBjMjMwN2QtNjgwOGQ3ZGQtN2Q2Y2Q0MTQtYWJjZjFjNzA=, ActorId: [1:7587124349477124937:2542], ActorState: ExecuteState, TraceId: 01kd680z04f8eqky2s8nbcr1sf, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } query_phases { duration_us: 9865 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1880 } compilation { duration_us: 115029 cpu_time_us: 107402 } process_cpu_time_us: 747 total_duration_us: 128753 total_cpu_time_us: 110029 2025-12-23T18:37:27.843855Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=8; 2025-12-23T18:37:27.844215Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:966: SelfId: [1:7587124358067059948:2542], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7587124349477124937:2542]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7587124358067059948:2542].{
: Error: Conflict with existing key., code: 2012 } 2025-12-23T18:37:27.844278Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:7587124358067059941:2542], SessionActorId: [1:7587124349477124937:2542], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7587124349477124937:2542]. 2025-12-23T18:37:27.844533Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=OTBjMjMwN2QtNjgwOGQ3ZGQtN2Q2Y2Q0MTQtYWJjZjFjNzA=, ActorId: [1:7587124349477124937:2542], ActorState: ExecuteState, TraceId: 01kd680z4fcvcbma8y9r68q77m, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7587124358067059942:2542] from: [1:7587124358067059941:2542] 2025-12-23T18:37:27.844612Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:7587124358067059942:2542] TxId: 281474976715691. Ctx: { TraceId: 01kd680z4fcvcbma8y9r68q77m, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTBjMjMwN2QtNjgwOGQ3ZGQtN2Q2Y2Q0MTQtYWJjZjFjNzA=, PoolId: default, IsStreamingQuery: 0}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-12-23T18:37:27.844903Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=OTBjMjMwN2QtNjgwOGQ3ZGQtN2Q2Y2Q0MTQtYWJjZjFjNzA=, ActorId: [1:7587124349477124937:2542], ActorState: ExecuteState, TraceId: 01kd680z4fcvcbma8y9r68q77m, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } query_phases { duration_us: 8388 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 2 bytes: 40 } partitions_count: 1 } cpu_time_us: 1721 } compilation { duration_us: 265902 cpu_time_us: 249254 } process_cpu_time_us: 830 total_duration_us: 277334 total_cpu_time_us: 251805 2025-12-23T18:37:28.046617Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=9; 2025-12-23T18:37:28.046860Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 9 at tablet 72075186224037928 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:37:28.046980Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 9 at tablet 72075186224037928 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:37:28.047162Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:966: SelfId: [1:7587124362362027272:2542], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7587124349477124937:2542]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7587124362362027272:2542].{
: Error: Conflict with existing key., code: 2012 } 2025-12-23T18:37:28.047223Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:7587124362362027265:2542], SessionActorId: [1:7587124349477124937:2542], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7587124349477124937:2542]. 2025-12-23T18:37:28.047421Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=OTBjMjMwN2QtNjgwOGQ3ZGQtN2Q2Y2Q0MTQtYWJjZjFjNzA=, ActorId: [1:7587124349477124937:2542], ActorState: ExecuteState, TraceId: 01kd680zdr8byd1fdrp5pr2xae, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7587124362362027266:2542] from: [1:7587124362362027265:2542] 2025-12-23T18:37:28.047558Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:7587124362362027266:2542] TxId: 281474976715693. Ctx: { TraceId: 01kd680zdr8byd1fdrp5pr2xae, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTBjMjMwN2QtNjgwOGQ3ZGQtN2Q2Y2Q0MTQtYWJjZjFjNzA=, PoolId: default, IsStreamingQuery: 0}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-12-23T18:37:28.047882Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=OTBjMjMwN2QtNjgwOGQ3ZGQtN2Q2Y2Q0MTQtYWJjZjFjNzA=, ActorId: [1:7587124349477124937:2542], ActorState: ExecuteState, TraceId: 01kd680zdr8byd1fdrp5pr2xae, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } query_phases { duration_us: 4655 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 3 bytes: 60 } partitions_count: 1 } cpu_time_us: 1990 } compilation { duration_us: 167571 cpu_time_us: 154448 } process_cpu_time_us: 880 total_duration_us: 182720 total_cpu_time_us: 157318 |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TS3WrapperTests::CopyPartUpload |93.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 11864, MsgBus: 19960 2025-12-23T18:37:16.632480Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124309993417690:2172];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:16.632688Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b18/r3tmp/tmpRHxwfv/pdisk_1.dat 2025-12-23T18:37:17.416996Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:17.424162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:17.424265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:17.455745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11864, node 1 2025-12-23T18:37:17.653046Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.006040s 2025-12-23T18:37:17.659471Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:17.668294Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:17.668660Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124309993417555:2081] 1766515036588299 != 1766515036588302 2025-12-23T18:37:17.712883Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:17.916080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:17.916106Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:17.916112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:17.916183Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19960 TClient is connected to server localhost:19960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515037665 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:18.886001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:18.900907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:18.921742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:19.343895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:19.843422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:20.029398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:21.638883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124309993417690:2172];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:21.651142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:23.817158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124340058190510:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:23.817253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:23.825575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124340058190520:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:23.825659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:24.285806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:24.349295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:24.419128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:24.490319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:24.536954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:24.625632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:24.731239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:24.837205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:24.983921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124344353158713:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:24.984019Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:24.984378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124344353158718:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:24.984430Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:24.984438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124344353158719:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:24.989272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:25.005126Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124344353158722:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:25.063160Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124348648126071:3784] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:27.201084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] >> TS3WrapperTests::CopyPartUpload [GOOD] >> KqpCost::WriteRow+isSink-isOlap [GOOD] >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |93.2%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpCost::OlapWriteRow |93.2%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2025-12-23T18:37:31.429092Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# DA8DB6A5-6581-4BE4-A1B7-65EF40460060, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:20345 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B97DD2B8-19F7-45A3-B4CA-D4E9B52BC8C8 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2025-12-23T18:37:31.438012Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# DA8DB6A5-6581-4BE4-A1B7-65EF40460060, response# |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |93.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |93.2%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2025-12-23T18:37:32.676699Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# CE9A16C2-D695-433C-ADCD-BECE00464DCF, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:7164 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EAC03173-000A-4690-BA6D-A08C9FD70C31 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-12-23T18:37:32.695402Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# CE9A16C2-D695-433C-ADCD-BECE00464DCF, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-12-23T18:37:32.696006Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 52413C34-65A0-4325-A9FE-C50F579CA7FE, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:7164 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B21481D9-86E6-4FF0-9EA4-7B493BEB7791 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2025-12-23T18:37:32.708048Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 52413C34-65A0-4325-A9FE-C50F579CA7FE, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2025-12-23T18:37:32.715031Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 98E7EAF8-5DC8-4E48-A36B-6BE8C05089A0, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:7164 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 41DE9B71-3C55-42B1-BDBB-6D991AE151C2 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2025-12-23T18:37:32.727533Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 98E7EAF8-5DC8-4E48-A36B-6BE8C05089A0, response# UploadPartCopyResult { } 2025-12-23T18:37:32.728592Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 4EA2722F-1A1A-4165-8AA4-1506F1588D98, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:7164 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 97AC4A82-A5BE-4FBB-90AC-1FD437E6B9EC amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2025-12-23T18:37:32.740846Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 4EA2722F-1A1A-4165-8AA4-1506F1588D98, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2025-12-23T18:37:32.742995Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 1CFA1FEF-C34E-4BF2-AD60-806C99041951, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:7164 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 03B3268C-E9FA-4402-B617-ECAAB39CDB97 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2025-12-23T18:37:32.755418Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 1CFA1FEF-C34E-4BF2-AD60-806C99041951, response# GetObjectResult { } |93.2%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> KqpCost::OlapRangeFullScan >> KqpCost::WriteRow-isSink-isOlap >> Cache::Test6 [GOOD] >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 8472, MsgBus: 18125 2025-12-23T18:37:20.302812Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124325169997851:2257];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:20.302979Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:20.323680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b0a/r3tmp/tmpV9IER1/pdisk_1.dat 2025-12-23T18:37:20.826901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:20.846088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:20.846223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:20.881643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:21.034908Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124325169997621:2081] 1766515040241777 != 1766515040241780 2025-12-23T18:37:21.056268Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:21.083205Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 8472, node 1 2025-12-23T18:37:21.318144Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:21.383386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:21.383424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:21.383433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:21.383536Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18125 TClient is connected to server localhost:18125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515041130 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:22.611450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:37:22.649245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:23.057861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:23.389102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:23.516422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:25.298838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124325169997851:2257];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:25.298924Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:26.672441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124350939803281:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.672592Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.673028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124350939803291:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.673082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.044798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.090790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.127414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.195489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.241241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.346769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.434637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.616079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.815208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124355234771458:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.815322Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.815906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124355234771463:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.815947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124355234771464:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.816058Z node 1 :KQP_WORKLOAD_SERV ... 1"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} query_phases { duration_us: 8369 table_access { name: "/Root/Test" reads { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 6393 affected_shards: 1 } compilation { duration_us: 368210 cpu_time_us: 357694 } process_cpu_time_us: 318 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Test\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"Test\",\"ReadColumns\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\",\"Amount\",\"Comment\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"},{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"E-Rows\":\"0\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"0\",\"E-Cost\":\"0\"}],\"Node Type\":\"Limit-Filter\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[4,19]}},\"Name\":\"4\",\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":2812,\"Max\":2812,\"Min\":2812,\"History\":[4,2812]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[4,1048576]},\"Introspections\":[\"1 tasks default for source scan\"],\"Tasks\":1,\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FinishedTasks\":1,\"IngressRows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"PhysicalStageId\":0,\"Mkql\":{},\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/Test\",\"ReadRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ReadBytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20}}],\"BaseTimeMs\":1766515050830,\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"CpuTimeUs\":{\"Count\":1,\"Sum\":1255,\"Max\":1255,\"Min\":1255,\"History\":[4,1255]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[4,192]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[4,192]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":2881,\"Max\":2881,\"Min\":2881,\"History\":[4,2881]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UpdateTimeMs\":3}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Group (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\",\"Stats\":{\"UseLlvm\":\"undefined\",\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"Introspections\":[\"1 minimum tasks for compute\"],\"DurationUs\":{\"Count\":1,\"Sum\":1000,\"Max\":1000,\"Min\":1000},\"Mkql\":{},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[5,1048576]},\"BaseTimeMs\":1766515050830,\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[5,19]}},\"Name\":\"RESULT\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"PauseMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1345,\"Max\":1345,\"Min\":1345,\"History\":[5,1345]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":3,\"Min\":2}}}],\"CpuTimeUs\":{\"Count\":1,\"Sum\":1491,\"Max\":1491,\"Min\":1491,\"History\":[5,1491]},\"StageDurationUs\":1000,\"ResultRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResultBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[5,19]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[5,19]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1316,\"Max\":1316,\"Min\":1316,\"History\":[5,1316]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":3,\"Min\":2}}}],\"UpdateTimeMs\":4,\"Tasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":368210,\"CpuTimeUs\":357694},\"ProcessCpuTimeUs\":318,\"TotalDurationUs\":386372,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":803},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Operators\":[{\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"Test\",\"ReadColumns\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\",\"Amount\",\"Comment\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"E-Rows\":\"0\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"0\",\"E-Cost\":\"0\"}],\"Node Type\":\"Filter\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":1.255,\"A-Cpu\":1.255,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":1.491,\"A-Cpu\":2.746,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Test\" \'\"72057594046644480:45\" \'\"\" \'1))\n(let $2 \'(\'\"Amount\" \'\"Comment\" \'\"Group\" \'\"Name\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $4 (Uint64 \'1))\n(let $5 (OptionalType (DataType \'String)))\n(let $6 (StructType \'(\'\"Amount\" (OptionalType (DataType \'Uint64))) \'(\'\"Comment\" $5) \'(\'\"Group\" (OptionalType (DataType \'Uint32))) \'(\'\"Name\" $5)))\n(let $7 \'(\'(\'\"_logical_id\" \'560) \'(\'\"_id\" \'\"8924aed6-2a7024de-be0715a8-e55a48f2\") \'(\'\"_wide_channels\" $6)))\n(let $8 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) (lambda \'($12) (block \'(\n (let $13 (lambda \'($16) (block \'(\n (let $17 (Member $16 \'\"Amount\"))\n (return $17 (Member $16 \'\"Comment\") (Member $16 \'\"Group\") (Member $16 \'\"Name\") (Coalesce (< $17 (Uint64 \'\"5000\")) (Bool \'false)))\n ))))\n (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda \'($18 $19 $20 $21 $22) $22) $4))\n (let $15 (lambda \'($23 $24 $25 $26 $27) $23 $24 $25 $26))\n (return (FromFlow (WideMap $14 $15)))\n))) $7))\n(let $9 (DqCnMerge (TDqOutput $8 \'0) \'(\'(\'\"2\" \'\"Asc\"))))\n(let $10 (DqPhyStage \'($9) (lambda \'($28) (FromFlow (NarrowMap (Take (ToFlow $28) $4) (lambda \'($29 $30 $31 $32) (AsStruct \'(\'\"Amount\" $29) \'(\'\"Comment\" $30) \'(\'\"Group\" $31) \'(\'\"Name\" $32)))))) \'(\'(\'\"_logical_id\" \'573) \'(\'\"_id\" \'\"8d55dfb0-c235daa8-fd8ef9f0-b54365a1\"))))\n(let $11 (DqCnResult (TDqOutput $10 \'0) \'()))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($8 $10) \'($11) \'() \'(\'(\'\"type\" \'\"data\")))) \'((KqpTxResultBinding (ListType $6) \'0 \'0)) \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 386372 total_cpu_time_us: 364405 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Test\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":45},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Amount\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Comment\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Group\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Name\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Group\\\",\\\"Name\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1766515050\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"8c71761-11a4ac77-8b175556-22c9a16b\",\"version\":\"1.0\"}" >> KqpCost::CTASWithRetry+isOlap [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 29990, MsgBus: 1845 2025-12-23T18:37:18.636481Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124318311260133:2213];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:18.636656Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b14/r3tmp/tmpwugOsK/pdisk_1.dat 2025-12-23T18:37:19.279590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:19.279690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:19.343773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:19.359163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:19.406922Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124318311259957:2081] 1766515038617025 != 1766515038617028 2025-12-23T18:37:19.432117Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29990, node 1 2025-12-23T18:37:19.565259Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:19.695548Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:19.812694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:19.812715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:19.812721Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:19.812799Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1845 TClient is connected to server localhost:1845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515039597 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:20.679367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:20.707627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:20.718980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:37:20.996905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:21.287576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:21.414385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:23.639906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124318311260133:2213];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:23.639994Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:25.165226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124348376032929:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:25.165371Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:25.165914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124348376032939:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:25.165960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:25.605251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:25.681663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:25.744593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:25.802089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:25.853061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:25.903702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:25.970303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:26.054816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:26.208322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124352671001113:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.208412Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.208904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124352671001118:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.208956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124352671001119:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.209066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.213888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:26.251714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-12-23T18:37:26.252011Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124352671001122:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:26.325170Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124352671001181:3788] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:29.738516Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515049754, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow+isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 4078, MsgBus: 8733 2025-12-23T18:37:18.732581Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124316403556352:2160];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:18.732764Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b13/r3tmp/tmpnRtvoX/pdisk_1.dat 2025-12-23T18:37:19.498797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:19.527142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:19.527254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:19.620017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:19.755419Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:19.782938Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124316403556230:2081] 1766515038724314 != 1766515038724317 2025-12-23T18:37:19.788111Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:19.834891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 4078, node 1 2025-12-23T18:37:20.146420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:20.146458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:20.146466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:20.146536Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8733 TClient is connected to server localhost:8733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515039891 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:37:21.353756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:37:21.367325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:21.386957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:21.698532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:22.037446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:37:22.252112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:23.735053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124316403556352:2160];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:23.739143Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:25.684911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124346468329188:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:25.685030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:25.685504Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124346468329198:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:25.685555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.858786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:26.945195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.012892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.072142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.130608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.221020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.305509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.427538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.653261Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124355058264687:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.653344Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.653774Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124355058264692:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.653844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124355058264693:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.654144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.658236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:27.704884Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124355058264696:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:27.800013Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124355058264759:3785] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:30.260751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 12983 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1484 affected_shards: 1 } compilation { duration_us: 152384 cpu_time_us: 143462 } process_cpu_time_us: 948 total_duration_us: 179922 total_cpu_time_us: 145894 query_phases { duration_us: 15015 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1456 affected_shards: 1 } compilation { duration_us: 109975 cpu_time_us: 99487 } process_cpu_time_us: 661 total_duration_us: 127373 total_cpu_time_us: 101604 2025-12-23T18:37:30.982472Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=5; 2025-12-23T18:37:31.023294Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 5 at tablet 72075186224037927 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:37:31.023533Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037927 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:37:31.023816Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:966: SelfId: [1:7587124367943167123:2544], Table: `/Root/TestTable` ([72057594046644480:54:1]), SessionActorId: [1:7587124367943166961:2544]Got CONSTRAINT VIOLATION for table `/Root/TestTable`. ShardID=72075186224037927, Sink=[1:7587124367943167123:2544].{
: Error: Conflict with existing key., code: 2012 } 2025-12-23T18:37:31.024541Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:7587124367943167116:2544], SessionActorId: [1:7587124367943166961:2544], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7587124367943166961:2544]. 2025-12-23T18:37:31.024869Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=MWMxZWIzMTEtMmU3NWIyZmQtZmYwMGFjNTQtMjU0OGFjNGQ=, ActorId: [1:7587124367943166961:2544], ActorState: ExecuteState, TraceId: 01kd6812am9a9a7tq7krrx194d, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7587124367943167117:2544] from: [1:7587124367943167116:2544] 2025-12-23T18:37:31.024985Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:7587124367943167117:2544] TxId: 281474976710677. Ctx: { TraceId: 01kd6812am9a9a7tq7krrx194d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MWMxZWIzMTEtMmU3NWIyZmQtZmYwMGFjNTQtMjU0OGFjNGQ=, PoolId: default, IsStreamingQuery: 0}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-12-23T18:37:31.025395Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=MWMxZWIzMTEtMmU3NWIyZmQtZmYwMGFjNTQtMjU0OGFjNGQ=, ActorId: [1:7587124367943166961:2544], ActorState: ExecuteState, TraceId: 01kd6812am9a9a7tq7krrx194d, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Constraint violated. Table: `/Root/TestTable`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } query_phases { duration_us: 46006 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 1637 } compilation { duration_us: 134422 cpu_time_us: 119009 } process_cpu_time_us: 934 total_duration_us: 188565 total_cpu_time_us: 121580 query_phases { duration_us: 5903 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1623 affected_shards: 1 } compilation { duration_us: 117169 cpu_time_us: 111464 } process_cpu_time_us: 561 total_duration_us: 126800 total_cpu_time_us: 113648 query_phases { duration_us: 3560 cpu_time_us: 1744 affected_shards: 1 } compilation { duration_us: 151112 cpu_time_us: 126311 } process_cpu_time_us: 712 total_duration_us: 158513 total_cpu_time_us: 128767 query_phases { duration_us: 5349 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1948 affected_shards: 1 } compilation { duration_us: 171508 cpu_time_us: 163581 } process_cpu_time_us: 4859 total_duration_us: 185572 total_cpu_time_us: 170388 query_phases { duration_us: 5332 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1249 affected_shards: 1 } compilation { duration_us: 57446 cpu_time_us: 52250 } process_cpu_time_us: 497 total_duration_us: 64462 total_cpu_time_us: 53996 query_phases { duration_us: 12062 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1743 affected_shards: 1 } compilation { duration_us: 136204 cpu_time_us: 116923 } process_cpu_time_us: 805 total_duration_us: 152598 total_cpu_time_us: 119471 |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 2162, MsgBus: 27199 2025-12-23T18:37:19.741276Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124323807634767:2257];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:19.741600Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b10/r3tmp/tmp7WoSYS/pdisk_1.dat 2025-12-23T18:37:20.241438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:20.319004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:20.319108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:20.446113Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:20.456990Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124323807634535:2081] 1766515039675455 != 1766515039675458 2025-12-23T18:37:20.473680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2162, node 1 2025-12-23T18:37:20.600502Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:20.769083Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:20.809438Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:20.809469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:20.809479Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:20.809558Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27199 TClient is connected to server localhost:27199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515040528 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:21.741111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:21.747911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:21.758263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:22.237560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:22.838834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:22.940514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:24.743060Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124323807634767:2257];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:24.743138Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:26.080982Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124353872407483:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.081136Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.081502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124353872407493:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.081561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.640747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:26.706551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:26.777211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:26.831706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:26.883640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:26.983256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.055585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.144253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.355982Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124358167375664:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.356067Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.356454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124358167375669:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.356524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124358167375670:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.356760Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.361233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:27.401692Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124358167375673:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:27.475517Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124358167375724:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:30.100164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:30.139346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:30.179705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/Join1_2 1 19 /Root/Join1_1 8 136 |93.2%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |93.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |93.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test6 [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 28261, MsgBus: 12540 2025-12-23T18:37:20.161068Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124327808952958:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:20.182778Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b0b/r3tmp/tmpedXreL/pdisk_1.dat 2025-12-23T18:37:20.773823Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:20.830209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:20.830336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:20.895126Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124327808952920:2081] 1766515040099276 != 1766515040099279 2025-12-23T18:37:20.909718Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:20.911746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:20.966981Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 28261, node 1 2025-12-23T18:37:21.196502Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:21.207278Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:21.207298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:21.207305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:21.207387Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12540 TClient is connected to server localhost:12540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515040976 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:22.576329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:22.588076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:37:22.602933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:22.857120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:23.139696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:23.274171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:25.166896Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124327808952958:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:25.185165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:26.721851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124353578758574:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.721972Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.723068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124353578758584:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.723152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.074293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.143942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.205054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.263046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.358331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.475442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.580564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.700847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.916199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124357873726775:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.916285Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.916850Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124357873726780:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.916915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124357873726781:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.916954Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [W ... id=[1:7587124375053596351:2543];scan_id=1;tx_id=281474976715674;fline=kqp_scan_compute_manager.h:100;event=stop_scanner;actor_id=[1:7587124375053596355:2073];message=;final_flag=1; 2025-12-23T18:37:31.344061Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:594: SelfId: [1:7587124375053596351:2543]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, pending resolve shards: 0, average read rows: 3, average read bytes: 0, 2025-12-23T18:37:31.344108Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7587124375053596351:2543];scan_id=1;tx_id=281474976715674;fline=kqp_scan_compute_manager.h:441;event=wait_all_scanner_finished;scans=0; 2025-12-23T18:37:31.344169Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:685: SelfId: [1:7587124375053596351:2543]. EVLOGKQP(max_in_flight:1) InFlightScans:InFlightShards:;wScans=0;wShards=0; {SHARD(72075186224037914):CHUNKS=1;D=0.000000s;PacksCount=1;RowsCount=3;BytesCount=0;MinPackSize=3;MaxPackSize=3;CAVG=0.000000s;CMIN=0.000000s;CMAX=0.000000s;}; 2025-12-23T18:37:31.344296Z node 1 :KQP_COMPUTE DEBUG: log.h:466: kqp_scan_compute_actor.cpp:180 :TEvSendData: [1:7587124375053596351:2543]/[1:7587124375053596349:2541] 2025-12-23T18:37:31.344543Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7587124375053596349:2541], TxId: 281474976715674, task: 1. Ctx: { CheckpointId : . TraceId : 01kd6812cq125431scj2n0ktvv. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:37:31.344564Z node 1 :KQP_COMPUTE DEBUG: log.h:466: kqp_scan_compute_actor.cpp:212 :TEvFetcherFinished: [1:7587124375053596351:2543] 2025-12-23T18:37:31.344633Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715674, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-12-23T18:37:31.344652Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124375053596350:2542], TxId: 281474976715674, task: 2. Ctx: { CheckpointId : . TraceId : 01kd6812cq125431scj2n0ktvv. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. CA StateFunc 271646923 2025-12-23T18:37:31.344675Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715674, task: 2. Finish input channelId: 1, from: [1:7587124375053596349:2541] 2025-12-23T18:37:31.344734Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124375053596350:2542], TxId: 281474976715674, task: 2. Ctx: { CheckpointId : . TraceId : 01kd6812cq125431scj2n0ktvv. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. CA StateFunc 271646922 2025-12-23T18:37:31.345005Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7587124375053596350:2542], TxId: 281474976715674, task: 2. Ctx: { CheckpointId : . TraceId : 01kd6812cq125431scj2n0ktvv. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:37:31.345061Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715674, task: 1. Tasks execution finished 2025-12-23T18:37:31.345079Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [1:7587124375053596349:2541], TxId: 281474976715674, task: 1. Ctx: { CheckpointId : . TraceId : 01kd6812cq125431scj2n0ktvv. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:37:31.345224Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715674, task: 1. pass away 2025-12-23T18:37:31.345301Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715674;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:37:31.345568Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715674, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-12-23T18:37:31.346189Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:326: ActorId: [1:7587124375053596345:2532] TxId: 281474976715674. Ctx: { TraceId: 01kd6812cq125431scj2n0ktvv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=, PoolId: default, IsStreamingQuery: 0}. Send TEvStreamData to [1:7587124370758629018:2532], seqNo: 1, nRows: 1 2025-12-23T18:37:31.346385Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [1:7587124375053596345:2532] TxId: 281474976715674. Ctx: { TraceId: 01kd6812cq125431scj2n0ktvv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [1:7587124375053596349:2541], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 3918 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 1123 FinishTimeMs: 1766515051345 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ComputeCpuTimeUs: 128 BuildCpuTimeUs: 995 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-ou4bsyjxme" NodeId: 1 StartTimeMs: 1766515051344 CreateTimeMs: 1766515051295 UpdateTimeMs: 1766515051345 } MaxMemoryUsage: 1048576 } 2025-12-23T18:37:31.346456Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976715674. Ctx: { TraceId: 01kd6812cq125431scj2n0ktvv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7587124375053596349:2541] 2025-12-23T18:37:31.346528Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [1:7587124375053596345:2532] TxId: 281474976715674. Ctx: { TraceId: 01kd6812cq125431scj2n0ktvv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7587124375053596350:2542], 2025-12-23T18:37:31.347050Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515051 AvailableComputeActors: 9999 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 9999 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 1 DataCenterId: "1" } 2025-12-23T18:37:31.349821Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:515: TxId: 281474976715674, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388468, to: [1:7587124375053596353:2542] 2025-12-23T18:37:31.349944Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [1:7587124375053596350:2542], TxId: 281474976715674, task: 2. Ctx: { CheckpointId : . TraceId : 01kd6812cq125431scj2n0ktvv. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. CA StateFunc 271646922 2025-12-23T18:37:31.350027Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715674, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-12-23T18:37:31.350047Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715674, task: 2. Tasks execution finished 2025-12-23T18:37:31.350070Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [1:7587124375053596350:2542], TxId: 281474976715674, task: 2. Ctx: { CheckpointId : . TraceId : 01kd6812cq125431scj2n0ktvv. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:37:31.350178Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715674, task: 2. pass away 2025-12-23T18:37:31.350284Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715674;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:37:31.350499Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715674, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-12-23T18:37:31.350759Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [1:7587124375053596345:2532] TxId: 281474976715674. Ctx: { TraceId: 01kd6812cq125431scj2n0ktvv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [1:7587124375053596350:2542], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 38829 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 962 FinishTimeMs: 1766515051349 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 222 BuildCpuTimeUs: 740 HostName: "ghrun-ou4bsyjxme" NodeId: 1 CreateTimeMs: 1766515051297 UpdateTimeMs: 1766515051350 } MaxMemoryUsage: 1048576 } 2025-12-23T18:37:31.350813Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976715674. Ctx: { TraceId: 01kd6812cq125431scj2n0ktvv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7587124375053596350:2542] 2025-12-23T18:37:31.350977Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [1:7587124375053596345:2532] TxId: 281474976715674. Ctx: { TraceId: 01kd6812cq125431scj2n0ktvv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=, PoolId: default, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:37:31.351031Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [1:7587124375053596345:2532] TxId: 281474976715674. Ctx: { TraceId: 01kd6812cq125431scj2n0ktvv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTMyNTUwODktYzY0NGJhMWItZjE0N2NlNjMtNWIzYmM2ZWU=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.042747s ReadRows: 3 ReadBytes: 96 ru: 28 rate limiter was not found force flag: 1 2025-12-23T18:37:31.352024Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515051329, txId: 281474976715673] shutting down 2025-12-23T18:37:31.352122Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 2025-12-23T18:37:33.342885Z, after 1.991113s >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] >> XdsBootstrapConfigInitializer::CanNotSetEnvIfXdsBootstrapConfigIsAbsent [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> ActorHandler::HttpOk |93.2%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 |93.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |93.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |93.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |93.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |93.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanNotSetEnvIfXdsBootstrapConfigIsAbsent [GOOD] >> TestSetCloudPermissions::CanSetPermissionsForRootDb >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 |93.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal >> TTxLocatorTest::TestAllocateAll |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |93.2%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTASWithRetry+isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 20910, MsgBus: 29876 ... waiting for SysViewsRoster update finished 2025-12-23T18:37:08.996203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:09.124248Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:37:09.140402Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:37:09.140880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:37:09.140973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:09.514570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:09.514781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:09.620880Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515025024598 != 1766515025024602 2025-12-23T18:37:09.674645Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:09.728730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:09.965846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b39/r3tmp/tmpxKYRxh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20910, node 1 2025-12-23T18:37:10.495780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:10.495830Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:10.495863Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:10.496174Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:10.511884Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:29876 TClient is connected to server localhost:29876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:11.050288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:11.077332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:37:11.094684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:11.650275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:12.093323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:12.398828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:13.378606Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1908:3515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:13.378899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:13.380258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1982:3534], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:13.380388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:13.416260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:13.674581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:14.090420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:14.408673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:14.690022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:15.054094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:15.365146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:15.735970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.360414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.360702Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.361264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2800:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.361379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2803:4182], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.361444Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_worklo ... 075186224037997 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.539173Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037996 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.539227Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037995 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.539283Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037994 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.539342Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037993 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.539409Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037992 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.539472Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037991 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.539531Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037990 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.539610Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037989 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.539690Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.539753Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037987 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.539823Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.539881Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037985 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.539944Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540001Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037983 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540079Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540173Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037981 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540234Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540292Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037979 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540351Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540422Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037977 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540522Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540585Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037975 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540641Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540712Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540786Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037971 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540844Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037970 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540904Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037969 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.540962Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037968 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541019Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037967 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541078Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037966 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541131Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037965 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541228Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037964 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541295Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037963 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541353Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037962 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541407Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037961 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541466Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037960 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541534Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037959 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541593Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037958 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541652Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037957 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541728Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037955 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541800Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037954 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.541860Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037953 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2025-12-23T18:37:32.546377Z node 1 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd680se22c8r0hkaseqggcj0", SessionId: ydb://session/3?node_id=1&id=ZjFkMjA0ZDAtOTJhYjk5Y2YtZTZkYjRhNjgtNmNhYzI4NmI=, Slow query, duration: 10.814620s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/TestTable2` (PRIMARY KEY (Group, Name)) WITH (STORE=COLUMN) AS SELECT * FROM `/Root/TestTable`;\n ", parameters: 0b query_phases { duration_us: 1766515040519453 table_access { name: "/Root/.tmp/sessions/a55ba454-4576-1ced-2861-ffbc759e032f/Root/TestTable2_5a965fda-49a2-15f9-fd7f-e895d912fd2f" updates { rows: 4 bytes: 1472 } partitions_count: 4 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 144 } } cpu_time_us: 51931 } compilation { duration_us: 14075 cpu_time_us: 7878 } process_cpu_time_us: 1454 total_duration_us: 10814620 total_cpu_time_us: 61263 |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |93.2%| [LD] {RESULT} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |93.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |93.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest |93.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |93.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot >> TTxLocatorTest::TestAllocateAll [GOOD] >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx >> TTxLocatorTest::TestZeroRange >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] >> TTxLocatorTest::Boot [GOOD] >> TTxLocatorTest::TestWithReboot |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] Test command err: 2025-12-23T18:37:37.781274Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-12-23T18:37:37.781795Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-12-23T18:37:37.782584Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-12-23T18:37:37.787990Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:37.788541Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-12-23T18:37:37.802120Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:37.802224Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:37.802286Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:37.802378Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-12-23T18:37:37.802526Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:37.802634Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-12-23T18:37:37.802773Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-12-23T18:37:37.803590Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#281474976710655 2025-12-23T18:37:37.804133Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:37.804192Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:37.804295Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 281474976710655 2025-12-23T18:37:37.804337Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 281474976710655 expected SUCCESS 2025-12-23T18:37:37.810726Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#1 2025-12-23T18:37:37.810933Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-12-23T18:37:37.810978Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> TTxLocatorTest::TestImposibleSize |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> KqpCost::CTAS+isOlap [GOOD] |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |93.2%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2025-12-23T18:37:38.348621Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-12-23T18:37:38.349223Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-12-23T18:37:38.350180Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-12-23T18:37:38.351923Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.352394Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-12-23T18:37:38.364569Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.364665Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.364747Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.364826Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-12-23T18:37:38.364955Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.365068Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-12-23T18:37:38.365193Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-12-23T18:37:38.366860Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2025-12-23T18:37:38.367384Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2025-12-23T18:37:38.367706Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2025-12-23T18:37:38.368146Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2025-12-23T18:37:38.368590Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2025-12-23T18:37:38.368972Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.369113Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:90:2125] requested range size#100000 2025-12-23T18:37:38.369316Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.369498Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2025-12-23T18:37:38.369719Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.369854Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.370031Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.370114Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2025-12-23T18:37:38.370386Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.370444Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.370649Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2025-12-23T18:37:38.370864Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.370946Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.371031Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2025-12-23T18:37:38.371262Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-12-23T18:37:38.371311Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2025-12-23T18:37:38.371489Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.371646Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.371733Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-12-23T18:37:38.371760Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2025-12-23T18:37:38.371873Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.371961Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-12-23T18:37:38.371986Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2025-12-23T18:37:38.372110Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.372188Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-12-23T18:37:38.372229Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2025-12-23T18:37:38.372296Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.372361Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-12-23T18:37:38.372388Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 400000 to# 500000 2025-12-23T18:37:38.372488Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.372597Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.372649Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-12-23T18:37:38.372673Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:90:2125] TEvAllocateResult from# 500000 to# 600000 2025-12-23T18:37:38.372733Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-12-23T18:37:38.372767Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2025-12-23T18:37:38.372949Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.372996Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-12-23T18:37:38.373018Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2025-12-23T18:37:38.373143Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.373245Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.373296Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-12-23T18:37:38.373319Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2025-12-23T18:37:38.373417Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.373485Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-12-23T18:37:38.373511Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-12-23T18:37:38.380713Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 720575 ... erved from# 8600000 Reserved to# 8700000 2025-12-23T18:37:38.501785Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:405:2439] TEvAllocateResult from# 8600000 to# 8700000 2025-12-23T18:37:38.501881Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:91:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.502010Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.502079Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2025-12-23T18:37:38.502124Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:407:2441] TEvAllocateResult from# 8700000 to# 8800000 2025-12-23T18:37:38.502215Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.502307Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2025-12-23T18:37:38.502331Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:409:2443] TEvAllocateResult from# 8800000 to# 8900000 2025-12-23T18:37:38.502426Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2025-12-23T18:37:38.502453Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:411:2445] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-12-23T18:37:38.507054Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:433:2467] requested range size#100000 2025-12-23T18:37:38.507558Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:435:2469] requested range size#100000 2025-12-23T18:37:38.507977Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:437:2471] requested range size#100000 2025-12-23T18:37:38.508141Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.508333Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.508514Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:439:2473] requested range size#100000 2025-12-23T18:37:38.508825Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.509000Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:441:2475] requested range size#100000 2025-12-23T18:37:38.509147Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.509418Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:443:2477] requested range size#100000 2025-12-23T18:37:38.509842Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:445:2479] requested range size#100000 2025-12-23T18:37:38.510001Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.510175Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.510225Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.510356Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:447:2481] requested range size#100000 2025-12-23T18:37:38.510512Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.510702Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.511020Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:449:2483] requested range size#100000 2025-12-23T18:37:38.511154Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.511288Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.511454Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2025-12-23T18:37:38.511495Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:433:2467] TEvAllocateResult from# 9000000 to# 9100000 2025-12-23T18:37:38.511563Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.511701Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:451:2485] requested range size#100000 2025-12-23T18:37:38.511841Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.512032Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-12-23T18:37:38.512071Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:435:2469] TEvAllocateResult from# 9100000 to# 9200000 2025-12-23T18:37:38.512124Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.512272Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-12-23T18:37:38.512308Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:437:2471] TEvAllocateResult from# 9200000 to# 9300000 2025-12-23T18:37:38.512359Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.512479Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-12-23T18:37:38.512519Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:439:2473] TEvAllocateResult from# 9300000 to# 9400000 2025-12-23T18:37:38.512575Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.512725Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-12-23T18:37:38.512751Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:441:2475] TEvAllocateResult from# 9400000 to# 9500000 2025-12-23T18:37:38.512812Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.512939Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-12-23T18:37:38.512967Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:443:2477] TEvAllocateResult from# 9500000 to# 9600000 2025-12-23T18:37:38.513038Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.513190Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-12-23T18:37:38.513218Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:445:2479] TEvAllocateResult from# 9600000 to# 9700000 2025-12-23T18:37:38.513272Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.513382Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-12-23T18:37:38.513407Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:447:2481] TEvAllocateResult from# 9700000 to# 9800000 2025-12-23T18:37:38.513452Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.513547Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-12-23T18:37:38.513570Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:449:2483] TEvAllocateResult from# 9800000 to# 9900000 2025-12-23T18:37:38.513661Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-12-23T18:37:38.513697Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:451:2485] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2025-12-23T18:37:38.489552Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-12-23T18:37:38.490074Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-12-23T18:37:38.490859Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-12-23T18:37:38.492634Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.493127Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-12-23T18:37:38.504777Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.504877Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.504957Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.505037Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-12-23T18:37:38.505170Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.505280Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-12-23T18:37:38.505419Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-12-23T18:37:38.506176Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#0 2025-12-23T18:37:38.506818Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.506886Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.506974Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2025-12-23T18:37:38.507014Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 0 expected SUCCESS >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [FAIL] |93.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 >> TTxLocatorTest::TestWithReboot [GOOD] |93.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot [GOOD] Test command err: 2025-12-23T18:37:38.350346Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-12-23T18:37:38.351130Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-12-23T18:37:38.352005Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-12-23T18:37:38.354526Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.355044Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-12-23T18:37:38.367490Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.367599Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.367663Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.367762Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-12-23T18:37:38.367917Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.368031Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-12-23T18:37:38.368162Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 |93.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> KqpCost::WriteRowInsertFails-isSink-isOlap [GOOD] >> TestSetCloudPermissions::CanSetAllPermissions |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> HttpRequest::Probe >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2025-12-23T18:37:38.998338Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-12-23T18:37:39.007046Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-12-23T18:37:39.007967Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-12-23T18:37:39.009762Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.010235Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-12-23T18:37:39.031602Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.031697Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.031761Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.031853Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-12-23T18:37:39.032043Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.032193Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-12-23T18:37:39.032320Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-12-23T18:37:39.033098Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#281474976710656 2025-12-23T18:37:39.033266Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2025-12-23T18:37:39.036953Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-12-23T18:37:39.037463Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2110] requested range size#123456 2025-12-23T18:37:39.037974Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.038039Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.038137Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2025-12-23T18:37:39.038177Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:76:2110] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2025-12-23T18:37:39.038546Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2114] requested range size#281474976587200 2025-12-23T18:37:39.038673Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2025-12-23T18:37:39.038711Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:80:2114] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-12-23T18:37:39.039086Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2117] requested range size#246912 2025-12-23T18:37:39.039571Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.039720Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.039813Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2025-12-23T18:37:39.039868Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:83:2117] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2025-12-23T18:37:39.040265Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:87:2121] requested range size#281474976340288 2025-12-23T18:37:39.040374Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2025-12-23T18:37:39.040416Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:87:2121] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] Test command err: 2025-12-23T18:37:38.806807Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-12-23T18:37:38.807331Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-12-23T18:37:38.808097Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-12-23T18:37:38.810031Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.810492Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-12-23T18:37:38.823095Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.823207Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.823278Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.823371Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-12-23T18:37:38.823527Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.823638Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-12-23T18:37:38.823760Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-12-23T18:37:38.825333Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2025-12-23T18:37:38.825830Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2025-12-23T18:37:38.826152Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2025-12-23T18:37:38.826574Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2025-12-23T18:37:38.827257Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2025-12-23T18:37:38.827660Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.827804Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:90:2125] requested range size#100000 2025-12-23T18:37:38.828017Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.828193Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2025-12-23T18:37:38.828383Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.828509Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.828663Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.828742Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2025-12-23T18:37:38.828987Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.829031Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.829230Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2025-12-23T18:37:38.829386Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.829468Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.829555Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2025-12-23T18:37:38.829791Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-12-23T18:37:38.829836Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2025-12-23T18:37:38.830021Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.830182Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.830264Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-12-23T18:37:38.830290Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2025-12-23T18:37:38.830391Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.830488Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-12-23T18:37:38.830514Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2025-12-23T18:37:38.830643Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.830720Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-12-23T18:37:38.831039Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2025-12-23T18:37:38.831156Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.831225Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-12-23T18:37:38.831252Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 400000 to# 500000 2025-12-23T18:37:38.831347Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.831457Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.831506Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-12-23T18:37:38.831530Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:90:2125] TEvAllocateResult from# 500000 to# 600000 2025-12-23T18:37:38.831590Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-12-23T18:37:38.831618Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2025-12-23T18:37:38.831763Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.831804Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-12-23T18:37:38.831827Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2025-12-23T18:37:38.831926Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.832024Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.832078Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-12-23T18:37:38.832100Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2025-12-23T18:37:38.832182Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:38.832256Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-12-23T18:37:38.832286Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-12-23T18:37:38.838194Z node 1 :TABLET_MAIN NOTICE: tablet_sys.cpp:1925: Tablet: 7205759404 ... ccessed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-12-23T18:37:39.329280Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:619:2556] TEvAllocateResult from# 9400000 to# 9500000 2025-12-23T18:37:39.329420Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:8:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.329541Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.329642Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-12-23T18:37:39.329672Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:621:2558] TEvAllocateResult from# 9500000 to# 9600000 2025-12-23T18:37:39.329752Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-12-23T18:37:39.329784Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:623:2560] TEvAllocateResult from# 9600000 to# 9700000 2025-12-23T18:37:39.329879Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:9:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.330011Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.330091Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-12-23T18:37:39.330114Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:625:2562] TEvAllocateResult from# 9700000 to# 9800000 2025-12-23T18:37:39.330203Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:10:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.330348Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.330449Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-12-23T18:37:39.330485Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:627:2564] TEvAllocateResult from# 9800000 to# 9900000 2025-12-23T18:37:39.330617Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:11:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.330696Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.330971Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-12-23T18:37:39.331007Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:629:2566] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-12-23T18:37:39.335871Z node 1 :TABLET_MAIN NOTICE: tablet_sys.cpp:1925: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 11 Marker# TSYS31 2025-12-23T18:37:39.337607Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:876: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 11 Promote Marker# TSYS16 2025-12-23T18:37:39.338790Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:421: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:11:11:0:0:71:0] Snap: 11:1 for 72057594046447617 Marker# TRRH04 2025-12-23T18:37:39.338861Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:11:0:0:71:0], refs: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617 2025-12-23T18:37:39.339066Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:1:0:0:42:0], refs: [[72057594046447617:11:1:1:28672:1485:0],] for 72057594046447617 2025-12-23T18:37:39.339115Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:2:0:0:69:0], refs: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-12-23T18:37:39.339162Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:3:0:0:71:0], refs: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-12-23T18:37:39.339197Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:4:0:0:71:0], refs: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-12-23T18:37:39.339231Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:5:0:0:71:0], refs: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2025-12-23T18:37:39.339330Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:6:0:0:71:0], refs: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2025-12-23T18:37:39.339377Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:7:0:0:71:0], refs: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617 2025-12-23T18:37:39.339428Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:8:0:0:71:0], refs: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617 2025-12-23T18:37:39.339467Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:9:0:0:71:0], refs: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617 2025-12-23T18:37:39.339527Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:10:0:0:71:0], refs: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617 2025-12-23T18:37:39.339661Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:632: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 11 from 1 with 11 steps Marker# TRRH09 2025-12-23T18:37:39.339696Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:1:1:28672:1485:0],] for 72057594046447617 2025-12-23T18:37:39.339738Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-12-23T18:37:39.339765Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-12-23T18:37:39.339787Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-12-23T18:37:39.339809Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2025-12-23T18:37:39.339840Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:6:1:24576:78:0],] 2025-12-23T18:37:39.339867Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:7:1:24576:78:0],] 2025-12-23T18:37:39.339897Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617, Gc+: [[72057594046447617:11:8:1:24576:75:0],] 2025-12-23T18:37:39.339923Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:9:1:24576:78:0],] 2025-12-23T18:37:39.339945Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:10:1:24576:78:0],] 2025-12-23T18:37:39.339969Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617, Gc+: [[72057594046447617:11:11:1:24576:72:0],] 2025-12-23T18:37:39.341739Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:12:0:0:0:0:0] Marker# TSYS01 2025-12-23T18:37:39.343941Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:12:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.347427Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-12-23T18:37:39.347742Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-12-23T18:37:39.348682Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 12, Type: TxAllocator started in 0msec Marker# TSYS24 2025-12-23T18:37:39.348829Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:12:1:1:28672:1641:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.348884Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:12:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.348967Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 12:0 Marker# TSYS28 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2025-12-23T18:37:39.911963Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-12-23T18:37:39.912519Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-12-23T18:37:39.913314Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-12-23T18:37:39.915183Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.915724Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-12-23T18:37:39.937307Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.937414Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.937506Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.937585Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-12-23T18:37:39.937725Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.937839Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-12-23T18:37:39.937964Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-12-23T18:37:39.938714Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#8796093022207 2025-12-23T18:37:39.939331Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.939388Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.939473Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2025-12-23T18:37:39.939511Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2025-12-23T18:37:39.943419Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#8796093022207 2025-12-23T18:37:39.943930Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.943991Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.944097Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2025-12-23T18:37:39.944141Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2025-12-23T18:37:39.944511Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2115] requested range size#8796093022207 2025-12-23T18:37:39.944928Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.944991Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.945116Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2025-12-23T18:37:39.945156Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:81:2115] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2025-12-23T18:37:39.945496Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2119] requested range size#8796093022207 2025-12-23T18:37:39.945794Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.945893Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.945974Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2025-12-23T18:37:39.946009Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:85:2119] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2025-12-23T18:37:39.946481Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:89:2123] requested range size#8796093022207 2025-12-23T18:37:39.946874Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.946929Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.947015Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2025-12-23T18:37:39.947055Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:89:2123] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2025-12-23T18:37:39.947450Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:93:2127] requested range size#8796093022207 2025-12-23T18:37:39.947750Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.947836Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.947910Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2025-12-23T18:37:39.947946Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:93:2127] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2025-12-23T18:37:39.948377Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:97:2131] requested range size#8796093022207 2025-12-23T18:37:39.948713Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.948776Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.948865Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2025-12-23T18:37:39.948906Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:97:2131] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2025-12-23T18:37:39.949361Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:101:2135] requested range size#8796093022207 2025-12-23T18:37:39.949639Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.949714Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.949833Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2025-12-23T18:37:39.949878Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:101:2135] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2025-12-23T18:37:39.950297Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:105:2139] requested range size#8796093022207 2025-12-23T18:37:39.950576Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.950638Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.950730Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2025-12-23T18:37:39.951071Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:105:2139] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2025-12-23T18:37:39.951533Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:109:2143] requested range size#8796093022207 2025-12-23T18:37:39.951846Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.951954Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.952036Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Suc ... node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:157:2191] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2025-12-23T18:37:39.979070Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:161:2195] requested range size#8796093022207 2025-12-23T18:37:39.979563Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.979645Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.979809Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2025-12-23T18:37:39.979852Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:161:2195] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2025-12-23T18:37:39.980642Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:165:2199] requested range size#8796093022207 2025-12-23T18:37:39.981020Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.981124Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.981238Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2025-12-23T18:37:39.981285Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:165:2199] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2025-12-23T18:37:39.981963Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:169:2203] requested range size#8796093022207 2025-12-23T18:37:39.982347Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.982450Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.982547Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2025-12-23T18:37:39.982608Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:169:2203] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2025-12-23T18:37:39.985817Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:173:2207] requested range size#8796093022207 2025-12-23T18:37:39.986187Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.986244Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.986348Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2025-12-23T18:37:39.986390Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:173:2207] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2025-12-23T18:37:39.987125Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:177:2211] requested range size#8796093022207 2025-12-23T18:37:39.987473Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.987561Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.987648Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2025-12-23T18:37:39.987690Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:177:2211] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2025-12-23T18:37:39.988369Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:181:2215] requested range size#8796093022207 2025-12-23T18:37:39.988697Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.988756Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.988830Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2025-12-23T18:37:39.988867Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:181:2215] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2025-12-23T18:37:39.989587Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:185:2219] requested range size#8796093022207 2025-12-23T18:37:39.990047Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.990121Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.990237Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2025-12-23T18:37:39.990273Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:185:2219] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2025-12-23T18:37:39.991054Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:189:2223] requested range size#8796093022207 2025-12-23T18:37:39.991377Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.991441Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.991541Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2025-12-23T18:37:39.991583Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:189:2223] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2025-12-23T18:37:39.992380Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:193:2227] requested range size#8796093022207 2025-12-23T18:37:39.992742Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.992796Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.992901Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2025-12-23T18:37:39.992946Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:193:2227] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2025-12-23T18:37:39.993693Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:197:2231] requested range size#8796093022207 2025-12-23T18:37:39.993976Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.994099Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.994982Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2025-12-23T18:37:39.995046Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:197:2231] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2025-12-23T18:37:39.995940Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:201:2235] requested range size#31 2025-12-23T18:37:39.996264Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.996325Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:37:39.996428Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2025-12-23T18:37:39.996483Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:201:2235] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2025-12-23T18:37:39.997322Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:205:2239] requested range size#1 2025-12-23T18:37:39.997444Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-12-23T18:37:39.997484Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:205:2239] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTAS+isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 4009, MsgBus: 30217 2025-12-23T18:37:19.886225Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124323730821879:2194];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:19.886362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:19.935297Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b11/r3tmp/tmpDfkRx0/pdisk_1.dat 2025-12-23T18:37:20.520686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:20.520805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:20.610301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:20.638228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:20.649237Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:20.702835Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124323730821719:2081] 1766515039859939 != 1766515039859942 TServer::EnableGrpc on GrpcPort 4009, node 1 2025-12-23T18:37:20.950987Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:20.981267Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:21.001809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:21.001828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:21.001840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:21.001919Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30217 TClient is connected to server localhost:30217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515040661 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:22.328322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:22.342610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:22.357771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:22.756887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:23.166079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:23.327565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:24.891800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124323730821879:2194];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:24.892180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:26.580954Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124353795594670:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.581074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.581886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124353795594680:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.581933Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.069368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.118084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.172088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.219272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.300999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.358332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.423399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.534400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.757333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124358090562855:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.757413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.757867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124358090562860:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.757907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124358090562861:2492], DatabaseId: /Root, PoolId: default ... 56: TColumnShard.StateWork at 72075186224037968 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108070Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037970 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108125Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037964 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108173Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037966 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108225Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037960 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108267Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037962 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108314Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037958 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108363Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037987 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108410Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037989 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108472Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037950 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108520Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037948 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108565Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037946 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108614Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037944 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108660Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037942 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108744Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037977 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108805Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038000 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108854Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037998 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108901Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037996 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.108969Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037994 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109019Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037992 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109065Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037939 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109114Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037937 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109159Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037956 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109208Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037954 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109259Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037952 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109331Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109385Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109430Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109478Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109527Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109578Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037971 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109626Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109670Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037969 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109750Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037965 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109804Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037967 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109853Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037963 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109897Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037961 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109942Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037959 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.109986Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037990 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.110034Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.110079Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2025-12-23T18:37:37.110127Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 query_phases { duration_us: 423558 table_access { name: "/Root/.tmp/sessions/66d33a6c-45ff-3ba3-a6fb-fab1a592cee2/Root/TestTable2_8d5af5e8-4c79-3233-3a35-a29ab3ca4730" updates { rows: 4 bytes: 1472 } partitions_count: 4 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 144 } } cpu_time_us: 68574 } compilation { duration_us: 30369 cpu_time_us: 21223 } process_cpu_time_us: 1045 total_duration_us: 5672126 total_cpu_time_us: 90842 2025-12-23T18:37:37.128972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:37:37.147720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 |93.3%| [TA] $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails-isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 13118, MsgBus: 12659 2025-12-23T18:37:19.406553Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124323009186936:2195];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:19.417551Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:19.446072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b12/r3tmp/tmpTlb7cj/pdisk_1.dat 2025-12-23T18:37:19.972874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:19.972968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:20.073221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:20.128287Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:20.228620Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13118, node 1 2025-12-23T18:37:20.405262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:20.418918Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:20.489594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:20.489623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:20.489633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:20.489716Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12659 TClient is connected to server localhost:12659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515040283 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:21.565985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:21.573922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:21.586189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:21.907377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:22.243283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:37:22.428662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:24.407181Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124323009186936:2195];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:24.407243Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:26.832637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124353073959706:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.832733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.833095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124353073959716:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.833131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:27.638511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.737249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.826476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.913556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:27.962115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:28.045561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:28.121798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:28.224823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:28.504822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124361663895212:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:28.504929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:28.505375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124361663895217:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:28.505412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124361663895218:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:28.505491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kq ... db://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [1:7587124396023634383:2532], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T18:37:36.193466Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [1:7587124396023634389:2659], TxId: 281474976710691, task: 4. Ctx: { TraceId : 01kd6816yydqe70amv2etng5t5. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7587124396023634383:2532], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T18:37:36.194020Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=, ActorId: [1:7587124374548797435:2532], ActorState: ExecuteState, TraceId: 01kd6816yydqe70amv2etng5t5, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } query_phases { duration_us: 1260 cpu_time_us: 1260 } query_phases { duration_us: 12723 table_access { name: "/Root/TestTable" partitions_count: 1 } table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 10337 affected_shards: 2 } query_phases { duration_us: 4271 cpu_time_us: 3342 } compilation { duration_us: 576447 cpu_time_us: 562166 } process_cpu_time_us: 2455 total_duration_us: 611041 total_cpu_time_us: 579560 2025-12-23T18:37:36.694500Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [1:7587124396023634435:2673], TxId: 281474976710694, task: 1. Ctx: { TraceId : 01kd6817jqedt0ndx4ahq9dsd3. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-12-23T18:37:36.695171Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [1:7587124396023634436:2674], TxId: 281474976710694, task: 2. Ctx: { CheckpointId : . TraceId : 01kd6817jqedt0ndx4ahq9dsd3. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7587124396023634432:2532], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T18:37:36.695588Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=, ActorId: [1:7587124374548797435:2532], ActorState: ExecuteState, TraceId: 01kd6817jqedt0ndx4ahq9dsd3, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } query_phases { duration_us: 1115 cpu_time_us: 1115 } query_phases { duration_us: 4709 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 2810 affected_shards: 1 } query_phases { duration_us: 2161 cpu_time_us: 1723 } compilation { duration_us: 456866 cpu_time_us: 444301 } process_cpu_time_us: 2439 total_duration_us: 476346 total_cpu_time_us: 452388 2025-12-23T18:37:37.292028Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [1:7587124400318601781:2685], TxId: 281474976710697, task: 1. Ctx: { CheckpointId : . TraceId : 01kd681827dx8s3t8t5ae68f6j. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-12-23T18:37:37.292415Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [1:7587124400318601782:2686], TxId: 281474976710697, task: 2. Ctx: { CheckpointId : . TraceId : 01kd681827dx8s3t8t5ae68f6j. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [1:7587124400318601778:2532], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T18:37:37.292889Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=, ActorId: [1:7587124374548797435:2532], ActorState: ExecuteState, TraceId: 01kd681827dx8s3t8t5ae68f6j, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } query_phases { duration_us: 966 cpu_time_us: 966 } query_phases { duration_us: 5885 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 3975 affected_shards: 1 } query_phases { duration_us: 2303 cpu_time_us: 2178 } compilation { duration_us: 557328 cpu_time_us: 545381 } process_cpu_time_us: 2121 total_duration_us: 580424 total_cpu_time_us: 554621 2025-12-23T18:37:37.744440Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [1:7587124400318601825:2698], TxId: 281474976710700, task: 1. Ctx: { CheckpointId : . TraceId : 01kd6818nb9ah6vgmpb1d1qpm3. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-12-23T18:37:37.744954Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [1:7587124400318601826:2699], TxId: 281474976710700, task: 2. Ctx: { CheckpointId : . TraceId : 01kd6818nb9ah6vgmpb1d1qpm3. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7587124400318601822:2532], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T18:37:37.745326Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=, ActorId: [1:7587124374548797435:2532], ActorState: ExecuteState, TraceId: 01kd6818nb9ah6vgmpb1d1qpm3, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } query_phases { duration_us: 916 cpu_time_us: 916 } query_phases { duration_us: 4205 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 3035 affected_shards: 1 } query_phases { duration_us: 2534 cpu_time_us: 1567 } compilation { duration_us: 405647 cpu_time_us: 390473 } process_cpu_time_us: 1860 total_duration_us: 421708 total_cpu_time_us: 397851 2025-12-23T18:37:38.145028Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [1:7587124404613569182:2716], TxId: 281474976710703, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68192zbp3d0qb4sndmzy3s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-12-23T18:37:38.145351Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [1:7587124404613569183:2717], TxId: 281474976710703, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68192zbp3d0qb4sndmzy3s. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7587124404613569179:2532], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T18:37:38.145758Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=, ActorId: [1:7587124374548797435:2532], ActorState: ExecuteState, TraceId: 01kd68192zbp3d0qb4sndmzy3s, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } query_phases { duration_us: 871 cpu_time_us: 871 } query_phases { duration_us: 4754 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 2923 affected_shards: 1 } query_phases { duration_us: 2193 cpu_time_us: 1706 } compilation { duration_us: 370233 cpu_time_us: 356382 } process_cpu_time_us: 1887 total_duration_us: 386228 total_cpu_time_us: 363769 2025-12-23T18:37:38.675418Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [1:7587124404613569236:2736], TxId: 281474976710706, task: 1. Ctx: { CheckpointId : . TraceId : 01kd6819fkavgz3m7zfjbekpxf. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-12-23T18:37:38.675991Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [1:7587124404613569237:2737], TxId: 281474976710706, task: 2. Ctx: { TraceId : 01kd6819fkavgz3m7zfjbekpxf. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [1:7587124404613569233:2532], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T18:37:38.676474Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=M2M3YzkxMzctYjUxMGE1ODYtYTAxOTU5NDAtZjdjNjU0NzU=, ActorId: [1:7587124374548797435:2532], ActorState: ExecuteState, TraceId: 01kd6819fkavgz3m7zfjbekpxf, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Duplicated keys found." issue_code: 2012 severity: 1 } query_phases { duration_us: 925 cpu_time_us: 925 } query_phases { duration_us: 9040 table_access { name: "/Root/TestTable2" partitions_count: 1 } cpu_time_us: 3405 affected_shards: 1 } query_phases { duration_us: 2382 cpu_time_us: 1933 } compilation { duration_us: 482836 cpu_time_us: 454228 } process_cpu_time_us: 1948 total_duration_us: 512824 total_cpu_time_us: 462439 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |93.3%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [LD] {RESULT} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |93.3%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit >> TestSetCloudPermissions::CanSetPermissionsForRootDb [GOOD] |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |93.3%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::DedicatedTimeIntervals |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] >> TSchemeShardTest::CreateTable >> HttpRequest::Analyze >> TSchemeShardTest::CreateAlterTableWithCodec ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsForRootDb [GOOD] Test command err: 2025-12-23T18:37:37.057232Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124399908706087:2128];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:37.057271Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:37.186352Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005711/r3tmp/tmppEkVYs/pdisk_1.dat 2025-12-23T18:37:37.726856Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:37.753727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:37.753825Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:37.851369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:37.908416Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124399908705999:2081] 1766515057025791 != 1766515057025794 2025-12-23T18:37:37.910853Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:38.096666Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.tables.write ydb.databases.create ydb.databases.connect ydb.tables.select) 2025-12-23T18:37:38.096783Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:38.097030Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ca0386f1950] Connect to grpc://localhost:6088 2025-12-23T18:37:38.100185Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca0386f1950] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.databases.l...(truncated) } 2025-12-23T18:37:38.121379Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:38.150392Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ca0386f1950] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:37:38.155229Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:37:38.156549Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root, user: user1@as, from ip: |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TSchemeShardTest::AlterTableDropColumnReCreateSplit |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |93.3%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> TSchemeShardTest::InitRootAgain |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse [GOOD] Test command err: 2025-12-23T18:37:37.387813Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124400845051257:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:37.387845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005710/r3tmp/tmpuaUzdU/pdisk_1.dat 2025-12-23T18:37:37.978870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:37.987906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:37.988007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:38.070642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:38.200384Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:38.211107Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124400845051235:2081] 1766515057380602 != 1766515057380605 2025-12-23T18:37:38.228552Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.005788s 2025-12-23T18:37:38.258828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:38.480419Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.tables.write ydb.databases.create ydb.databases.connect ydb.tables.select) 2025-12-23T18:37:38.480600Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:38.483956Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cee89b003d0] Connect to grpc://localhost:20280 2025-12-23T18:37:38.488416Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cee89b003d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "database12345" type: "ydb.database" } resource_path { id: "folder12345" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "database12345" type: "ydb.databa...(truncated) } 2025-12-23T18:37:38.515340Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cee89b003d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:37:38.515875Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:37:38.516879Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: >> TSchemeShardCheckProposeSize::CopyTable >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 >> KqpSystemView::NodesRange2 |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |93.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut >> TestSetCloudPermissions::CanSetAllPermissions [GOOD] >> KqpSysColV1::InnerJoinSelectAsterisk >> KqpSysColV1::StreamSelectRange >> KqpSysColV0::InnerJoinSelect >> KqpSystemView::PartitionStatsSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 21087, MsgBus: 28436 2025-12-23T18:33:56.173525Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123450700577245:2186];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:56.173801Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003c69/r3tmp/tmpBdqpuK/pdisk_1.dat 2025-12-23T18:33:56.814952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:56.872695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:56.872797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:57.025460Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123450700577097:2081] 1766514836077248 != 1766514836077251 2025-12-23T18:33:57.035391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:57.036019Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:57.068065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 21087, node 1 2025-12-23T18:33:57.187561Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:57.262696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:57.262723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:57.262756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:57.262838Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28436 TClient is connected to server localhost:28436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514837094 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:57.997355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:33:58.011230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:33:58.033921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:58.361736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:58.673993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:58.828885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:01.146896Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123450700577245:2186];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:01.146988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:34:01.980659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123472175415455:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:01.980792Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:01.981440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123472175415465:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:01.981483Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.674795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.762912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.856838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.913714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.000978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.129151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.212533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.313188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.420222Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123480765350950:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:03.420353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:03.420995Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123480765350955:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:03.421048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123480765350956:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:03.421118Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [W ... R WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:36:22.135563Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:36:22.251005Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12141 2025-12-23T18:36:22.735120Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12141 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514982106 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:36:22.997207Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:36:23.012955Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:36:23.028705Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:23.164650Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:23.562653Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:23.772597Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:26.687294Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587124073320260225:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:26.687386Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:36:27.228536Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124099090065817:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:27.228653Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:27.229153Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124099090065827:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:27.229222Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:27.336471Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:27.388083Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:27.438075Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:27.509760Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:27.574162Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:27.701609Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:27.781944Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:27.868383Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:28.019689Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124103385033998:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:28.019799Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:28.020135Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124103385034003:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:28.020171Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124103385034004:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:28.020321Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:28.030931Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:36:28.066878Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587124103385034007:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:36:28.136255Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587124103385034060:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:36:32.260255Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:36.952960Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:36:36.952994Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateTableWithDate >> TExportToS3Tests::ExportStartTime >> KqpSysColV0::UpdateAndDelete >> ConvertUnboxedValueToArrowAndBack::LargeVariant [GOOD] >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 >> ActorHandler::HttpOk [GOOD] >> ActorHandler::InvalidTokenForbidden >> KqpSysColV0::InnerJoinTables >> KqpSysColV0::SelectRowById ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetAllPermissions [GOOD] Test command err: 2025-12-23T18:37:39.872754Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124409594264944:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:39.873702Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:39.925642Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005701/r3tmp/tmpxfMbUt/pdisk_1.dat 2025-12-23T18:37:40.406841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:40.503529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:40.503628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:40.692342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:40.806546Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:40.812534Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124409594264909:2081] 1766515059793295 != 1766515059793298 2025-12-23T18:37:40.817231Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:40.851126Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:40.931088Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.developerApi.update ydb.tables.write ydb.databases.create ydb.databases.connect ydb.developerApi.get ydb.tables.select) 2025-12-23T18:37:40.931151Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d5421901550] Connect to grpc://localhost:9888 2025-12-23T18:37:40.934490Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d5421901550] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "database12345" type: "ydb.database" } resource_path { id: "folder12345" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "database12345" type: "ydb.databa...(truncated) } 2025-12-23T18:37:41.093361Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d5421901550] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:37:41.094134Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:37:41.094832Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> KqpCost::OlapRangeFullScan [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CreateAlterTableWithCacheMode |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/bridge/ydb-tests-functional-bridge |93.3%| [LD] {RESULT} $(B)/ydb/tests/functional/bridge/ydb-tests-functional-bridge >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/bridge/ydb-tests-functional-bridge >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> KqpCost::OlapWriteRow [GOOD] >> KqpCost::WriteRow-isSink-isOlap [GOOD] >> KqpCost::IndexLookup-useSink [GOOD] >> KqpSysColV1::StreamInnerJoinTables |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> ConvertUnboxedValueToArrowAndBack::LargeVariant [GOOD] >> KqpSysColV1::StreamSelectRowById |93.3%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |93.3%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> KqpSystemView::Join >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false >> TExportToS3Tests::ExportStartTime [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 12170, MsgBus: 21097 2025-12-23T18:37:34.116703Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124386154961270:2161];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:34.116907Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b09/r3tmp/tmp9yiC83/pdisk_1.dat 2025-12-23T18:37:34.710475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:34.717918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:34.847571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:34.890917Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124386154961146:2081] 1766515054089603 != 1766515054089606 2025-12-23T18:37:34.917382Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:34.932232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12170, node 1 2025-12-23T18:37:35.164424Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:35.279014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:35.295507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:35.295535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:35.295543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:35.295640Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21097 TClient is connected to server localhost:21097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515054983 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:36.448908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:36.480181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:37:36.489973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:36.993443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:37.371877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:37.532636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:39.118997Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124386154961270:2161];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:39.119079Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:40.101890Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124411924766801:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:40.102056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:40.102717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124411924766811:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:40.102778Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:40.635168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.695679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.755258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.801803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.843545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.898099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.957865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:41.023600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:41.187822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124416219734978:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:41.187912Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:41.188228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124416219734984:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:41.188268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124416219734983:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:41.188300Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [W ... 44.270031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:37:44.270127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:37:44.270151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:37:44.270184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:37:44.270211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:37:44.270245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:37:44.270270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:37:44.270368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:37:44.270393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-12-23T18:37:44.270461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-12-23T18:37:44.270525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-12-23T18:37:44.280484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7587124424809670075:2544];ev=NActors::IEventHandle;tablet_id=72075186224037931;tx_id=281474976715673;this=137085688655008;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1766515064280;max=18446744073709551615;plan=0;src=[1:7587124386154961496:2148];cookie=442:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.280628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;self_id=[1:7587124424809670084:2545];ev=NActors::IEventHandle;tablet_id=72075186224037929;tx_id=281474976715673;this=137085688682560;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1766515064280;max=18446744073709551615;plan=0;src=[1:7587124386154961496:2148];cookie=422:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.284181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7587124424809670096:2547];ev=NActors::IEventHandle;tablet_id=72075186224037930;tx_id=281474976715673;this=137085688683456;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1766515064283;max=18446744073709551615;plan=0;src=[1:7587124386154961496:2148];cookie=432:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.291223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.291318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.291338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.293314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.293372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.293389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.340469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.340533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.340548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.340698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.340740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.340753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.351077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.351134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.351148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.354388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.354446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.354460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.359803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.359861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.359876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.368470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.368527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.368542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.375539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.375606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.375626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.377088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.377132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:44.377146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; query_phases { duration_us: 330441 table_access { name: "/Root/TestTable" reads { rows: 3 bytes: 108 } } cpu_time_us: 183412 } compilation { duration_us: 778187 cpu_time_us: 755308 } process_cpu_time_us: 448 total_duration_us: 1127996 total_cpu_time_us: 939168 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow-isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 9158, MsgBus: 15831 2025-12-23T18:37:34.323485Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124388199653014:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:34.324687Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b07/r3tmp/tmpydB3qn/pdisk_1.dat 2025-12-23T18:37:35.073704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:35.073823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:35.109327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:35.175718Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:35.267216Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:35.289705Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124388199652969:2081] 1766515054253075 != 1766515054253078 TServer::EnableGrpc on GrpcPort 9158, node 1 2025-12-23T18:37:35.374922Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:35.462335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:35.590133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:35.590158Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:35.590168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:35.590261Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15831 TClient is connected to server localhost:15831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515055354 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:36.496365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:36.512670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:36.534085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:36.898797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:37.353654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:37.499304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:39.322982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124388199653014:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:39.323106Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:40.075001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124413969458636:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:40.075105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:40.075504Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124413969458646:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:40.075549Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:40.596455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.667768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.717593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.759407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.799549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.846722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.924044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:41.008859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:41.152918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124418264426813:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:41.153025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:41.153569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124418264426818:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:41.153620Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124418264426819:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:41.153727Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:41.159225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:41.177309Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124418264426822:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:41.260116Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124418264426873:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:43.869976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 712 cpu_time_us: 712 } query_phases { duration_us: 7131 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1137 affected_shards: 1 } compilation { duration_us: 114533 cpu_time_us: 106488 } process_cpu_time_us: 1145 total_duration_us: 125383 total_cpu_time_us: 109482 query_phases { duration_us: 8294 cpu_time_us: 8294 } query_phases { duration_us: 27575 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1287 affected_shards: 1 } compilation { duration_us: 194821 cpu_time_us: 177485 } process_cpu_time_us: 8762 total_duration_us: 238444 total_cpu_time_us: 195828 2025-12-23T18:37:44.892845Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [1:7587124431149329216:2569], TxId: 281474976710678, task: 1. Ctx: { CheckpointId : . TraceId : 01kd681fpvds53ygpdg6bvw0b7. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZWNlMDE4NS05NTE4ZTE4NS1jMGJkY2M0MC1iYjdhZjZiYQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-12-23T18:37:44.893383Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [1:7587124431149329217:2570], TxId: 281474976710678, task: 2. Ctx: { CheckpointId : . TraceId : 01kd681fpvds53ygpdg6bvw0b7. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZWNlMDE4NS05NTE4ZTE4NS1jMGJkY2M0MC1iYjdhZjZiYQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7587124431149329213:2529], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T18:37:44.894004Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=ZWNlMDE4NS05NTE4ZTE4NS1jMGJkY2M0MC1iYjdhZjZiYQ==, ActorId: [1:7587124426854361757:2529], ActorState: ExecuteState, TraceId: 01kd681fpvds53ygpdg6bvw0b7, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } query_phases { duration_us: 981 cpu_time_us: 981 } query_phases { duration_us: 8957 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 8169 affected_shards: 1 } query_phases { duration_us: 26862 cpu_time_us: 26142 } compilation { duration_us: 298597 cpu_time_us: 291746 } process_cpu_time_us: 2113 total_duration_us: 354197 total_cpu_time_us: 329151 query_phases { duration_us: 804 cpu_time_us: 804 } query_phases { duration_us: 4337 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 3313 affected_shards: 1 } query_phases { duration_us: 2248 cpu_time_us: 1845 } query_phases { duration_us: 11149 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1319 affected_shards: 1 } compilation { duration_us: 426230 cpu_time_us: 418319 } process_cpu_time_us: 1865 total_duration_us: 462202 total_cpu_time_us: 427465 query_phases { duration_us: 727 cpu_time_us: 727 } query_phases { duration_us: 6036 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 3786 affected_shards: 1 } query_phases { duration_us: 1368 cpu_time_us: 563 affected_shards: 1 } compilation { duration_us: 301055 cpu_time_us: 292481 } process_cpu_time_us: 1616 total_duration_us: 321195 total_cpu_time_us: 299173 query_phases { duration_us: 777 cpu_time_us: 777 } query_phases { duration_us: 7531 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 3313 affected_shards: 1 } query_phases { duration_us: 8885 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1395 affected_shards: 1 } compilation { duration_us: 270073 cpu_time_us: 260734 } process_cpu_time_us: 1728 total_duration_us: 306685 total_cpu_time_us: 267947 query_phases { duration_us: 518 cpu_time_us: 518 } query_phases { duration_us: 7593 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1005 affected_shards: 1 } compilation { duration_us: 158433 cpu_time_us: 147018 } process_cpu_time_us: 910 total_duration_us: 169422 total_cpu_time_us: 149451 query_phases { duration_us: 608 cpu_time_us: 608 } query_phases { duration_us: 2814 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1070 affected_shards: 1 } compilation { duration_us: 123899 cpu_time_us: 114552 } process_cpu_time_us: 1033 total_duration_us: 133788 total_cpu_time_us: 117263 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TExportToS3Tests::ExportPartitioningSettings |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |93.3%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> KqpSystemView::Sessions-EnableRealSystemViewPaths >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableKeyColumns >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::CreateIndexedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 61701, MsgBus: 8952 2025-12-23T18:37:35.682435Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124390286026025:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:35.682580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b05/r3tmp/tmpe4GBNF/pdisk_1.dat 2025-12-23T18:37:36.278865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:36.293344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:36.293437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:36.487931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:36.531878Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:36.558957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 61701, node 1 2025-12-23T18:37:36.761625Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:36.911441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:36.911472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:36.911480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:36.911563Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8952 TClient is connected to server localhost:8952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515056607 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:37.858464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:37.866179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:37.883648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:38.126171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:38.481424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:38.582825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:40.686016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124390286026025:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:40.686119Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:41.457467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124416055831537:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:41.457611Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:41.458366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124416055831547:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:41.458440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:41.994515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:42.052673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:42.110924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:42.153657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:42.195581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:42.258447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:42.344981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:42.439805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:42.593042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124420350799723:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:42.593151Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:42.593843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124420350799728:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:42.593889Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124420350799729:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:42.594183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:42.603027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:42.627209Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124420350799732:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:42.684560Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124420350799783:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:45.250190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow [GOOD] Test command err: Trying to start YDB, gRPC: 5638, MsgBus: 14433 2025-12-23T18:37:33.651045Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124380852338288:2271];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:33.651130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b08/r3tmp/tmp55Z7HB/pdisk_1.dat 2025-12-23T18:37:34.194969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:34.200242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:34.200349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:34.474695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:34.490922Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124380852338031:2081] 1766515053533542 != 1766515053533545 2025-12-23T18:37:34.562042Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:34.570650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:34.647008Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TServer::EnableGrpc on GrpcPort 5638, node 1 2025-12-23T18:37:35.043577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:35.043605Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:35.043617Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:35.043738Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14433 TClient is connected to server localhost:14433 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515054640 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:36.091838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:36.127018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:36.386962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:36.765420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:36.885001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:38.650339Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124380852338288:2271];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:38.654654Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:39.295566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124406622143697:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:39.295878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:39.296336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124406622143707:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:39.296383Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:39.789384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:39.827951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:39.877298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:39.939881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:39.994223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.061470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.113732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.198630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:40.385390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124410917111877:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:40.385475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:40.385795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124410917111883:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:40.385826Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:40.385866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124410917111882:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2 ... d; 2025-12-23T18:37:43.507268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:43.507317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:43.507317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:43.507330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:43.507330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:43.517722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:43.517784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:43.517797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:43.521729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:43.521791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2025-12-23T18:37:43.521806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; query_phases { duration_us: 13934 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 2216 affected_shards: 1 } query_phases { duration_us: 30116 cpu_time_us: 159 affected_shards: 1 } compilation { duration_us: 164209 cpu_time_us: 150082 } process_cpu_time_us: 856 total_duration_us: 213272 total_cpu_time_us: 153313 query_phases { duration_us: 9358 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 1875 affected_shards: 1 } query_phases { duration_us: 21098 cpu_time_us: 268 affected_shards: 1 } compilation { duration_us: 119403 cpu_time_us: 109229 } process_cpu_time_us: 831 total_duration_us: 156483 total_cpu_time_us: 112203 query_phases { duration_us: 113482 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 2623 affected_shards: 1 } query_phases { duration_us: 13599 cpu_time_us: 254 affected_shards: 1 } compilation { duration_us: 130014 cpu_time_us: 116182 } process_cpu_time_us: 868 total_duration_us: 262745 total_cpu_time_us: 119927 query_phases { duration_us: 24678 table_access { name: "/Root/TestTable" updates { rows: 2 bytes: 744 } partitions_count: 2 } cpu_time_us: 2980 affected_shards: 2 } query_phases { duration_us: 25469 cpu_time_us: 141 affected_shards: 2 } compilation { duration_us: 109289 cpu_time_us: 99821 } process_cpu_time_us: 841 total_duration_us: 166655 total_cpu_time_us: 103783 query_phases { duration_us: 21446 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 2153 affected_shards: 1 } query_phases { duration_us: 8308 cpu_time_us: 155 affected_shards: 1 } compilation { duration_us: 231668 cpu_time_us: 216522 } process_cpu_time_us: 776 total_duration_us: 266072 total_cpu_time_us: 219606 query_phases { duration_us: 14556 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 2372 affected_shards: 1 } query_phases { duration_us: 24719 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 2417 affected_shards: 2 } query_phases { duration_us: 34372 cpu_time_us: 117 affected_shards: 2 } compilation { duration_us: 115204 cpu_time_us: 109083 } process_cpu_time_us: 1322 total_duration_us: 198472 total_cpu_time_us: 115311 2025-12-23T18:37:45.304852Z node 1 :TX_COLUMNSHARD_RESTORE WARN: log.cpp:841: tablet_id=72075186224037935;tablet_actor_id=[1:7587124419507047010:2553];this=136763089894464;activity=1;task_id=7911e502-e02e11f0-b16955dd-5998868a::4;fline=restore.cpp:28;event=merge_data_problems;write_id=4;tablet_id=72075186224037935;message=Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}; 2025-12-23T18:37:45.305655Z node 1 :TX_COLUMNSHARD_SCAN WARN: actor.cpp:152: Scan [1:7587124432391949719:2833] got AbortExecution txId: 281474976710689 scanId: 1 gen: 1 tablet: 72075186224037935 code: ABORTED reason: {
: Error: task finished: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]} } 2025-12-23T18:37:45.307074Z node 1 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037935;self_id=[1:7587124419507047010:2553];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteBlobsResult;tablet_id=72075186224037935;event=TEvWriteBlobsResult;fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]};tx_id=281474976710689; 2025-12-23T18:37:45.317283Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:966: SelfId: [1:7587124432391949716:2831], Table: `/Root/TestTable` ([72057594046644480:54:1]), SessionActorId: [0:0:0]Got CONSTRAINT VIOLATION for table `/Root/TestTable`. ShardID=72075186224037935, Sink=[1:7587124432391949716:2831].{
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}, code: 2012 } 2025-12-23T18:37:45.317407Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2081: SelfId: [1:7587124432391949713:2831], TxId: 281474976710689, task: 1. Ctx: { CheckpointId : . TraceId : 01kd681g9d85yfffxaf7xkw2wz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZjVmNmM5MmUtMTY4NDU1MjEtOGU3OTA5MDQtM2NjZGY4NTE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}, code: 2012 } } 2025-12-23T18:37:45.317548Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [1:7587124432391949713:2831], TxId: 281474976710689, task: 1. Ctx: { CheckpointId : . TraceId : 01kd681g9d85yfffxaf7xkw2wz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZjVmNmM5MmUtMTY4NDU1MjEtOGU3OTA5MDQtM2NjZGY4NTE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}, code: 2012 } }. 2025-12-23T18:37:45.318333Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=ZjVmNmM5MmUtMTY4NDU1MjEtOGU3OTA5MDQtM2NjZGY4NTE=, ActorId: [1:7587124419507046865:2539], ActorState: ExecuteState, TraceId: 01kd681g9d85yfffxaf7xkw2wz, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Constraint violated. Table: `/Root/TestTable`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key. {\"sorting_columns\":[{\"name\":\"Group\",\"value\":\"1\"},{\"name\":\"Name\",\"value\":\"Anna\"}],\"fields\":[\"Group: Uint32\",\"Name: String\"]}" issue_code: 2012 severity: 1 } } query_phases { duration_us: 39676 cpu_time_us: 2060 } compilation { duration_us: 135425 cpu_time_us: 129539 } process_cpu_time_us: 814 total_duration_us: 184067 total_cpu_time_us: 132413 query_phases { duration_us: 21597 cpu_time_us: 2494 affected_shards: 1 } query_phases { duration_us: 11120 cpu_time_us: 154 affected_shards: 1 } compilation { duration_us: 80401 cpu_time_us: 74626 } process_cpu_time_us: 984 total_duration_us: 118155 total_cpu_time_us: 78258 query_phases { duration_us: 18055 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 2365 affected_shards: 1 } query_phases { duration_us: 14849 cpu_time_us: 162 affected_shards: 1 } compilation { duration_us: 69554 cpu_time_us: 63711 } process_cpu_time_us: 897 total_duration_us: 107502 total_cpu_time_us: 67135 query_phases { duration_us: 11025 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 2207 affected_shards: 1 } query_phases { duration_us: 7498 cpu_time_us: 184 affected_shards: 1 } compilation { duration_us: 80367 cpu_time_us: 74252 } process_cpu_time_us: 991 total_duration_us: 104871 total_cpu_time_us: 77634 query_phases { duration_us: 15655 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 2117 affected_shards: 1 } query_phases { duration_us: 16721 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 2432 affected_shards: 2 } query_phases { duration_us: 38263 cpu_time_us: 189 affected_shards: 2 } compilation { duration_us: 136465 cpu_time_us: 111984 } process_cpu_time_us: 1220 total_duration_us: 212873 total_cpu_time_us: 117942 query_phases { duration_us: 930 cpu_time_us: 930 } query_phases { duration_us: 238955 table_access { name: "/Root/TestTable" reads { rows: 2 bytes: 40 } deletes { rows: 2 } partitions_count: 2 } cpu_time_us: 58373 affected_shards: 10 } query_phases { duration_us: 37938 cpu_time_us: 251 affected_shards: 10 } compilation { duration_us: 425960 cpu_time_us: 417715 } process_cpu_time_us: 1975 total_duration_us: 720829 total_cpu_time_us: 479244 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpSystemView::CompileCacheQueriesOrderByDesc >> TSchemeShardTest::CreateAlterTableWithCacheMode [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |93.3%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> KqpSystemView::NodesOrderByDesc >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::AlterTableConfig >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges >> KqpProxy::PassErrroViaSessionActor >> ScriptExecutionsTest::RunCheckLeaseStatus >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects >> TableCreation::SimpleTableCreation >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit >> TableCreation::ConcurrentTableCreation |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/s3_bench/ydb_s3_bench |93.3%| [LD] {RESULT} $(B)/ydb/tests/tools/s3_bench/ydb_s3_bench >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableFollowers >> TExportToS3Tests::ExportTableWithUniqueIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 |93.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/s3_bench/ydb_s3_bench >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 >> KqpSystemView::PartitionStatsSimple [GOOD] >> KqpSysColV0::InnerJoinSelect [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropTable >> ActorHandler::InvalidTokenForbidden [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 >> KqpSysColV1::StreamSelectRange [GOOD] >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] >> TSchemeShardTest::AlterTable [GOOD] >> TSchemeShardTest::AlterTableById >> KqpSysColV0::InnerJoinTables [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 >> KqpSysColV0::SelectRowById [GOOD] >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 11607, MsgBus: 61600 2025-12-23T18:37:45.851322Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124434050495168:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:45.851430Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:45.917296Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e93/r3tmp/tmpBTF7wm/pdisk_1.dat 2025-12-23T18:37:46.419575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:46.419676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:46.500780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:46.606049Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:46.606211Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124434050494957:2081] 1766515065807173 != 1766515065807176 2025-12-23T18:37:46.615098Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11607, node 1 2025-12-23T18:37:46.806932Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:46.835477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:46.835504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:46.835510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:46.835608Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:46.920164Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61600 TClient is connected to server localhost:61600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515066610 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:37:48.456578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:37:48.467665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:48.483425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:48.793556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:49.161940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:49.257484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:50.831643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124434050495168:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:50.831709Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:52.260496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124464115267913:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.260597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.270213Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124464115267923:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.270295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.712507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.760000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.812104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.867145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.971812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.060234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.117014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.193024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.280498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124468410236097:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.280592Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.281089Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124468410236102:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.281135Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124468410236103:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.281171Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.285739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:53.299034Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124468410236106:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:53.363332Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124468410236157:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:55.894962Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515075881, txId: 281474976710673] shutting down |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TExportToS3Tests::ExportTableWithUniqueIndex [GOOD] >> KqpSysColV1::StreamInnerJoinTables [GOOD] |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |93.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/ut/ydb-core-client-ut |93.3%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 >> KqpProxy::PassErrroViaSessionActor [GOOD] >> KqpProxy::NodeDisconnectedTest |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut >> TExportToS3Tests::IndexMaterializationDisabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 11298, MsgBus: 63423 2025-12-23T18:37:45.799155Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124436194269493:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:45.801796Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e90/r3tmp/tmpP0bMZ7/pdisk_1.dat 2025-12-23T18:37:46.488072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:46.599715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:46.599833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:46.704951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:46.718909Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124436194269465:2081] 1766515065757502 != 1766515065757505 2025-12-23T18:37:46.733726Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:46.772325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 11298, node 1 2025-12-23T18:37:46.811153Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:46.991467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:46.991491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:46.991498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:46.991587Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63423 TClient is connected to server localhost:63423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515066785 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:47.822493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:47.839461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:48.063262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:48.225973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:48.298361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:50.767072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124436194269493:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:50.767161Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:51.453580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124461964075122:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:51.453724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:51.454325Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124461964075132:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:51.454371Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:51.987721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.044831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.125493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.187652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.242261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.308654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.410429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.502361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.661303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124466259043306:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.661414Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.661686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124466259043311:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.661719Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124466259043312:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.661745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.665803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:52.706456Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124466259043315:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:52.784030Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124466259043368:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 1862, MsgBus: 28439 2025-12-23T18:37:45.742665Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124433238144955:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:45.750527Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e91/r3tmp/tmpObXVyr/pdisk_1.dat 2025-12-23T18:37:46.288805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:46.288922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:46.299805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:46.550107Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:46.550255Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124433238144895:2081] 1766515065624610 != 1766515065624613 2025-12-23T18:37:46.552722Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1862, node 1 2025-12-23T18:37:46.793235Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:46.805383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:46.809520Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:46.809548Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:46.809555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:46.809644Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28439 TClient is connected to server localhost:28439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515066596 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:47.866139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:47.906989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:47.957250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:48.477810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:48.932212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:49.133542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:50.727062Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124433238144955:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:50.727135Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:52.171674Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124463302917862:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.171824Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.172342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124463302917872:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.172425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.815973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.967748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.017286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.094580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.151241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.228929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.289104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.382489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.748580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124467597886049:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.748656Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.749054Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124467597886054:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.749104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124467597886055:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.749204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.754446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:53.783632Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124467597886058:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:53.857211Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124467597886119:3788] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:56.953949Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515076984, txId: 281474976710673] shutting down |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorHandler::InvalidTokenForbidden [GOOD] Test command err: 2025-12-23T18:37:36.676069Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124395820143333:2161];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:36.678933Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:37:37.434860Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:37.528412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:37.529188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:37.722672Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:37.743133Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124395820143209:2081] 1766515056633551 != 1766515056633554 2025-12-23T18:37:37.751387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:37.791280Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:37.801570Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 23874, node 1 2025-12-23T18:37:38.167502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:38.167552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:38.167562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:38.167653Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16384 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515057874 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:38.879731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:38.885489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:37:38.973260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:37:39.004644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:37:39.019037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-12-23T18:37:47.299083Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124441855541668:2077];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:37:47.356611Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:47.382958Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.010913s 2025-12-23T18:37:47.407675Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.585855Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:47.601903Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124441855541628:2081] 1766515067249004 != 1766515067249007 2025-12-23T18:37:47.604002Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.608562Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:47.608675Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:47.635980Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9314, node 2 2025-12-23T18:37:47.847684Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:47.847716Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:47.847726Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:47.847838Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:48.205562Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:48.294926Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515067688 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:48.896413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:48.911485Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:49.009344Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:37:49.048807Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:37:49.062415Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 19400, MsgBus: 22942 2025-12-23T18:37:45.756990Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124436160303471:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:45.757126Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e97/r3tmp/tmpnxCC7I/pdisk_1.dat 2025-12-23T18:37:46.258820Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:46.280174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:46.280314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:46.296390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:46.473048Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:46.534991Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124436160303352:2081] 1766515065626695 != 1766515065626698 2025-12-23T18:37:46.554924Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 19400, node 1 2025-12-23T18:37:46.807686Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:46.855886Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:46.855909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:46.855914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:46.855992Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:46.942823Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22942 TClient is connected to server localhost:22942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515066722 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:47.853706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:47.868082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:47.891834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:48.163074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:48.610415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:48.800412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:50.702868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124436160303471:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:50.702940Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:52.113347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124466225076313:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.113451Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.117036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124466225076323:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.117108Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.977052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.029214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.079688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.129946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.171399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.234771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.323772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.398920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.536540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124470520044501:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.536627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.537049Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124470520044506:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.537092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124470520044507:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.537420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.541593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:53.565455Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124470520044510:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:53.646732Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124470520044561:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 8082, MsgBus: 23836 2025-12-23T18:37:47.223740Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124441518576279:2173];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:47.223895Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e8e/r3tmp/tmpq2omSq/pdisk_1.dat 2025-12-23T18:37:47.763994Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.838925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:47.839027Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:47.901615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:47.970858Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124441518576123:2081] 1766515067150143 != 1766515067150146 2025-12-23T18:37:47.989232Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8082, node 1 2025-12-23T18:37:47.999814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:48.123386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:48.123407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:48.123414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:48.123500Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:48.239014Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:23836 TClient is connected to server localhost:23836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515067947 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:49.121457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:49.147562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:49.416699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:37:49.603355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:49.693179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:52.071945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124462993414476:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.072089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.081006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124462993414486:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.081120Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.227361Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124441518576279:2173];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:52.227457Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:52.822987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.887390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:52.994250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.042947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.083545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.162198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.256624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.421893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.866465Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124467288382656:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.866544Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.869341Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124467288382661:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.869409Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124467288382662:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.869450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.874034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:53.932065Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124467288382665:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:54.088585Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124471583350019:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 32103, MsgBus: 2054 2025-12-23T18:37:47.302536Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124444948114420:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:47.302711Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e86/r3tmp/tmpdcdtPT/pdisk_1.dat 2025-12-23T18:37:48.002881Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:48.067740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:48.067848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:48.241356Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:48.286488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:48.307108Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:48.318966Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124444948114176:2081] 1766515067235024 != 1766515067235027 2025-12-23T18:37:48.406827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 32103, node 1 2025-12-23T18:37:48.783303Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:48.783322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:48.783328Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:48.783395Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2054 TClient is connected to server localhost:2054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515068486 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:50.373480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:50.413797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:50.688824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:51.040815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:51.159305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:52.301156Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124444948114420:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:52.301213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:53.824259Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124470717919839:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.824426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.824963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124470717919849:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.825004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.252234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.292711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.342265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.391824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.442044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.536305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.634591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.843939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:55.066950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124479307855320:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.067051Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.068193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124479307855325:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.068243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124479307855326:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.068276Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.075668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:55.095406Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124479307855329:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:37:55.190055Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124479307855380:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::DropTableById >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 14819, MsgBus: 8482 2025-12-23T18:37:48.029579Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124449167093403:2096];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:48.034965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e83/r3tmp/tmpZaJHxD/pdisk_1.dat 2025-12-23T18:37:48.470835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:48.612143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:48.612266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:48.623148Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:48.632268Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124444872126039:2081] 1766515068022071 != 1766515068022074 2025-12-23T18:37:48.660138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:48.663028Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 14819, node 1 2025-12-23T18:37:48.927594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:48.927621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:48.927631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:48.927754Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:49.035123Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:8482 TClient is connected to server localhost:8482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515068710 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:49.787784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:49.802980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:49.823340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:50.089443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:50.319496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:50.402325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:52.552899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124466346964387:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.553015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.555500Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124466346964397:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:52.555595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.034879Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124449167093403:2096];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:53.034970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:53.348098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.413164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:53.824270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.173912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.401540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.485559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.567714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.686285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.874753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124474936899870:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.874918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.875289Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124474936899875:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.875336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124474936899876:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.875657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.880165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:54.915248Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124474936899879:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:54.979660Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124474936899932:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] 2025-12-23T18:37:58.448533Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515078454, txId: 281474976710673] shutting down >> KqpSysColV0::UpdateAndDelete [GOOD] >> TExportToS3Tests::IndexMaterializationDisabled [GOOD] |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::AlterTableSplitSchema >> Config::ExcludeScope [GOOD] >> TExportToS3Tests::IndexMaterialization ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 65026, MsgBus: 13030 2025-12-23T18:37:48.434081Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124444975936497:2239];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:48.434166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e81/r3tmp/tmpexIkLo/pdisk_1.dat 2025-12-23T18:37:49.550291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:49.550446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:49.750696Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:49.760363Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:49.796116Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:49.817204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:49.834986Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124444975936298:2081] 1766515068393747 != 1766515068393750 TServer::EnableGrpc on GrpcPort 65026, node 1 2025-12-23T18:37:50.129416Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:50.151741Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:50.151780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:50.151788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:50.151860Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13030 TClient is connected to server localhost:13030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515069865 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:51.207085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:51.222659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:51.255355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:51.462670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:51.841892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:52.002326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:53.541864Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124444975936497:2239];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:53.542018Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:55.581290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124475040709254:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.581400Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.591854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124475040709263:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.591961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:56.093622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:56.143511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:56.186530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:56.224486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:56.263835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:56.319987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:56.402677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:56.480416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:56.605588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124479335677446:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:56.605672Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:56.606105Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124479335677451:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:56.606142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124479335677452:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:56.606399Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:56.621307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:56.652946Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124479335677455:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:56.728892Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124479335677506:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:59.789530Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515079798, txId: 281474976710673] shutting down |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest >> ScriptExecutionsTest::RestartQueryWithGetOperation |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::AggregateStat >> FormatTimes::DurationUs [GOOD] >> StatsFormat::AggregateStat [GOOD] >> KqpCost::VectorIndexLookup+useSink [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> Config::ExcludeScope [GOOD] |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::DropPQ |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::AggregateStat [GOOD] |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationUs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 24907, MsgBus: 11348 2025-12-23T18:37:46.577391Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124436610335928:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:46.578231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:46.609804Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e8f/r3tmp/tmp0eKlI2/pdisk_1.dat 2025-12-23T18:37:47.170861Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.239499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:47.239617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:47.334892Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124436610335857:2081] 1766515066524287 != 1766515066524290 2025-12-23T18:37:47.392019Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:47.406814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:47.407524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24907, node 1 2025-12-23T18:37:47.590409Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:47.737496Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:47.737520Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:47.737527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:47.737604Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11348 TClient is connected to server localhost:11348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515067464 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:49.096286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:49.111954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:49.133666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:49.604925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:49.939491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:50.067753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:51.603430Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124436610335928:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:51.603782Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:53.597019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124466675108834:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.597116Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.597808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124466675108843:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:53.597855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.304175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.345332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.403371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.487819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.526153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.589716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.648912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.736213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:54.889387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124470970077027:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.889494Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.889829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124470970077032:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.889861Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.896420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124470970077033:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.906870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:54.938325Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124470970077036:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:55.007243Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124475265044383:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterPersQueueGroup |93.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |93.3%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |93.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithUniformPartitioning |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 >> KqpSystemView::CompileCacheQueriesOrderByDesc [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |93.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::VectorIndexLookup+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 29551, MsgBus: 13014 2025-12-23T18:37:11.241767Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124290012317463:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:11.241810Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b22/r3tmp/tmpbedHfI/pdisk_1.dat 2025-12-23T18:37:11.706982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:11.734674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:11.734783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:11.871973Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:11.898847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29551, node 1 2025-12-23T18:37:12.006482Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:12.195362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:12.195388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:12.195395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:12.195517Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:12.263796Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:13014 TClient is connected to server localhost:13014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515031953 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:12.791014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:12.803484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:12.814382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:12.973867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:13.183278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:13.279006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:15.793021Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124307192188407:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.793189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.799584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124307192188417:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:15.799668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:16.243014Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124290012317463:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:16.243091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:16.332920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.379621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.437019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.511469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.583793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.662619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.715272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:16.829949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:17.020001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124315782123887:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:17.020073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:17.020483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124315782123892:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:17.020538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124315782123893:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:17.020668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18 ... 54775816u;3u];["lS\2";9223372036854775817u;4u];["kO\2";9223372036854775818u;4u];["nI\2";9223372036854775819u;5u];["nN\2";9223372036854775820u;5u];["vB\2";9223372036854775821u;6u];["sF\2";9223372036854775822u;6u]] /Root/Vectors/vector_idx_covered/indexImplPostingTable: [[["bR\2"];[1];[10];9223372036854775815u];[["eQ\2"];[4];[40];9223372036854775815u];[["jX\2"];[9];[90];9223372036854775815u];[["mW\2"];[12];[120];9223372036854775815u];[["bR\2"];[27];[270];9223372036854775815u];[["eQ\2"];[30];[300];9223372036854775815u];[["jX\2"];[35];[350];9223372036854775815u];[["mW\2"];[38];[380];9223372036854775815u];[["bR\2"];[53];[530];9223372036854775815u];[["eQ\2"];[56];[560];9223372036854775815u];[["jX\2"];[61];[610];9223372036854775815u];[["mW\2"];[64];[640];9223372036854775815u];[["bR\2"];[79];[790];9223372036854775815u];[["eQ\2"];[82];[820];9223372036854775815u];[["jX\2"];[87];[870];9223372036854775815u];[["mW\2"];[90];[900];9223372036854775815u];[["dZ\2"];[3];[30];9223372036854775816u];[["gY\2"];[6];[60];9223372036854775816u];[["dZ\2"];[29];[290];9223372036854775816u];[["gY\2"];[32];[320];9223372036854775816u];[["dZ\2"];[55];[550];9223372036854775816u];[["gY\2"];[58];[580];9223372036854775816u];[["dZ\2"];[81];[810];9223372036854775816u];[["gY\2"];[84];[840];9223372036854775816u];[["hP\2"];[7];[70];9223372036854775817u];[["pV\2"];[15];[150];9223372036854775817u];[["hP\2"];[33];[330];9223372036854775817u];[["pV\2"];[41];[410];9223372036854775817u];[["hP\2"];[59];[590];9223372036854775817u];[["pV\2"];[67];[670];9223372036854775817u];[["hP\2"];[85];[850];9223372036854775817u];[["pV\2"];[93];[930];9223372036854775817u];[["cI\2"];[2];[20];9223372036854775818u];[["kO\2"];[10];[100];9223372036854775818u];[["sU\2"];[18];[180];9223372036854775818u];[["cI\2"];[28];[280];9223372036854775818u];[["kO\2"];[36];[360];9223372036854775818u];[["sU\2"];[44];[440];9223372036854775818u];[["cI\2"];[54];[540];9223372036854775818u];[["kO\2"];[62];[620];9223372036854775818u];[["sU\2"];[70];[700];9223372036854775818u];[["cI\2"];[80];[800];9223372036854775818u];[["kO\2"];[88];[880];9223372036854775818u];[["sU\2"];[96];[960];9223372036854775818u];[["aA\2"];[0];[0];9223372036854775819u];[["iG\2"];[8];[80];9223372036854775819u];[["lF\2"];[11];[110];9223372036854775819u];[["qM\2"];[16];[160];9223372036854775819u];[["tL\2"];[19];[190];9223372036854775819u];[["wK\2"];[22];[220];9223372036854775819u];[["yS\2"];[24];[240];9223372036854775819u];[["aA\2"];[26];[260];9223372036854775819u];[["iG\2"];[34];[340];9223372036854775819u];[["lF\2"];[37];[370];9223372036854775819u];[["qM\2"];[42];[420];9223372036854775819u];[["tL\2"];[45];[450];9223372036854775819u];[["wK\2"];[48];[480];9223372036854775819u];[["yS\2"];[50];[500];9223372036854775819u];[["aA\2"];[52];[520];9223372036854775819u];[["iG\2"];[60];[600];9223372036854775819u];[["lF\2"];[63];[630];9223372036854775819u];[["qM\2"];[68];[680];9223372036854775819u];[["tL\2"];[71];[710];9223372036854775819u];[["wK\2"];[74];[740];9223372036854775819u];[["yS\2"];[76];[760];9223372036854775819u];[["aA\2"];[78];[780];9223372036854775819u];[["iG\2"];[86];[860];9223372036854775819u];[["lF\2"];[89];[890];9223372036854775819u];[["qM\2"];[94];[940];9223372036854775819u];[["tL\2"];[97];[970];9223372036854775819u];[["fH\2"];[5];[50];9223372036854775820u];[["nN\2"];[13];[130];9223372036854775820u];[["vT\2"];[21];[210];9223372036854775820u];[["fH\2"];[31];[310];9223372036854775820u];[["nN\2"];[39];[390];9223372036854775820u];[["vT\2"];[47];[470];9223372036854775820u];[["fH\2"];[57];[570];9223372036854775820u];[["nN\2"];[65];[650];9223372036854775820u];[["vT\2"];[73];[730];9223372036854775820u];[["fH\2"];[83];[830];9223372036854775820u];[["nN\2"];[91];[910];9223372036854775820u];[["vT\2"];[99];[990];9223372036854775820u];[["uC\2"];[20];[200];9223372036854775821u];[["xB\2"];[23];[230];9223372036854775821u];[["uC\2"];[46];[460];9223372036854775821u];[["xB\2"];[49];[490];9223372036854775821u];[["uC\2"];[72];[720];9223372036854775821u];[["xB\2"];[75];[750];9223372036854775821u];[["uC\2"];[98];[980];9223372036854775821u];[["oE\2"];[14];[140];9223372036854775822u];[["rD\2"];[17];[170];9223372036854775822u];[["zJ\2"];[25];[250];9223372036854775822u];[["oE\2"];[40];[400];9223372036854775822u];[["rD\2"];[43];[430];9223372036854775822u];[["zJ\2"];[51];[510];9223372036854775822u];[["oE\2"];[66];[660];9223372036854775822u];[["rD\2"];[69];[690];9223372036854775822u];[["zJ\2"];[77];[770];9223372036854775822u];[["oE\2"];[92];[920];9223372036854775822u];[["rD\2"];[95];[950];9223372036854775822u]] /Root/Vectors: [[["aA\2"];[0];[0];[0]];[["bR\2"];[1];[1];[10]];[["cI\2"];[2];[2];[20]];[["dZ\2"];[3];[3];[30]];[["eQ\2"];[4];[4];[40]];[["fH\2"];[5];[5];[50]];[["gY\2"];[6];[6];[60]];[["hP\2"];[7];[7];[70]];[["iG\2"];[8];[8];[80]];[["jX\2"];[9];[9];[90]];[["kO\2"];[10];[0];[100]];[["lF\2"];[11];[1];[110]];[["mW\2"];[12];[2];[120]];[["nN\2"];[13];[3];[130]];[["oE\2"];[14];[4];[140]];[["pV\2"];[15];[5];[150]];[["qM\2"];[16];[6];[160]];[["rD\2"];[17];[7];[170]];[["sU\2"];[18];[8];[180]];[["tL\2"];[19];[9];[190]];[["uC\2"];[20];[0];[200]];[["vT\2"];[21];[1];[210]];[["wK\2"];[22];[2];[220]];[["xB\2"];[23];[3];[230]];[["yS\2"];[24];[4];[240]];[["zJ\2"];[25];[5];[250]];[["aA\2"];[26];[6];[260]];[["bR\2"];[27];[7];[270]];[["cI\2"];[28];[8];[280]];[["dZ\2"];[29];[9];[290]];[["eQ\2"];[30];[0];[300]];[["fH\2"];[31];[1];[310]];[["gY\2"];[32];[2];[320]];[["hP\2"];[33];[3];[330]];[["iG\2"];[34];[4];[340]];[["jX\2"];[35];[5];[350]];[["kO\2"];[36];[6];[360]];[["lF\2"];[37];[7];[370]];[["mW\2"];[38];[8];[380]];[["nN\2"];[39];[9];[390]];[["oE\2"];[40];[0];[400]];[["pV\2"];[41];[1];[410]];[["qM\2"];[42];[2];[420]];[["rD\2"];[43];[3];[430]];[["sU\2"];[44];[4];[440]];[["tL\2"];[45];[5];[450]];[["uC\2"];[46];[6];[460]];[["vT\2"];[47];[7];[470]];[["wK\2"];[48];[8];[480]];[["xB\2"];[49];[9];[490]];[["yS\2"];[50];[0];[500]];[["zJ\2"];[51];[1];[510]];[["aA\2"];[52];[2];[520]];[["bR\2"];[53];[3];[530]];[["cI\2"];[54];[4];[540]];[["dZ\2"];[55];[5];[550]];[["eQ\2"];[56];[6];[560]];[["fH\2"];[57];[7];[570]];[["gY\2"];[58];[8];[580]];[["hP\2"];[59];[9];[590]];[["iG\2"];[60];[0];[600]];[["jX\2"];[61];[1];[610]];[["kO\2"];[62];[2];[620]];[["lF\2"];[63];[3];[630]];[["mW\2"];[64];[4];[640]];[["nN\2"];[65];[5];[650]];[["oE\2"];[66];[6];[660]];[["pV\2"];[67];[7];[670]];[["qM\2"];[68];[8];[680]];[["rD\2"];[69];[9];[690]];[["sU\2"];[70];[0];[700]];[["tL\2"];[71];[1];[710]];[["uC\2"];[72];[2];[720]];[["vT\2"];[73];[3];[730]];[["wK\2"];[74];[4];[740]];[["xB\2"];[75];[5];[750]];[["yS\2"];[76];[6];[760]];[["zJ\2"];[77];[7];[770]];[["aA\2"];[78];[8];[780]];[["bR\2"];[79];[9];[790]];[["cI\2"];[80];[0];[800]];[["dZ\2"];[81];[1];[810]];[["eQ\2"];[82];[2];[820]];[["fH\2"];[83];[3];[830]];[["gY\2"];[84];[4];[840]];[["hP\2"];[85];[5];[850]];[["iG\2"];[86];[6];[860]];[["jX\2"];[87];[7];[870]];[["kO\2"];[88];[8];[880]];[["lF\2"];[89];[9];[890]];[["mW\2"];[90];[0];[900]];[["nN\2"];[91];[1];[910]];[["oE\2"];[92];[2];[920]];[["pV\2"];[93];[3];[930]];[["qM\2"];[94];[4];[940]];[["rD\2"];[95];[5];[950]];[["sU\2"];[96];[6];[960]];[["tL\2"];[97];[7];[970]];[["uC\2"];[98];[8];[980]];[["vT\2"];[99];[9];[990]]] /Root/Vectors/vector_idx_prefixed/indexImplLevelTable: [["nG\2";202u;201u];["jQ\2";203u;201u];["rD\2";9223372036854776411u;202u];["kI\2";9223372036854776412u;202u];["kO\2";9223372036854776413u;203u];["iT\2";9223372036854776414u;203u];["hV\2";205u;204u];["pK\2";206u;204u];["cV\2";9223372036854776417u;205u];["mW\2";9223372036854776418u;205u];["nN\2";9223372036854776419u;206u];["sI\2";9223372036854776420u;206u];["gQ\2";208u;207u];["oF\2";209u;207u];["gL\2";9223372036854776423u;208u];["hU\2";9223372036854776424u;208u];["mH\2";9223372036854776425u;209u];["rD\2";9223372036854776426u;209u];["rD\2";211u;210u];["jQ\2";212u;210u];["lF\2";9223372036854776429u;211u];["uC\2";9223372036854776430u;211u];["cV\2";9223372036854776431u;212u];["mP\2";9223372036854776432u;212u];["iS\2";214u;213u];["qK\2";215u;213u];["hU\2";9223372036854776435u;214u];["kO\2";9223372036854776436u;214u];["qM\2";9223372036854776437u;215u];["sH\2";9223372036854776438u;215u];["iV\2";217u;216u];["rH\2";218u;216u];["dZ\2";9223372036854776441u;217u];["kT\2";9223372036854776442u;217u];["mK\2";9223372036854776443u;218u];["vE\2";9223372036854776444u;218u];["nH\2";220u;219u];["jS\2";221u;219u];["mJ\2";9223372036854776447u;220u];["rD\2";9223372036854776448u;220u];["fU\2";9223372036854776449u;221u];["oR\2";9223372036854776450u;221u];["jR\2";223u;222u];["sH\2";224u;222u];["mP\2";9223372036854776453u;223u];["fU\2";9223372036854776454u;223u];["vG\2";9223372036854776455u;224u];["pI\2";9223372036854776456u;224u];["nG\2";226u;225u];["jR\2";227u;225u];["uC\2";9223372036854776459u;226u];["lH\2";9223372036854776460u;226u];["gY\2";9223372036854776461u;227u];["kQ\2";9223372036854776462u;227u];["rF\2";229u;228u];["jS\2";230u;228u];["pG\2";9223372036854776465u;229u];["xB\2";9223372036854776466u;229u];["nP\2";9223372036854776467u;230u];["eV\2";9223372036854776468u;230u]] /Root/Vectors/vector_idx_prefixed/indexImplPostingTable: [[[20];9223372036854776411u];[[40];9223372036854776411u];[[0];9223372036854776412u];[[50];9223372036854776412u];[[60];9223372036854776412u];[[10];9223372036854776413u];[[70];9223372036854776413u];[[80];9223372036854776413u];[[30];9223372036854776414u];[[90];9223372036854776414u];[[1];9223372036854776417u];[[81];9223372036854776417u];[[41];9223372036854776418u];[[61];9223372036854776418u];[[21];9223372036854776419u];[[31];9223372036854776419u];[[91];9223372036854776419u];[[11];9223372036854776420u];[[51];9223372036854776420u];[[71];9223372036854776420u];[[2];9223372036854776423u];[[62];9223372036854776423u];[[12];9223372036854776424u];[[32];9223372036854776424u];[[82];9223372036854776424u];[[22];9223372036854776425u];[[42];9223372036854776425u];[[52];9223372036854776425u];[[72];9223372036854776426u];[[92];9223372036854776426u];[[63];9223372036854776429u];[[23];9223372036854776430u];[[43];9223372036854776430u];[[3];9223372036854776431u];[[53];9223372036854776431u];[[13];9223372036854776432u];[[33];9223372036854776432u];[[73];9223372036854776432u];[[83];9223372036854776432u];[[93];9223372036854776432u];[[4];9223372036854776435u];[[64];9223372036854776435u];[[84];9223372036854776435u];[[44];9223372036854776436u];[[54];9223372036854776436u];[[24];9223372036854776437u];[[34];9223372036854776437u];[[94];9223372036854776437u];[[14];9223372036854776438u];[[74];9223372036854776438u];[[55];9223372036854776441u];[[15];9223372036854776442u];[[35];9223372036854776442u];[[85];9223372036854776442u];[[5];9223372036854776443u];[[45];9223372036854776443u];[[65];9223372036854776443u];[[25];9223372036854776444u];[[75];9223372036854776444u];[[95];9223372036854776444u];[[16];9223372036854776447u];[[26];9223372036854776447u];[[76];9223372036854776447u];[[86];9223372036854776447u];[[46];9223372036854776448u];[[66];9223372036854776448u];[[6];9223372036854776449u];[[56];9223372036854776449u];[[36];9223372036854776450u];[[96];9223372036854776450u];[[7];9223372036854776453u];[[47];9223372036854776453u];[[57];9223372036854776453u];[[67];9223372036854776453u];[[27];9223372036854776454u];[[87];9223372036854776454u];[[17];9223372036854776455u];[[77];9223372036854776455u];[[37];9223372036854776456u];[[97];9223372036854776456u];[[98];9223372036854776459u];[[8];9223372036854776460u];[[48];9223372036854776460u];[[68];9223372036854776460u];[[78];9223372036854776460u];[[58];9223372036854776461u];[[18];9223372036854776462u];[[28];9223372036854776462u];[[38];9223372036854776462u];[[88];9223372036854776462u];[[19];9223372036854776465u];[[69];9223372036854776465u];[[89];9223372036854776465u];[[49];9223372036854776466u];[[39];9223372036854776467u];[[59];9223372036854776467u];[[99];9223372036854776467u];[[9];9223372036854776468u];[[29];9223372036854776468u];[[79];9223372036854776468u]] /Root/Vectors/vector_idx_prefixed/indexImplPrefixTable: [[[0];201u];[[1];204u];[[2];207u];[[3];210u];[[4];213u];[[5];216u];[[6];219u];[[7];222u];[[8];225u];[[9];228u]] >> TExportToS3Tests::IndexMaterialization [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest >> ResultFormatter::EmptyDict [GOOD] >> ResultFormatter::Dict [GOOD] >> ResultFormatter::Decimal [GOOD] >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] >> ResultFormatter::Utf8WithQuotes [GOOD] >> ResultFormatter::VariantStruct [GOOD] >> KqpSystemView::Join [GOOD] >> ResultFormatter::List [GOOD] >> ResultFormatter::Null [GOOD] >> KqpSystemView::Sessions-EnableRealSystemViewPaths [GOOD] |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |93.4%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantStruct [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2025-12-23T18:38:06.840683Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-12-23T18:38:06.841698Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-12-23T18:38:06.842087Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:38:06.842461Z node 3 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-12-23T18:38:06.842609Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [1:39:2059], tablet id = 2, status = OK 2025-12-23T18:38:06.842666Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:39:2059], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:38:06.849424Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2025-12-23T18:38:06.849705Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-12-23T18:38:06.849880Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [3:44:2057], tablet id = 5, status = OK 2025-12-23T18:38:06.854856Z node 3 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [3:44:2057], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:38:06.855248Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [1:40:2060], tablet id = 3, status = OK 2025-12-23T18:38:06.855339Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:40:2060], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:38:06.855400Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 2 2025-12-23T18:38:06.855576Z node 3 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 5 2025-12-23T18:38:06.855639Z node 3 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2025-12-23T18:38:06.855758Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [2:46:2057], tablet id = 4, status = OK 2025-12-23T18:38:06.855805Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:46:2057], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:38:06.855865Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-12-23T18:38:06.855913Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:38:06.856015Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [0:0:0], tablet id = 5, status = ERROR 2025-12-23T18:38:06.856038Z node 3 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:38:06.856103Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [0:0:0], tablet id = 2, status = ERROR 2025-12-23T18:38:06.856135Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:38:06.856285Z node 4 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-12-23T18:38:06.856402Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 4 2025-12-23T18:38:06.856468Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2025-12-23T18:38:06.856649Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [0:0:0], tablet id = 3, status = ERROR 2025-12-23T18:38:06.856679Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:38:06.856719Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [4:49:2057], tablet id = 6, status = OK 2025-12-23T18:38:06.856773Z node 4 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [4:49:2057], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:38:06.856872Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-12-23T18:38:06.856894Z node 2 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:38:06.856935Z node 4 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 6 2025-12-23T18:38:06.856975Z node 4 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:38:06.857095Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-12-23T18:38:06.857210Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [0:0:0], tablet id = 6, status = ERROR 2025-12-23T18:38:06.857232Z node 4 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:38:06.857342Z node 2 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-12-23T18:38:06.857473Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2025-12-23T18:38:06.857642Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-12-23T18:38:06.857695Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries >> TableCreation::ConcurrentTableCreation [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation >> HttpRequest::Analyze [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Null [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest >> TLocksTest::Range_IncorrectDot1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheQueriesOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 3267, MsgBus: 31596 2025-12-23T18:37:51.452995Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124459216720035:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:51.453046Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e7e/r3tmp/tmp286qIa/pdisk_1.dat 2025-12-23T18:37:52.022712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:52.031626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:52.031744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:52.116798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:52.224635Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:52.227978Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124459216719983:2081] 1766515071427073 != 1766515071427076 2025-12-23T18:37:52.265926Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 3267, node 1 2025-12-23T18:37:52.531656Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:52.575382Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:52.575401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:52.575411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:52.575482Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31596 TClient is connected to server localhost:31596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515072322 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:53.788758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:53.809277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:54.001264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:54.424275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:37:54.662625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:56.447821Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124459216720035:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:56.447900Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:57.975780Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124484986525651:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:57.975926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:57.976503Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124484986525661:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:57.976580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:58.353294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:58.403842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:58.461199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:58.544173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:58.592498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:58.651082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:58.715848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:58.796646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:58.919773Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124489281493826:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:58.919875Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:58.920169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124489281493831:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:58.920214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124489281493832:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:58.920349Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:58.923883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:58.944938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-12-23T18:37:58.945315Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124489281493835:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:59.041025Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124493576461184:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 >> TableCreation::SimpleTableCreation [GOOD] >> TableCreation::SimpleUpdateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::IndexMaterialization [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:37:46.680195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:37:46.680303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:46.680346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:37:46.680387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:37:46.680504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:37:46.680544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:37:46.680601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:46.680674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:37:46.681515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:37:46.681775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:37:46.825751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:37:46.825815Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:46.841404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:37:46.841655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:37:46.841798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:37:46.845139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:37:46.845324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:37:46.846071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:37:46.846286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:37:46.855412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:46.855616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:37:46.856762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:37:46.856834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:46.856981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:37:46.857031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:37:46.857079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:37:46.857223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:37:47.034455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.035583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.035712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.035782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.035876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.035983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.036059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.036129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.036226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.036345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.036460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.036572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.036654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.036771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:47.036838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... .cpp:1714: TOperation IsReadyToDone TxId: 281474976710762 ready parts: 1/1 2025-12-23T18:38:06.007638Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710762:0 2025-12-23T18:38:06.007693Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710762:0 2025-12-23T18:38:06.007776Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2025-12-23T18:38:06.007828Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710762, publications: 2, subscribers: 1 2025-12-23T18:38:06.007876Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710762, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-12-23T18:38:06.007928Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710762, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2025-12-23T18:38:06.009658Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710762 2025-12-23T18:38:06.011383Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:38:06.011417Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710762, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:38:06.011571Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710762, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2025-12-23T18:38:06.011680Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:38:06.011710Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:209:2210], at schemeshard: 72057594046678944, txId: 281474976710762, path id: 1 2025-12-23T18:38:06.011760Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:209:2210], at schemeshard: 72057594046678944, txId: 281474976710762, path id: 41 FAKE_COORDINATOR: Erasing txId 281474976710762 2025-12-23T18:38:06.012474Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710762 2025-12-23T18:38:06.012546Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710762 2025-12-23T18:38:06.012591Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710762 2025-12-23T18:38:06.012616Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-12-23T18:38:06.012649Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:38:06.013226Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710762 2025-12-23T18:38:06.013304Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710762 2025-12-23T18:38:06.013327Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710762 2025-12-23T18:38:06.013358Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710762, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2025-12-23T18:38:06.013387Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:38:06.013458Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710762, subscribers: 1 2025-12-23T18:38:06.013488Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [6:127:2152] 2025-12-23T18:38:06.016981Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710762 2025-12-23T18:38:06.017446Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710762 2025-12-23T18:38:06.017528Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710762 2025-12-23T18:38:06.017591Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710762 2025-12-23T18:38:06.019663Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:38:06.019744Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:976:2913] TestWaitNotification: OK eventTxId 102 2025-12-23T18:38:06.020453Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:38:06.020831Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/index" took 415us result status StatusSuccess 2025-12-23T18:38:06.021742Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/index" PathDescription { Self { Name: "index" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "index" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TObjectStorageListingTest::CornerCases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions-EnableRealSystemViewPaths [GOOD] Test command err: Trying to start YDB, gRPC: 29077, MsgBus: 20374 2025-12-23T18:37:50.145073Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124457454322646:2202];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:50.145199Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:50.160473Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e7f/r3tmp/tmp9edji4/pdisk_1.dat 2025-12-23T18:37:50.710878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:50.752066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:50.752172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:50.754716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:50.951734Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:50.954981Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124457454322453:2081] 1766515070078899 != 1766515070078902 TServer::EnableGrpc on GrpcPort 29077, node 1 2025-12-23T18:37:51.223196Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:51.281702Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:51.295114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:51.295140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:51.295147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:51.295233Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20374 TClient is connected to server localhost:20374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:52.634003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:52.656541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:52.672174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:37:55.150903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124457454322646:2202];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:55.151000Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 1 2025-12-23T18:37:57.724812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124487519094491:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:57.724937Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:57.731177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124487519094504:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:57.731235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124487519094503:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:57.731285Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:57.739676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:57.767068Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124487519094507:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:37:57.874056Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124487519094560:2363] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ydb-cpp-sdk/dev 2025-12-23T18:38:05.567175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:38:05.567204Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:06.459934Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515086446, txId: 281474976710673] shutting down |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TFlatTest::SelectRangeItemsLimit >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] Test command err: Trying to start YDB, gRPC: 20037, MsgBus: 7867 2025-12-23T18:37:48.814729Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124449065933518:2196];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:48.815034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e80/r3tmp/tmpoli8jv/pdisk_1.dat 2025-12-23T18:37:49.687624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:49.687733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:49.763149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:49.816705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:49.831066Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:49.842842Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124449065933327:2081] 1766515068762866 != 1766515068762869 2025-12-23T18:37:49.849863Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20037, node 1 2025-12-23T18:37:50.106893Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:50.165427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:50.165451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:50.165457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:50.165556Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7867 TClient is connected to server localhost:7867 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515069949 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:51.056987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:51.064735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:51.070358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:51.345884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:51.630997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:51.731623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:53.814832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124449065933518:2196];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:53.814938Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:54.506120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124474835738997:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.506269Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.511015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124474835739007:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:54.511139Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.065639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:55.140781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:55.183536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:55.228251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:55.275339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:55.334021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:55.386987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:55.622253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:55.706705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124479130707181:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.706815Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.709203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124479130707186:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.709220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124479130707187:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.709302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:55.714173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:55.729280Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124479130707190:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:55.821476Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124479130707241:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:58.483302Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515078426, txId: 281474976710673] shutting down waiting... 2025-12-23T18:37:59.883716Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515079861, txId: 281474976710675] shutting down waiting... 2025-12-23T18:38:01.334751Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515081312, txId: 281474976710677] shutting down waiting... 2025-12-23T18:38:02.674165Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515082659, txId: 281474976710679] shutting down waiting... 2025-12-23T18:38:04.032446Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515084018, txId: 281474976710681] shutting down 2025-12-23T18:38:04.578430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:38:04.578487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded waiting... 2025-12-23T18:38:05.282997Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515085261, txId: 281474976710683] shutting down 2025-12-23T18:38:06.020540Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515085990, txId: 281474976710685] shutting down |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TSchemeShardTest::CreateTableWithConfig >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropPQFail >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:37:48.085442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:48.466078Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:37:48.495172Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:37:48.495670Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:48.495789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:37:49.216011Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:49.521168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:49.521385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:49.569175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:49.670129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003266/r3tmp/tmpDYsyAi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1103, node 1 2025-12-23T18:37:50.531375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:37:50.532307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:50.532469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:50.532506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:50.533153Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:50.604070Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:20580 2025-12-23T18:37:51.349710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:37:56.198143Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:56.227703Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:37:56.238416Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:37:56.366719Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:56.366941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:56.416100Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:37:56.420806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:56.674498Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:56.675335Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:56.676067Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:56.677031Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:56.677330Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:56.677482Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:56.677688Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:56.677924Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:56.678076Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:56.942517Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:37:56.968966Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:56.969107Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:56.987185Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:57.238124Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:57.300547Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:37:57.300672Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:37:57.343700Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:37:57.345591Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:37:57.345893Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:37:57.345975Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:37:57.346038Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:37:57.346098Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:37:57.346164Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:37:57.346224Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:37:57.355197Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:37:57.537499Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:37:57.537625Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:37:57.559957Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:37:57.561882Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:37:57.736614Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:37:57.737094Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:37:57.740213Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:37:57.740273Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:37:57.740385Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:37:57.745765Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:37:57.755850Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:57.769638Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:37:57.769830Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:37:57.808963Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:37:58.124008Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:37:59.000705Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:37:59.135834Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:37:59.135939Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:26 ... ;src=[2:1947:2453];cookie=151:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.601318Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[2:2960:3093];ev=NActors::IEventHandle;tablet_id=72075186224037903;tx_id=281474976715659;this=136782792276544;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1947:2453];cookie=161:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.601935Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;self_id=[2:2962:3094];ev=NActors::IEventHandle;tablet_id=72075186224037901;tx_id=281474976715659;this=136782792277664;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1947:2453];cookie=141:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.602496Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;self_id=[2:2974:3101];ev=NActors::IEventHandle;tablet_id=72075186224037904;tx_id=281474976715659;this=136782792278784;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1947:2453];cookie=171:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.603060Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;self_id=[2:2975:3102];ev=NActors::IEventHandle;tablet_id=72075186224037905;tx_id=281474976715659;this=136782792279904;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1947:2453];cookie=181:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.603660Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;self_id=[2:2982:3107];ev=NActors::IEventHandle;tablet_id=72075186224037907;tx_id=281474976715659;this=136782792281024;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1947:2453];cookie=201:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.657921Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;self_id=[2:2983:3108];ev=NActors::IEventHandle;tablet_id=72075186224037908;tx_id=281474976715659;this=136782792285280;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1947:2453];cookie=211:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.659118Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;self_id=[2:2984:3109];ev=NActors::IEventHandle;tablet_id=72075186224037906;tx_id=281474976715659;this=136782792286400;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1947:2453];cookie=191:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.857846Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.858003Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.858076Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.925762Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.925875Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.925915Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.938845Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.938959Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.939003Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.947491Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.947615Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.947659Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.958069Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.958193Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.958233Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.969700Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.969807Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.969845Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.979841Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.979947Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.979990Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.988679Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.988789Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:02.988865Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:03.029011Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:03.029118Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:03.029160Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:03.037761Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:03.037869Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:38:03.037910Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; waiting actualization: 0/0.000020s 2025-12-23T18:38:05.482719Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3763:3688] 2025-12-23T18:38:05.514229Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3734:3516] , Record { OperationId: "\000\000\000\000\017\365$\264\254\307X\017\310#\240&" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Database: "" } 2025-12-23T18:38:05.514328Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `$X#&', DatabaseName: `', Types: 2025-12-23T18:38:05.514378Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `$X#&', PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: Answer: 'Analyze sent. OperationId: 00000003zn4jtashtr1z427816' |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TFlatTest::SplitEmptyToMany >> TLocksTest::GoodLock >> TCancelTx::CrossShardReadOnly >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |93.4%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering >> TFlatTest::WriteSplitByPartialKeyAndRead >> ScriptExecutionsTest::RunCheckLeaseStatus [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 8351, MsgBus: 11222 2025-12-23T18:37:45.938844Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.012583s 2025-12-23T18:37:45.947640Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007216s 2025-12-23T18:37:46.149589Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.006564s 2025-12-23T18:37:46.231300Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124440579662246:2265];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:46.231378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:46.426153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:46.511594Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124437191724191:2240];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:46.511656Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:46.529354Z node 4 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007803s test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e96/r3tmp/tmp8pZmeK/pdisk_1.dat 2025-12-23T18:37:46.773680Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:47.194261Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:47.475366Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.475499Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:47.506865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.512309Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:47.517252Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.547042Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:47.551339Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.555663Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.566362Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:47.565944Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:47.563386Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:47.703482Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.751294Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.782730Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.782626Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.787146Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:47.789621Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:47.819487Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:47.937818Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:48.023200Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:48.115394Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:48.296197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:48.327352Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:48.407567Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:48.506983Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:48.569729Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:48.571356Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:48.571578Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:48.872611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:48.872716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:48.888578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:48.888650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:48.891877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:48.897444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:48.897693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:48.897747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:48.983391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:48.983464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:49.135932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:49.138023Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:37:49.138076Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-12-23T18:37:49.138096Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:37:49.138110Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:37:49.175539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:49.175796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:49.179623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:49.263997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:49.414209Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:49.586201Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.018170s 2025-12-23T1 ... ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:50.499204Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:50.499235Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:50.499243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:50.499367Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:51.234881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124440579662246:2265];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:51.237236Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:51.490888Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587124437191724191:2240];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:51.490954Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:11222 TClient is connected to server localhost:11222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515069711 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:53.688648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:53.731229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:54.274323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:54.962649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:55.334628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:00.044608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124500709206526:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:00.044758Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:00.045287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124500709206536:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:00.045348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:01.093726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:01.279783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:01.409469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:01.577568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:01.779252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:02.112445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:02.337148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:02.529368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:02.837463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124509299142148:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:02.837574Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:02.837941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124509299142153:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:02.837991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124509299142154:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:02.838115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:02.847741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:38:02.927061Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124509299142157:2420], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:38:02.994500Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124509299142231:4689] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:38:03.159059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:38:03.159095Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:06.163560Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515086135, txId: 281474976715673] shutting down |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 >> TObjectStorageListingTest::Split >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 >> TFlatTest::Ls >> KqpCost::VectorIndexLookup-useSink [GOOD] >> TLocksTest::SetLockFail >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |93.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers >> TLocksTest::BrokenLockErase >> TFlatTest::CopyTableAndReturnPartAfterCompaction >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardTest::DropPQAbort >> TLocksTest::UpdateLockedKey >> TLocksTest::GoodDupLock |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ut/ydb-core-security-ut |93.4%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::VectorIndexLookup-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 5886, MsgBus: 28287 2025-12-23T18:37:17.528077Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124315680626936:2168];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:17.528702Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:17.569799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004b16/r3tmp/tmpKtOwJG/pdisk_1.dat 2025-12-23T18:37:18.089084Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:18.106849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:18.106981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:18.252935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124315680626797:2081] 1766515037423297 != 1766515037423300 2025-12-23T18:37:18.275875Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:18.277395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5886, node 1 2025-12-23T18:37:18.404624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:18.546179Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:18.575298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:18.575316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:18.575322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:18.575399Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28287 TClient is connected to server localhost:28287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515038337 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:19.712278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:19.758576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:20.231236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:20.661863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:20.794151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:22.482920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124315680626936:2168];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:22.482992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:24.408794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124345745399757:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:24.408915Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:24.409334Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124345745399767:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:24.409406Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:25.567994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:25.639467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:25.698872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:25.787960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:25.874008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:25.986061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:26.112651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:26.272042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:26.503430Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124354335335275:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.503530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.504106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124354335335282:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.504144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124354335335283:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:26.504276Z node 1 :KQP_WORKLOAD_SERV ... 54775816u;3u];["lS\2";9223372036854775817u;4u];["kO\2";9223372036854775818u;4u];["nI\2";9223372036854775819u;5u];["nN\2";9223372036854775820u;5u];["vB\2";9223372036854775821u;6u];["sF\2";9223372036854775822u;6u]] /Root/Vectors/vector_idx_covered/indexImplPostingTable: [[["bR\2"];[1];[10];9223372036854775815u];[["eQ\2"];[4];[40];9223372036854775815u];[["jX\2"];[9];[90];9223372036854775815u];[["mW\2"];[12];[120];9223372036854775815u];[["bR\2"];[27];[270];9223372036854775815u];[["eQ\2"];[30];[300];9223372036854775815u];[["jX\2"];[35];[350];9223372036854775815u];[["mW\2"];[38];[380];9223372036854775815u];[["bR\2"];[53];[530];9223372036854775815u];[["eQ\2"];[56];[560];9223372036854775815u];[["jX\2"];[61];[610];9223372036854775815u];[["mW\2"];[64];[640];9223372036854775815u];[["bR\2"];[79];[790];9223372036854775815u];[["eQ\2"];[82];[820];9223372036854775815u];[["jX\2"];[87];[870];9223372036854775815u];[["mW\2"];[90];[900];9223372036854775815u];[["dZ\2"];[3];[30];9223372036854775816u];[["gY\2"];[6];[60];9223372036854775816u];[["dZ\2"];[29];[290];9223372036854775816u];[["gY\2"];[32];[320];9223372036854775816u];[["dZ\2"];[55];[550];9223372036854775816u];[["gY\2"];[58];[580];9223372036854775816u];[["dZ\2"];[81];[810];9223372036854775816u];[["gY\2"];[84];[840];9223372036854775816u];[["hP\2"];[7];[70];9223372036854775817u];[["pV\2"];[15];[150];9223372036854775817u];[["hP\2"];[33];[330];9223372036854775817u];[["pV\2"];[41];[410];9223372036854775817u];[["hP\2"];[59];[590];9223372036854775817u];[["pV\2"];[67];[670];9223372036854775817u];[["hP\2"];[85];[850];9223372036854775817u];[["pV\2"];[93];[930];9223372036854775817u];[["cI\2"];[2];[20];9223372036854775818u];[["kO\2"];[10];[100];9223372036854775818u];[["sU\2"];[18];[180];9223372036854775818u];[["cI\2"];[28];[280];9223372036854775818u];[["kO\2"];[36];[360];9223372036854775818u];[["sU\2"];[44];[440];9223372036854775818u];[["cI\2"];[54];[540];9223372036854775818u];[["kO\2"];[62];[620];9223372036854775818u];[["sU\2"];[70];[700];9223372036854775818u];[["cI\2"];[80];[800];9223372036854775818u];[["kO\2"];[88];[880];9223372036854775818u];[["sU\2"];[96];[960];9223372036854775818u];[["aA\2"];[0];[0];9223372036854775819u];[["iG\2"];[8];[80];9223372036854775819u];[["lF\2"];[11];[110];9223372036854775819u];[["qM\2"];[16];[160];9223372036854775819u];[["tL\2"];[19];[190];9223372036854775819u];[["wK\2"];[22];[220];9223372036854775819u];[["yS\2"];[24];[240];9223372036854775819u];[["aA\2"];[26];[260];9223372036854775819u];[["iG\2"];[34];[340];9223372036854775819u];[["lF\2"];[37];[370];9223372036854775819u];[["qM\2"];[42];[420];9223372036854775819u];[["tL\2"];[45];[450];9223372036854775819u];[["wK\2"];[48];[480];9223372036854775819u];[["yS\2"];[50];[500];9223372036854775819u];[["aA\2"];[52];[520];9223372036854775819u];[["iG\2"];[60];[600];9223372036854775819u];[["lF\2"];[63];[630];9223372036854775819u];[["qM\2"];[68];[680];9223372036854775819u];[["tL\2"];[71];[710];9223372036854775819u];[["wK\2"];[74];[740];9223372036854775819u];[["yS\2"];[76];[760];9223372036854775819u];[["aA\2"];[78];[780];9223372036854775819u];[["iG\2"];[86];[860];9223372036854775819u];[["lF\2"];[89];[890];9223372036854775819u];[["qM\2"];[94];[940];9223372036854775819u];[["tL\2"];[97];[970];9223372036854775819u];[["fH\2"];[5];[50];9223372036854775820u];[["nN\2"];[13];[130];9223372036854775820u];[["vT\2"];[21];[210];9223372036854775820u];[["fH\2"];[31];[310];9223372036854775820u];[["nN\2"];[39];[390];9223372036854775820u];[["vT\2"];[47];[470];9223372036854775820u];[["fH\2"];[57];[570];9223372036854775820u];[["nN\2"];[65];[650];9223372036854775820u];[["vT\2"];[73];[730];9223372036854775820u];[["fH\2"];[83];[830];9223372036854775820u];[["nN\2"];[91];[910];9223372036854775820u];[["vT\2"];[99];[990];9223372036854775820u];[["uC\2"];[20];[200];9223372036854775821u];[["xB\2"];[23];[230];9223372036854775821u];[["uC\2"];[46];[460];9223372036854775821u];[["xB\2"];[49];[490];9223372036854775821u];[["uC\2"];[72];[720];9223372036854775821u];[["xB\2"];[75];[750];9223372036854775821u];[["uC\2"];[98];[980];9223372036854775821u];[["oE\2"];[14];[140];9223372036854775822u];[["rD\2"];[17];[170];9223372036854775822u];[["zJ\2"];[25];[250];9223372036854775822u];[["oE\2"];[40];[400];9223372036854775822u];[["rD\2"];[43];[430];9223372036854775822u];[["zJ\2"];[51];[510];9223372036854775822u];[["oE\2"];[66];[660];9223372036854775822u];[["rD\2"];[69];[690];9223372036854775822u];[["zJ\2"];[77];[770];9223372036854775822u];[["oE\2"];[92];[920];9223372036854775822u];[["rD\2"];[95];[950];9223372036854775822u]] /Root/Vectors: [[["aA\2"];[0];[0];[0]];[["bR\2"];[1];[1];[10]];[["cI\2"];[2];[2];[20]];[["dZ\2"];[3];[3];[30]];[["eQ\2"];[4];[4];[40]];[["fH\2"];[5];[5];[50]];[["gY\2"];[6];[6];[60]];[["hP\2"];[7];[7];[70]];[["iG\2"];[8];[8];[80]];[["jX\2"];[9];[9];[90]];[["kO\2"];[10];[0];[100]];[["lF\2"];[11];[1];[110]];[["mW\2"];[12];[2];[120]];[["nN\2"];[13];[3];[130]];[["oE\2"];[14];[4];[140]];[["pV\2"];[15];[5];[150]];[["qM\2"];[16];[6];[160]];[["rD\2"];[17];[7];[170]];[["sU\2"];[18];[8];[180]];[["tL\2"];[19];[9];[190]];[["uC\2"];[20];[0];[200]];[["vT\2"];[21];[1];[210]];[["wK\2"];[22];[2];[220]];[["xB\2"];[23];[3];[230]];[["yS\2"];[24];[4];[240]];[["zJ\2"];[25];[5];[250]];[["aA\2"];[26];[6];[260]];[["bR\2"];[27];[7];[270]];[["cI\2"];[28];[8];[280]];[["dZ\2"];[29];[9];[290]];[["eQ\2"];[30];[0];[300]];[["fH\2"];[31];[1];[310]];[["gY\2"];[32];[2];[320]];[["hP\2"];[33];[3];[330]];[["iG\2"];[34];[4];[340]];[["jX\2"];[35];[5];[350]];[["kO\2"];[36];[6];[360]];[["lF\2"];[37];[7];[370]];[["mW\2"];[38];[8];[380]];[["nN\2"];[39];[9];[390]];[["oE\2"];[40];[0];[400]];[["pV\2"];[41];[1];[410]];[["qM\2"];[42];[2];[420]];[["rD\2"];[43];[3];[430]];[["sU\2"];[44];[4];[440]];[["tL\2"];[45];[5];[450]];[["uC\2"];[46];[6];[460]];[["vT\2"];[47];[7];[470]];[["wK\2"];[48];[8];[480]];[["xB\2"];[49];[9];[490]];[["yS\2"];[50];[0];[500]];[["zJ\2"];[51];[1];[510]];[["aA\2"];[52];[2];[520]];[["bR\2"];[53];[3];[530]];[["cI\2"];[54];[4];[540]];[["dZ\2"];[55];[5];[550]];[["eQ\2"];[56];[6];[560]];[["fH\2"];[57];[7];[570]];[["gY\2"];[58];[8];[580]];[["hP\2"];[59];[9];[590]];[["iG\2"];[60];[0];[600]];[["jX\2"];[61];[1];[610]];[["kO\2"];[62];[2];[620]];[["lF\2"];[63];[3];[630]];[["mW\2"];[64];[4];[640]];[["nN\2"];[65];[5];[650]];[["oE\2"];[66];[6];[660]];[["pV\2"];[67];[7];[670]];[["qM\2"];[68];[8];[680]];[["rD\2"];[69];[9];[690]];[["sU\2"];[70];[0];[700]];[["tL\2"];[71];[1];[710]];[["uC\2"];[72];[2];[720]];[["vT\2"];[73];[3];[730]];[["wK\2"];[74];[4];[740]];[["xB\2"];[75];[5];[750]];[["yS\2"];[76];[6];[760]];[["zJ\2"];[77];[7];[770]];[["aA\2"];[78];[8];[780]];[["bR\2"];[79];[9];[790]];[["cI\2"];[80];[0];[800]];[["dZ\2"];[81];[1];[810]];[["eQ\2"];[82];[2];[820]];[["fH\2"];[83];[3];[830]];[["gY\2"];[84];[4];[840]];[["hP\2"];[85];[5];[850]];[["iG\2"];[86];[6];[860]];[["jX\2"];[87];[7];[870]];[["kO\2"];[88];[8];[880]];[["lF\2"];[89];[9];[890]];[["mW\2"];[90];[0];[900]];[["nN\2"];[91];[1];[910]];[["oE\2"];[92];[2];[920]];[["pV\2"];[93];[3];[930]];[["qM\2"];[94];[4];[940]];[["rD\2"];[95];[5];[950]];[["sU\2"];[96];[6];[960]];[["tL\2"];[97];[7];[970]];[["uC\2"];[98];[8];[980]];[["vT\2"];[99];[9];[990]]] /Root/Vectors/vector_idx_prefixed/indexImplLevelTable: [["nG\2";202u;201u];["jQ\2";203u;201u];["rD\2";9223372036854776411u;202u];["kI\2";9223372036854776412u;202u];["kO\2";9223372036854776413u;203u];["iT\2";9223372036854776414u;203u];["hV\2";205u;204u];["pK\2";206u;204u];["cV\2";9223372036854776417u;205u];["mW\2";9223372036854776418u;205u];["nN\2";9223372036854776419u;206u];["sI\2";9223372036854776420u;206u];["gQ\2";208u;207u];["oF\2";209u;207u];["gL\2";9223372036854776423u;208u];["hU\2";9223372036854776424u;208u];["mH\2";9223372036854776425u;209u];["rD\2";9223372036854776426u;209u];["rD\2";211u;210u];["jQ\2";212u;210u];["lF\2";9223372036854776429u;211u];["uC\2";9223372036854776430u;211u];["cV\2";9223372036854776431u;212u];["mP\2";9223372036854776432u;212u];["iS\2";214u;213u];["qK\2";215u;213u];["hU\2";9223372036854776435u;214u];["kO\2";9223372036854776436u;214u];["qM\2";9223372036854776437u;215u];["sH\2";9223372036854776438u;215u];["iV\2";217u;216u];["rH\2";218u;216u];["dZ\2";9223372036854776441u;217u];["kT\2";9223372036854776442u;217u];["mK\2";9223372036854776443u;218u];["vE\2";9223372036854776444u;218u];["nH\2";220u;219u];["jS\2";221u;219u];["mJ\2";9223372036854776447u;220u];["rD\2";9223372036854776448u;220u];["fU\2";9223372036854776449u;221u];["oR\2";9223372036854776450u;221u];["jR\2";223u;222u];["sH\2";224u;222u];["mP\2";9223372036854776453u;223u];["fU\2";9223372036854776454u;223u];["vG\2";9223372036854776455u;224u];["pI\2";9223372036854776456u;224u];["nG\2";226u;225u];["jR\2";227u;225u];["uC\2";9223372036854776459u;226u];["lH\2";9223372036854776460u;226u];["gY\2";9223372036854776461u;227u];["kQ\2";9223372036854776462u;227u];["rF\2";229u;228u];["jS\2";230u;228u];["pG\2";9223372036854776465u;229u];["xB\2";9223372036854776466u;229u];["nP\2";9223372036854776467u;230u];["eV\2";9223372036854776468u;230u]] /Root/Vectors/vector_idx_prefixed/indexImplPostingTable: [[[20];9223372036854776411u];[[40];9223372036854776411u];[[0];9223372036854776412u];[[50];9223372036854776412u];[[60];9223372036854776412u];[[10];9223372036854776413u];[[70];9223372036854776413u];[[80];9223372036854776413u];[[30];9223372036854776414u];[[90];9223372036854776414u];[[1];9223372036854776417u];[[81];9223372036854776417u];[[41];9223372036854776418u];[[61];9223372036854776418u];[[21];9223372036854776419u];[[31];9223372036854776419u];[[91];9223372036854776419u];[[11];9223372036854776420u];[[51];9223372036854776420u];[[71];9223372036854776420u];[[2];9223372036854776423u];[[62];9223372036854776423u];[[12];9223372036854776424u];[[32];9223372036854776424u];[[82];9223372036854776424u];[[22];9223372036854776425u];[[42];9223372036854776425u];[[52];9223372036854776425u];[[72];9223372036854776426u];[[92];9223372036854776426u];[[63];9223372036854776429u];[[23];9223372036854776430u];[[43];9223372036854776430u];[[3];9223372036854776431u];[[53];9223372036854776431u];[[13];9223372036854776432u];[[33];9223372036854776432u];[[73];9223372036854776432u];[[83];9223372036854776432u];[[93];9223372036854776432u];[[4];9223372036854776435u];[[64];9223372036854776435u];[[84];9223372036854776435u];[[44];9223372036854776436u];[[54];9223372036854776436u];[[24];9223372036854776437u];[[34];9223372036854776437u];[[94];9223372036854776437u];[[14];9223372036854776438u];[[74];9223372036854776438u];[[55];9223372036854776441u];[[15];9223372036854776442u];[[35];9223372036854776442u];[[85];9223372036854776442u];[[5];9223372036854776443u];[[45];9223372036854776443u];[[65];9223372036854776443u];[[25];9223372036854776444u];[[75];9223372036854776444u];[[95];9223372036854776444u];[[16];9223372036854776447u];[[26];9223372036854776447u];[[76];9223372036854776447u];[[86];9223372036854776447u];[[46];9223372036854776448u];[[66];9223372036854776448u];[[6];9223372036854776449u];[[56];9223372036854776449u];[[36];9223372036854776450u];[[96];9223372036854776450u];[[7];9223372036854776453u];[[47];9223372036854776453u];[[57];9223372036854776453u];[[67];9223372036854776453u];[[27];9223372036854776454u];[[87];9223372036854776454u];[[17];9223372036854776455u];[[77];9223372036854776455u];[[37];9223372036854776456u];[[97];9223372036854776456u];[[98];9223372036854776459u];[[8];9223372036854776460u];[[48];9223372036854776460u];[[68];9223372036854776460u];[[78];9223372036854776460u];[[58];9223372036854776461u];[[18];9223372036854776462u];[[28];9223372036854776462u];[[38];9223372036854776462u];[[88];9223372036854776462u];[[19];9223372036854776465u];[[69];9223372036854776465u];[[89];9223372036854776465u];[[49];9223372036854776466u];[[39];9223372036854776467u];[[59];9223372036854776467u];[[99];9223372036854776467u];[[9];9223372036854776468u];[[29];9223372036854776468u];[[79];9223372036854776468u]] /Root/Vectors/vector_idx_prefixed/indexImplPrefixTable: [[[0];201u];[[1];204u];[[2];207u];[[3];210u];[[4];213u];[[5];216u];[[6];219u];[[7];222u];[[8];225u];[[9];228u]] >> KqpSystemView::NodesOrderByDesc [GOOD] |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |93.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TLocksTest::Range_GoodLock0 >> TFlatTest::WriteSplitByPartialKeyAndRead [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |93.4%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut >> SlowTopicAutopartitioning::CDC_Write [GOOD] >> TObjectStorageListingTest::CornerCases [GOOD] >> TObjectStorageListingTest::Decimal >> TCdcStreamTests::VirtualTimestamps >> TFlatTest::SelectRangeItemsLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit |93.4%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 >> TCancelTx::CrossShardReadOnly [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets >> KqpProxy::NodeDisconnectedTest [GOOD] >> KqpProxy::PingNotExistedSession |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay_yt/query_replay_yt |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::CopyTableForBackup |93.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreatePersQueueGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 61866, MsgBus: 3320 2025-12-23T18:37:53.973742Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124467318727150:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:53.973812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:54.126016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:54.245415Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124474475245070:2087];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:54.246493Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:54.309313Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124472706500992:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:54.309380Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:54.354481Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:54.528635Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:54.567139Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587124470959058124:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:54.567186Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:54.643650Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587124471343701149:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:54.644449Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:37:54.864728Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:54.949459Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e7c/r3tmp/tmpLNWmF1/pdisk_1.dat 2025-12-23T18:37:55.039076Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:55.246862Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:55.410226Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:55.672636Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:55.727196Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:55.740422Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:55.740489Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:55.756705Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:55.756822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:55.763228Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:55.762856Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:55.810891Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:55.815115Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:55.854829Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:55.855404Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:55.954769Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:55.955144Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:56.011321Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:56.042735Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:56.055440Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:56.455996Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:56.615942Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:56.655240Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:56.655355Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:37:56.743483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:56.743596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:56.745579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:56.745625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:56.745708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:56.745733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:56.745788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:56.745814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:56.745878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:56.745919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:56.775288Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:37:56.775322Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:37:56.827051Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:37:56.827103Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-12-23T18:37:56.827271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:56.859875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:56.860073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:56.860161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:56.862804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:57.055775Z node 5 :META ... path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:59.244529Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587124474475245070:2087];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:59.244627Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:3320 2025-12-23T18:37:59.323036Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587124472706500992:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:59.343318Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:59.570985Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587124470959058124:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:59.586875Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:59.646898Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587124471343701149:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:59.646958Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:3320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515077313 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:38:01.062551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:01.221544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:01.907403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:02.709635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:03.081990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:06.617035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124523153304201:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:06.617146Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:06.617824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124523153304211:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:06.617882Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:07.173157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:07.366789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:07.487880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:07.629903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:07.741889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:07.920270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:08.013922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:08.117517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:08.226207Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124531743239786:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:08.226313Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:08.226621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124531743239791:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:08.226686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124531743239792:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:08.226815Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:08.230602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:38:08.262911Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124531743239795:2414], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:38:08.366378Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124531743239871:4588] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:38:11.286917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:38:11.286947Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] >> TableCreation::ConcurrentTableCreationWithDifferentVersions >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> TFlatTest::Ls [GOOD] >> TFlatTest::LsPathId >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardCountersTest::PathsCounterDecrementsOnFail >> TCdcStreamTests::Basic >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps >> TLocksTest::SetLockFail [GOOD] >> TLocksTest::SetEraseSet >> TObjectStorageListingTest::Split [GOOD] >> TObjectStorageListingTest::SuffixColumns >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |93.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 >> TObjectStorageListingTest::Decimal [GOOD] >> TFlatTest::CopyTableAndReturnPartAfterCompaction [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction >> TSchemeShardCountersTest::PathsCounterDecrementsOnFail [FAIL] >> TSchemeShardDecimalTypesInTables::Parameterless >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> SlowTopicAutopartitioning::CDC_Write [GOOD] Test command err: 2025-12-23T18:35:32.884894Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123864238728838:2206];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:32.884999Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:35:32.974264Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:35:33.031457Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003137/r3tmp/tmpsEk83N/pdisk_1.dat 2025-12-23T18:35:33.571094Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:35:33.581118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:35:33.581240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:35:33.678522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:35:33.843299Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:35:33.854999Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123864238728669:2081] 1766514932826515 != 1766514932826518 2025-12-23T18:35:33.866052Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:35:33.872623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 15034, node 1 2025-12-23T18:35:34.108951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/003137/r3tmp/yandexR4OpKu.tmp 2025-12-23T18:35:34.108974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/003137/r3tmp/yandexR4OpKu.tmp 2025-12-23T18:35:34.109125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/003137/r3tmp/yandexR4OpKu.tmp 2025-12-23T18:35:34.109196Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:35:34.188373Z INFO: TTestServer started on Port 10527 GrpcPort 15034 TClient is connected to server localhost:10527 PQClient connected to localhost:15034 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514933792 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:35:35.087245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:35:35.138598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:35:37.884061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123864238728838:2206];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:35:37.884135Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:35:38.832500Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123890008533492:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.832623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.838832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123890008533505:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.838887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123890008533506:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.839219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:35:38.843367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:35:38.867484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-12-23T18:35:38.867837Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123890008533510:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:35:38.958268Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123890008533578:2655] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:35:39.496320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:39.524389Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587123890008533586:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:35:39.529450Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=M2I4OGNmZTEtM2FjMDI0OTQtNDJkNDRhMTYtZTA4YjllYmU=, ActorId: [1:7587123890008533482:2331], ActorState: ExecuteState, TraceId: 01kd67xmydckj84pbpmkk676cc, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:35:39.532787Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:35:39.555473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:35:39.726378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587123898598468465:2832] === CheckClustersList. Ok 2025-12-23T18:35:45.895957Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-12-23T18:35:46.184343Z node 1 :PERSQUEUE_READ_BALANC ... artitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.941003Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.941023Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.941036Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.941058Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.941068Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.956970Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957018Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957068Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957080Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957102Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957114Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957134Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957146Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957166Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957184Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957264Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957285Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957298Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957318Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957330Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NEED_SPLIT 2025-12-23T18:38:14.957346Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:41: [TPartitionScaleManager: streamImpl] ::HandleScaleStatusChange need to split partition 36 2025-12-23T18:38:14.957401Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:138: [TPartitionScaleManager: streamImpl] Scale request: #splits=0, #unprocessed=0, splitsLimit=64, #merges=0 2025-12-23T18:38:14.957413Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:62: [TPartitionScaleManager: streamImpl] splitMergeRequest empty 2025-12-23T18:38:14.957460Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957477Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957500Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NEED_SPLIT 2025-12-23T18:38:14.957508Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:41: [TPartitionScaleManager: streamImpl] ::HandleScaleStatusChange need to split partition 35 2025-12-23T18:38:14.957529Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:138: [TPartitionScaleManager: streamImpl] Scale request: #splits=0, #unprocessed=0, splitsLimit=64, #merges=0 2025-12-23T18:38:14.957538Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:62: [TPartitionScaleManager: streamImpl] splitMergeRequest empty 2025-12-23T18:38:14.957549Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NEED_SPLIT 2025-12-23T18:38:14.957558Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:41: [TPartitionScaleManager: streamImpl] ::HandleScaleStatusChange need to split partition 37 2025-12-23T18:38:14.957575Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:138: [TPartitionScaleManager: streamImpl] Scale request: #splits=0, #unprocessed=0, splitsLimit=64, #merges=0 2025-12-23T18:38:14.957582Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:62: [TPartitionScaleManager: streamImpl] splitMergeRequest empty 2025-12-23T18:38:14.957599Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957610Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957666Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957760Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957785Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957796Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957817Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957830Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957850Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957861Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957880Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957892Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.957989Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958113Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958140Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958152Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958173Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958189Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958208Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958220Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958237Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958249Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958266Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958277Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958373Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958385Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958403Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.958414Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: streamImpl] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-12-23T18:38:14.966066Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186224037894][streamImpl] Send TEvPeriodicTopicStats PathId: 51 Generation: 1 StatsReportRound: 155 DataSize: 18494968 UsedReserveSize: 0 2025-12-23T18:38:14.966121Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186224037894][streamImpl] ProcessPendingStats. PendingUpdates size 0 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::SchemaChanges >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] |93.4%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] >> TPQTest::TestWriteSplit >> TableCreation::SimpleUpdateTable [GOOD] >> TableCreation::RollbackTableAcl >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::DropMultipleStreams >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::AlterTopicOverDiskSpaceQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] Test command err: 2025-12-23T18:38:10.242162Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124540876367483:2238];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:10.242234Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:10.273801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005802/r3tmp/tmputTPmz/pdisk_1.dat 2025-12-23T18:38:11.075881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:11.075997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:11.103625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:11.253374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:11.253776Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:11.429739Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124540876367273:2081] 1766515090198316 != 1766515090198319 2025-12-23T18:38:11.455903Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:11.527043Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TServer::EnableGrpc on GrpcPort 16343, node 1 2025-12-23T18:38:11.839363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:11.839381Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:11.839392Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:11.839477Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13939 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515091523 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:12.531459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:12.547707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:12.585152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005802/r3tmp/tmpm1V4xX/pdisk_1.dat 2025-12-23T18:38:17.892891Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:17.893081Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:18.013380Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:18.014930Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124573751375037:2081] 1766515097787779 != 1766515097787782 2025-12-23T18:38:18.075909Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:18.075992Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:18.128747Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:18.134977Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TServer::EnableGrpc on GrpcPort 10167, node 2 2025-12-23T18:38:18.259496Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:18.259522Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:18.259529Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:18.259627Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23531 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515098180 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:18.560749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:18.567530Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:18.592649Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:18.863498Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TCdcStreamTests::SchemaChanges [GOOD] >> TCdcStreamTests::RetentionPeriod |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |93.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 >> TPQTest::DirectReadOldPipe >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly >> TFlatTest::LsPathId [FAIL] |93.4%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] Test command err: 2025-12-23T18:36:18.316789Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124061564168893:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:18.316879Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003962/r3tmp/tmpL5W6Rl/pdisk_1.dat 2025-12-23T18:36:18.658225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:36:18.658357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:36:18.664366Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:36:18.667916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:36:18.781128Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:18.794881Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124061564168867:2081] 1766514978312799 != 1766514978312802 TServer::EnableGrpc on GrpcPort 15914, node 1 2025-12-23T18:36:18.905103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:36:18.908882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:36:18.908924Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:36:18.908933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:36:18.909051Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514978816 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:36:19.366274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:36:19.379160Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:36:19.384187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:36:21.775433Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-12-23T18:36:21.781712Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587124074449071618:2320], Start check tables existence, number paths: 2 2025-12-23T18:36:21.792682Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:36:21.792730Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:36:21.793044Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587124074449071618:2320], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:36:21.793106Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587124074449071618:2320], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:36:21.793157Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587124074449071618:2320], Successfully finished 2025-12-23T18:36:21.793340Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-12-23T18:36:21.796672Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=ZjliNTRmZTItOGFhYjk4OGUtNmJjY2ZkZjYtMjFlY2UyMDU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZjliNTRmZTItOGFhYjk4OGUtNmJjY2ZkZjYtMjFlY2UyMDU= (tmp dir name: 7c438c15-4105-c62e-082b-489d4be5814f) 2025-12-23T18:36:21.796967Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-12-23T18:36:21.797030Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=ZjliNTRmZTItOGFhYjk4OGUtNmJjY2ZkZjYtMjFlY2UyMDU=, ActorId: [1:7587124074449071645:2324], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:36:21.847119Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124074449071647:2501], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:36:21.851639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:36:21.855197Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124074449071647:2501], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-12-23T18:36:21.856112Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124074449071647:2501], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-12-23T18:36:21.869698Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124074449071647:2501], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:36:21.926067Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124074449071647:2501], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:36:21.930484Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124074449071699:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:36:21.930634Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124074449071647:2501], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-12-23T18:36:21.952755Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-12-23T18:36:21.952784Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2025-12-23T18:36:21.952826Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-12-23T18:36:21.953003Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=1&id=ZjliNTRmZTItOGFhYjk4OGUtNmJjY2ZkZjYtMjFlY2UyMDU=, ActorId: [1:7587124074449071645:2324], ActorState: ReadyState, TraceId: 01kd67yz1z10n1aqt085hmgzc4, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT DESCRIBE SCHEMA ON `/Root` TO `user@test`; GRANT DESCRIBE SCHEMA, SELECT ROW ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `user@test`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-12-23T18:36:21.953333Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124074449071709:2326], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-12-23T18:36:21.955178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124074449071709:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:21.955271Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:21.955365Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-12-23T18:36:21.955397Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124074449071719:2327], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-12-23T18:36:21.955635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124074449071719:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:21.955688Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:22.283898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__op ... KQP_SESSION TRACE: kqp_session_actor.cpp:972: SessionId: ydb://session/3?node_id=8&id=YTkwZWJhOTktZGRkN2ZjOTgtMmFhOWVlMWMtMTcyZGJhOTI=, ActorId: [8:7587124578093047294:2973], ActorState: ExecuteState, TraceId: 01kd682gp3eqf3b8khnkq3w9ff, read snapshot result: UNAVAILABLE, step: 0, tx id: 0 2025-12-23T18:38:18.904375Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=8&id=YTkwZWJhOTktZGRkN2ZjOTgtMmFhOWVlMWMtMTcyZGJhOTI=, ActorId: [8:7587124578093047294:2973], ActorState: ExecuteState, TraceId: 01kd682gp3eqf3b8khnkq3w9ff, Create QueryResponse for error on request, msg: , status: UNAVAILABLE, issues: { message: "Database coordinators are unavailable" severity: 1 } 2025-12-23T18:38:18.904482Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2383: SessionId: ydb://session/3?node_id=8&id=YTkwZWJhOTktZGRkN2ZjOTgtMmFhOWVlMWMtMTcyZGJhOTI=, ActorId: [8:7587124578093047294:2973], ActorState: ExecuteState, TraceId: 01kd682gp3eqf3b8khnkq3w9ff, txInfo Status: Aborted Kind: ReadOnly TotalDuration: 0 ServerDuration: 281.027 QueriesCount: 2 2025-12-23T18:38:18.904544Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1816: SessionId: ydb://session/3?node_id=8&id=YTkwZWJhOTktZGRkN2ZjOTgtMmFhOWVlMWMtMTcyZGJhOTI=, ActorId: [8:7587124578093047294:2973], ActorState: ExecuteState, TraceId: 01kd682gp3eqf3b8khnkq3w9ff, Sending to Executer TraceId: 0 8 2025-12-23T18:38:18.904642Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1880: SessionId: ydb://session/3?node_id=8&id=YTkwZWJhOTktZGRkN2ZjOTgtMmFhOWVlMWMtMTcyZGJhOTI=, ActorId: [8:7587124578093047294:2973], ActorState: ExecuteState, TraceId: 01kd682gp3eqf3b8khnkq3w9ff, Created new KQP executer: [8:7587124578093047355:2973] isRollback: 1 2025-12-23T18:38:18.904685Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=8&id=YTkwZWJhOTktZGRkN2ZjOTgtMmFhOWVlMWMtMTcyZGJhOTI=, ActorId: [8:7587124578093047294:2973], ActorState: ExecuteState, TraceId: 01kd682gp3eqf3b8khnkq3w9ff, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:38:18.906526Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=8&id=YTkwZWJhOTktZGRkN2ZjOTgtMmFhOWVlMWMtMTcyZGJhOTI=, ActorId: [8:7587124578093047294:2973], ActorState: CleanupState, TraceId: 01kd682gp3eqf3b8khnkq3w9ff, EndCleanup, isFinal: 0 2025-12-23T18:38:18.906644Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=8&id=YTkwZWJhOTktZGRkN2ZjOTgtMmFhOWVlMWMtMTcyZGJhOTI=, ActorId: [8:7587124578093047294:2973], ActorState: CleanupState, TraceId: 01kd682gp3eqf3b8khnkq3w9ff, Sent query response back to proxy, proxyRequestId: 99, proxyId: [8:7587124453538992089:2257] 2025-12-23T18:38:18.907595Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Database coordinators are unavailable" severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01kd682gzzehd0c6kd0grheq02" } } } } ; 2025-12-23T18:38:18.907936Z node 8 :METADATA_PROVIDER ERROR: log.h:466: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Database coordinators are unavailable 2025-12-23T18:38:18.908001Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=8&id=YTkwZWJhOTktZGRkN2ZjOTgtMmFhOWVlMWMtMTcyZGJhOTI=, ActorId: [8:7587124578093047294:2973], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:38:18.908037Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=8&id=YTkwZWJhOTktZGRkN2ZjOTgtMmFhOWVlMWMtMTcyZGJhOTI=, ActorId: [8:7587124578093047294:2973], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:38:18.908065Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=8&id=YTkwZWJhOTktZGRkN2ZjOTgtMmFhOWVlMWMtMTcyZGJhOTI=, ActorId: [8:7587124578093047294:2973], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:38:18.908094Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=8&id=YTkwZWJhOTktZGRkN2ZjOTgtMmFhOWVlMWMtMTcyZGJhOTI=, ActorId: [8:7587124578093047294:2973], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:38:18.908170Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=8&id=YTkwZWJhOTktZGRkN2ZjOTgtMmFhOWVlMWMtMTcyZGJhOTI=, ActorId: [8:7587124578093047294:2973], ActorState: unknown state, Session actor destroyed 2025-12-23T18:38:19.138310Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY= (tmp dir name: ee3fdadc-48f3-0d23-b69a-1e9b4e42003e) 2025-12-23T18:38:19.138721Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [8:7587124582388014657:2988], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:38:19.142964Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [8:7587124582388014657:2988], ActorState: ReadyState, TraceId: 01kd682hg65qhme528fvxjzmmw, received request, proxyRequestId: 102 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: /*UI-QUERY-EXCLUDE*/ SELECT * FROM `//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpcActor: [8:7587124582388014658:2989] database: /Root/test-dedicated databaseId: /Root/test-dedicated pool id: default 2025-12-23T18:38:19.143018Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:268: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [8:7587124582388014657:2988], ActorState: ReadyState, TraceId: 01kd682hg65qhme528fvxjzmmw, request placed into pool from cache: default 2025-12-23T18:38:19.143155Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:630: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [8:7587124582388014657:2988], ActorState: ExecuteState, TraceId: 01kd682hg65qhme528fvxjzmmw, Sending CompileQuery request 2025-12-23T18:38:19.186381Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][8:7587124483603764036:2673][/Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is incomplete in one of the ring groups: cookie# 48 2025-12-23T18:38:19.186525Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][8:7587124483603764036:2673][/Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is incomplete in one of the ring groups: cookie# 49 2025-12-23T18:38:19.188091Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [8:7587124582388014660:2990], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:2:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-12-23T18:38:19.191247Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [8:7587124582388014657:2988], ActorState: ExecuteState, TraceId: 01kd682hg65qhme528fvxjzmmw, ReplyQueryCompileError, status: UNAVAILABLE, issues: { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 2 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:38:19.191302Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [8:7587124582388014657:2988], ActorState: ExecuteState, TraceId: 01kd682hg65qhme528fvxjzmmw, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:38:19.191341Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [8:7587124582388014657:2988], ActorState: ExecuteState, TraceId: 01kd682hg65qhme528fvxjzmmw, EndCleanup, isFinal: 0 2025-12-23T18:38:19.191540Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [8:7587124582388014657:2988], ActorState: ExecuteState, TraceId: 01kd682hg65qhme528fvxjzmmw, Sent query response back to proxy, proxyRequestId: 102, proxyId: [8:7587124453538992089:2257] 2025-12-23T18:38:19.192325Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 2 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-12-23T18:38:19.192656Z node 8 :METADATA_PROVIDER ERROR: log.h:466: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:2:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-12-23T18:38:19.192770Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [8:7587124582388014657:2988], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:38:19.192804Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [8:7587124582388014657:2988], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:38:19.192830Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [8:7587124582388014657:2988], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:38:19.192857Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [8:7587124582388014657:2988], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:38:19.192927Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=8&id=NTYyMjg4NzQtZWMxNzlkNGYtN2IzMzJiYWYtODA1ZWRjMDY=, ActorId: [8:7587124582388014657:2988], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] Test command err: 2025-12-23T18:38:10.287478Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124542908750911:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:10.287554Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005809/r3tmp/tmpWnSNHG/pdisk_1.dat 2025-12-23T18:38:11.167897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:11.233884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:11.233986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:11.387299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:11.484758Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:11.491033Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124542908750700:2081] 1766515090260362 != 1766515090260365 2025-12-23T18:38:11.504158Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:11.526967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:20172 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515091579 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:12.136087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:12.147414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:12.180162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:38:12.197977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005809/r3tmp/tmpaES72m/pdisk_1.dat 2025-12-23T18:38:18.139077Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124574343626886:2089];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:18.224181Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:18.375565Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:18.376103Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:18.382945Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124574343626813:2081] 1766515098088480 != 1766515098088483 2025-12-23T18:38:18.419311Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:18.419384Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:18.463828Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:18.569356Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:7382 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515098509 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:18.907792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:18.920901Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:18.935911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:18.940206Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:19.221726Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |93.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] Test command err: test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057ee/r3tmp/tmpGSDU9Y/pdisk_1.dat 2025-12-23T18:38:13.012663Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124555758784468:2255];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:13.012889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:13.540326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:13.540433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:13.600144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:13.622623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:13.633385Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:13.639142Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124551463816953:2081] 1766515092896298 != 1766515092896301 2025-12-23T18:38:13.813469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:26503 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515093672 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:13.958310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:13.971317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:14.003773Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:14.013825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515094141 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Key2" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "... (TRUNCATED) 2025-12-23T18:38:14.281669Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:38:14.283440Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:38:14.283495Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:38:14.444140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } } } TxId: 281474976710668 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:38:14.444483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2025-12-23T18:38:14.444764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2025-12-23T18:38:14.444794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2025-12-23T18:38:14.445073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2025-12-23T18:38:14.445093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976710668:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2025-12-23T18:38:14.445310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2025-12-23T18:38:14.445358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710668:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:38:14.446331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710668, response: Status: StatusAccepted TxId: 281474976710668 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-12-23T18:38:14.446464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710668, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-12-23T18:38:14.446593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-12-23T18:38:14.446625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710668:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 waiting... 2025-12-23T18:38:14.448333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-12-23T18:38:14.448499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-12-23T18:38:14.448921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:2 msg type: 268697601 2025-12-23T18:38:14.448991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-12-23T18:38:14.449046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710668, partId: 0, tablet: 72057594037968897 2025-12-23T18:38:14.449061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-12-23T18:38:14.449070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2025-12-23T18:38:14.455237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6306: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-12-23T18:38:14.455267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-12-23T18:38:14.455381Z ... 12499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:38:19.912509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:38:19.912544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:38:19.912555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:38:19.912589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:38:19.912600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:38:19.912628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2025-12-23T18:38:19.912642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-12-23T18:38:19.912665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2025-12-23T18:38:19.913380Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-12-23T18:38:19.914906Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-12-23T18:38:19.914935Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-12-23T18:38:19.918936Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-12-23T18:38:19.918974Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-12-23T18:38:19.919466Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2025-12-23T18:38:19.919560Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2025-12-23T18:38:19.920665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-12-23T18:38:19.920881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2025-12-23T18:38:19.921038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-12-23T18:38:19.921140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2025-12-23T18:38:19.921224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-12-23T18:38:19.921317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2025-12-23T18:38:19.921402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-12-23T18:38:19.921490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2025-12-23T18:38:19.921577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:38:19.921595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2025-12-23T18:38:19.921629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:38:19.921789Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-12-23T18:38:19.921805Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-12-23T18:38:19.921819Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-12-23T18:38:19.923089Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2025-12-23T18:38:19.923910Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-12-23T18:38:19.923982Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7587124579896671117:3535], serverId# [2:7587124579896671120:3538], sessionId# [0:0:0] 2025-12-23T18:38:19.924293Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2025-12-23T18:38:19.925230Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2025-12-23T18:38:19.925348Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-12-23T18:38:19.925368Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-12-23T18:38:19.925385Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-12-23T18:38:19.925399Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-12-23T18:38:19.925414Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-12-23T18:38:19.926089Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-12-23T18:38:19.926232Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2025-12-23T18:38:19.926289Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2025-12-23T18:38:19.926914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2025-12-23T18:38:19.926928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-12-23T18:38:19.926964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-12-23T18:38:19.926972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-12-23T18:38:19.926988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2025-12-23T18:38:19.926996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-12-23T18:38:19.927014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-12-23T18:38:19.927029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-12-23T18:38:19.927056Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:38:19.924419Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2025-12-23T18:38:19.925189Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-12-23T18:38:19.925441Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2025-12-23T18:38:19.926360Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2025-12-23T18:38:19.927236Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2025-12-23T18:38:19.928378Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-12-23T18:38:19.928674Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-12-23T18:38:19.928726Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7587124579896671118:3536], serverId# [3:7587124579988562909:2334], sessionId# [0:0:0] 2025-12-23T18:38:19.928967Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2025-12-23T18:38:19.929919Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2025-12-23T18:38:19.929971Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2025-12-23T18:38:19.932674Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-12-23T18:38:19.935417Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-12-23T18:38:19.935479Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037892, clientId# [2:7587124579896671119:3537], serverId# [2:7587124579896671121:3539], sessionId# [0:0:0] 2025-12-23T18:38:19.935789Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-12-23T18:38:19.936196Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2025-12-23T18:38:19.936271Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2025-12-23T18:38:19.939180Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2025-12-23T18:38:19.939253Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2025-12-23T18:38:22.818963Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587124571306734504:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:22.819046Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateDropSolomon >> TCdcStreamTests::DropMultipleStreams [GOOD] >> TCdcStreamTests::Attributes >> TPQTest::TestWriteSplit [GOOD] >> TPQTest::TestWriteTimeStampEstimate >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 >> TPQTest::DirectReadOldPipe [GOOD] |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> TPQTest::Read_From_Different_Zones_What_Was_Written_With_Gaps |93.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> TObjectStorageListingTest::SuffixColumns [GOOD] >> TSchemeShardTest::AlterTopicOverDiskSpaceQuotas [GOOD] >> KqpProxy::PingNotExistedSession [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> ScriptExecutionsTest::AttemptToUpdateDeletedLease >> TPartitionTests::CorrectRange_Commit >> TCdcStreamTests::RetentionPeriod [GOOD] >> TCdcStreamTests::TopicPartitions >> TPQTest::TestWriteTimeStampEstimate [GOOD] >> TPQTest::TestWriteTimeLag |93.4%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit >> TPartitionTests::CorrectRange_Commit [GOOD] >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] >> ScriptExecutionsTest::TestSecureScriptExecutions >> TPartitionTests::CorrectRange_Multiple_Consumers >> TPQTest::TestReadRuleVersions >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi >> ScriptExecutionsTest::RestartQueryWithGetOperation [GOOD] >> ScriptExecutionsTest::BackgroundOperationRestart >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SuffixColumns [GOOD] Test command err: 2025-12-23T18:38:14.531997Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124560594592178:2255];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:14.532231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:14.563944Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057eb/r3tmp/tmpIQ2tHY/pdisk_1.dat 2025-12-23T18:38:15.272560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:15.323331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:15.323434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:15.472248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:15.501872Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:15.522350Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124560594591951:2081] 1766515094429777 != 1766515094429780 2025-12-23T18:38:15.522461Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:15.566888Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TServer::EnableGrpc on GrpcPort 25798, node 1 2025-12-23T18:38:15.851046Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:15.851070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:15.851080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:15.851163Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3988 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515095611 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:16.403858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:16.411807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:16.439841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1766515096717 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1766515096717 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path... (TRUNCATED) 2025-12-23T18:38:21.388492Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124587596925039:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:21.388571Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:21.428842Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057eb/r3tmp/tmpr17yON/pdisk_1.dat 2025-12-23T18:38:21.564867Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:21.701145Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124587596924927:2081] 1766515101372272 != 1766515101372275 2025-12-23T18:38:21.712965Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:21.715574Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:21.715646Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:21.814565Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1535, node 2 2025-12-23T18:38:21.947901Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:38:22.255309Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:22.255330Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:22.255337Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:22.255408Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:38:22.418950Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:11983 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515101862 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:22.667293Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:22.675291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:22.693940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:22.706875Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:23.790174Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553163, Sender [2:7587124596186861071:2472], Recipient [2:7587124591891893118:2299]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 39 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\002\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3\010\000\000\000B\000\000\000\000\000\000\000" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 6 MaxKeys: 10 2025-12-23T18:38:23.790210Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-12-23T18:38:23.790406Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3") (type:4, value:"B\0\0\0\0\0\0\0")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-12-23T18:38:23.790665Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 77, String : ) 2025-12-23T18:38:23.790708Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 88, String : ) 2025-12-23T18:38:23.790751Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 666, String : ) 2025-12-23T18:38:23.790776Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, String : ) 2025-12-23T18:38:23.790803Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, String : ) 2025-12-23T18:38:23.790880Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 5 common prefixes: 0 2025-12-23T18:38:23.814318Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553163, Sender [2:7587124596186861075:2473], Recipient [2:7587124591891893118:2299]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 39 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\001\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 5 MaxKeys: 10 2025-12-23T18:38:23.814350Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-12-23T18:38:23.814491Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-12-23T18:38:23.814703Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, Uint64 : 10) 2025-12-23T18:38:23.814755Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, Uint64 : 10) 2025-12-23T18:38:23.814837Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TCancelTx::CrossShardReadOnlyWithReadSets [GOOD] >> TCancelTx::ImmediateReadOnly >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 >> TPartitionTests::ConflictingTxIsAborted >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> TPartitionTests::ConflictingTxIsAborted [GOOD] >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2025-12-23T18:38:16.166457Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124566767255613:2222];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:16.166551Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:16.251535Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e9/r3tmp/tmpyChDZe/pdisk_1.dat 2025-12-23T18:38:16.881419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:16.881541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:16.997445Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:17.001357Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:17.001690Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124566767255427:2081] 1766515096097308 != 1766515096097311 2025-12-23T18:38:17.039406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:17.177789Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:17.227347Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:61651 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515097109 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:17.850382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:17.874958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:17.903082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:17.912008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:18.271490Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.010s,wait=0.002s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-12-23T18:38:18.334630Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.055s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-12-23T18:38:18.396201Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.012s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-12-23T18:38:18.417398Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515098103 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name:... (TRUNCATED) Copy TableOld to Table 2025-12-23T18:38:18.669777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 100000 InMemStepsToSnapshot: 2 InMemForceStepsToSnapshot: 3 InMemForceSizeToSnapshot: 1000000 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 200000 ReadAheadLoThreshold: 100000 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 10000 CountToCompact: 2 ForceCountToCompact: 2 ForceSizeToCompact: 20000 CompactionBrokerQueue: 1 KeepInCache: true } } ColumnFamilies { Id: 0 ColumnCache: ColumnCacheNone Storage: ColumnStorageTest_1_2_1k } } CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:38:18.670177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:431: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-12-23T18:38:18.670645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2025-12-23T18:38:18.670688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2025-12-23T18:38:18.670703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2025-12-23T18:38:18.670722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976710676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2025-12-23T18:38:18.671056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2025-12-23T18:38:18.671080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2025-12-23T18:38:18.671196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2025-12-23T18:38:18.671295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:38:18.671936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2025-12-23T18:38:18.671972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2025-12-23T18:38:18.672552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2025-12-23T18:38:18.672731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-12-23T18:38:18.672888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:38:18.672920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T18:38:18.673036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2025-12-23T18:38:18.673094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:38:18.673116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7587124566767255959:2248], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 38 2025-12-23T18:38:18.673130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7587124566767255959:2248], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 40 2025-12-23T18:38:18.673159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-12-23T18:38:18.673186Z node 1 :FLAT_TX_SCHEME ... 5186224037890 Initiating switch from PreOffline to Offline state 2025-12-23T18:38:26.038945Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7587124603933444686:2678], serverId# [2:7587124603933444692:3646], sessionId# [0:0:0] 2025-12-23T18:38:26.039028Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:38:26.039078Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:38:26.039169Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:38:26.043232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587124595343508375 RawX2: 4503608217307374 } TabletId: 72075186224037889 State: 4 2025-12-23T18:38:26.043296Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:38:26.043683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:38:26.043713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:38:26.043861Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-12-23T18:38:26.044095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587124599638475975 RawX2: 4503608217307431 } TabletId: 72075186224037891 State: 4 2025-12-23T18:38:26.044133Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:38:26.044305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587124599638475975 RawX2: 4503608217307431 } TabletId: 72075186224037891 State: 4 2025-12-23T18:38:26.044338Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:38:26.044444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587124599638475973 RawX2: 4503608217307429 } TabletId: 72075186224037890 State: 4 2025-12-23T18:38:26.044469Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:38:26.044764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:38:26.044780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:38:26.044933Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-12-23T18:38:26.045147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:38:26.045160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:38:26.045669Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-12-23T18:38:26.045831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:38:26.045845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:38:26.046043Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-12-23T18:38:26.051628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-12-23T18:38:26.051876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2025-12-23T18:38:26.052095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-12-23T18:38:26.052233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2025-12-23T18:38:26.052336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-12-23T18:38:26.052468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-12-23T18:38:26.052617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2025-12-23T18:38:26.052734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:38:26.052748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2025-12-23T18:38:26.052790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2025-12-23T18:38:26.052806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2025-12-23T18:38:26.052821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:38:26.053373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2025-12-23T18:38:26.053391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-12-23T18:38:26.053423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-12-23T18:38:26.053462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-12-23T18:38:26.053555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-12-23T18:38:26.053579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-12-23T18:38:26.053595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-12-23T18:38:26.053637Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:38:26.054222Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-12-23T18:38:26.054277Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7587124595343508502:2599], serverId# [2:7587124595343508503:2600], sessionId# [0:0:0] 2025-12-23T18:38:26.054299Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-12-23T18:38:26.054317Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7587124603933444535:3510], serverId# [2:7587124603933444536:3511], sessionId# [0:0:0] 2025-12-23T18:38:26.054363Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-12-23T18:38:26.054379Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7587124599638476158:2834], serverId# [2:7587124599638476159:2835], sessionId# [0:0:0] 2025-12-23T18:38:26.054400Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7587124599638476053:2761], serverId# [2:7587124599638476056:2764], sessionId# [0:0:0] 2025-12-23T18:38:26.055083Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-12-23T18:38:26.055111Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-12-23T18:38:26.055824Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-12-23T18:38:26.056338Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2025-12-23T18:38:26.056410Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2025-12-23T18:38:26.057867Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2025-12-23T18:38:26.057921Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2025-12-23T18:38:26.059634Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2025-12-23T18:38:26.059712Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2025-12-23T18:38:26.326886Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-12-23T18:38:26.331681Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-12-23T18:38:26.332676Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 >> TPQTest::TestWriteTimeLag [GOOD] >> TPQTest::The_Value_Of_CreationUnixTime_Must_Not_Decrease >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients >> TPartitionTests::ConflictingTxProceedAfterRollback >> TLocksTest::SetEraseSet [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [FAIL] >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateSystemColumn >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients [GOOD] >> TPartitionTests::ShadowPartitionCounters >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] Test command err: 2025-12-23T18:37:55.418680Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124476427584035:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:55.469306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/006019/r3tmp/tmpYCIoZi/pdisk_1.dat 2025-12-23T18:37:55.872150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:55.872255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:56.089495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:56.194204Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124476427583823:2081] 1766515075392043 != 1766515075392046 2025-12-23T18:37:56.242291Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:56.402236Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1101 TServer::EnableGrpc on GrpcPort 9995, node 1 2025-12-23T18:37:56.847104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:56.847125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:56.847133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:56.847227Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515076382 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:37:57.093260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:57.104115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:38:00.419241Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124476427584035:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:00.419314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:38:00.602406Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:38:00.624459Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:38:00.624502Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:38:00.624521Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:38:00.626694Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7587124497902421145:2495] Owner: [1:7587124497902421144:2494]. Describe result: PathErrorUnknown 2025-12-23T18:38:00.626707Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7587124497902421145:2495] Owner: [1:7587124497902421144:2494]. Creating table 2025-12-23T18:38:00.626767Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7587124497902421145:2495] Owner: [1:7587124497902421144:2494]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2025-12-23T18:38:00.626916Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7587124497902421146:2496] Owner: [1:7587124497902421144:2494]. Describe result: PathErrorUnknown 2025-12-23T18:38:00.626926Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7587124497902421146:2496] Owner: [1:7587124497902421144:2494]. Creating table 2025-12-23T18:38:00.626939Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7587124497902421146:2496] Owner: [1:7587124497902421144:2494]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2025-12-23T18:38:00.627109Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7587124497902421147:2497] Owner: [1:7587124497902421144:2494]. Describe result: PathErrorUnknown 2025-12-23T18:38:00.627114Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7587124497902421147:2497] Owner: [1:7587124497902421144:2494]. Creating table 2025-12-23T18:38:00.627136Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7587124497902421147:2497] Owner: [1:7587124497902421144:2494]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2025-12-23T18:38:00.642257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:00.648629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:00.655982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:00.661741Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7587124497902421147:2497] Owner: [1:7587124497902421144:2494]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2025-12-23T18:38:00.669580Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7587124497902421145:2495] Owner: [1:7587124497902421144:2494]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2025-12-23T18:38:00.669690Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7587124497902421145:2495] Owner: [1:7587124497902421144:2494]. Subscribe on create table tx: 281474976710658 2025-12-23T18:38:00.670546Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7587124497902421146:2496] Owner: [1:7587124497902421144:2494]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2025-12-23T18:38:00.670564Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7587124497902421146:2496] Owner: [1:7587124497902421144:2494]. Subscribe on create table tx: 281474976710659 2025-12-23T18:38:00.672017Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7587124497902421147:2497] Owner: [1:7587124497902421144:2494]. Subscribe on create table tx: 281474976710660 2025-12-23T18:38:00.673124Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7587124497902421145:2495] Owner: [1:7587124497902421144:2494]. Subscribe on tx: 281474976710658 registered 2025-12-23T18:38:00.673295Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7587124497902421146:2496] Owner: [1:7587124497902421144:2494]. Subscribe on tx: 281474976710659 registered 2025-12-23T18:38:00.673309Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7587124497902421147:2497] Owner: [1:7587124497902421144:2494]. Subscribe on tx: 281474976710660 registered 2025-12-23T18:38:00.884344Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7587124497902421147:2497] Owner: [1:7587124497902421144:2494]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-12-23T18:38:00.942975Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7587124497902421145:2495] Owner: [1:7587124497902421144:2494]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-12-23T18:38:00.943252Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7587124497902421146:2496] Owner: [1:7587124497902421144:2494]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-12-23T18:38:00.955176Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7587124497902421147:2497] Owner: [1:7587124497902421144:2494]. Table already exists, number of columns: 7, has SecurityObject: true 2025-12-23T18:38:00.955221Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7587124497902421147:2497] Owner: [1:7587124497902421144:2494]. Column diff is empty, finishing 2025-12-23T18:38:00.956216Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7587124497902421147:2497] Owner: [1:7587124497902421144:2494]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2025-12-23T18:38:00.957236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: ( ... eCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 46 PathCreateTxId: 281474976710696 } 2025-12-23T18:38:27.692693Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7587124614753495916:2974] Owner: [3:7587124614753495915:2973]. Subscribe on create table tx: 281474976710696 2025-12-23T18:38:27.694721Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7587124614753495908:2966] Owner: [3:7587124614753495907:2965]. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710689 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateCreate)" SchemeShardTabletId: 72057594046644480 PathId: 46 PathCreateTxId: 281474976710696 } 2025-12-23T18:38:27.696333Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7587124614753495908:2966] Owner: [3:7587124614753495907:2965]. Subscribe on create table tx: 281474976710696 2025-12-23T18:38:27.707958Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7587124614753495918:2976] Owner: [3:7587124614753495917:2975]. Subscribe on tx: 281474976710696 registered 2025-12-23T18:38:27.707981Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7587124614753495920:2978] Owner: [3:7587124614753495919:2977]. Subscribe on tx: 281474976710696 registered 2025-12-23T18:38:27.707991Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7587124614753495912:2970] Owner: [3:7587124614753495911:2969]. Subscribe on tx: 281474976710696 registered 2025-12-23T18:38:27.708001Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7587124614753495914:2972] Owner: [3:7587124614753495913:2971]. Subscribe on tx: 281474976710696 registered 2025-12-23T18:38:27.708013Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7587124614753495910:2968] Owner: [3:7587124614753495909:2967]. Subscribe on tx: 281474976710696 registered 2025-12-23T18:38:27.708054Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7587124614753495926:2984] Owner: [3:7587124614753495925:2983]. Subscribe on tx: 281474976710696 registered 2025-12-23T18:38:27.708066Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7587124614753495922:2980] Owner: [3:7587124614753495921:2979]. Subscribe on tx: 281474976710696 registered 2025-12-23T18:38:27.708078Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7587124614753495916:2974] Owner: [3:7587124614753495915:2973]. Subscribe on tx: 281474976710696 registered 2025-12-23T18:38:27.708089Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7587124614753495908:2966] Owner: [3:7587124614753495907:2965]. Subscribe on tx: 281474976710696 registered 2025-12-23T18:38:27.712360Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7587124614753495924:2982] Owner: [3:7587124614753495923:2981]. Subscribe on tx: 281474976710696 registered 2025-12-23T18:38:27.782272Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7587124614753495908:2966] Owner: [3:7587124614753495907:2965]. Request: create. Transaction completed: 281474976710696. Doublechecking... 2025-12-23T18:38:27.782313Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7587124614753495914:2972] Owner: [3:7587124614753495913:2971]. Request: create. Transaction completed: 281474976710696. Doublechecking... 2025-12-23T18:38:27.782336Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7587124614753495920:2978] Owner: [3:7587124614753495919:2977]. Request: create. Transaction completed: 281474976710696. Doublechecking... 2025-12-23T18:38:27.782356Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7587124614753495926:2984] Owner: [3:7587124614753495925:2983]. Request: create. Transaction completed: 281474976710696. Doublechecking... 2025-12-23T18:38:27.782373Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7587124614753495910:2968] Owner: [3:7587124614753495909:2967]. Request: create. Transaction completed: 281474976710696. Doublechecking... 2025-12-23T18:38:27.782392Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7587124614753495922:2980] Owner: [3:7587124614753495921:2979]. Request: create. Transaction completed: 281474976710696. Doublechecking... 2025-12-23T18:38:27.782410Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7587124614753495916:2974] Owner: [3:7587124614753495915:2973]. Request: create. Transaction completed: 281474976710696. Doublechecking... 2025-12-23T18:38:27.782428Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7587124614753495918:2976] Owner: [3:7587124614753495917:2975]. Request: create. Transaction completed: 281474976710696. Doublechecking... 2025-12-23T18:38:27.782451Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7587124614753495912:2970] Owner: [3:7587124614753495911:2969]. Request: create. Transaction completed: 281474976710696. Doublechecking... 2025-12-23T18:38:27.782469Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7587124614753495924:2982] Owner: [3:7587124614753495923:2981]. Request: create. Transaction completed: 281474976710696. Doublechecking... 2025-12-23T18:38:27.834447Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587124614753495910:2968] Owner: [3:7587124614753495909:2967]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:38:27.834482Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587124614753495910:2968] Owner: [3:7587124614753495909:2967]. Column diff is empty, finishing 2025-12-23T18:38:27.836823Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587124614753495914:2972] Owner: [3:7587124614753495913:2971]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:38:27.836856Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587124614753495914:2972] Owner: [3:7587124614753495913:2971]. Column diff is empty, finishing 2025-12-23T18:38:27.844643Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587124614753495926:2984] Owner: [3:7587124614753495925:2983]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:38:27.844679Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587124614753495926:2984] Owner: [3:7587124614753495925:2983]. Column diff is empty, finishing 2025-12-23T18:38:27.852040Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587124614753495920:2978] Owner: [3:7587124614753495919:2977]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:38:27.852077Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587124614753495920:2978] Owner: [3:7587124614753495919:2977]. Column diff is empty, finishing 2025-12-23T18:38:27.862365Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587124614753495918:2976] Owner: [3:7587124614753495917:2975]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:38:27.862402Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587124614753495918:2976] Owner: [3:7587124614753495917:2975]. Column diff is empty, finishing 2025-12-23T18:38:27.862490Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587124614753495922:2980] Owner: [3:7587124614753495921:2979]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:38:27.862507Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587124614753495922:2980] Owner: [3:7587124614753495921:2979]. Column diff is empty, finishing 2025-12-23T18:38:27.871992Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587124614753495916:2974] Owner: [3:7587124614753495915:2973]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:38:27.872028Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587124614753495916:2974] Owner: [3:7587124614753495915:2973]. Column diff is empty, finishing 2025-12-23T18:38:27.872093Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587124614753495908:2966] Owner: [3:7587124614753495907:2965]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:38:27.872108Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587124614753495908:2966] Owner: [3:7587124614753495907:2965]. Column diff is empty, finishing 2025-12-23T18:38:27.875081Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587124614753495912:2970] Owner: [3:7587124614753495911:2969]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:38:27.875112Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587124614753495912:2970] Owner: [3:7587124614753495911:2969]. Column diff is empty, finishing 2025-12-23T18:38:27.875195Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587124614753495924:2982] Owner: [3:7587124614753495923:2981]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:38:27.875207Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587124614753495924:2982] Owner: [3:7587124614753495923:2981]. Column diff is empty, finishing 2025-12-23T18:38:27.925546Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd682t2n0ygzzvp6jn1deahm", Request has 18444977558601.626092s seconds to be completed 2025-12-23T18:38:27.927977Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd682t2n0ygzzvp6jn1deahm", Created new session, sessionId: ydb://session/3?node_id=3&id=NzM3YzI5YzUtMjZkZjlhNjYtM2QzNTY0NTMtNjg4NjEwOTk=, workerId: [3:7587124614753496151:2540], database: /dc-1, longSession: 1, local sessions count: 2 2025-12-23T18:38:27.928195Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd682t2n0ygzzvp6jn1deahm 2025-12-23T18:38:27.984070Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=YzA2NTQ1MmYtNzk4MTYyZjktOWU1NzEyNGMtYWZkN2JjNmM=, workerId: [3:7587124614753495894:2528], local sessions count: 1 2025-12-23T18:38:27.984453Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=NzM3YzI5YzUtMjZkZjlhNjYtM2QzNTY0NTMtNjg4NjEwOTk=, workerId: [3:7587124614753496151:2540], local sessions count: 0 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match [GOOD] >> TPQTest::TestReadRuleVersions [GOOD] >> TPQTest::TestSeveralOwners >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetEraseSet [GOOD] Test command err: 2025-12-23T18:38:15.465346Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124564479289512:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:15.465433Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e8/r3tmp/tmp2URqTH/pdisk_1.dat 2025-12-23T18:38:15.617207Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:38:16.202937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:16.212432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:16.212535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:16.313001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:16.474861Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:16.500154Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:38:16.512616Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28222 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515096451 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:16.890127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:16.901440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:16.916088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:16.924318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:17.235045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:17.368841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:17.491601Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710662: Validate (783): Key validation status: 3 2025-12-23T18:38:17.492024Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7587124573069225177:2697] txid# 281474976710662 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-12-23T18:38:17.492148Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7587124573069225177:2697] txid# 281474976710662 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-12-23T18:38:17.492187Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7587124573069225177:2697] txid# 281474976710662 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-12-23T18:38:17.503670Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710663: Validate (783): Key validation status: 3 2025-12-23T18:38:17.503809Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7587124573069225201:2705] txid# 281474976710663 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-12-23T18:38:17.503868Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7587124573069225201:2705] txid# 281474976710663 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-12-23T18:38:17.503891Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7587124573069225201:2705] txid# 281474976710663 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-12-23T18:38:17.507103Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710664: Validate (783): Key validation status: 3 2025-12-23T18:38:17.510841Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7587124573069225208:2709] txid# 281474976710664 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-12-23T18:38:17.510930Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7587124573069225208:2709] txid# 281474976710664 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-12-23T18:38:17.510981Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7587124573069225208:2709] txid# 281474976710664 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-12-23T18:38:17.519598Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710665: Validate (783): Key validation status: 3 2025-12-23T18:38:17.519908Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7587124573069225214:2712] txid# 281474976710665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-12-23T18:38:17.519988Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7587124573069225214:2712] txid# 281474976710665 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-12-23T18:38:17.520017Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7587124573069225214:2712] txid# 281474976710665 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-12-23T18:38:21.120783Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124589641957825:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:21.120859Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e8/r3tmp/tmpzGj7Sk/pdisk_1.dat 2025-12-23T18:38:21.190449Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:38:21.458844Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:21.469901Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:21.498897Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124589641957800:2081] 1766515101115005 != 1766515101115008 2025-12-23T18:38:21.540638Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:21.540722Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:21.613754Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:21.725562Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:27891 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515101680 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:22.078622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:38:22.108000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:22.115955Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:22.155879Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:22.214659Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:22.330312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:26.752420Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124610154485307:2084];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:26.753575Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e8/r3tmp/tmpOmr9uH/pdisk_1.dat 2025-12-23T18:38:26.805579Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:27.034823Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:27.045845Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:27.050949Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587124610154485251:2081] 1766515106744654 != 1766515106744657 2025-12-23T18:38:27.070978Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:27.071073Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:27.127868Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21903 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515107189 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:38:27.420148Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:38:27.431316Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:27.453184Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:27.461511Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:27.549315Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:27.596835Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:38:27.628444Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:27.783993Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TPQTestInternal::TestPartitionedBigTest >> TPQTabletTests::Read_TEvTxCommit_After_Restart >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 >> TPartitionTests::GetPartitionWriteInfoSuccess |93.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |93.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |93.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::Negative >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] >> TSchemeShardTest::CreateSystemColumn [GOOD] >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::RebootSchemeShard >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestReserveBytes >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet [GOOD] >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters >> TableCreation::RollbackTableAcl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LsPathId [FAIL] Test command err: 2025-12-23T18:38:15.150649Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124562719273387:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:15.150718Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057ea/r3tmp/tmpWmPKjp/pdisk_1.dat 2025-12-23T18:38:15.746069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:15.870882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:15.871005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:15.960712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:16.098899Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124562719273263:2081] 1766515095047488 != 1766515095047491 2025-12-23T18:38:16.107784Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:16.129251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:38:16.178882Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18418 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515096031 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:16.646245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:16.660808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: // TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515096703 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515096031 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-11 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" TClient::Ls request: /dc-2 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" waiting... TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515096703 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515096031 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Berkanavt" PathI... (TRUNCATED) TClient::Ls request: /dc-1/Berkanavt TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Berkanavt" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766515096731 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 37 PathsLimit: 10000 Sha... (TRUNCATED) 2025-12-23T18:38:16.694790Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124567014241415:2529] txid# 281474976715659, issues: { message: "Check failed: path: \'/dc-1/Berkanavt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/dc-1/Berkanavt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515096703 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515096031 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Berkanavt" PathI... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515096703 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515096031 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Berkanavt" Pat... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "arcadia" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1766515096752 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 Shard... (TRUNCATED) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057ea/r3tmp/tmprvsInt/pdisk_1.dat 2025-12-23T18:38:20.478895Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:20.542827Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:20.661624Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:20.661692Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:20.681991Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124586229073506:2081] 1766515100271125 != 1766515100271128 2025-12-23T18:38:20.738490Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:20.762822Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:20.783429Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8695 TClient::Ls request: / 2025-12-23T18:38:21.334981Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515100854 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:21.383165Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:21.395263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:38:21.423031Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; assertion failed at ydb/core/client/flat_ut.cpp:1060, void NKikimr::NFlatTests::NTestSuiteTFlatTest::TestLsUknownPathId(TFlatMsgBusClient &, ui64, ui64): (res->Record.HasPathDescription() == false) failed: (1 != 0) Unxepected description for pathId 2 TBackTrace::Capture()+28 (0x1B2B695C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B7A7BEC) NKikimr::NFlatTests::NTestSuiteTFlatTest::TestLsUknownPathId(NKikimr::NFlatTests::TFlatMsgBusClient&, unsigned long, unsigned long)+3557 (0x1AD1B6A5) NKikimr::NFlatTests::NTestSuiteTFlatTest::TTestCaseLsPathId::Execute_(NUnitTest::TTestContext&)+2545 (0x1AD1DFF1) std::__y1::__function::__func, void ()>::operator()()+280 (0x1ADEF6C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B7E087A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B7AE8C8) NKikimr::NFlatTests::NTestSuiteTFlatTest::TCurrentTest::Execute()+1229 (0x1ADEEA1D) NUnitTest::TTestFactory::Execute()+2176 (0x1B7B0080) NUnitTest::RunMain(int, char**)+5805 (0x1B7DA6DD) ??+0 (0x7F0ADF9BBD90) __libc_start_main+128 (0x7F0ADF9BBE40) _start+41 (0x186AE029) |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> TPQTestInternal::TestBatchPacking [GOOD] >> TPQTestInternal::TestKeyRange [GOOD] >> TPQTestInternal::TestAsInt [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTestInternal::StoreKeys [GOOD] >> TPartitionTests::ShadowPartitionCounters [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] >> TMeteringSink::UnusedStorageV1 [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] >> TCancelTx::ImmediateReadOnly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet [GOOD] Test command err: 2025-12-23T18:38:31.121315Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:31.257882Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:38:31.261859Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:38:31.262242Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:31.262298Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:31.262338Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-12-23T18:38:31.262393Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4927: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-12-23T18:38:31.262467Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:31.262537Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:31.282647Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2025-12-23T18:38:31.282877Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:38:31.309411Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1470: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:38:31.313007Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:38:31.313176Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:31.315284Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:38:31.315468Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:38:31.315558Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:38:31.316254Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:38:31.316921Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2025-12-23T18:38:31.318137Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:38:31.318190Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2025-12-23T18:38:31.318236Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:190:2142] 2025-12-23T18:38:31.318287Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:38:31.318350Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:38:31.319053Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:38:31.319126Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:31.319214Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:38:31.319311Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:31.319374Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:31.319436Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:38:31.319532Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-12-23T18:38:31.319607Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:38:31.319657Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:38:31.319712Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:38:31.319763Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:31.320120Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:38:31.320275Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:38:31.320539Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:38:31.320903Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:192:2142] 2025-12-23T18:38:31.321771Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:38:31.321832Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:1:Initializer] Initializing completed. 2025-12-23T18:38:31.321863Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [1:192:2142] 2025-12-23T18:38:31.321915Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:38:31.321974Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:38:31.322349Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2025-12-23T18:38:31.322387Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:38:31.322414Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:38:31.322467Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:31.322500Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:31.322533Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:38:31.322584Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-12-23T18:38:31.322616Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-12-23T18:38:31.322648Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:38:31.322675Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2025-12-23T18:38:31.322705Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2025-12-23T18:38:31.323003Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:38:31.323067Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:38:31.323308Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:38:31.323517Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:38:31.323705Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:38:31.323857Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2025-12-23T18:38:31.328957Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdl ... :2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:35.455236Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:35.455273Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:35.455528Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3437: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2025-12-23T18:38:35.455572Z node 6 :PQ_TX DEBUG: transaction.cpp:244: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-12-23T18:38:35.455614Z node 6 :PQ_TX DEBUG: transaction.cpp:301: [TxId: 67890] Partition responses 1/1 2025-12-23T18:38:35.455656Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-12-23T18:38:35.455703Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-12-23T18:38:35.455742Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4310: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-12-23T18:38:35.455783Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4431: [PQ: 72057594037927937] Responses received from the partitions 1/1 2025-12-23T18:38:35.455824Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-12-23T18:38:35.455872Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-12-23T18:38:35.455930Z node 6 :PQ_TX INFO: pq_impl.cpp:3962: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-12-23T18:38:35.455979Z node 6 :PQ_TX INFO: pq_impl.cpp:3972: [PQ: 72057594037927937] Send TEvReadSet to tablet 72057594037950158 tx 67890 2025-12-23T18:38:35.456103Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4468: [PQ: 72057594037927937] HaveParticipantsDecision 0 2025-12-23T18:38:35.456303Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:38:35.457228Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2766: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-12-23T18:38:35.462763Z node 6 :PQ_TX INFO: pq_impl.cpp:2791: [PQ: 72057594037927937] Assume tablet 72057594037950158 dead, sending read set acks for tx 67890 2025-12-23T18:38:35.462838Z node 6 :PQ_TX DEBUG: transaction.cpp:358: [TxId: 67890] Predicate acks 1/1 2025-12-23T18:38:35.462918Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2025-12-23T18:38:35.462971Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2025-12-23T18:38:35.463026Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4310: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2025-12-23T18:38:35.463078Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4468: [PQ: 72057594037927937] HaveParticipantsDecision 0 2025-12-23T18:38:35.463127Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4331: [PQ: 72057594037927937] TxId 67890 status has not changed 2025-12-23T18:38:35.463557Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2766: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-12-23T18:38:35.463633Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2771: [PQ: 72057594037927937] Connected to tablet 72057594037950158 2025-12-23T18:38:35.463776Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [6:236:2229], now have 1 active actors on pipe 2025-12-23T18:38:35.464434Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3362: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 72057594037950158 TabletDest: 72057594037927937 TabletProducer: 72057594037950158 ReadSet: "\010\001" Seqno: 0 2025-12-23T18:38:35.464499Z node 6 :PQ_TX INFO: pq_impl.cpp:3372: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet tx 67890 tabletProducer 72057594037950158 2025-12-23T18:38:35.464577Z node 6 :PQ_TX DEBUG: transaction.cpp:308: [TxId: 67890] Handle TEvReadSet 67890 2025-12-23T18:38:35.464632Z node 6 :PQ_TX DEBUG: transaction.cpp:325: [TxId: 67890] Predicates 1/1 2025-12-23T18:38:35.464680Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2025-12-23T18:38:35.464726Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2025-12-23T18:38:35.464774Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4310: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2025-12-23T18:38:35.464816Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4468: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-12-23T18:38:35.464927Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-12-23T18:38:35.464981Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] Responses received from the partitions 0/1 2025-12-23T18:38:35.465079Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-12-23T18:38:35.465140Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:38:35.465207Z node 6 :PERSQUEUE DEBUG: pqtablet_mock.cpp:72: Connected to tablet 72057594037927937 from tablet 72057594037950158 2025-12-23T18:38:35.472079Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:38:35.472225Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:38:35.472294Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:35.472337Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:35.472384Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:35.472451Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2025-12-23T18:38:35.472552Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 67890 2025-12-23T18:38:35.472600Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2025-12-23T18:38:35.472650Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:38:35.472709Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:38:35.472795Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:35.473038Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:38:35.473132Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:38:35.479197Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:38:35.479368Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:38:35.479419Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:35.479474Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:35.479509Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:35.479547Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:35.479591Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:35.479640Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:38:35.479723Z node 6 :PQ_TX INFO: pq_impl.cpp:3483: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2025-12-23T18:38:35.479781Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-12-23T18:38:35.479827Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-12-23T18:38:35.479872Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4310: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-12-23T18:38:35.479942Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] Responses received from the partitions 1/1 2025-12-23T18:38:35.479998Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4196: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-12-23T18:38:35.480042Z node 6 :PQ_TX INFO: pq_impl.cpp:4504: [PQ: 72057594037927937] Complete TxId 67890 2025-12-23T18:38:35.480086Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-12-23T18:38:35.480178Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3981: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-12-23T18:38:35.480245Z node 6 :PQ_TX INFO: pq_impl.cpp:3983: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 72057594037950158 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-12-23T18:38:35.480291Z node 6 :PQ_TX INFO: pq_impl.cpp:4535: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-12-23T18:38:35.480336Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-12-23T18:38:35.480391Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 1/1 2025-12-23T18:38:35.480424Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4544: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-12-23T18:38:35.480458Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 1/1 2025-12-23T18:38:35.480503Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4623: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2025-12-23T18:38:35.480546Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS_ACKS to DELETING 2025-12-23T18:38:35.480590Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3835: [PQ: 72057594037927937] Delete key for TxId 67890 2025-12-23T18:38:35.480662Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:38:35.491105Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:38:35.491217Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state DELETING 2025-12-23T18:38:35.491297Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State DELETING 2025-12-23T18:38:35.491346Z node 6 :PQ_TX INFO: pq_impl.cpp:4568: [PQ: 72057594037927937] delete TxId 67890 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetPartitionWriteInfoError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:37:43.843151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:37:43.843224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:43.843253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:37:43.843278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:37:43.843329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:37:43.843353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:37:43.843409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:43.843494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:37:43.844217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:37:43.844508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:37:43.939331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:37:43.939396Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:43.950052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:37:43.950311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:37:43.950471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:37:43.953877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:37:43.954074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:37:43.954837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:37:43.955082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:37:43.957252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:43.957465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:37:43.958705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:37:43.958789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:43.958911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:37:43.958960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:37:43.959011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:37:43.959160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:37:43.969439Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:37:44.115645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:37:44.115910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:37:44.116107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:37:44.116162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:37:44.116372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:37:44.116451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:37:44.127765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:37:44.128001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:37:44.128217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:37:44.128280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:37:44.128332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:37:44.128369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:37:44.134462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:37:44.134544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:37:44.134587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:37:44.136847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:37:44.136931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:37:44.136981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:37:44.137045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:37:44.140659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:37:44.147979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:37:44.148223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:37:44.149450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:37:44.149603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:37:44.149657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:37:44.149926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:37:44.149974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:37:44.150150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:37:44.150225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:37:44.164026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:37:44.164093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... rget path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2025-12-23T18:38:34.542334Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:38:34.542416Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 103:0 type: TxCopyTable target path: [OwnerId: 72057594046678944, LocalPathId: 39] source path: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:38:34.542508Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:38:34.542727Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:38:34.550582Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:38:34.551513Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:38:34.551643Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:38:34.560487Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 39, at schemeshard: 72057594046678944 2025-12-23T18:38:34.561029Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2025-12-23T18:38:34.561559Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:38:34.561660Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:38:34.562027Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:38:34.562196Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:38:34.562285Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-12-23T18:38:34.562374Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2025-12-23T18:38:34.563389Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:38:34.563513Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046678944 2025-12-23T18:38:34.563979Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-12-23T18:38:34.570959Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:38:34.571210Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:38:34.571300Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:38:34.571387Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-12-23T18:38:34.571484Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:38:34.572770Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:38:34.572878Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:38:34.572914Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:38:34.572953Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 1 2025-12-23T18:38:34.572990Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:38:34.573115Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-12-23T18:38:34.582409Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2025-12-23T18:38:34.582778Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2025-12-23T18:38:34.582883Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-12-23T18:38:34.585118Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:38:34.585619Z node 17 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-12-23T18:38:34.585964Z node 17 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2025-12-23T18:38:34.586336Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6306: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-12-23T18:38:34.586419Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-12-23T18:38:34.586668Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-12-23T18:38:34.586795Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-12-23T18:38:34.586938Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-12-23T18:38:34.587105Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 2 -> 3 2025-12-23T18:38:34.591896Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:38:34.601500Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:38:34.601959Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:38:34.602071Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2025-12-23T18:38:34.602190Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2025-12-23T18:38:34.617664Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-12-23T18:38:34.617917Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-12-23T18:38:34.618035Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2025-12-23T18:38:34.618075Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::StoreKeys [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo >> TMeteringSink::FlushPutEventsV1 [GOOD] >> PQCountersSimple::Partition >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TMultiBucketCounter::InsertAndUpdate [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetPartitionWriteInfoError [GOOD] >> TPartitionTests::FailedTxsDontBlock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::RollbackTableAcl [GOOD] Test command err: 2025-12-23T18:37:55.212733Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124478840110117:2235];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:55.212926Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/006011/r3tmp/tmpDYhsQy/pdisk_1.dat 2025-12-23T18:37:55.748073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:55.748221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:55.881741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:56.007747Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124478840109911:2081] 1766515075153331 != 1766515075153334 2025-12-23T18:37:56.008810Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:56.213797Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7711 TServer::EnableGrpc on GrpcPort 8015, node 1 2025-12-23T18:37:56.507310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:56.507347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:56.507376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:56.507464Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515076116 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:37:56.765739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:00.215174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124478840110117:2235];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:00.224420Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:38:00.707514Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:38:00.711972Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:38:00.712009Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:38:00.712031Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:38:00.716765Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7587124500314947240:2502] Owner: [1:7587124500314947231:2496]. Describe result: PathErrorUnknown 2025-12-23T18:38:00.716792Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7587124500314947240:2502] Owner: [1:7587124500314947231:2496]. Creating table 2025-12-23T18:38:00.716853Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7587124500314947240:2502] Owner: [1:7587124500314947231:2496]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2025-12-23T18:38:00.724849Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7587124500314947238:2500] Owner: [1:7587124500314947231:2496]. Describe result: PathErrorUnknown 2025-12-23T18:38:00.724869Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7587124500314947238:2500] Owner: [1:7587124500314947231:2496]. Creating table 2025-12-23T18:38:00.724898Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7587124500314947238:2500] Owner: [1:7587124500314947231:2496]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2025-12-23T18:38:00.730619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:00.731707Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7587124500314947239:2501] Owner: [1:7587124500314947231:2496]. Describe result: PathErrorUnknown 2025-12-23T18:38:00.737203Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7587124500314947239:2501] Owner: [1:7587124500314947231:2496]. Creating table 2025-12-23T18:38:00.737661Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7587124500314947239:2501] Owner: [1:7587124500314947231:2496]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2025-12-23T18:38:00.741675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:00.748818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:00.767353Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7587124500314947238:2500] Owner: [1:7587124500314947231:2496]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2025-12-23T18:38:00.767415Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7587124500314947238:2500] Owner: [1:7587124500314947231:2496]. Subscribe on create table tx: 281474976710659 2025-12-23T18:38:00.768169Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7587124500314947239:2501] Owner: [1:7587124500314947231:2496]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2025-12-23T18:38:00.768188Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7587124500314947239:2501] Owner: [1:7587124500314947231:2496]. Subscribe on create table tx: 281474976710660 2025-12-23T18:38:00.770257Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7587124500314947240:2502] Owner: [1:7587124500314947231:2496]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2025-12-23T18:38:00.770291Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7587124500314947240:2502] Owner: [1:7587124500314947231:2496]. Subscribe on create table tx: 281474976710658 2025-12-23T18:38:00.772238Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7587124500314947240:2502] Owner: [1:7587124500314947231:2496]. Subscribe on tx: 281474976710658 registered 2025-12-23T18:38:00.772251Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7587124500314947238:2500] Owner: [1:7587124500314947231:2496]. Subscribe on tx: 281474976710659 registered 2025-12-23T18:38:00.772259Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7587124500314947239:2501] Owner: [1:7587124500314947231:2496]. Subscribe on tx: 281474976710660 registered 2025-12-23T18:38:00.931638Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7587124500314947238:2500] Owner: [1:7587124500314947231:2496]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-12-23T18:38:01.018093Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7587124500314947239:2501] Owner: [1:7587124500314947231:2496]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-12-23T18:38:01.018364Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7587124500314947240:2502] Owner: [1:7587124500314947231:2496]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-12-23T18:38:01.026799Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7587124500314947238:2500] Owner: [1:7587124500314947231:2496]. Table already exists, number of columns: 33, has SecurityObject: true 2025-12-23T18:38:01.026981Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7587124500314947238:2500] Owner: [1:7587124500314947231:2496]. Column diff is empty, finishing 2025-12-23T18:38:01.028141Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7587124500314947238:2500] Owner: [1:7587124500314947231:2496]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2025-12-23T18:38:01.029493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:38:01.030709Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: ... (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-12-23T18:38:34.109579Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=OGVkZmY0OTgtNjYxMzQzMzAtY2ZlN2QzNjctYjVlMWIyZDg=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 38, targetId: [3:7587124645180419607:2508] 2025-12-23T18:38:34.109628Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 38 timeout: 300.000000s actor id: [3:7587124645180419609:2946] 2025-12-23T18:38:34.128272Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 38, sender: [3:7587124645180419608:2509], selfId: [3:7587124602230745060:2215], source: [3:7587124645180419607:2508] 2025-12-23T18:38:34.129353Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587124645180419604:2944], ActorId: [3:7587124645180419605:2945], TraceId: ExecutionId: 1e8d9085-48bf74d-efa07039-36521eb7, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=OGVkZmY0OTgtNjYxMzQzMzAtY2ZlN2QzNjctYjVlMWIyZDg=, TxId: 2025-12-23T18:38:34.129932Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587124645180419604:2944], ActorId: [3:7587124645180419605:2945], TraceId: ExecutionId: 1e8d9085-48bf74d-efa07039-36521eb7, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=OGVkZmY0OTgtNjYxMzQzMzAtY2ZlN2QzNjctYjVlMWIyZDg=, TxId: 2025-12-23T18:38:34.129959Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587124645180419604:2944], ActorId: [3:7587124645180419605:2945], TraceId: ExecutionId: 1e8d9085-48bf74d-efa07039-36521eb7, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2025-12-23T18:38:34.130065Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587124645180419603:2943], ActorId: [3:7587124645180419604:2944], TraceId: ExecutionId: 1e8d9085-48bf74d-efa07039-36521eb7, RequestDatabase: /dc-1, Got response [3:7587124645180419605:2945] SUCCESS 2025-12-23T18:38:34.130114Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7587124645180419602:2942] ActorId: [3:7587124645180419603:2943] Database: /dc-1 ExecutionId: 1e8d9085-48bf74d-efa07039-36521eb7. Extracted script execution operation [3:7587124645180419605:2945], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7587124623705582460:2674], LeaseGeneration: 0 2025-12-23T18:38:34.130136Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7587124645180419602:2942] ActorId: [3:7587124645180419603:2943] Database: /dc-1 ExecutionId: 1e8d9085-48bf74d-efa07039-36521eb7. Reply success 2025-12-23T18:38:34.130897Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=OGVkZmY0OTgtNjYxMzQzMzAtY2ZlN2QzNjctYjVlMWIyZDg=, workerId: [3:7587124645180419607:2508], local sessions count: 0 2025-12-23T18:38:34.177253Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd6830604try2drk7pfxwnge", Request has 18444977558595.374398s seconds to be completed 2025-12-23T18:38:34.179645Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd6830604try2drk7pfxwnge", Created new session, sessionId: ydb://session/3?node_id=3&id=OTY3NGE0NjktYjYxOGJkMmEtNmIzOTliMDMtYjU2MDIwODY=, workerId: [3:7587124645180419643:2522], database: /dc-1, longSession: 1, local sessions count: 1 2025-12-23T18:38:34.179864Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd6830604try2drk7pfxwnge 2025-12-23T18:38:34.203971Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kd68306vb98pz2yfvrv3rg25, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=OTY3NGE0NjktYjYxOGJkMmEtNmIzOTliMDMtYjU2MDIwODY=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 40, targetId: [3:7587124645180419643:2522] 2025-12-23T18:38:34.204031Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 40 timeout: 600.000000s actor id: [3:7587124645180419649:2956] 2025-12-23T18:38:34.222352Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:38:34.230536Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd68306vb98pz2yfvrv3rg25", Forwarded response to sender actor, requestId: 40, sender: [3:7587124645180419647:2523], selfId: [3:7587124602230745060:2215], source: [3:7587124645180419643:2522] --------------------------- INIT FINISHED --------------------------- 2025-12-23T18:38:34.252400Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7587124645180419660:2963] Owner: [3:7587124645180419659:2962]. Describe result: PathErrorUnknown 2025-12-23T18:38:34.252429Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7587124645180419660:2963] Owner: [3:7587124645180419659:2962]. Creating table 2025-12-23T18:38:34.252486Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7587124645180419660:2963] Owner: [3:7587124645180419659:2962]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2025-12-23T18:38:34.272046Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:34.273716Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7587124645180419660:2963] Owner: [3:7587124645180419659:2962]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710686 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2025-12-23T18:38:34.273749Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7587124645180419660:2963] Owner: [3:7587124645180419659:2962]. Subscribe on create table tx: 281474976710686 2025-12-23T18:38:34.285352Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7587124645180419660:2963] Owner: [3:7587124645180419659:2962]. Subscribe on tx: 281474976710686 registered 2025-12-23T18:38:34.325204Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7587124645180419660:2963] Owner: [3:7587124645180419659:2962]. Request: create. Transaction completed: 281474976710686. Doublechecking... 2025-12-23T18:38:34.416488Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587124645180419660:2963] Owner: [3:7587124645180419659:2962]. Table already exists, number of columns: 3, has SecurityObject: true 2025-12-23T18:38:34.416522Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587124645180419660:2963] Owner: [3:7587124645180419659:2962]. Column diff is empty, finishing 2025-12-23T18:38:34.447293Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd6830ef4wzabqbs7hr744cr", Request has 18444977558595.104352s seconds to be completed 2025-12-23T18:38:34.449460Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd6830ef4wzabqbs7hr744cr", Created new session, sessionId: ydb://session/3?node_id=3&id=NThjOGFhNTktYWU4YzliNi03ODAzZDg1Mi00YTUwMjBhOA==, workerId: [3:7587124645180419789:2532], database: /dc-1, longSession: 1, local sessions count: 2 2025-12-23T18:38:34.449640Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd6830ef4wzabqbs7hr744cr 2025-12-23T18:38:34.485158Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587124645180419795:3067] Owner: [3:7587124645180419794:3066]. Table already exists, number of columns: 3, has SecurityObject: true 2025-12-23T18:38:34.485202Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587124645180419795:3067] Owner: [3:7587124645180419794:3066]. Column diff is empty, finishing 2025-12-23T18:38:34.485281Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7587124645180419795:3067] Owner: [3:7587124645180419794:3066]. Created ESchemeOpModifyACL transaction for path: /dc-1/test/test_table 2025-12-23T18:38:34.486307Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:38:34.488113Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7587124645180419795:3067] Owner: [3:7587124645180419794:3066]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710687 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2025-12-23T18:38:34.488126Z node 3 :KQP_PROXY INFO: table_creator.cpp:361: Table test_table updater. SelfId: [3:7587124645180419795:3067] Owner: [3:7587124645180419794:3066]. Successful alter request: ExecComplete 2025-12-23T18:38:34.489743Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=NThjOGFhNTktYWU4YzliNi03ODAzZDg1Mi00YTUwMjBhOA==, workerId: [3:7587124645180419789:2532], local sessions count: 1 2025-12-23T18:38:34.505373Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd6830g950v41tsk620ky210", Request has 18444977558595.046276s seconds to be completed 2025-12-23T18:38:34.507682Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd6830g950v41tsk620ky210", Created new session, sessionId: ydb://session/3?node_id=3&id=MzM4ZWYzYTItOGNjOTJjODAtOGFmMTQwMTctNWU4YTg1NDM=, workerId: [3:7587124645180419807:2536], database: /dc-1, longSession: 1, local sessions count: 2 2025-12-23T18:38:34.507874Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd6830g950v41tsk620ky210 2025-12-23T18:38:34.540561Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=MzM4ZWYzYTItOGNjOTJjODAtOGFmMTQwMTctNWU4YTg1NDM=, workerId: [3:7587124645180419807:2536], local sessions count: 1 2025-12-23T18:38:34.578363Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=OTY3NGE0NjktYjYxOGJkMmEtNmIzOTliMDMtYjU2MDIwODY=, workerId: [3:7587124645180419643:2522], local sessions count: 0 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> PQCountersSimple::Partition [GOOD] >> PQCountersSimple::PartitionLevelMetrics >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches [GOOD] >> TTicketParserTest::AuthenticationWithUserAccount >> TPartitionTests::ConflictingSrcIdForTxWithHead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TCancelTx::ImmediateReadOnly [GOOD] Test command err: 2025-12-23T18:38:12.564806Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124550585342070:2172];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:12.565096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057f1/r3tmp/tmpPxbyvP/pdisk_1.dat 2025-12-23T18:38:13.209967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:13.232410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:13.232529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:13.449377Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:13.455024Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124550585341922:2081] 1766515092461213 != 1766515092461216 2025-12-23T18:38:13.469321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:13.470916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:38:13.547065Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:25682 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515093567 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:13.903972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:13.914494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:38:13.923669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient is connected to server localhost:25682 2025-12-23T18:38:14.804320Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7587124559175277469:2587] txid# 281474976715660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-12-23T18:38:14.804426Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7587124559175277469:2587] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-12-23T18:38:14.826182Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7587124559175277482:2597] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-12-23T18:38:14.826244Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7587124559175277482:2597] txid# 281474976715661 RESPONSE Status# ExecCancelled marker# P13c 2025-12-23T18:38:14.840006Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7587124559175277495:2607] txid# 281474976715662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-12-23T18:38:14.840088Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7587124559175277495:2607] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c 2025-12-23T18:38:14.882043Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7587124559175277521:2627] txid# 281474976715664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-12-23T18:38:14.882106Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7587124559175277521:2627] txid# 281474976715664 RESPONSE Status# ExecCancelled marker# P13c 2025-12-23T18:38:14.915755Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7587124559175277534:2637] txid# 281474976715665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-12-23T18:38:14.915851Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7587124559175277534:2637] txid# 281474976715665 RESPONSE Status# ExecCancelled marker# P13c 2025-12-23T18:38:14.948151Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7587124559175277547:2647] txid# 281474976715666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-12-23T18:38:14.948236Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7587124559175277547:2647] txid# 281474976715666 RESPONSE Status# ExecCancelled marker# P13c 2025-12-23T18:38:19.089335Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124580151566721:2259];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:19.089409Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:19.127094Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057f1/r3tmp/tmptJptet/pdisk_1.dat 2025-12-23T18:38:19.437067Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:19.437170Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:19.454892Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:19.485483Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:19.504330Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:2510 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515099552 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:19.794161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:19.806227Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:38:19.810093Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:19.815151Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions waiting... 2025-12-23T18:38:20.083148Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:2510 2025-12-23T18:38:25.865479Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124605336455826:2169];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:25.866116Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057f1/r3tmp/tmpb1IA3R/pdisk_1.dat 2025-12-23T18:38:25.923437Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:26.139153Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587124605336455693:2081] 1766515105844549 != 1766515105844552 2025-12-23T18:38:26.152126Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:26.161453Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:26.161572Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:26.235486Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:26.248609Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3373 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515106307 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:26.532741Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:26.539292Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:38:26.543018Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:26.814281Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:38:26.844673Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3373 2025-12-23T18:38:27.236731Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7587124613926391241:2583] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-12-23T18:38:27.236801Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7587124613926391241:2583] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2025-12-23T18:38:27.264029Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7587124613926391258:2597] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-12-23T18:38:27.264106Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7587124613926391258:2597] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2025-12-23T18:38:27.282285Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7587124613926391272:2608] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-12-23T18:38:27.282365Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7587124613926391272:2608] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2025-12-23T18:38:27.319946Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7587124613926391300:2630] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-12-23T18:38:27.320027Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7587124613926391300:2630] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2025-12-23T18:38:27.350263Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7587124613926391314:2641] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-12-23T18:38:27.350343Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7587124613926391314:2641] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2025-12-23T18:38:27.366940Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7587124613926391328:2652] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-12-23T18:38:27.367018Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7587124613926391328:2652] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2025-12-23T18:38:31.054506Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587124630096657835:2206];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:31.059335Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:31.151224Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057f1/r3tmp/tmplTakis/pdisk_1.dat 2025-12-23T18:38:31.362862Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:31.419908Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:31.422978Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587124630096657667:2081] 1766515111021379 != 1766515111021382 2025-12-23T18:38:31.450382Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:31.450503Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:31.518607Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31365 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515111599 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:31.893076Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:31.905146Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:38:31.905546Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:38:31.913527Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:32.048855Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:31365 2025-12-23T18:38:32.533421Z node 4 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710660 at tablet 72075186224037888 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-12-23T18:38:32.535152Z node 4 :TX_PROXY ERROR: datareq.cpp:883: Actor# [4:7587124634391625923:2586] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2025-12-23T18:38:32.555213Z node 4 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710662 at tablet 72075186224037889 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-12-23T18:38:32.555492Z node 4 :TX_PROXY ERROR: datareq.cpp:883: Actor# [4:7587124634391625937:2594] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot1 [GOOD] >> TLocksTest::Range_IncorrectDot2 >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TCdcStreamTests::CreateStream >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardInfoTypesTest::IndexBuildInfoAddParent >> TSchemeShardInfoTypesTest::IndexBuildInfoAddParent [GOOD] >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck >> TestSqsTopicHttpProxy::TestSendMessageTooBig >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume >> TLocksTest::GoodLock [GOOD] >> TLocksTest::GoodNullLock >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 >> IncrementalRestoreScan::ChangeSenderEmpty >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 >> TLocksTest::BrokenLockErase [GOOD] >> TLocksTest::BrokenDupLock >> IncrementalRestoreScan::ChangeSenderSimple >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] >> IncrementalRestoreScan::Empty >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable >> TPartitionTests::ShadowPartitionCountersRestore |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest [GOOD] >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] >> TPartitionTests::TEvTxCalcPredicate_Without_Conflicts >> TPQTest::The_Value_Of_CreationUnixTime_Must_Not_Decrease [GOOD] >> TPQTest::The_Keys_Are_Loaded_In_Several_Iterations >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::ManyDirs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:37:44.797188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:37:44.797284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:44.797354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:37:44.797396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:37:44.797439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:37:44.797474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:37:44.797555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:44.797650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:37:44.798520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:37:44.798849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:37:44.890030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:37:44.890099Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:44.896057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:37:44.896281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:37:44.896445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:37:44.901909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:37:44.902151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:37:44.902922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:37:44.903154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:37:44.905321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:44.905554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:37:44.907252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:37:44.907319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:44.907462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:37:44.907528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:37:44.907572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:37:44.907745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:37:45.084807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.085790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.085918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.086011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.086123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.086192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.086262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.086323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.086430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.086878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.087001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.087091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.087182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.087275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.087368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 46 ParentPathId: 80 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 } ChildrenExist: true } Children { Name: "DirB" PathId: 86 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table2" PathId: 91 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 85 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:38:39.610949Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:38:39.611477Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" took 8.67ms result status StatusSuccess 2025-12-23T18:38:39.612116Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 91 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 91 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:38:39.613832Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:38:39.614176Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" took 366us result status StatusSuccess 2025-12-23T18:38:39.614812Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 86 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "Table3" PathId: 94 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 86 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 86 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:38:39.616138Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:38:39.616562Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" took 448us result status StatusSuccess 2025-12-23T18:38:39.617126Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 94 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 86 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 94 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions >> TPartitionTests::FailedTxsDontBlock [GOOD] >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidQueueUrl >> TPartitionTests::EndWriteTimestamp_DataKeysBody >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] Test command err: 2025-12-23T18:38:28.495622Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:28.778951Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:28.779032Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:28.779094Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:28.779191Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:28.829713Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [1:183:2196] 2025-12-23T18:38:28.835978Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:38:28.000000Z 2025-12-23T18:38:28.836318Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:183:2196] 2025-12-23T18:38:28.863027Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:28.910984Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:28.935093Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:28.949564Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:28.999820Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:29.050990Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:29.086941Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\240\321\205\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:38:29.275017Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\240\321\205\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:38:29.299014Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\240\321\205\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:38:30.153637Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:30.201157Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:30.201213Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:30.201259Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:30.201331Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:30.220997Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:183:2196] 2025-12-23T18:38:30.223165Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:38:30.000000Z 2025-12-23T18:38:30.223410Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:183:2196] 2025-12-23T18:38:30.245552Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:30.295130Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:30.319096Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:30.329861Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:30.379159Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:30.438252Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:30.475233Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\001\030\001\"\tsession-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\340\205\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:38:30.649041Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\340\205\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:38:30.674673Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\003\020\001\030\001\"\tsession-1(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\003\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\340\205\344\26438\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\340\205\344\26438\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\001\020\001\030\001\"\tsession-2(\0000\003@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\006\020\001\030\001\"\tsession-1(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\006\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\340\205\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:38:31.283174Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:31.341944Z node 3 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:31.342014Z node 3 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:31.342062Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:31.342114Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:31.915673Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candida ... RSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:42.192625Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:38:42.192672Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 4, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:42.192710Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:42.192750Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 4, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:42.192783Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:42.218430Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:38:42.218645Z node 6 :PERSQUEUE DEBUG: partition.cpp:1672: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-12-23T18:38:42.218711Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:42.218781Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:38:42.218845Z node 6 :PERSQUEUE DEBUG: partition.cpp:1616: [72057594037927937][Partition][0][StateIdle] TxId 0 affect SourceId src1 2025-12-23T18:38:42.218936Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:38:42.218993Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:42.219039Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:42.219093Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:42.219136Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Wait 1st KV request Wait kv request 2025-12-23T18:38:42.219313Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-12-23T18:38:42.219363Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:42.219402Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:38:42.219448Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:42.219493Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:42.219548Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2025-12-23T18:38:42.219598Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-12-23T18:38:42.219667Z node 6 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2025-12-23T18:38:42.219734Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 0 2025-12-23T18:38:42.219781Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:38:42.219824Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:38:42.219874Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request Wait tx committed for tx 0 2025-12-23T18:38:42.220312Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:38:42.241027Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:38:42.241175Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-12-23T18:38:42.241309Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2025-12-23T18:38:42.241368Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:42.241417Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:38:42.241469Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:38:42.241500Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:38:42.241534Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 3, PendingWrites: 0 2025-12-23T18:38:42.241578Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:42.241625Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:38:42.241704Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1227: [72057594037927937][Partition][0][StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 8. InitialSeqNo: (NULL). Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 1. CurOffset: 1. Offset: 20 2025-12-23T18:38:42.241759Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:38:42.241794Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1227: [72057594037927937][Partition][0][StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 10. InitialSeqNo: (NULL). Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 1. CurOffset: 1. Offset: 30 2025-12-23T18:38:42.241821Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:38:42.241927Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 11 partNo 0 2025-12-23T18:38:42.242822Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 40 PartNo 0 PackedSize 84 count 1 nextOffset 41 batches 1 2025-12-23T18:38:42.242922Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 3 2025-12-23T18:38:42.242965Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (3) 2025-12-23T18:38:42.243023Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:42.243606Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1704: [72057594037927937][Partition][0][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 40,1 HeadOffset 1 endOffset 1 curOffset 41 d0000000000_00000000000000000040_00000_0000000001_00000? size 70 WTime 10239 2025-12-23T18:38:42.243861Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 3 Got KV request Got KV request Wait 2nd KV request Wait kv request 2025-12-23T18:38:42.244147Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:38:42.267482Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:38:42.267562Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-12-23T18:38:42.267646Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 17 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:38:42.267691Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-12-23T18:38:42.267751Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][0][StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 1 is already written 2025-12-23T18:38:42.267786Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-12-23T18:38:42.267814Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][0][StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 1 is already written 2025-12-23T18:38:42.267851Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-12-23T18:38:42.267880Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][0][StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 40 is stored on disk 2025-12-23T18:38:42.268075Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:42.268122Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:42.268170Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:42.268207Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:42.268240Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:42.268286Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::EndWriteTimestamp_DataKeysBody [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds >> TPartitionTests::EndWriteTimestamp_FromMeta >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 |93.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |93.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |93.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors >> TTicketParserTest::AuthenticationWithUserAccount [GOOD] >> TTicketParserTest::AuthenticationUnavailable >> KqpLocksTricky::TestSnapshotWithDependentReads-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 >> TPartitionTests::EndWriteTimestamp_FromMeta [GOOD] >> TPartitionTests::EndWriteTimestamp_HeadKeys >> TPartitionTests::TEvTxCalcPredicate_Without_Conflicts [GOOD] >> TPartitionTests::TEvTxCalcPredicate_With_Conflicts >> IncrementalRestoreScan::Empty [GOOD] >> TLocksTest::Range_GoodLock0 [GOOD] >> TLocksTest::Range_GoodLock1 >> TLocksTest::UpdateLockedKey [GOOD] >> TLocksTest::SetLockNothing >> KqpTx::LocksAbortOnCommit >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateDropKesus >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] Test command err: 2025-12-23T18:37:53.597641Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124468127769069:2259];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:53.597694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/006023/r3tmp/tmp3BLOUI/pdisk_1.dat 2025-12-23T18:37:54.402937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:37:54.418682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:54.431028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:54.521457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:54.656562Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:37:54.663982Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124468127768835:2081] 1766515073526394 != 1766515073526397 2025-12-23T18:37:54.676175Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:54.711747Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:2964 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515074646 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:37:55.359053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:55.367542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:58.601437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124468127769069:2259];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:58.601517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:59.495865Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:37:59.513351Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=1&id=NTg5NjI3OWQtMjI4OGNlMTktZjZkMjRmMjItYzc3NDQ4ODU=, workerId: [1:7587124493897573450:2302], database: , longSession: 0, local sessions count: 1 2025-12-23T18:37:59.513689Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=1&id=NTg5NjI3OWQtMjI4OGNlMTktZjZkMjRmMjItYzc3NDQ4ODU=, PoolId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 0.010000s timeout: 0.010000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [1:7587124493897573450:2302] 2025-12-23T18:37:59.513716Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 2 timeout: 0.010000s actor id: [0:0:0] 2025-12-23T18:37:59.513845Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:37:59.513873Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:37:59.513890Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:37:59.514253Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2616: SessionId: ydb://session/3?node_id=1&id=NTg5NjI3OWQtMjI4OGNlMTktZjZkMjRmMjItYzc3NDQ4ODU=, ActorId: [1:7587124493897573450:2302], ActorState: ReadyState, Reply query error, msg:
: Error: SomeUniqTextForUt proxyRequestId: 2 2025-12-23T18:37:59.520146Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 2, sender: [1:7587124476717704235:2489], selfId: [1:7587124468127769102:2265], source: [1:7587124493897573450:2302] 2025-12-23T18:37:59.525336Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-12-23T18:37:59.525364Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1215: Invalid request info while on request timeout handle. RequestId: 2 2025-12-23T18:37:59.525997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124493897573476:2305], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:59.526082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:59.526700Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124493897573486:2306], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:59.526768Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ... waiting for SysViewsRoster update finished 2025-12-23T18:38:11.759055Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:38:11.759380Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:38:11.770338Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:38:11.771669Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:38:11.773577Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:496:2402], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:38:11.773795Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:38:11.773975Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:11.777131Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:489:2166], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:38:11.777520Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:38:11.777830Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:12.387171Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:12.561696Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:12.561846Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:12.563516Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:12.563637Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:12.615714Z node 2 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:38:12.616992Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:12.617388Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:12.690222Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:38:12.751996Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/006023/r3tmp/tmpTA657m/pdisk_1.dat TClient is connected to server localhost:22349 KQP PROXY1 [2:8678280833929343339:121] KQP PROXY2 [3:8678280833929343339:121] SENDER [2:1489:2973] 2025-12-23T18:38:13.973032Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=3&id=NjA3NGRlZTQtOTE2 ... 2025-12-23T18:38:41.706102Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=OWU1ZTAxNDYtMzc2NzhhYTktODQ1NmNjMDktYmQzM2ZhMg==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 46, targetId: [7:7587124673167532187:2559] 2025-12-23T18:38:41.706131Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 46 timeout: 300.000000s actor id: [7:7587124673167532212:3058] 2025-12-23T18:38:41.727770Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 46, sender: [7:7587124673167532211:2566], selfId: [7:7587124617332955555:2225], source: [7:7587124673167532187:2559] 2025-12-23T18:38:41.728606Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7587124673167532184:2556], ActorId: [7:7587124673167532185:2557], TraceId: ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, DataQuery #2 finished SUCCESS, Issues: [ {
:20:21: Warning: Symbol $retry_deadline is not used, code: 4527 } {
:21:21: Warning: Symbol $lease_state is not used, code: 4527 } ], SessionId: ydb://session/3?node_id=7&id=OWU1ZTAxNDYtMzc2NzhhYTktODQ1NmNjMDktYmQzM2ZhMg==, TxId: 2025-12-23T18:38:41.728705Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7587124673167532184:2556], ActorId: [7:7587124673167532185:2557], TraceId: ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=OWU1ZTAxNDYtMzc2NzhhYTktODQ1NmNjMDktYmQzM2ZhMg==, TxId: 2025-12-23T18:38:41.728765Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4311: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7587124673167532184:2556], ActorId: [7:7587124673167532185:2557], TraceId: ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: UNAVAILABLE. Issues: {
: Error: Lease expired } 2025-12-23T18:38:41.728978Z node 7 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [7:7587124673167532182:2555], ActorId: [7:7587124673167532184:2556], TraceId: ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [7:7587124673167532185:2557] SUCCESS 2025-12-23T18:38:41.729416Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=7&id=OWU1ZTAxNDYtMzc2NzhhYTktODQ1NmNjMDktYmQzM2ZhMg==, workerId: [7:7587124673167532187:2559], local sessions count: 1 2025-12-23T18:38:41.729726Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1443: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [7:7587124668872564836:3029] ActorId: [7:7587124668872564837:3030] Database: /dc-1 ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3. Successfully finalized script execution operation, WaitingRetry: 0 2025-12-23T18:38:41.729780Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1789: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [7:7587124668872564836:3029] ActorId: [7:7587124668872564837:3030] Database: /dc-1 ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3. Reply success 2025-12-23T18:38:41.745442Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kd6837jg6xxqnztkce129f3x, Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=MWU3MjZlNzYtZmJjMTMwMTktNjE4Zjk5MTItODdiNWEwN2Q=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 47, targetId: [7:7587124660282630105:2512] 2025-12-23T18:38:41.745491Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 47 timeout: 300.000000s actor id: [7:7587124673167532238:3065] 2025-12-23T18:38:42.558654Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd6837jg6xxqnztkce129f3x", Forwarded response to sender actor, requestId: 47, sender: [7:7587124673167532237:2571], selfId: [7:7587124617332955555:2225], source: [7:7587124660282630105:2512] 2025-12-23T18:38:42.562763Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:833: [ScriptExecutions] [TScriptLeaseUpdateActor] OwnerId: [7:7587124677462499572:3080] ActorId: [7:7587124677462499573:3081] Database: /dc-1 ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3. Bootstrap. Start TLeaseUpdateRetryActor [7:7587124677462499574:3082] 2025-12-23T18:38:42.562857Z node 7 :KQP_PROXY DEBUG: query_actor.h:292: [TQueryRetryActor] [TScriptLeaseUpdater] OwnerId: [7:7587124677462499573:3081], ActorId: [7:7587124677462499574:3082], TraceId: ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3, RequestDatabase: /dc-1, LeaseGeneration: 1, Starting query actor #1 [7:7587124677462499575:3083] 2025-12-23T18:38:42.562891Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7587124677462499574:3082], ActorId: [7:7587124677462499575:3083], TraceId: ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3, RequestDatabase: /dc-1, LeaseGeneration: 1, Bootstrap. Database: /dc-1, IsSystemUser: 1, run create session 2025-12-23T18:38:42.563059Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977558586.988570s seconds to be completed 2025-12-23T18:38:42.565282Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=7&id=ZWRmMWY5NDAtMjIyMjA3OC0yZGMwOTM3Ny02MjdiY2QyMw==, workerId: [7:7587124677462499577:2583], database: /dc-1, longSession: 1, local sessions count: 2 2025-12-23T18:38:42.565484Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T18:38:42.566981Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:695: [ScriptExecutions] [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7587124677462499574:3082], ActorId: [7:7587124677462499575:3083], TraceId: ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3, RequestDatabase: /dc-1, LeaseGeneration: 1, Update lease on duration: 1.000000s 2025-12-23T18:38:42.567115Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7587124677462499574:3082], ActorId: [7:7587124677462499575:3083], TraceId: ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, RunDataQuery with SessionId: ydb://session/3?node_id=7&id=ZWRmMWY5NDAtMjIyMjA3OC0yZGMwOTM3Ny02MjdiY2QyMw==, TxId: , text: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-12-23T18:38:42.569362Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=ZWRmMWY5NDAtMjIyMjA3OC0yZGMwOTM3Ny02MjdiY2QyMw==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 49, targetId: [7:7587124677462499577:2583] 2025-12-23T18:38:42.569409Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 49 timeout: 300.000000s actor id: [7:7587124677462499579:3084] 2025-12-23T18:38:42.919533Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 49, sender: [7:7587124677462499578:2584], selfId: [7:7587124617332955555:2225], source: [7:7587124677462499577:2583] 2025-12-23T18:38:42.920115Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7587124677462499574:3082], ActorId: [7:7587124677462499575:3083], TraceId: ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=ZWRmMWY5NDAtMjIyMjA3OC0yZGMwOTM3Ny02MjdiY2QyMw==, TxId: 01kd6838pv2scwg6hwvdycrg4r 2025-12-23T18:38:42.920278Z node 7 :KQP_PROXY WARN: query_actor.cpp:376: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7587124677462499574:3082], ActorId: [7:7587124677462499575:3083], TraceId: ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=7&id=ZWRmMWY5NDAtMjIyMjA3OC0yZGMwOTM3Ny02MjdiY2QyMw==, TxId: 01kd6838pv2scwg6hwvdycrg4r 2025-12-23T18:38:42.920327Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:432: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7587124677462499574:3082], ActorId: [7:7587124677462499575:3083], TraceId: ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, Rollback transaction: 01kd6838pv2scwg6hwvdycrg4r in session: ydb://session/3?node_id=7&id=ZWRmMWY5NDAtMjIyMjA3OC0yZGMwOTM3Ny02MjdiY2QyMw== 2025-12-23T18:38:42.920471Z node 7 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TScriptLeaseUpdater] OwnerId: [7:7587124677462499573:3081], ActorId: [7:7587124677462499574:3082], TraceId: ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [7:7587124677462499575:3083] NOT_FOUND 2025-12-23T18:38:42.920551Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:843: [ScriptExecutions] [TScriptLeaseUpdateActor] OwnerId: [7:7587124677462499572:3080] ActorId: [7:7587124677462499573:3081] Database: /dc-1 ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3. Lease update [7:7587124677462499575:3083] finished NOT_FOUND, issues: {
: Error: No such execution } 2025-12-23T18:38:42.921036Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=ZWRmMWY5NDAtMjIyMjA3OC0yZGMwOTM3Ny02MjdiY2QyMw==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 50, targetId: [7:7587124677462499577:2583] 2025-12-23T18:38:42.921074Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 50 timeout: 600.000000s actor id: [7:7587124677462499607:3096] 2025-12-23T18:38:42.922060Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 50, sender: [7:7587124677462499606:2593], selfId: [7:7587124617332955555:2225], source: [7:7587124677462499577:2583] 2025-12-23T18:38:42.922459Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:441: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7587124677462499574:3082], ActorId: [7:7587124677462499575:3083], TraceId: ExecutionId: d7b5801f-a889a1ef-8d9c299b-712f8bc3, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, RollbackTransactionResult: SUCCESS. Issues: 2025-12-23T18:38:42.922983Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=7&id=ZWRmMWY5NDAtMjIyMjA3OC0yZGMwOTM3Ny02MjdiY2QyMw==, workerId: [7:7587124677462499577:2583], local sessions count: 1 2025-12-23T18:38:42.933508Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=7&id=MWU3MjZlNzYtZmJjMTMwMTktNjE4Zjk5MTItODdiNWEwN2Q=, workerId: [7:7587124660282630105:2512], local sessions count: 0 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> TLocksTest::GoodDupLock [GOOD] >> TLocksTest::CK_Range_GoodLock >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] |93.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |93.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |93.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:38:43.697363Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:38:43.811796Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:38:43.823686Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:38:43.824041Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:38:43.824111Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:44.132083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:44.132231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:44.233810Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515120384072 != 1766515120384076 2025-12-23T18:38:44.249855Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:44.308042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:44.432154Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032db/r3tmp/tmpfOiGIz/pdisk_1.dat 2025-12-23T18:38:44.839514Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:182: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:832:2726] Exhausted 2025-12-23T18:38:44.839637Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:131: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:832:2726] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2025-12-23T18:38:44.839697Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:195: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:832:2726] Finish Done |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled >> TPQTest::TestReserveBytes [GOOD] >> TPQTest::TestSetClientOffset >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] >> KqpSinkTx::DeferredEffects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] Test command err: 2025-12-23T18:38:34.996776Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:35.152149Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:35.152258Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:35.152331Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:35.152430Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:35.198461Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:38:35.198733Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:38:35.199045Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] bootstrapping {2, {0, 10}, 100001} [1:182:2195] 2025-12-23T18:38:35.199897Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:38:35.199952Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Initializing completed. 2025-12-23T18:38:35.199998Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [1:182:2195] 2025-12-23T18:38:35.200067Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition {2, {0, 10}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:38:35.200138Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:38:35.200217Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process pending events. Count 0 2025-12-23T18:38:35.200262Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2025-12-23T18:38:35.200313Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:35.200347Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:35.200384Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:35.200415Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2025-12-23T18:38:35.200503Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:38:35.200656Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|65d47ab9-9e88608e-d2d92e5f-80d65be5_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2025-12-23T18:38:35.200744Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2025-12-23T18:38:35.200788Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:38:35.200851Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:35.200886Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:35.212362Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:38:35.212496Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:38:35.212548Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Batch completed (1) 2025-12-23T18:38:35.212603Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2025-12-23T18:38:35.212680Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ReplyOwnerOk. Partition: {2, {0, 10}, 100001} 2025-12-23T18:38:35.212952Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] No data for blobs compaction 2025-12-23T18:38:35.213142Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Received TPartition::TEvWrite 2025-12-23T18:38:35.213211Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2025-12-23T18:38:35.213257Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:38:35.213330Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:35.213377Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:35.213415Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:38:35.213530Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 2 partNo 0 2025-12-23T18:38:35.214389Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-12-23T18:38:35.214492Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:38:35.214532Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Batch completed (1) 2025-12-23T18:38:35.214569Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2025-12-23T18:38:35.215097Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1704: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 D0000100001_00000000000000000100_00000_0000000001_00000? size 104 WTime 128 2025-12-23T18:38:35.215317Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:38:35.238948Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:35.276660Z node 1 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:38:35.276750Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-12-23T18:38:35.276839Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:38:35.276927Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-12-23T18:38:35.277025Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 2, partNo: 0, Offset: 100 is stored on disk 2025-12-23T18:38:35.277247Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2025-12-23T18:38:35.277299Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:35.277356Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:35.277398Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:35.277434Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2025-12-23T18:38:35.277484Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] No data for blobs compaction 2025-12-23T18:38:35.291057Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:35.314305Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2025-12-23T18:38:35.314383Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:35.314421Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:35.314464Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:35.314501Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2025-12-23T18:38:35.314584Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:35.327266Z nod ... t(TTransaction[Tx]) 2025-12-23T18:38:42.746062Z node 3 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 12 2025-12-23T18:38:42.746140Z node 3 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 80 PartNo 0 PackedSize 0 count 0 nextOffset 80 batches 0, NewHead=Offset 80 PartNo 0 PackedSize 0 count 0 nextOffset 80 batches 0 2025-12-23T18:38:42.746202Z node 3 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 12 2025-12-23T18:38:42.746250Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:38:42.746295Z node 3 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:38:42.746344Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 Wait tx rollback for tx 8 Wait immediate tx complete 10 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 10 Errors { Kind: BAD_REQUEST Reason: "[TxId: (empty maybe), Topic: \'Root/PQ/rt3.dc1--account--topic\', Partition 0, SourceId \'src2\', SeqNo 10] MinSeqNo violation failure. SeqNo 5" } Wait tx committed for tx 12 2025-12-23T18:38:42.746995Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:38:42.770987Z node 3 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:38:42.781504Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-12-23T18:38:42.781662Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2025-12-23T18:38:42.781724Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:42.781765Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:42.781800Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:42.781843Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:42.781877Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:42.781954Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:38:43.365948Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:43.434215Z node 4 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:43.434285Z node 4 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:43.434359Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:43.434418Z node 4 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:43.454519Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [4:183:2196] 2025-12-23T18:38:43.456802Z node 4 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:38:43.000000Z 2025-12-23T18:38:43.457125Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [4:183:2196] 2025-12-23T18:38:43.480179Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:43.526994Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:43.548872Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:43.561033Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:43.605088Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:43.647112Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:43.680517Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:44.293671Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:44.369673Z node 5 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:44.369749Z node 5 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:44.369803Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:44.369888Z node 5 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:44.390900Z node 5 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [5:183:2196] 2025-12-23T18:38:44.393191Z node 5 :PERSQUEUE INFO: partition_init.cpp:1016: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:38:44.393487Z node 5 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [5:183:2196] 2025-12-23T18:38:44.416237Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:44.469980Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:44.494606Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:44.506195Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:44.549586Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:44.602172Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:44.639024Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:45.291154Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:45.390666Z node 6 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:45.390734Z node 6 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:45.390804Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:45.390863Z node 6 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:45.421556Z node 6 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [6:184:2196] >>>> ADD BLOB 0 writeTimestamp=2025-12-23T18:38:45.413748Z >>>> ADD BLOB 1 writeTimestamp=2025-12-23T18:38:45.413780Z >>>> ADD BLOB 2 writeTimestamp=2025-12-23T18:38:45.413798Z >>>> ADD BLOB 3 writeTimestamp=2025-12-23T18:38:45.413815Z >>>> ADD BLOB 4 writeTimestamp=2025-12-23T18:38:45.413830Z >>>> ADD BLOB 5 writeTimestamp=2025-12-23T18:38:45.413847Z >>>> ADD BLOB 6 writeTimestamp=2025-12-23T18:38:45.413860Z >>>> ADD BLOB 7 writeTimestamp=2025-12-23T18:38:45.413875Z >>>> ADD BLOB 8 writeTimestamp=2025-12-23T18:38:45.413889Z >>>> ADD BLOB 9 writeTimestamp=2025-12-23T18:38:45.413905Z 2025-12-23T18:38:45.425504Z node 6 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:38:45.000000Z 2025-12-23T18:38:45.425777Z node 6 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [6:184:2196] 2025-12-23T18:38:45.445049Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:45.478329Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:45.501363Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:45.533104Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:45.568437Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:45.589937Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:45.653841Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:45.747059Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:38:43.782990Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:38:43.919931Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:38:43.932283Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:38:43.932680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:38:43.932777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:44.215748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:44.215865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:44.305424Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515120255850 != 1766515120255854 2025-12-23T18:38:44.322159Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:44.369485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:44.485199Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032eb/r3tmp/tmpQgzzXZ/pdisk_1.dat 2025-12-23T18:38:45.029774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-12-23T18:38:45.030046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:38:45.030326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:38:45.030399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:38:45.030614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:38:45.030699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:38:45.031520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:38:45.031683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:38:45.031892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:38:45.031967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:38:45.032020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:38:45.032061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:38:45.032676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:38:45.032734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:38:45.032774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:38:45.033342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:38:45.033402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:38:45.033446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-12-23T18:38:45.033511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:38:45.040281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:38:45.041008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:38:45.041153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:38:45.041744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-12-23T18:38:45.041793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-12-23T18:38:45.041955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-12-23T18:38:45.053966Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:45.161345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:38:45.161528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-12-23T18:38:45.161580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-12-23T18:38:45.161834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:38:45.161891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-12-23T18:38:45.162069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-12-23T18:38:45.162134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-12-23T18:38:45.162963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:38:45.163031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:38:45.163220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:38:45.163274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:558:2492], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-12-23T18:38:45.163755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:38:45.163805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 1:0 ProgressState 2025-12-23T18:38:45.163908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:38:45.163946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:38:45.163984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:38:45.164027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:38:45.164071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:38:45.164116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:38:45.164168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:38:45.164200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:38:45.164259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2025-12-23T18:38:45.164308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-12-23T18:38:45.164346Z node 1 :FLAT_TX_S ... ountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 2025-12-23T18:38:46.076040Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:67:2114] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-12-23T18:38:46.076156Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:1023:2864] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-12-23T18:38:46.076652Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:1023:2864] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:46.076750Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:1023:2864] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2025-12-23T18:38:46.078833Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:1023:2864] Handle TEvDescribeSchemeResult Forward to# [1:828:2723] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 2500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2025-12-23T18:38:46.079820Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:65: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1025:2866] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:38:46.080134Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:131: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1025:2866] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:38:46.080498Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:227: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1025:2866] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:38:46.080688Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:176: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1025:2866] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> ScriptExecutionsTest::TestSecureScriptExecutions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:38:44.218992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:38:44.379020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:38:44.395731Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:38:44.396168Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:38:44.396267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:44.781250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:44.781382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:44.909045Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515120337097 != 1766515120337101 2025-12-23T18:38:44.934595Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:44.988109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:45.118977Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032d4/r3tmp/tmpsonST0/pdisk_1.dat 2025-12-23T18:38:45.679755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-12-23T18:38:45.680003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:38:45.680256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:38:45.680308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:38:45.680622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:38:45.680704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:38:45.681548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:38:45.681761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:38:45.681978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:38:45.682054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:38:45.682097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:38:45.682129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:38:45.782498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:38:45.782582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:38:45.782633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:38:45.783245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:38:45.783305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:38:45.783358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-12-23T18:38:45.783429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:38:45.787213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:38:45.787767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:38:45.787930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:38:45.788590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-12-23T18:38:45.788641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-12-23T18:38:45.788687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-12-23T18:38:45.800224Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:45.906207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:38:45.906392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-12-23T18:38:45.906446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-12-23T18:38:45.906803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:38:45.906949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-12-23T18:38:45.907133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-12-23T18:38:45.907200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-12-23T18:38:45.908039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:38:45.908098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:38:45.908324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:38:45.908388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:558:2492], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-12-23T18:38:45.911467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-12-23T18:38:45.911531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 1:0 ProgressState 2025-12-23T18:38:45.911680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:38:45.911744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:38:45.911789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:38:45.911827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:38:45.911868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:38:45.911911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:38:45.911976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:38:45.912007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:38:45.912077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2025-12-23T18:38:45.912152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-12-23T18:38:45.912187Z node 1 :FLAT_TX_S ... ration id: 281474976715658:0 2025-12-23T18:38:46.596557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976715658:0 2025-12-23T18:38:46.596681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2025-12-23T18:38:46.597415Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:67:2114] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-12-23T18:38:46.597519Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:1011:2858] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-12-23T18:38:46.600365Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:1011:2858] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:46.600481Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:1011:2858] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2025-12-23T18:38:46.601679Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:1011:2858] Handle TEvDescribeSchemeResult Forward to# [1:828:2723] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 2500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2025-12-23T18:38:46.603008Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:1021:2862], serverId# [1:1022:2863], sessionId# [0:0:0] 2025-12-23T18:38:46.603948Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:65: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1023:2864] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:38:46.604257Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:131: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1023:2864] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:38:46.604545Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:227: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1023:2864] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:38:46.604756Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:139: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1023:2864] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] BodySize: 18 }] } 2025-12-23T18:38:46.604882Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:144: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1023:2864] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 39] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-12-23T18:38:46.605050Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:67:2114] Handle TEvGetProxyServicesRequest 2025-12-23T18:38:46.605138Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:40: [TableChangeSenderShard][0:0][72075186224037888][1:1027:2864] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-12-23T18:38:46.605518Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1028:2868], serverId# [1:1029:2869], sessionId# [0:0:0] 2025-12-23T18:38:46.657602Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][0:0][72075186224037888][1:1027:2864] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-12-23T18:38:46.657751Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:154: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1023:2864] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:38:46.657906Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][0:0][72075186224037888][1:1027:2864] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 39] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-12-23T18:38:46.657979Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:154: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1023:2864] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:38:46.658143Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:176: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1023:2864] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink >> PQCountersSimple::PartitionLevelMetrics [GOOD] >> PQCountersSimple::PartitionWriteQuota >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled >> TestSqsTopicHttpProxy::TestSendMessageTooBig [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 >> TTicketParserTest::AuthenticationUnavailable [GOOD] >> TTicketParserTest::AuthenticationUnsupported |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 |93.5%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon >> TestSqsTopicHttpProxy::TestSendMessageBatchLong >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::TestSecureScriptExecutions [GOOD] Test command err: 2025-12-23T18:37:53.847434Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124468640820489:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:53.847491Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00601e/r3tmp/tmpMUNOMA/pdisk_1.dat 2025-12-23T18:37:54.321216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:54.321313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:54.550906Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124468640820463:2081] 1766515073833767 != 1766515073833770 2025-12-23T18:37:54.711972Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:54.731857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:54.932934Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:15226 TServer::EnableGrpc on GrpcPort 63838, node 1 2025-12-23T18:37:55.539804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:55.539821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:55.539838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:55.539924Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515074940 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:37:55.728116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:55.735305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:58.851753Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124468640820489:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:58.851844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:38:00.038021Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:38:00.046350Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:38:00.046393Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:38:00.046410Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:38:00.058410Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7587124498705592383:2500] Owner: [1:7587124498705592380:2497]. Describe result: PathErrorUnknown 2025-12-23T18:38:00.058421Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7587124498705592383:2500] Owner: [1:7587124498705592380:2497]. Creating table 2025-12-23T18:38:00.058473Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7587124498705592383:2500] Owner: [1:7587124498705592380:2497]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2025-12-23T18:38:00.058596Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7587124498705592381:2498] Owner: [1:7587124498705592380:2497]. Describe result: PathErrorUnknown 2025-12-23T18:38:00.058613Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7587124498705592381:2498] Owner: [1:7587124498705592380:2497]. Creating table 2025-12-23T18:38:00.058634Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7587124498705592381:2498] Owner: [1:7587124498705592380:2497]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2025-12-23T18:38:00.062986Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7587124498705592382:2499] Owner: [1:7587124498705592380:2497]. Describe result: PathErrorUnknown 2025-12-23T18:38:00.063001Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7587124498705592382:2499] Owner: [1:7587124498705592380:2497]. Creating table 2025-12-23T18:38:00.063031Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7587124498705592382:2499] Owner: [1:7587124498705592380:2497]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2025-12-23T18:38:00.073172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:00.076444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:00.079122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:00.086352Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7587124498705592382:2499] Owner: [1:7587124498705592380:2497]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2025-12-23T18:38:00.086407Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7587124498705592382:2499] Owner: [1:7587124498705592380:2497]. Subscribe on create table tx: 281474976710660 2025-12-23T18:38:00.088262Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7587124498705592381:2498] Owner: [1:7587124498705592380:2497]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2025-12-23T18:38:00.088282Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7587124498705592381:2498] Owner: [1:7587124498705592380:2497]. Subscribe on create table tx: 281474976710659 2025-12-23T18:38:00.089693Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7587124498705592382:2499] Owner: [1:7587124498705592380:2497]. Subscribe on tx: 281474976710660 registered 2025-12-23T18:38:00.093699Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7587124498705592381:2498] Owner: [1:7587124498705592380:2497]. Subscribe on tx: 281474976710659 registered 2025-12-23T18:38:00.094987Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7587124498705592383:2500] Owner: [1:7587124498705592380:2497]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2025-12-23T18:38:00.095020Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7587124498705592383:2500] Owner: [1:7587124498705592380:2497]. Subscribe on create table tx: 281474976710658 2025-12-23T18:38:00.097230Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7587124498705592383:2500] Owner: [1:7587124498705592380:2497]. Subscribe on tx: 281474976710658 registered 2025-12-23T18:38:00.250250Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7587124498705592383:2500] Owner: [1:7587124498705592380:2497]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-12-23T18:38:00.304827Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7587124498705592383:2500] Owner: [1:7587124498705592380:2497]. Table already exists, number of columns: 7, has SecurityObject: true 2025-12-23T18:38:00.304880Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7587124498705592383:2500] Owner: [1:7587124498705592380:2497]. Column diff is empty, finishing 2025-12-23T18:38:00.305943Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7587124498705592383:2500] Owner: [1:7587124498705592380:2497]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2025-12-23T18:38:00.306015Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7587124498705592381:2498] Owner: [1:7587124498705592380:2497]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-12-23T18:38:00.307596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:38:00.309047Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7587124498705592383:2500] Owner: [1:7587124498705592380:249 ... user_token FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-12-23T18:38:45.933610Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=NzI2NGU5NTktOTAyNzkzOTMtMzc2N2VmNjQtNGEwOTRmZDY=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 95, targetId: [3:7587124690365927272:2727] 2025-12-23T18:38:45.933641Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 95 timeout: 300.000000s actor id: [3:7587124690365927274:3305] 2025-12-23T18:38:45.948290Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 95, sender: [3:7587124690365927273:2728], selfId: [3:7587124621646447948:2079], source: [3:7587124690365927272:2727] 2025-12-23T18:38:45.949278Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587124690365927269:3303], ActorId: [3:7587124690365927270:3304], TraceId: ExecutionId: f3d81820-c32d2139-8058753c-648da4b, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=NzI2NGU5NTktOTAyNzkzOTMtMzc2N2VmNjQtNGEwOTRmZDY=, TxId: 2025-12-23T18:38:45.949902Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587124690365927269:3303], ActorId: [3:7587124690365927270:3304], TraceId: ExecutionId: f3d81820-c32d2139-8058753c-648da4b, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=NzI2NGU5NTktOTAyNzkzOTMtMzc2N2VmNjQtNGEwOTRmZDY=, TxId: 2025-12-23T18:38:45.949930Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587124690365927269:3303], ActorId: [3:7587124690365927270:3304], TraceId: ExecutionId: f3d81820-c32d2139-8058753c-648da4b, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2025-12-23T18:38:45.950051Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587124690365927268:3302], ActorId: [3:7587124690365927269:3303], TraceId: ExecutionId: f3d81820-c32d2139-8058753c-648da4b, RequestDatabase: /dc-1, Got response [3:7587124690365927270:3304] SUCCESS 2025-12-23T18:38:45.950115Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7587124690365927267:3301] ActorId: [3:7587124690365927268:3302] Database: /dc-1 ExecutionId: f3d81820-c32d2139-8058753c-648da4b. Extracted script execution operation [3:7587124690365927270:3304], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7587124681775992420:3213], LeaseGeneration: 0 2025-12-23T18:38:45.950136Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7587124690365927267:3301] ActorId: [3:7587124690365927268:3302] Database: /dc-1 ExecutionId: f3d81820-c32d2139-8058753c-648da4b. Reply success 2025-12-23T18:38:45.951416Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=NzI2NGU5NTktOTAyNzkzOTMtMzc2N2VmNjQtNGEwOTRmZDY=, workerId: [3:7587124690365927272:2727], local sessions count: 1 2025-12-23T18:38:46.975860Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kd683cnz0t94n7qx07kqqmt6, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=OTEzYmU0ODItMTA3ZjljOWMtZTgwOTc2NzktZTY3OTU2OGI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 96, targetId: [3:7587124668891090006:2539] 2025-12-23T18:38:46.975911Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 96 timeout: 300.000000s actor id: [3:7587124694660894604:3318] 2025-12-23T18:38:47.007616Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7587124698955861904:3320], for# root@builtin, access# DescribeSchema 2025-12-23T18:38:47.007660Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7587124698955861904:3320], for# root@builtin, access# DescribeSchema 2025-12-23T18:38:47.010270Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7587124694660894605:2738], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:38:47.013531Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=3&id=OTEzYmU0ODItMTA3ZjljOWMtZTgwOTc2NzktZTY3OTU2OGI=, ActorId: [3:7587124668891090006:2539], ActorState: ExecuteState, TraceId: 01kd683cnz0t94n7qx07kqqmt6, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/script_executions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:38:47.013778Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd683cnz0t94n7qx07kqqmt6", Forwarded response to sender actor, requestId: 96, sender: [3:7587124694660894603:2737], selfId: [3:7587124621646447948:2079], source: [3:7587124668891090006:2539] 2025-12-23T18:38:47.051608Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kd683cra6kxqmefdhvn2tzt9, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=OTEzYmU0ODItMTA3ZjljOWMtZTgwOTc2NzktZTY3OTU2OGI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 97, targetId: [3:7587124668891090006:2539] 2025-12-23T18:38:47.051662Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 97 timeout: 300.000000s actor id: [3:7587124698955861907:3321] 2025-12-23T18:38:47.099227Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7587124698955861911:3323], for# root@builtin, access# DescribeSchema 2025-12-23T18:38:47.099259Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7587124698955861911:3323], for# root@builtin, access# DescribeSchema 2025-12-23T18:38:47.101909Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7587124698955861908:2741], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/script_execution_leases]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:38:47.103155Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=3&id=OTEzYmU0ODItMTA3ZjljOWMtZTgwOTc2NzktZTY3OTU2OGI=, ActorId: [3:7587124668891090006:2539], ActorState: ExecuteState, TraceId: 01kd683cra6kxqmefdhvn2tzt9, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/script_execution_leases]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:38:47.103315Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd683cra6kxqmefdhvn2tzt9", Forwarded response to sender actor, requestId: 97, sender: [3:7587124698955861906:2740], selfId: [3:7587124621646447948:2079], source: [3:7587124668891090006:2539] 2025-12-23T18:38:47.121749Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kd683cth9kjsbrqrae9kdp4k, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=OTEzYmU0ODItMTA3ZjljOWMtZTgwOTc2NzktZTY3OTU2OGI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 98, targetId: [3:7587124668891090006:2539] 2025-12-23T18:38:47.121802Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 98 timeout: 300.000000s actor id: [3:7587124698955861914:3324] 2025-12-23T18:38:47.164842Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7587124698955861918:3326], for# root@builtin, access# DescribeSchema 2025-12-23T18:38:47.164874Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7587124698955861918:3326], for# root@builtin, access# DescribeSchema 2025-12-23T18:38:47.167422Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7587124698955861915:2744], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/result_sets]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:38:47.170732Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=3&id=OTEzYmU0ODItMTA3ZjljOWMtZTgwOTc2NzktZTY3OTU2OGI=, ActorId: [3:7587124668891090006:2539], ActorState: ExecuteState, TraceId: 01kd683cth9kjsbrqrae9kdp4k, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/result_sets]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:38:47.170915Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd683cth9kjsbrqrae9kdp4k", Forwarded response to sender actor, requestId: 98, sender: [3:7587124698955861913:2743], selfId: [3:7587124621646447948:2079], source: [3:7587124668891090006:2539] 2025-12-23T18:38:47.189258Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=OTEzYmU0ODItMTA3ZjljOWMtZTgwOTc2NzktZTY3OTU2OGI=, workerId: [3:7587124668891090006:2539], local sessions count: 0 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> TPartitionTests::TEvTxCalcPredicate_With_Conflicts [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidQueueUrl [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress >> PQCountersSimple::PartitionWriteQuota [GOOD] >> PQCountersSimple::PartitionFirstClass |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TEvTxCalcPredicate_With_Conflicts [GOOD] Test command err: 2025-12-23T18:38:31.672147Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:31.825363Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:31.825439Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:31.825498Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:31.825585Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:31.866371Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [1:183:2196] 2025-12-23T18:38:31.868857Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:38:31.000000Z 2025-12-23T18:38:31.869113Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:183:2196] 2025-12-23T18:38:31.893565Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:31.941574Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:31.965822Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:31.978983Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:32.025944Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:32.073344Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:32.106777Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-1" IncludeFrom: true To: "m0000000003cclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-1" IncludeFrom: true To: "m0000000003uclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003Sclient-1" IncludeFrom: true To: "c0000000003Sclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003wclient-1|0000000000000000" IncludeFrom: true To: "c0000000003wclient-1|FFFFFFFFFFFFFFFF" IncludeTo: true } } CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\000\030\000\"\000(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\014\n\010client-2@\000" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\330\350\205\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:38:32.747393Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:32.810662Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:32.810715Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:32.810777Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:32.810852Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:32.826806Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] bootstrapping {0, {0, 1111}, 123} [2:184:2196] 2025-12-23T18:38:32.830662Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:38:32.000000Z 2025-12-23T18:38:32.830781Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [2:184:2196] 2025-12-23T18:38:32.841279Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:32.882954Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:32.906949Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:32.945680Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:32.983018Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:33.007034Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:33.070764Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:33.154949Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:33.227299Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|3ba6e18c-f8b58e1-89680fe-20575ffa_0 generated for partition {0, {0, 1111}, 123} topic 'rt3.dc1--account--topic' owner owner1 Send write: 0 2025-12-23T18:38:33.242995Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:33.446172Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:33.485537Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 1 2025-12-23T18:38:33.770957Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 2 2025-12-23T18:38:34.051616Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:34.090074Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 3 Send write: 4 2025-12-23T18:38:34.438995Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 5 2025-12-23T18:38:34.736298Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:35.015677Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 6 2025-12-23T18:38:35.202964Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 7 2025-12-23T18:38:35.346017Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 8 2025-12-23T18:38:35.615001Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 9 2025-12-23T18:38:36.040121Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:36.808580Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:36.896888Z node 3 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:36.896955Z node 3 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:36.897023Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:36.897079Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:36.920095Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] bootstrapping {0, {0, 1111}, 123} [3:183:2196] 2025-12-23T18:38:36.921792Z node 3 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:38:36.000000Z 2025-12-23T18:38:36.921853Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [3:183:2196] 2025-12-23T18:38:36.946639Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:36.990789Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:37.015224Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 self ... [StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.098285Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:50.123285Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:50.123391Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:38:50.123443Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.123493Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:50.123536Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.123582Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:50.146092Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:50.146224Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:38:50.146278Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.146313Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:50.146358Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.146403Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:50.167140Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:50.167243Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:38:50.167311Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.167350Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:50.167391Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.167440Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:50.189313Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:50.189401Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:38:50.189450Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.189503Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:50.189546Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.189578Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:50.216781Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:50.216880Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:38:50.216934Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.216968Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:50.217026Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.217062Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:50.227449Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:50.248543Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:50.248632Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:38:50.248682Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.248717Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:50.248792Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.248836Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:50.270991Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:50.271092Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:38:50.271171Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.271209Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:50.271252Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.271286Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:50.293828Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:50.293912Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:38:50.293962Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.294009Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:50.294058Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.294091Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:50.297084Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-12-23T18:38:50.297257Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:50.297386Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:38:50.297479Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.297560Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:50.297657Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2025-12-23T18:38:50.297732Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-12-23T18:38:50.297918Z node 6 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-12-23T18:38:50.298042Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 0 2025-12-23T18:38:50.298143Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:38:50.298244Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:38:50.298342Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request 2025-12-23T18:38:50.299076Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:38:50.320545Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:38:50.333654Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-12-23T18:38:50.333896Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2025-12-23T18:38:50.333984Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:50.334063Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:38:50.334138Z node 6 :PERSQUEUE DEBUG: partition.cpp:1616: [72057594037927937][Partition][0][StateIdle] TxId 2 affect SourceId sourceid 2025-12-23T18:38:50.334260Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.334328Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:50.334379Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:50.334437Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:50.334511Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> PQCountersSimple::PartitionFirstClass [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop |93.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |93.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |93.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:37:44.567447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:37:44.567583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:44.567628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:37:44.567665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:37:44.567699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:37:44.567728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:37:44.567803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:44.567918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:37:44.568835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:37:44.569164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:37:44.658627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:37:44.658711Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:44.664930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:37:44.665194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:37:44.665335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:37:44.668773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:37:44.668972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:37:44.669753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:37:44.669989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:37:44.672142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:44.672382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:37:44.673936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:37:44.674040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:44.674181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:37:44.674225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:37:44.674271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:37:44.674412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:37:44.836379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.837433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.837557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.837670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.837766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.837830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.837897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.837960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.838040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.838123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.838210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.838293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.838355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.838427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:44.838529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... SHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:38:50.526365Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:38:50.526398Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:38:50.527497Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2025-12-23T18:38:50.528899Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-12-23T18:38:50.529177Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-12-23T18:38:50.529582Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2025-12-23T18:38:50.529992Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:38:50.530232Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-12-23T18:38:50.532083Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2025-12-23T18:38:50.532354Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409546 2025-12-23T18:38:50.534121Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:38:50.535020Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-12-23T18:38:50.535183Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:38:50.535431Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-12-23T18:38:50.535806Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:38:50.535998Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:38:50.537615Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:38:50.537707Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:38:50.537833Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 Forgetting tablet 72075186233409547 2025-12-23T18:38:50.540555Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:38:50.540663Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-12-23T18:38:50.548588Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:38:50.548660Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:38:50.549158Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:38:50.549199Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-12-23T18:38:50.549488Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:38:50.549554Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:38:50.550890Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:38:50.551344Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:38:50.551432Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:38:50.552132Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:38:50.552301Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:38:50.552374Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [17:544:2497] TestWaitNotification: OK eventTxId 103 2025-12-23T18:38:50.553147Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:38:50.553453Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 356us result status StatusPathDoesNotExist 2025-12-23T18:38:50.553684Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2025-12-23T18:38:50.554325Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-12-23T18:38:50.554426Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-12-23T18:38:50.554473Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-12-23T18:38:50.554528Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2025-12-23T18:38:50.555392Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:38:50.555667Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 324us result status StatusSuccess 2025-12-23T18:38:50.556290Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TTicketParserTest::AuthenticationUnsupported [GOOD] >> TTicketParserTest::AuthenticationUnknown >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> ScriptExecutionsTest::BackgroundOperationRestart [GOOD] >> ScriptExecutionsTest::BackgroundOperationFinalization |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream >> KqpErrors::ProposeResultLost_RwTx+UseSink >> DataShardVolatile::DistributedWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] Test command err: 2025-12-23T18:38:37.595161Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:37.675952Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:37.676031Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:37.676091Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:37.676166Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:37.695673Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:37.716296Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:204:2217] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:38:37.717591Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:212:2165] 2025-12-23T18:38:37.720268Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:212:2165] 2025-12-23T18:38:37.722455Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:213:2165] 2025-12-23T18:38:37.724364Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:213:2165] 2025-12-23T18:38:37.735892Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:37.736454Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3371567-e8aaba5d-75b13734-9765ca20_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:38:37.747141Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:37.747613Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7614cf50-f70a44b9-46c03ba1-d5845476_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:38:37.755948Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:37.756438Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1d925f22-b33a8e29-68ed165b-805a6664_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:38:37.763466Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:37.763871Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1a5f67e4-1ab43acd-491e28f8-5f730a8e_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:38:37.765386Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:37.765755Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d40b5255-87825ce2-3ea208bf-4744d96_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 ACTUAL:
subsystem=SLI:
    Account=asdfgs:
        sensor=WriteBigLatency: 0
        sensor=WritesTotal: 5
    Account=total:
        sensor=WriteBigLatency: 0
        sensor=WritesTotal: 5
    sensor=Write:
        Account=asdfgs:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
        Account=total:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
subsystem=partitionWriteQuotaWait:
    Account=asdfgs:
        Producer=asdfgs:
            Topic=asdfgs--topic:
                TopicPath=asdfgs/topic:
                    OriginDC=Dc1:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
    Account=total:
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
subsystem=readSession:
    Account=asdfgs:
        Producer=asdfgs:
            Topic=asdfgs--topic:
                TopicPath=asdfgs/topic:
                    OriginDC=Dc1:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
    Account=total:
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
subsystem=readTimeLag:
    Account=asdfgs:
        Producer=asdfgs:
            Topic=asdfgs--topic:
                TopicPath=asdfgs/topic:
                    OriginDC=Dc1:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=TimeLags:
                                    Interval=10000ms: 0
                                    Interval=1000ms: 0
                                    Interval=100ms: 0
                                    Interval=10ms: 0
                                    Interval=180000ms: 0
                                    Interval=2000ms: 0
                                    Interval=200ms: 0
                                    Interval=20ms: 0
                                    Interval=30000ms: 0
                                    Interval=5000ms: 0
                                    Interval=500ms: 0
                                    Interval=50ms: 0
                                    Interval=60000ms: 0
                                    Interval=9999999ms: 0
                    OriginDC=cluster:
                        Client=user:
 
...
5:203:2215] txId 12345 config:
CacheSize: 10485760
PartitionConfig {
  MaxCountInPartition: 20000000
  MaxSizeInPartition: 104857600
  LifetimeSeconds: 0
  LowWatermark: 6291456
  SourceIdLifetimeSeconds: 3600
  MaxWriteInflightSize: 90000000
}
PartitionIds: 0
PartitionIds: 1
TopicName: "topic"
Version: 39
LocalDC: true
Topic: "topic"
TopicPath: "/topic"
YcCloudId: "somecloud"
YcFolderId: "somefolder"
YdbDatabaseId: "PQ"
YdbDatabasePath: "/Root/PQ"
Partitions {
  PartitionId: 0
}
Partitions {
  PartitionId: 1
}
FederationAccount: "federationAccount"
MeteringMode: METERING_MODE_RESERVED_CAPACITY
AllPartitions {
  PartitionId: 0
}
AllPartitions {
  PartitionId: 1
}
Consumers {
  Name: "user"
  ReadFromTimestampsMs: 0
  Generation: 39
}
2025-12-23T18:38:51.659267Z node 15 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [15:211:2165]
2025-12-23T18:38:51.667042Z node 15 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [15:211:2165]
2025-12-23T18:38:51.667942Z node 15 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [15:212:2165]
2025-12-23T18:38:51.668892Z node 15 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [15:212:2165]
2025-12-23T18:38:51.675196Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'topic' partition: 0 messageNo: 0 requestId:  error: new GetOwnership request needed for owner 
2025-12-23T18:38:51.675627Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c86a7058-6d59e7a6-6252fade-dd18d270_0 generated for partition 0 topic 'topic' owner default
2025-12-23T18:38:51.684616Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId:  error: new GetOwnership request needed for owner 
2025-12-23T18:38:51.685114Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b81caf2c-fa6ab99-5c26fbb8-cf11ad4e_1 generated for partition 0 topic 'topic' owner default
2025-12-23T18:38:51.693080Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId:  error: new GetOwnership request needed for owner 
2025-12-23T18:38:51.693512Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f840c1bc-84f15eef-f3f18e3d-ac0f7224_2 generated for partition 0 topic 'topic' owner default
2025-12-23T18:38:51.700478Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId:  error: new GetOwnership request needed for owner 
2025-12-23T18:38:51.700937Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|60e4f3aa-dc309cce-50248c7f-db85d608_3 generated for partition 0 topic 'topic' owner default
Got start offset = 0
COUNTERS: 
subsystem=SLI:
    Account=federationAccount:
        name=WriteBigLatency: 0
        name=WritesTotal: 5
    Account=total:
        name=WriteBigLatency: 0
        name=WritesTotal: 5
    sensor=Write:
        Account=federationAccount:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
        Account=total:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
2025-12-23T18:38:52.330492Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:52.395956Z node 16 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:52.396026Z node 16 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:52.396106Z node 16 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:52.396193Z node 16 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:52.419367Z node 16 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:52.420205Z node 16 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 40 actor [16:204:2217] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 40 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 40 } 2025-12-23T18:38:52.420905Z node 16 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [16:213:2165] 2025-12-23T18:38:52.428547Z node 16 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [16:213:2165] 2025-12-23T18:38:52.433627Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:52.434004Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5ea68860-6839953c-5e449373-43b79807_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:229:2165] Captured TEvRequest, cmd write size: 4 2025-12-23T18:38:52.445793Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:52.446215Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|feb37993-e3e93a0e-c91f60a1-29b6d0a6_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:38:52.470983Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:52.520924Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:52.548421Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:52.560883Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:52.610965Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:52.659707Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:52.706959Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:229:2165] Captured TEvRequest, cmd write size: 4 2025-12-23T18:38:52.843846Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:52.844261Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|29c3da9f-52f45bd-f456ffbe-f1785340_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:38:52.878089Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:52.922302Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:229:2165] Captured TEvRequest, cmd write size: 4 2025-12-23T18:38:53.136612Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:53.136962Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ff674b3-31c75318-b4b075f-c7fa16f7_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:38:53.190150Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:53.200836Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:229:2165] Captured TEvRequest, cmd write size: 4 2025-12-23T18:38:53.447792Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:53.448178Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ca22ed52-2494a690-3068a5d5-6477dc22_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:38:53.544959Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:229:2165] Captured TEvRequest, cmd write size: 4 2025-12-23T18:38:53.736911Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:53.737309Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ac05d5e2-5c12c93c-16391b01-c31ef4a7_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:38:53.798561Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:38:53.882786Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:229:2165] Captured TEvRequest, cmd write size: 4 Got start offset = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:37:45.357203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:37:45.357311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:45.357352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:37:45.357387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:37:45.357430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:37:45.357456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:37:45.357568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:45.357674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:37:45.358652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:37:45.358966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:37:45.462202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:37:45.462272Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:45.480442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:37:45.480762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:37:45.480909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:37:45.493634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:37:45.493910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:37:45.494937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:37:45.495213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:37:45.502506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:45.502827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:37:45.504189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:37:45.504263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:45.504447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:37:45.504499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:37:45.504546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:37:45.504697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:37:45.730363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.737521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.737708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.737826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.737950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.738024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.738100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.738173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.738253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.738363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.738478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.738564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.738635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.738722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.738842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard: 72057594046678944 Send TEvRosterUpdateFinished 2025-12-23T18:38:40.801863Z node 12 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2025-12-23T18:38:40.813447Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:38:40.813666Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:38:40.813873Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:38:40.813924Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:38:40.814088Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:38:40.814158Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:38:40.816430Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:38:40.816597Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:38:40.816805Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:38:40.816887Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:38:40.816940Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:38:40.816981Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:38:40.819208Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:38:40.819282Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:38:40.819348Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:38:40.821339Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:38:40.821394Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:38:40.821461Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:38:40.821534Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:38:40.821704Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:38:40.823526Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:38:40.823685Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:38:40.824082Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:38:40.824291Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 51539609708 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:38:40.824357Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:38:40.824684Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:38:40.824760Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:38:40.825016Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:38:40.825105Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:38:40.827160Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:38:40.827209Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:38:40.827427Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:38:40.827471Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [12:212:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:38:40.827747Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:38:40.827799Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:38:40.827936Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:38:40.827977Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:38:40.828017Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:38:40.828059Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:38:40.828108Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:38:40.828191Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:38:40.828234Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:38:40.828268Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:38:40.828334Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:38:40.828377Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:38:40.828412Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:38:40.829092Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:38:40.829199Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:38:40.829244Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:38:40.829286Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:38:40.829335Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:38:40.829417Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:38:40.831643Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:38:40.832078Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-12-23T18:38:40.834910Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [12:675:2664] Bootstrap 2025-12-23T18:38:40.836095Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [12:675:2664] Become StateWork (SchemeCache [12:681:2670]) 2025-12-23T18:38:40.836679Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [12:675:2664] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:38:40.838754Z node 12 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpTx::LocksAbortOnCommit [GOOD] >> KqpTx::RollbackByIdle |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> DataShardVolatile::DistributedWriteThenDropTable >> TLocksTest::SetLockNothing [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> KqpErrors::ProposeError >> KqpErrors::ResolveTableError >> TCdcStreamWithInitialScanTests::WithoutPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions >> KqpSinkTx::DeferredEffects [GOOD] >> KqpSinkTx::ExplicitTcl |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TestSqsTopicHttpProxy::TestSendMessageBatchLong [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [FAIL] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] Test command err: 2025-12-23T18:38:16.587250Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124568565457063:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:16.592017Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e6/r3tmp/tmpIpsMM8/pdisk_1.dat 2025-12-23T18:38:17.276971Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:17.286451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:17.286544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:17.308009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:17.453910Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124568565457028:2081] 1766515096558411 != 1766515096558414 2025-12-23T18:38:17.497163Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:17.522859Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:17.594473Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:17.912818Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:13617 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515097585 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:18.118315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:18.127631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:18.171639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:38:18.182990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:18.438230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:18.541788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:23.705480Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124597144334187:2257];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:23.705522Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e6/r3tmp/tmpZo8yZJ/pdisk_1.dat 2025-12-23T18:38:23.858883Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:23.898322Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:23.906931Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:24.009675Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:24.014947Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124597144333959:2081] 1766515103663739 != 1766515103663742 2025-12-23T18:38:24.033715Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:24.157845Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:18262 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515104158 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:24.588945Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:24.594663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:24.604422Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:24.608766Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:24.705573Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:24.744706Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:24.801218Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:29.828106Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124624722791042:2137];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:29.828166Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e6/r3tmp/tmpe0pWxF/pdisk_1.dat 2025-12-23T18:38:29.911274Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:30.070933Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587124624722790943:2081] 1766515109812688 != 1766515109812691 2025-12-23T18:38:30.146301Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:30.153336Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:30.159231Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:30.162006Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Conn ... CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515120496 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:40.845061Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:40.857580Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:40.890604Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:41.081605Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:41.221521Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:41.242081Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e6/r3tmp/tmplS55xs/pdisk_1.dat 2025-12-23T18:38:45.824087Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:45.860851Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:45.962376Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:46.002310Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:46.002646Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:46.031420Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:46.124096Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:25724 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515126082 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:46.431090Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:38:46.466269Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:38:46.518861Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:46.616735Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:46.684524Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:46.742188Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:50.639863Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7587124713580713443:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:50.639999Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e6/r3tmp/tmpzTNHKN/pdisk_1.dat 2025-12-23T18:38:50.687577Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:50.786670Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:50.786910Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:50.803166Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7587124713580713418:2081] 1766515130634332 != 1766515130634335 2025-12-23T18:38:50.832560Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:50.835484Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:50.887875Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:23905 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515130891 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:38:51.159009Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:51.175996Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:51.182256Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:51.276411Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:51.356453Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize [GOOD] >> TTicketParserTest::AuthenticationUnknown [GOOD] >> TTicketParserTest::Authorization >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [FAIL] >> KqpLocksTricky::TestSnapshotWithDependentReads-UseSink [GOOD] >> KqpRollback::DoubleUpdate >> TCdcStreamWithInitialScanTests::WithPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBatchLong [GOOD] Test command err: 2025-12-23T18:38:39.917588Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124666907867460:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:39.930940Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0058df/r3tmp/tmp5zvlM2/pdisk_1.dat 2025-12-23T18:38:40.336911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:40.337077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:40.363957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:40.586722Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:38:40.598185Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124666907867433:2081] 1766515119915710 != 1766515119915713 2025-12-23T18:38:40.608637Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22008, node 1 2025-12-23T18:38:40.702853Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:40.702879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:40.702890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:40.702998Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:38:40.785004Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:38:40.994240Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515120601 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:38:41.068873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:41.080091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:61573 2025-12-23T18:38:41.544308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:38:41.557768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:41.579296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:38:41.641506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:41.904095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:38:41.988361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:38:42.041171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-12-23T18:38:42.048146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:42.090569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:42.141368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:42.192254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:38:42.250346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:42.297664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:42.342119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:44.309643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124688382705576:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:44.310187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124688382705568:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:44.310292Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:44.317429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:38:44.317765Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124688382705583:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:44.317938Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:44.337610Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124688382705582:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:38:44.442027Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124688382705635:3104] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, sta ... 57.377195Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:38:57.377458Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:38:57.380512Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:38:57.380713Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 1 2025-12-23T18:38:57.380727Z node 2 :PERSQUEUE INFO: partition_mlp.cpp:127: [72075186224037907][Partition][0][StateIdle] Updateing MLP consumer 'consumer' config 2025-12-23T18:38:57.380848Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:38:57.380871Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:57.380883Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:57.380893Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:57.380918Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:57.380929Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:38:57.380959Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:38:57.381347Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic1" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/topic1" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: false DeadLetterPolicy: DEAD_LETTER_POLICY_UNSPECIFIED MaxProcessingAttempts: 0 DefaultProcessingTimeoutSeconds: 20 } MonitoringProjectId: "" 2025-12-23T18:38:57.381404Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:946: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS 2025-12-23T18:38:57.381521Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:38:57.386453Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) { status: SUCCESS, issues: }ct.IsSuccess() = 1, ct.IsTransportError() = 0, ct.GetEndpoint() = [::]:65338 { status: SUCCESS, issues: }consumer 2025-12-23T18:38:57.414574Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:60288) incoming connection opened 2025-12-23T18:38:57.414644Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:60288) -> (POST /Root, 1406 bytes) 2025-12-23T18:38:57.414813Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [183a:bb97:ee7b:0:3a:bb97:ee7b:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: 83e1e1ec-61ed3d16-2bf50c10-4f994773 2025-12-23T18:38:57.415851Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessageBatch] requestId [83e1e1ec-61ed3d16-2bf50c10-4f994773] got new request from [183a:bb97:ee7b:0:3a:bb97:ee7b:0] database '/Root' stream '' 2025-12-23T18:38:57.416497Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.schemas.getMetadata) 2025-12-23T18:38:57.416570Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.connect) 2025-12-23T18:38:57.416600Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.create) 2025-12-23T18:38:57.416629Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.select) 2025-12-23T18:38:57.416658Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.write) 2025-12-23T18:38:57.416684Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.streams.write) 2025-12-23T18:38:57.416711Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.list) 2025-12-23T18:38:57.445905Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.connect now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:38:57.446007Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (C9049D91) permission ydb.databases.list now has a valid subject "Service1_id@as" 2025-12-23T18:38:57.446411Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.write now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:38:57.446449Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.schemas.getMetadata now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:38:57.452947Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.select now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:38:57.453006Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.streams.write now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:38:57.454873Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.create now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:38:57.454978Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2025-12-23T18:38:57.455127Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessageBatch] requestId [83e1e1ec-61ed3d16-2bf50c10-4f994773] [auth] Authorized successfully 2025-12-23T18:38:57.455193Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessageBatch] requestId [83e1e1ec-61ed3d16-2bf50c10-4f994773] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"__type":"AWS.SimpleQueueService.TooManyEntriesInBatchRequest","message":"The batch request contains more entries than permissible."} 2025-12-23T18:38:57.456042Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessageBatch] requestId [83e1e1ec-61ed3d16-2bf50c10-4f994773] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.TooManyEntriesInBatchRequest 2025-12-23T18:38:57.456168Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessageBatch] requestId [83e1e1ec-61ed3d16-2bf50c10-4f994773] reply with status: STATUS_UNDEFINED message: The batch request contains more entries than permissible. 2025-12-23T18:38:57.456441Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:60288) <- (400 AWS.SimpleQueueService.TooManyEntriesInBatchRequest, 134 bytes) 2025-12-23T18:38:57.456492Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#38,[::1]:60288) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"/v1/5//Root/6/topic1/8/consumer", "Entries": [ { "Id":"Id-1", "MessageGroupId":"MessageGroupId-1", "MessageBody":"MessageBody-1" }, { "Id":"Id-2", "MessageGroupId":"MessageGroupId-2", "MessageBody":"MessageBody-2" }, { "Id":"Id-3", "MessageGroupId":"MessageGroupId-3", "MessageBody":"MessageBody-3" }, { "Id":"Id-4", "MessageGroupId":"MessageGroupId-4", "MessageBody":"MessageBody-4" }, { "Id":"Id-5", "MessageGroupId":"MessageGroupId-5", "MessageBody":"MessageBody-5" }, { "Id":"Id-6", "MessageGroupId":"MessageGroupId-6", "MessageBody":"MessageBody-6" 2025-12-23T18:38:57.456518Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#38,[::1]:60288) Response: HTTP/1.1 400 AWS.SimpleQueueService.TooManyEntriesInBatchRequest Connection: close x-amzn-requestid: 83e1e1ec-61ed3d16-2bf50c10-4f994773 Content-Type: application/x-amz-json-1.1 Content-Length: 134 2025-12-23T18:38:57.456613Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:60288) connection closed 2025-12-23T18:38:57.475019Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:57.475048Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:57.475058Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:57.475072Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:57.475082Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:38:57.574839Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:57.574872Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:57.574885Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:57.574904Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:57.574917Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:38:57.681960Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:57.682003Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:57.682014Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:57.682032Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:57.682044Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits >> BasicStatistics::DedicatedTimeIntervals [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize [GOOD] Test command err: 2025-12-23T18:38:43.095976Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124681229110633:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:43.107974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:43.199901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0058de/r3tmp/tmpqgodXv/pdisk_1.dat 2025-12-23T18:38:43.552308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:43.552402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:43.586572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14985, node 1 2025-12-23T18:38:43.855482Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:38:43.856577Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:43.872661Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:43.872690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:43.872712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:43.872781Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:38:44.128894Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515123695 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:38:44.153734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:44.168762Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:38:44.172478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:1077 waiting... 2025-12-23T18:38:44.464629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2025-12-23T18:38:44.492287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:38:44.526998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-12-23T18:38:44.535690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:44.713735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:38:44.800979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-12-23T18:38:44.809213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:38:44.881746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:44.926841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:44.970225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:45.015469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:45.078825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:45.127903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:45.174730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:47.700522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124698408981431:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:47.700680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:47.706514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124698408981443:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:47.706899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124698408981445:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:47.707017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:47.711444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:38:47.733234Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124698408981446:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:38:47.810692Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124698408981499:3106] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type ... :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:46730) incoming connection opened 2025-12-23T18:38:58.931662Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:46730) -> (POST /Root, 100 bytes) 2025-12-23T18:38:58.931793Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d84e:a56d:e7c:0:c04e:a56d:e7c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 9c1c9bb4-d1b73bf1-b6280d71-f6fb0411 2025-12-23T18:38:58.932153Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [9c1c9bb4-d1b73bf1-b6280d71-f6fb0411] got new request from [d84e:a56d:e7c:0:c04e:a56d:e7c:0] database '/Root' stream '' 2025-12-23T18:38:58.934949Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [9c1c9bb4-d1b73bf1-b6280d71-f6fb0411] [auth] Authorized successfully 2025-12-23T18:38:58.935011Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [9c1c9bb4-d1b73bf1-b6280d71-f6fb0411] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:38:58.935413Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [9c1c9bb4-d1b73bf1-b6280d71-f6fb0411] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is less than 1"} 2025-12-23T18:38:58.935476Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [9c1c9bb4-d1b73bf1-b6280d71-f6fb0411] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is less than 1 2025-12-23T18:38:58.935676Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:46730) <- (400 InvalidParameterValue, 81 bytes) 2025-12-23T18:38:58.935719Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:46730) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":0, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2025-12-23T18:38:58.935744Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:46730) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: 9c1c9bb4-d1b73bf1-b6280d71-f6fb0411 Content-Type: application/x-amz-json-1.1 Content-Length: 81 2025-12-23T18:38:58.935811Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:46730) connection closed Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is greater than 10"} 2025-12-23T18:38:58.943028Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:46736) incoming connection opened 2025-12-23T18:38:58.943108Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:46736) -> (POST /Root, 101 bytes) 2025-12-23T18:38:58.943245Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f890:a66d:e7c:0:e090:a66d:e7c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: f4f4213d-8abee159-bac4430-322e50d8 2025-12-23T18:38:58.943657Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [f4f4213d-8abee159-bac4430-322e50d8] got new request from [f890:a66d:e7c:0:e090:a66d:e7c:0] database '/Root' stream '' 2025-12-23T18:38:58.944417Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [f4f4213d-8abee159-bac4430-322e50d8] [auth] Authorized successfully 2025-12-23T18:38:58.944456Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [f4f4213d-8abee159-bac4430-322e50d8] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:38:58.944943Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [f4f4213d-8abee159-bac4430-322e50d8] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2025-12-23T18:38:58.945006Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [f4f4213d-8abee159-bac4430-322e50d8] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is greater than 10 2025-12-23T18:38:58.945190Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:46736) <- (400 InvalidParameterValue, 85 bytes) 2025-12-23T18:38:58.945233Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:46736) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":50, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2025-12-23T18:38:58.945272Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:46736) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: f4f4213d-8abee159-bac4430-322e50d8 Content-Type: application/x-amz-json-1.1 Content-Length: 85 2025-12-23T18:38:58.945338Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:46736) connection closed 2025-12-23T18:38:58.946407Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:46744) incoming connection opened 2025-12-23T18:38:58.946485Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:46744) -> (POST /Root, 109 bytes) 2025-12-23T18:38:58.946583Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f8a8:a66d:e7c:0:e0a8:a66d:e7c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 812e65e6-2b562717-2ca00d85-527817b7 Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is greater than 10"} Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is less than 1"} 2025-12-23T18:38:58.946871Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [812e65e6-2b562717-2ca00d85-527817b7] got new request from [f8a8:a66d:e7c:0:e0a8:a66d:e7c:0] database '/Root' stream '' 2025-12-23T18:38:58.947225Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [812e65e6-2b562717-2ca00d85-527817b7] [auth] Authorized successfully 2025-12-23T18:38:58.947243Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [812e65e6-2b562717-2ca00d85-527817b7] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:38:58.947641Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [812e65e6-2b562717-2ca00d85-527817b7] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2025-12-23T18:38:58.947701Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [812e65e6-2b562717-2ca00d85-527817b7] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is greater than 10 2025-12-23T18:38:58.947893Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:46744) <- (400 InvalidParameterValue, 85 bytes) 2025-12-23T18:38:58.947932Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:46744) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":2147483647, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2025-12-23T18:38:58.947956Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:46744) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: 812e65e6-2b562717-2ca00d85-527817b7 Content-Type: application/x-amz-json-1.1 Content-Length: 85 2025-12-23T18:38:58.948033Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:46744) connection closed 2025-12-23T18:38:58.949119Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:46752) incoming connection opened 2025-12-23T18:38:58.949193Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:46752) -> (POST /Root, 110 bytes) 2025-12-23T18:38:58.949309Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [58b8:a66d:e7c:0:40b8:a66d:e7c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: c2ac8b2a-b3b2d9a7-593992e3-67d8d7bf 2025-12-23T18:38:58.949581Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [c2ac8b2a-b3b2d9a7-593992e3-67d8d7bf] got new request from [58b8:a66d:e7c:0:40b8:a66d:e7c:0] database '/Root' stream '' 2025-12-23T18:38:58.949925Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [c2ac8b2a-b3b2d9a7-593992e3-67d8d7bf] [auth] Authorized successfully 2025-12-23T18:38:58.949957Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [c2ac8b2a-b3b2d9a7-593992e3-67d8d7bf] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:38:58.950181Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [c2ac8b2a-b3b2d9a7-593992e3-67d8d7bf] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2025-12-23T18:38:58.950229Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [c2ac8b2a-b3b2d9a7-593992e3-67d8d7bf] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is less than 1 2025-12-23T18:38:58.950324Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:46752) <- (400 InvalidParameterValue, 81 bytes) 2025-12-23T18:38:58.950361Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:46752) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":-2147483648, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2025-12-23T18:38:58.950386Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:46752) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: c2ac8b2a-b3b2d9a7-593992e3-67d8d7bf Content-Type: application/x-amz-json-1.1 Content-Length: 81 2025-12-23T18:38:58.950463Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:46752) connection closed 2025-12-23T18:38:58.954232Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:58.954262Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:58.954289Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:58.954307Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:58.954319Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:38:59.054691Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:59.054731Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:59.054821Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:59.054838Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:59.054865Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> TCdcStreamWithInitialScanTests::DropStream >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::DedicatedTimeIntervals [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:37:47.937208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:48.278806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:37:48.312440Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:527:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:37:48.312961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:48.313046Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:37:49.054274Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:49.210243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:49.210371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:49.250865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:49.376248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003267/r3tmp/tmpYP22nt/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17434, node 1 2025-12-23T18:37:50.537435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:37:50.543161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:50.543227Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:50.543260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:50.543737Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:50.639390Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:4840 2025-12-23T18:37:51.424673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:37:57.025392Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:57.032907Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2025-12-23T18:37:57.037937Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:37:57.094325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:57.094457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:57.145943Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:37:57.153558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:57.472891Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:57.473683Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:57.475044Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:57.476016Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:57.476234Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:57.476486Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:57.476589Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:57.476728Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:57.476852Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:57.689892Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database1/.metadata/script_executions 2025-12-23T18:37:57.745250Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:57.745368Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:57.760906Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:58.014037Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:58.080922Z node 3 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:37:58.081027Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:37:58.182580Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:37:58.182932Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:37:58.183162Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:37:58.183226Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:37:58.183310Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:37:58.183368Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:37:58.183423Z node 3 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:37:58.183481Z node 3 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:37:58.184335Z node 3 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:37:58.380310Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2288:2610] 2025-12-23T18:37:58.381876Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database1 2025-12-23T18:37:58.427704Z node 3 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [3:2292:2614] Owner: [3:2291:2613]. Describe result: PathErrorUnknown 2025-12-23T18:37:58.427780Z node 3 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [3:2292:2614] Owner: [3:2291:2613]. Creating table 2025-12-23T18:37:58.427878Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2292:2614] Owner: [3:2291:2613]. Created ESchemeOpCreateTable transaction for path: /Root/Database1/.metadata/_statistics 2025-12-23T18:37:58.448456Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2338:2634], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:37:58.469182Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:2326:2631] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-12-23T18:37:58.488305Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2292:2614] Owner: [3:2291:2613]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-12-23T18:37:58.525124Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:37:58.525240Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [3:2348:2641], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:37:58.620518Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2376:2666] 2025-12-23T18:37:58.628278Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2376:2666], schemeshard id = 72075186224037897 2025-12-23T18:37:58.771965Z node 3 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:37:59.351330Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2292:2614] Owner: [3:2291:2613]. Created ESchemeOpCreateTable transaction for path: /Root/Database1/.metadata/_statistics 2025-12-23T18:37:59.353705Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2681:2888], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:37:59.357525Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:59.380779Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2292:2614] Owner: [3:2291:2613]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075 ... 3T18:38:46.053424Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:6061:3435], StatRequests.size() = 1 2025-12-23T18:38:47.382780Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:6101:3445]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:38:47.383182Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2025-12-23T18:38:47.383231Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:6101:3445], StatRequests.size() = 1 2025-12-23T18:38:47.495717Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-12-23T18:38:47.496435Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 10 2025-12-23T18:38:47.496933Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2025-12-23T18:38:47.497015Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 10 2025-12-23T18:38:47.646409Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:38:47.646520Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224037897 2025-12-23T18:38:47.646926Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:38:47.668446Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:38:48.836022Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:6149:3459]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:38:48.836439Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-12-23T18:38:48.836483Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:6149:3459], StatRequests.size() = 1 2025-12-23T18:38:49.479579Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2025-12-23T18:38:49.479650Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224038898 2025-12-23T18:38:49.479908Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:38:49.496296Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2025-12-23T18:38:49.866934Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:38:50.250442Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:6190:3469]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:38:50.250809Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-12-23T18:38:50.250859Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:6190:3469], StatRequests.size() = 1 2025-12-23T18:38:51.544265Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [3:6233:3479]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:38:51.544677Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-12-23T18:38:51.544728Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [3:6233:3479], StatRequests.size() = 1 2025-12-23T18:38:51.573880Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-12-23T18:38:51.574131Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 11 2025-12-23T18:38:51.574760Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:38:51.575014Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2025-12-23T18:38:51.703723Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:38:51.703805Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224037897 2025-12-23T18:38:51.704288Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:38:51.718903Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:38:52.982576Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [3:6280:3495]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:38:52.983075Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-12-23T18:38:52.983128Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [3:6280:3495], StatRequests.size() = 1 2025-12-23T18:38:52.983685Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6282:3389]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:38:52.996673Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:38:52.996804Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:6292:3393] 2025-12-23T18:38:52.996868Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:6292:3393] 2025-12-23T18:38:53.004263Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6292:3393], server id = [2:6298:3394], tablet id = 72075186224038895, status = OK 2025-12-23T18:38:53.004387Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224038895] EvServerConnected, pipe server id = [2:6298:3394] 2025-12-23T18:38:53.004800Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224038895] EvConnectNode, pipe server id = [2:6298:3394], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:38:53.004874Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:38:53.005413Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:38:53.005525Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6282:3389], StatRequests.size() = 1 2025-12-23T18:38:53.005632Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:38:53.088954Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224038895] EvFastPropagateCheck 2025-12-23T18:38:53.089066Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:38:53.159765Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:6292:3393], schemeshard count = 1 2025-12-23T18:38:53.643308Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224038895] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:38:53.643604Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 1 2025-12-23T18:38:53.644348Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 1 2025-12-23T18:38:53.655903Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2025-12-23T18:38:53.656003Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224038898 2025-12-23T18:38:53.656379Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:38:53.680834Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2025-12-23T18:38:53.961143Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:38:55.754301Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-12-23T18:38:55.756871Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 12 2025-12-23T18:38:55.757660Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2025-12-23T18:38:55.757799Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2025-12-23T18:38:55.851917Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:38:55.852007Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224037897 2025-12-23T18:38:55.852451Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:38:55.872625Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:38:56.080623Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224038895] EvPropagateTimeout 2025-12-23T18:38:57.655447Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224038895] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:38:57.655661Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 2 2025-12-23T18:38:57.656279Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 2 2025-12-23T18:38:57.667746Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2025-12-23T18:38:57.667821Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224038898 2025-12-23T18:38:57.668177Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:38:57.692550Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2025-12-23T18:38:57.991942Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> LocalPartitionReader::Retries |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Retries [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenImmediateUpsert |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> KqpTx::RollbackByIdle [GOOD] >> KqpTx::MixEnginesOldNew >> TPQTest::TestSetClientOffset [GOOD] >> TPQTest::TestReadSessions |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Retries [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock >> LocalPartitionReader::Simple [GOOD] >> TTicketParserTest::Authorization [GOOD] >> TTicketParserTest::AuthorizationModify >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless >> LocalPartitionReader::Booting >> LocalPartitionReader::Booting [GOOD] |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpSinkTx::ExplicitTcl [GOOD] >> KqpSinkTx::Interactive >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [FAIL] Test command err: Starting YDB, grpc: 61184, msgbus: 27951 2025-12-23T18:34:43.636068Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123652994041129:2091];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:43.636133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0047fe/r3tmp/tmpT9WDT2/pdisk_1.dat 2025-12-23T18:34:43.957892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:34:44.014478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:44.014585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:44.067885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:44.110879Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123652994041063:2082] 1766514883622788 != 1766514883622791 2025-12-23T18:34:44.112554Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:44.161047Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TServer::EnableGrpc on GrpcPort 61184, node 1 2025-12-23T18:34:44.336974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:44.336996Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:44.337004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:44.337096Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27951 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:34:44.624348Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587123652994041326:2116] Handle TEvNavigate describe path dc-1 2025-12-23T18:34:44.624389Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587123657289009452:2733] HANDLE EvNavigateScheme dc-1 2025-12-23T18:34:44.624745Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587123657289009452:2733] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:44.656928Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:34:44.732313Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587123657289009452:2733] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-12-23T18:34:44.750069Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587123657289009452:2733] Handle TEvDescribeSchemeResult Forward to# [1:7587123657289009451:2732] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514884169 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:34:44.781184Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123652994041326:2116] Handle TEvProposeTransaction 2025-12-23T18:34:44.781215Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123652994041326:2116] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:34:44.781283Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123652994041326:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7587123657289009470:2741] 2025-12-23T18:34:44.927464Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123657289009470:2741] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:44.927559Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123657289009470:2741] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:44.927577Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123657289009470:2741] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:44.927641Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123657289009470:2741] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:44.927912Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123657289009470:2741] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:44.928029Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123657289009470:2741] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-12-23T18:34:44.928092Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7587123657289009470:2741] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:34:44.928202Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7587123657289009470:2741] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:34:44.928772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:44.933468Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7587123657289009470:2741] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:34:44.933543Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7587123657289009470:2741] txid# 281474976715657 SEND to# [1:7587123657289009469:2740] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-12-23T18:34:44.957891Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123652994041326:2116] Handle TEvProposeTransaction 2025-12-23T18:34:44.957912Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123652994041326:2116] TxId# 281474976715658 ProcessProposeTransaction 2025-12-23T18:34:44.957949Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123652994041326:2116] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7587123657289009506:2774] 2025-12-23T18:34:44.960587Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123657289009506:2774] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:44.960624Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123657289009506:2774] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:44.960636Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123657289009506:2774] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:44.960681Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123657289009506:2774] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:44.960941Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123657289009506:2774] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:44.961022Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123657289009506:2774] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 Time ... 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515130289 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:50.575610Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124707102434202:2113] Handle TEvProposeTransaction 2025-12-23T18:38:50.575640Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124707102434202:2113] TxId# 281474976710657 ProcessProposeTransaction 2025-12-23T18:38:50.575694Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124707102434202:2113] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [59:7587124711397402513:2738] 2025-12-23T18:38:50.578585Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124711397402513:2738] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:38:50.578645Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124711397402513:2738] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:50.578669Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124711397402513:2738] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:38:50.578728Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124711397402513:2738] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:50.583127Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124711397402513:2738] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:50.583249Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124711397402513:2738] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-12-23T18:38:50.583354Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124711397402513:2738] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-12-23T18:38:50.583543Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124711397402513:2738] txid# 281474976710657 HANDLE EvClientConnected 2025-12-23T18:38:50.584163Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:38:50.589340Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124711397402513:2738] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-12-23T18:38:50.589403Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124711397402513:2738] txid# 281474976710657 SEND to# [59:7587124711397402512:2737] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-12-23T18:38:50.612884Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124707102434202:2113] Handle TEvProposeTransaction 2025-12-23T18:38:50.612909Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124707102434202:2113] TxId# 281474976710658 ProcessProposeTransaction 2025-12-23T18:38:50.612942Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124707102434202:2113] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [59:7587124711397402546:2768] 2025-12-23T18:38:50.615355Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124711397402546:2768] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:38:50.615412Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124711397402546:2768] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:50.615428Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124711397402546:2768] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:38:50.615469Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124711397402546:2768] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:50.615728Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124711397402546:2768] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:50.615817Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124711397402546:2768] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:50.615890Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124711397402546:2768] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-12-23T18:38:50.616000Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124711397402546:2768] txid# 281474976710658 HANDLE EvClientConnected 2025-12-23T18:38:50.616594Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:38:50.620149Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124711397402546:2768] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-12-23T18:38:50.620219Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124711397402546:2768] txid# 281474976710658 SEND to# [59:7587124711397402545:2767] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-12-23T18:38:50.910137Z node 59 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:54.866858Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7587124707102434210:2094];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:54.866948Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:38:55.826868Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kd683g9abgja1gve8a81eqtw", Request deadline has expired for 0.192085s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22925 TBackTrace::Capture()+28 (0x1B03950C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B5289DC) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1825 (0x1AC12391) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+1758 (0x1AC29A9E) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AC6D066) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AC5AB08) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B56166A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B52F6B8) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AC59C0D) NUnitTest::TTestFactory::Execute()+2176 (0x1B530E70) NUnitTest::RunMain(int, char**)+5805 (0x1B55B4CD) ??+0 (0x7FF8411EED90) __libc_start_main+128 (0x7FF8411EEE40) _start+41 (0x18614029) |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] Test command err: Starting YDB, grpc: 2481, msgbus: 7119 2025-12-23T18:34:43.707965Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123650751127431:2089];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:43.708623Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:34:43.736690Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0047ff/r3tmp/tmpwlLw2N/pdisk_1.dat 2025-12-23T18:34:44.076346Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:34:44.130592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:44.130808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:44.165130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:44.196011Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:44.242318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TServer::EnableGrpc on GrpcPort 2481, node 1 2025-12-23T18:34:44.474784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:44.474811Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:44.474824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:44.474936Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:34:44.707853Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7119 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:34:44.748162Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587123650751127625:2117] Handle TEvNavigate describe path dc-1 2025-12-23T18:34:44.748233Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587123655046095735:2725] HANDLE EvNavigateScheme dc-1 2025-12-23T18:34:44.748651Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587123655046095735:2725] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:44.818774Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587123655046095735:2725] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-12-23T18:34:44.833169Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587123655046095735:2725] Handle TEvDescribeSchemeResult Forward to# [1:7587123655046095734:2724] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514884288 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:34:44.882602Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123650751127625:2117] Handle TEvProposeTransaction 2025-12-23T18:34:44.882631Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123650751127625:2117] TxId# 281474976710657 ProcessProposeTransaction 2025-12-23T18:34:44.882724Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123650751127625:2117] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7587123655046095744:2731] 2025-12-23T18:34:45.022910Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123655046095744:2731] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.022998Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123655046095744:2731] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.023016Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123655046095744:2731] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.023094Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123655046095744:2731] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.023412Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123655046095744:2731] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.023511Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123655046095744:2731] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-12-23T18:34:45.023604Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7587123655046095744:2731] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-12-23T18:34:45.023764Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7587123655046095744:2731] txid# 281474976710657 HANDLE EvClientConnected 2025-12-23T18:34:45.024313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:45.032730Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7587123655046095744:2731] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-12-23T18:34:45.032804Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7587123655046095744:2731] txid# 281474976710657 SEND to# [1:7587123655046095743:2730] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-12-23T18:34:45.045295Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123650751127625:2117] Handle TEvProposeTransaction 2025-12-23T18:34:45.045321Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123650751127625:2117] TxId# 281474976710658 ProcessProposeTransaction 2025-12-23T18:34:45.045355Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123650751127625:2117] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7587123659341063077:2765] 2025-12-23T18:34:45.048193Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123659341063077:2765] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.048247Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123659341063077:2765] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.048279Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123659341063077:2765] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.048328Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123659341063077:2765] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.048604Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123659341063077:2765] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.048725Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123659341063077:2765] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinato ... Request# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:37.731947Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124658371995470:2993] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-12-23T18:38:37.732121Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124658371995470:2993] txid# 281474976715661 HANDLE EvClientConnected 2025-12-23T18:38:37.740112Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124658371995470:2993] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-12-23T18:38:37.740277Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7587124658371995470:2993] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:38:37.740308Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124658371995470:2993] txid# 281474976715661 SEND to# [59:7587124658371995392:2336] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-12-23T18:38:37.766335Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124632602190419:2105] Handle TEvProposeTransaction 2025-12-23T18:38:37.766363Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124632602190419:2105] TxId# 281474976715662 ProcessProposeTransaction 2025-12-23T18:38:37.766411Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124632602190419:2105] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7587124658371995494:3005] 2025-12-23T18:38:37.769422Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124658371995494:3005] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47238" 2025-12-23T18:38:37.769490Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124658371995494:3005] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:37.769507Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124658371995494:3005] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:38:37.769565Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124658371995494:3005] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:37.769891Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124658371995494:3005] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:37.769964Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124658371995494:3005] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:37.814305Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124658371995494:3005] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-12-23T18:38:37.814529Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124658371995494:3005] txid# 281474976715662 HANDLE EvClientConnected 2025-12-23T18:38:37.818690Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124658371995494:3005] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-12-23T18:38:37.818782Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124658371995494:3005] txid# 281474976715662 SEND to# [59:7587124658371995493:2328] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-12-23T18:38:37.838823Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124632602190419:2105] Handle TEvProposeTransaction 2025-12-23T18:38:37.838864Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124632602190419:2105] TxId# 281474976715663 ProcessProposeTransaction 2025-12-23T18:38:37.838916Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124632602190419:2105] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7587124658371995508:3015] 2025-12-23T18:38:37.842036Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124658371995508:3015] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "db_admin@builtin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47238" 2025-12-23T18:38:37.842129Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124658371995508:3015] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:37.842155Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124658371995508:3015] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:38:37.842220Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124658371995508:3015] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:37.842649Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124658371995508:3015] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:37.842767Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124658371995508:3015] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:37.842864Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124658371995508:3015] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-12-23T18:38:37.843021Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124658371995508:3015] txid# 281474976715663 HANDLE EvClientConnected 2025-12-23T18:38:37.843498Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:38:37.848069Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124658371995508:3015] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-12-23T18:38:37.848139Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124658371995508:3015] txid# 281474976715663 SEND to# [59:7587124658371995507:2343] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-12-23T18:38:37.940095Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124632602190419:2105] Handle TEvProposeTransaction 2025-12-23T18:38:37.940123Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124632602190419:2105] TxId# 281474976715664 ProcessProposeTransaction 2025-12-23T18:38:37.940167Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124632602190419:2105] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7587124658371995542:3029] 2025-12-23T18:38:37.942422Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124658371995542:3029] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\020db_admin@builtin\022\030\022\026\n\024all-users@well-known\032\020db_admin@builtin\"\007Builtin*\027db_a****ltin (DEFA2CD5)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:47272" 2025-12-23T18:38:37.942473Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124658371995542:3029] txid# 281474976715664 Bootstrap, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:37.942489Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124658371995542:3029] txid# 281474976715664 Bootstrap, UserSID: db_admin@builtin IsClusterAdministrator: 0 2025-12-23T18:38:37.942651Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7587124658371995542:3029] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-12-23T18:38:37.942680Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7587124658371995542:3029] txid# 281474976715664 HandleResolveDatabase, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: db_admin@builtin 2025-12-23T18:38:37.942712Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124658371995542:3029] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:37.943154Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124658371995542:3029] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:37.943251Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124658371995542:3029] HANDLE EvNavigateKeySetResult, txid# 281474976715664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:37.943341Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124658371995542:3029] txid# 281474976715664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715664 TabletId# 72057594046644480} 2025-12-23T18:38:37.943507Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124658371995542:3029] txid# 281474976715664 HANDLE EvClientConnected 2025-12-23T18:38:37.946849Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124658371995542:3029] txid# 281474976715664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715664} 2025-12-23T18:38:37.946901Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124658371995542:3029] txid# 281474976715664 SEND to# [59:7587124658371995541:2348] Source {TEvProposeTransactionStatus txid# 281474976715664 Status# 48} |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 31587, msgbus: 2893 2025-12-23T18:34:43.537656Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123650537564343:2088];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:43.561831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00485d/r3tmp/tmplnbUcj/pdisk_1.dat 2025-12-23T18:34:43.947337Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:34:43.994843Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:43.996938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:44.023958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:44.094951Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:44.130830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TServer::EnableGrpc on GrpcPort 31587, node 1 2025-12-23T18:34:44.472304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:44.472357Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:44.472366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:44.472463Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:34:44.569542Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:2893 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:34:44.953482Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587123650537564538:2118] Handle TEvNavigate describe path dc-1 2025-12-23T18:34:44.953525Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587123654832532678:2755] HANDLE EvNavigateScheme dc-1 2025-12-23T18:34:44.953808Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587123654832532678:2755] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:44.988233Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587123654832532678:2755] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-12-23T18:34:44.997779Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587123654832532678:2755] Handle TEvDescribeSchemeResult Forward to# [1:7587123654832532674:2751] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514884218 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:34:45.073082Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123650537564538:2118] Handle TEvProposeTransaction 2025-12-23T18:34:45.073115Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123650537564538:2118] TxId# 281474976710657 ProcessProposeTransaction 2025-12-23T18:34:45.073205Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123650537564538:2118] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7587123659127499979:2760] 2025-12-23T18:34:45.230791Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123659127499979:2760] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.230890Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123659127499979:2760] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.230920Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123659127499979:2760] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.231012Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123659127499979:2760] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.231358Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123659127499979:2760] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.231479Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123659127499979:2760] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-12-23T18:34:45.231562Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7587123659127499979:2760] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-12-23T18:34:45.231712Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7587123659127499979:2760] txid# 281474976710657 HANDLE EvClientConnected 2025-12-23T18:34:45.232381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:45.236400Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7587123659127499979:2760] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-12-23T18:34:45.236472Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7587123659127499979:2760] txid# 281474976710657 SEND to# [1:7587123659127499978:2759] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-12-23T18:34:45.274992Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123650537564538:2118] Handle TEvProposeTransaction 2025-12-23T18:34:45.275028Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123650537564538:2118] TxId# 281474976710658 ProcessProposeTransaction 2025-12-23T18:34:45.275068Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123650537564538:2118] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7587123659127500020:2795] 2025-12-23T18:34:45.277871Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123659127500020:2795] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.277929Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123659127500020:2795] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.277968Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123659127500020:2795] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.278029Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123659127500020:2795] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.278343Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123659127500020:2795] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.278452Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123659127500020:2795] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:34:45.278535Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7587123659127500020:2795] txid# 28147497671065 ... 803804951:2116] Handle TEvProposeTransaction 2025-12-23T18:38:50.604588Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124685803804951:2116] TxId# 281474976710661 ProcessProposeTransaction 2025-12-23T18:38:50.604656Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124685803804951:2116] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7587124711573610110:3017] 2025-12-23T18:38:50.608460Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124711573610110:3017] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\317\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-12-23T18:38:50.608515Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124711573610110:3017] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:50.608531Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124711573610110:3017] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-12-23T18:38:50.608695Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7587124711573610110:3017] txid# 281474976710661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-12-23T18:38:50.608727Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7587124711573610110:3017] txid# 281474976710661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-12-23T18:38:50.609294Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [59:7587124711573610110:3017] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:38:50.609430Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124711573610110:3017] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:50.609764Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124711573610110:3017] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:50.609876Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124711573610110:3017] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:50.610026Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124711573610110:3017] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-12-23T18:38:50.610179Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124711573610110:3017] txid# 281474976710661 HANDLE EvClientConnected 2025-12-23T18:38:50.614608Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124711573610110:3017] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-12-23T18:38:50.614783Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7587124711573610110:3017] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:38:50.614830Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124711573610110:3017] txid# 281474976710661 SEND to# [59:7587124711573610028:2335] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-12-23T18:38:50.638931Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124685803804951:2116] Handle TEvProposeTransaction 2025-12-23T18:38:50.638979Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124685803804951:2116] TxId# 281474976710662 ProcessProposeTransaction 2025-12-23T18:38:50.639034Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124685803804951:2116] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7587124711573610134:3029] 2025-12-23T18:38:50.641664Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124711573610134:3029] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:45082" 2025-12-23T18:38:50.641736Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124711573610134:3029] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:50.641760Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124711573610134:3029] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:38:50.641818Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124711573610134:3029] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:50.642174Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124711573610134:3029] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:50.642266Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124711573610134:3029] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:50.699201Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124711573610134:3029] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-12-23T18:38:50.699390Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124711573610134:3029] txid# 281474976710662 HANDLE EvClientConnected 2025-12-23T18:38:50.704208Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124711573610134:3029] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-12-23T18:38:50.704274Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124711573610134:3029] txid# 281474976710662 SEND to# [59:7587124711573610133:2327] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-12-23T18:38:50.791464Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124685803804951:2116] Handle TEvProposeTransaction 2025-12-23T18:38:50.791500Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124685803804951:2116] TxId# 281474976710663 ProcessProposeTransaction 2025-12-23T18:38:50.791551Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124685803804951:2116] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7587124711573610170:3044] 2025-12-23T18:38:50.794675Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124711573610170:3044] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:34492" 2025-12-23T18:38:50.794883Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124711573610170:3044] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:50.794908Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124711573610170:3044] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-12-23T18:38:50.795076Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7587124711573610170:3044] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-12-23T18:38:50.795116Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7587124711573610170:3044] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-12-23T18:38:50.795165Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124711573610170:3044] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:50.795444Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124711573610170:3044] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:50.795480Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7587124711573610170:3044] txid# 281474976710663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-12-23T18:38:50.795575Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7587124711573610170:3044] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-12-23T18:38:50.795602Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124711573610170:3044] txid# 281474976710663 SEND to# [59:7587124711573610169:2347] Source {TEvProposeTransactionStatus Status# 5} 2025-12-23T18:38:50.796469Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=59&id=YTJjMmEwNmEtN2MyOTllNWUtYzQ0M2RkZDItZGJlMmRlOWU=, ActorId: [59:7587124711573610155:2347], ActorState: ExecuteState, TraceId: 01kd683gcg4vyfpr8d819a9cgd, Create QueryResponse for error on request, msg: , status: UNAUTHORIZED, issues: { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } 2025-12-23T18:38:50.797042Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7587124685803804951:2116] Handle TEvExecuteKqpTransaction 2025-12-23T18:38:50.797062Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7587124685803804951:2116] TxId# 281474976710664 ProcessProposeKqpTransaction |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 20360, msgbus: 14436 2025-12-23T18:34:43.901888Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123652515598438:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:43.912860Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004810/r3tmp/tmpftmfjo/pdisk_1.dat 2025-12-23T18:34:44.148132Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:34:44.367206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:44.367338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:44.381064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:44.481633Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:44.486875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions TServer::EnableGrpc on GrpcPort 20360, node 1 2025-12-23T18:34:44.520152Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /dc-1 Strong=0 2025-12-23T18:34:44.592529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:44.592553Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:44.592565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:44.592642Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14436 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:34:44.852245Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587123652515598632:2116] Handle TEvNavigate describe path dc-1 2025-12-23T18:34:44.852315Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587123656810566753:2750] HANDLE EvNavigateScheme dc-1 2025-12-23T18:34:44.852684Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587123656810566753:2750] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:44.913495Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587123656810566753:2750] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-12-23T18:34:44.929594Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587123656810566753:2750] Handle TEvDescribeSchemeResult Forward to# [1:7587123656810566752:2749] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: 2025-12-23T18:34:44.942666Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514884638 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:34:44.956721Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123652515598632:2116] Handle TEvProposeTransaction 2025-12-23T18:34:44.956747Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123652515598632:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-12-23T18:34:44.956816Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123652515598632:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7587123656810566770:2760] 2025-12-23T18:34:45.048747Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:34:45.081240Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123656810566770:2760] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.081346Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123656810566770:2760] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.081367Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123656810566770:2760] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.081431Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123656810566770:2760] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.081750Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123656810566770:2760] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.081859Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123656810566770:2760] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-12-23T18:34:45.081963Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7587123656810566770:2760] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-12-23T18:34:45.082090Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7587123656810566770:2760] txid# 281474976710657 HANDLE EvClientConnected 2025-12-23T18:34:45.082734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:45.086283Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7587123656810566770:2760] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-12-23T18:34:45.086337Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7587123656810566770:2760] txid# 281474976710657 SEND to# [1:7587123656810566769:2759] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-12-23T18:34:45.108856Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123652515598632:2116] Handle TEvProposeTransaction 2025-12-23T18:34:45.108882Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123652515598632:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-12-23T18:34:45.108910Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123652515598632:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7587123661105534126:2796] 2025-12-23T18:34:45.111432Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123661105534126:2796] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.111488Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123661105534126:2796] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.111502Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123661105534126:2796] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.111551Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123661105534126:2796] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.111808Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123661105534126:2796] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.111905Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123661105534126:2796] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToR ... eadyExists txid# 281474976715660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-12-23T18:38:55.760367Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7587124733165846685:2929] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:38:55.760412Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124733165846685:2929] txid# 281474976715660 SEND to# [59:7587124733165846609:2335] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 48} 2025-12-23T18:38:55.791698Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124707396041567:2106] Handle TEvProposeTransaction 2025-12-23T18:38:55.791739Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124707396041567:2106] TxId# 281474976715661 ProcessProposeTransaction 2025-12-23T18:38:55.791798Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124707396041567:2106] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7587124733165846709:2941] 2025-12-23T18:38:55.795402Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124733165846709:2941] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:51386" 2025-12-23T18:38:55.795498Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124733165846709:2941] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:55.795523Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124733165846709:2941] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:38:55.795583Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124733165846709:2941] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:55.795992Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124733165846709:2941] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:55.796103Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124733165846709:2941] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:55.849934Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124733165846709:2941] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-12-23T18:38:55.850149Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124733165846709:2941] txid# 281474976715661 HANDLE EvClientConnected 2025-12-23T18:38:55.857774Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124733165846709:2941] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-12-23T18:38:55.857844Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124733165846709:2941] txid# 281474976715661 SEND to# [59:7587124733165846708:2327] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-12-23T18:38:55.970866Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124707396041567:2106] Handle TEvProposeTransaction 2025-12-23T18:38:55.970900Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124707396041567:2106] TxId# 281474976715662 ProcessProposeTransaction 2025-12-23T18:38:55.970945Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124707396041567:2106] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7587124733165846739:2962] 2025-12-23T18:38:55.974202Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124733165846739:2962] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:38830" 2025-12-23T18:38:55.974288Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124733165846739:2962] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:55.974313Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124733165846739:2962] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:38:55.974376Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124733165846739:2962] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:55.974958Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124733165846739:2962] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:55.975107Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124733165846739:2962] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:55.975194Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124733165846739:2962] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-12-23T18:38:55.975342Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124733165846739:2962] txid# 281474976715662 HANDLE EvClientConnected 2025-12-23T18:38:55.976177Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:38:55.982915Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124733165846739:2962] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-12-23T18:38:55.982988Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124733165846739:2962] txid# 281474976715662 SEND to# [59:7587124733165846738:2343] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-12-23T18:38:56.079248Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124707396041567:2106] Handle TEvProposeTransaction 2025-12-23T18:38:56.079280Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124707396041567:2106] TxId# 281474976715663 ProcessProposeTransaction 2025-12-23T18:38:56.079332Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124707396041567:2106] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7587124737460814124:3034] 2025-12-23T18:38:56.082580Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124737460814124:3034] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2NjU1ODMzNSwiaWF0IjoxNzY2NTE1MTM1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.YtjBeti-yAwmifSpu5HmqkIuyABYuiYpJ8JsjVnUnl5LZC8v5Oh19TQlPU1VMC0xZHimcwgUE7E-ZwlC5vH1wK3lPm6ZYjDy_gIQTuiINgXC9NzjmnzKBycAbkRehouKhzYX-a_1LrAQYlMbb1Tg7LIPtsK8Hus8PUxCYhzD9njJq7E6Ni55-x2QC3r7-jnjy6K-x0csr7rXrlWnZ4ywXTbpyxjxVqTIhwWG-Uhe5y2u66R4fhzSMRXIuw1qNTqCE7yUuzms9FPoKKkAFF0ebUtQJdqKTG6nPaFFC3vOwXC5GizTA5C2K_vnd8_VbW_YvIFPQbISj76aKmMNg5764w\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2NjU1ODMzNSwiaWF0IjoxNzY2NTE1MTM1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:38864" 2025-12-23T18:38:56.082665Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124737460814124:3034] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:56.082690Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124737460814124:3034] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-12-23T18:38:56.083227Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7587124737460814124:3034] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-12-23T18:38:56.083286Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7587124737460814124:3034] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-12-23T18:38:56.083343Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124737460814124:3034] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:56.083661Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124737460814124:3034] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:56.083698Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7587124737460814124:3034] txid# 281474976715663, Access denied for ordinaryuser, attempt to manage user 2025-12-23T18:38:56.083825Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7587124737460814124:3034] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-12-23T18:38:56.083862Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124737460814124:3034] txid# 281474976715663 SEND to# [59:7587124737460814123:2348] Source {TEvProposeTransactionStatus Status# 5} 2025-12-23T18:38:56.084726Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=59&id=MWNlYTM0ZGQtZGY4ZDNjYTUtNzgwNzg1NTgtNWQ1N2RmMg==, ActorId: [59:7587124737460814109:2348], ActorState: ExecuteState, TraceId: 01kd683nhmf4fayzte7tr4a1hg, Create QueryResponse for error on request, msg: , status: UNAUTHORIZED, issues: { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } 2025-12-23T18:38:56.085225Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7587124707396041567:2106] Handle TEvExecuteKqpTransaction 2025-12-23T18:38:56.085250Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7587124707396041567:2106] TxId# 281474976715664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 26903, msgbus: 22142 2025-12-23T18:34:43.973258Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123654160524774:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:43.973536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0047f2/r3tmp/tmpLnNFN0/pdisk_1.dat 2025-12-23T18:34:44.391372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:34:44.424938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:44.425041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:44.489091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:44.538144Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26903, node 1 2025-12-23T18:34:44.660588Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:34:44.638940Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.011946s 2025-12-23T18:34:44.941177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:44.941206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:44.941229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:44.941332Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:34:44.983013Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22142 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:34:45.279694Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587123658455492232:2137] Handle TEvNavigate describe path dc-1 2025-12-23T18:34:45.279775Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587123662750460344:2752] HANDLE EvNavigateScheme dc-1 2025-12-23T18:34:45.280221Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587123662750460344:2752] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.337130Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587123662750460344:2752] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-12-23T18:34:45.350617Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587123662750460344:2752] Handle TEvDescribeSchemeResult Forward to# [1:7587123662750460343:2751] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514884596 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:34:45.378900Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123658455492232:2137] Handle TEvProposeTransaction 2025-12-23T18:34:45.378940Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123658455492232:2137] TxId# 281474976710657 ProcessProposeTransaction 2025-12-23T18:34:45.379055Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123658455492232:2137] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7587123662750460350:2758] 2025-12-23T18:34:45.479680Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123662750460350:2758] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.479800Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123662750460350:2758] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.479818Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123662750460350:2758] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.479884Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123662750460350:2758] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.480278Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123662750460350:2758] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.480457Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123662750460350:2758] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-12-23T18:34:45.480577Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7587123662750460350:2758] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-12-23T18:34:45.480720Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7587123662750460350:2758] txid# 281474976710657 HANDLE EvClientConnected 2025-12-23T18:34:45.481584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:45.486991Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7587123662750460350:2758] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-12-23T18:34:45.487252Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7587123662750460350:2758] txid# 281474976710657 SEND to# [1:7587123662750460349:2757] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-12-23T18:34:45.501307Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123658455492232:2137] Handle TEvProposeTransaction 2025-12-23T18:34:45.501335Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123658455492232:2137] TxId# 281474976710658 ProcessProposeTransaction 2025-12-23T18:34:45.501371Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123658455492232:2137] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7587123662750460386:2791] 2025-12-23T18:34:45.503488Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123662750460386:2791] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.503550Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123662750460386:2791] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.503575Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123662750460386:2791] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.503646Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123662750460386:2791] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.504012Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123662750460386:2791] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.504172Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123662750460386:2791] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# ... 4689413127765:2109] Handle TEvProposeTransaction 2025-12-23T18:38:50.250388Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124689413127765:2109] TxId# 281474976715661 ProcessProposeTransaction 2025-12-23T18:38:50.250448Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124689413127765:2109] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7587124715182932950:2995] 2025-12-23T18:38:50.254105Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124715182932950:2995] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\317\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-12-23T18:38:50.254180Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124715182932950:2995] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:50.254206Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124715182932950:2995] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-12-23T18:38:50.254412Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7587124715182932950:2995] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-12-23T18:38:50.254447Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7587124715182932950:2995] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-12-23T18:38:50.255862Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [59:7587124715182932950:2995] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:38:50.255992Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124715182932950:2995] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:50.256268Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124715182932950:2995] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:50.256379Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124715182932950:2995] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:50.256519Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124715182932950:2995] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-12-23T18:38:50.256695Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124715182932950:2995] txid# 281474976715661 HANDLE EvClientConnected 2025-12-23T18:38:50.262353Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124715182932950:2995] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-12-23T18:38:50.262534Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7587124715182932950:2995] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:38:50.262576Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124715182932950:2995] txid# 281474976715661 SEND to# [59:7587124715182932874:2335] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-12-23T18:38:50.287496Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124689413127765:2109] Handle TEvProposeTransaction 2025-12-23T18:38:50.287531Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124689413127765:2109] TxId# 281474976715662 ProcessProposeTransaction 2025-12-23T18:38:50.287578Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124689413127765:2109] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7587124715182932974:3007] 2025-12-23T18:38:50.290659Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124715182932974:3007] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:33568" 2025-12-23T18:38:50.290735Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124715182932974:3007] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:50.292551Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124715182932974:3007] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:38:50.292621Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124715182932974:3007] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:50.293074Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124715182932974:3007] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:50.293194Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124715182932974:3007] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:50.350814Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124715182932974:3007] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-12-23T18:38:50.351013Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124715182932974:3007] txid# 281474976715662 HANDLE EvClientConnected 2025-12-23T18:38:50.362833Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124715182932974:3007] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-12-23T18:38:50.362906Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124715182932974:3007] txid# 281474976715662 SEND to# [59:7587124715182932973:2328] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-12-23T18:38:50.450834Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124689413127765:2109] Handle TEvProposeTransaction 2025-12-23T18:38:50.450873Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124689413127765:2109] TxId# 281474976715663 ProcessProposeTransaction 2025-12-23T18:38:50.450941Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124689413127765:2109] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7587124715182933007:3022] 2025-12-23T18:38:50.453990Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124715182933007:3022] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:33578" 2025-12-23T18:38:50.454072Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124715182933007:3022] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:50.454094Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124715182933007:3022] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-12-23T18:38:50.454300Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7587124715182933007:3022] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-12-23T18:38:50.454341Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7587124715182933007:3022] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-12-23T18:38:50.454391Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124715182933007:3022] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:50.454802Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124715182933007:3022] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:50.454838Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7587124715182933007:3022] txid# 281474976715663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-12-23T18:38:50.454946Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7587124715182933007:3022] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-12-23T18:38:50.454979Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124715182933007:3022] txid# 281474976715663 SEND to# [59:7587124715182933006:2345] Source {TEvProposeTransactionStatus Status# 5} 2025-12-23T18:38:50.455499Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=59&id=Nzg2YjMwM2YtZDFiNGUzZTAtOTRiMmY1YjAtOGE5ZmJkYmE=, ActorId: [59:7587124715182932992:2345], ActorState: ExecuteState, TraceId: 01kd683g1w19bgern7t4eha0tv, Create QueryResponse for error on request, msg: , status: UNAUTHORIZED, issues: { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } 2025-12-23T18:38:50.455989Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7587124689413127765:2109] Handle TEvExecuteKqpTransaction 2025-12-23T18:38:50.456018Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7587124689413127765:2109] TxId# 281474976715664 ProcessProposeKqpTransaction |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |93.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |93.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |93.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |93.5%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |93.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 29699, msgbus: 21737 2025-12-23T18:34:43.894509Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123653530318943:2177];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:43.894630Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0047fd/r3tmp/tmpIDPVQL/pdisk_1.dat 2025-12-23T18:34:44.205951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:34:44.374806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:44.374913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:44.443021Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:44.462865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:34:44.468536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29699, node 1 2025-12-23T18:34:44.669910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:44.669952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:44.669961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:44.670089Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:34:44.797627Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:34:44.895334Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21737 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:34:44.997128Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587123653530319039:2117] Handle TEvNavigate describe path dc-1 2025-12-23T18:34:44.997215Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587123657825287207:2782] HANDLE EvNavigateScheme dc-1 2025-12-23T18:34:44.997742Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587123657825287207:2782] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.092086Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587123657825287207:2782] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-12-23T18:34:45.097522Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587123657825287207:2782] Handle TEvDescribeSchemeResult Forward to# [1:7587123657825287206:2781] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514884561 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:34:45.121640Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123653530319039:2117] Handle TEvProposeTransaction 2025-12-23T18:34:45.121671Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123653530319039:2117] TxId# 281474976710657 ProcessProposeTransaction 2025-12-23T18:34:45.121746Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123653530319039:2117] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7587123662120254512:2788] 2025-12-23T18:34:45.217715Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123662120254512:2788] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.217822Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123662120254512:2788] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.217841Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123662120254512:2788] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.217910Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123662120254512:2788] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.218272Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123662120254512:2788] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.218427Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123662120254512:2788] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-12-23T18:34:45.218520Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7587123662120254512:2788] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-12-23T18:34:45.218718Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7587123662120254512:2788] txid# 281474976710657 HANDLE EvClientConnected 2025-12-23T18:34:45.219861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:45.223250Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7587123662120254512:2788] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-12-23T18:34:45.223329Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7587123662120254512:2788] txid# 281474976710657 SEND to# [1:7587123662120254511:2787] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-12-23T18:34:45.247720Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123653530319039:2117] Handle TEvProposeTransaction 2025-12-23T18:34:45.247747Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123653530319039:2117] TxId# 281474976710658 ProcessProposeTransaction 2025-12-23T18:34:45.247789Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123653530319039:2117] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7587123662120254547:2820] 2025-12-23T18:34:45.250652Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123662120254547:2820] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.250720Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123662120254547:2820] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.250759Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123662120254547:2820] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.250816Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123662120254547:2820] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.251149Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123662120254547:2820] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.251296Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123662120254547:2820] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 Pla ... 94046382081 RedirectRequired# true 2025-12-23T18:38:56.167277Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124741091887612:2918] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-12-23T18:38:56.167472Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124741091887612:2918] txid# 281474976710661 HANDLE EvClientConnected 2025-12-23T18:38:56.170884Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124741091887612:2918] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-12-23T18:38:56.170954Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124741091887612:2918] txid# 281474976710661 SEND to# [59:7587124741091887611:2327] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-12-23T18:38:56.300773Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124715322082516:2114] Handle TEvProposeTransaction 2025-12-23T18:38:56.300807Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124715322082516:2114] TxId# 281474976710662 ProcessProposeTransaction 2025-12-23T18:38:56.300858Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124715322082516:2114] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7587124741091887636:2936] 2025-12-23T18:38:56.304033Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124741091887636:2936] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:48492" 2025-12-23T18:38:56.304151Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124741091887636:2936] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:56.304176Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124741091887636:2936] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:38:56.304242Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124741091887636:2936] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:56.304640Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124741091887636:2936] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:56.304791Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124741091887636:2936] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:56.304875Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124741091887636:2936] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-12-23T18:38:56.305037Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124741091887636:2936] txid# 281474976710662 HANDLE EvClientConnected 2025-12-23T18:38:56.305851Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:38:56.314419Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124741091887636:2936] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-12-23T18:38:56.314497Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124741091887636:2936] txid# 281474976710662 SEND to# [59:7587124741091887635:2341] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-12-23T18:38:56.402570Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124715322082516:2114] Handle TEvProposeTransaction 2025-12-23T18:38:56.402608Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124715322082516:2114] TxId# 281474976710663 ProcessProposeTransaction 2025-12-23T18:38:56.402664Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124715322082516:2114] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7587124741091887717:3003] 2025-12-23T18:38:56.405652Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124741091887717:3003] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:48522" 2025-12-23T18:38:56.405727Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124741091887717:3003] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:56.405750Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124741091887717:3003] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:38:56.405805Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124741091887717:3003] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:56.406206Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124741091887717:3003] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:56.406286Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124741091887717:3003] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:56.456421Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124741091887717:3003] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-12-23T18:38:56.456610Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124741091887717:3003] txid# 281474976710663 HANDLE EvClientConnected 2025-12-23T18:38:56.465750Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124741091887717:3003] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-12-23T18:38:56.465828Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124741091887717:3003] txid# 281474976710663 SEND to# [59:7587124741091887716:2343] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-12-23T18:38:56.554434Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124715322082516:2114] Handle TEvProposeTransaction 2025-12-23T18:38:56.554473Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124715322082516:2114] TxId# 281474976710664 ProcessProposeTransaction 2025-12-23T18:38:56.554543Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124715322082516:2114] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7587124741091887746:3017] 2025-12-23T18:38:56.557974Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124741091887746:3017] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2NjU1ODMzNiwiaWF0IjoxNzY2NTE1MTM2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.p_acaQ1ypQH1LA8JESS0t3jWDQk5oW_UVnkqUAh255i6nGjyiD1k6bGlKWP3B1c_MJROOzd1Q5QWp0ig6BqIBYkLnX-_1-JF-yTPRFXT4eFfUbxp9xbj-eS6frJGPf-hEpA4Zk7ziGdYI8HBtRL54D9wEE-YDnaiObE-9gGLx4PMwKMJSZdGYzqLq0J81RAw8xd4GwNJxsM3g9TxIxLsmmH9zgyr-q8qd4DceK-QD2X6nRJ5fgJ65Urf0PACgVoM9b7kDvwDnXt3Z0K1_u04xkbZ8zYi5odhDb3fnCYU6sZx1kFxhxykcl6TKek6HHfTVxvGpjn6iUf72FaG0Wb14A\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2NjU1ODMzNiwiaWF0IjoxNzY2NTE1MTM2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:55762" 2025-12-23T18:38:56.558053Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124741091887746:3017] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:56.558083Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124741091887746:3017] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-12-23T18:38:56.558246Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7587124741091887746:3017] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-12-23T18:38:56.558296Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7587124741091887746:3017] txid# 281474976710664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-12-23T18:38:56.558348Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124741091887746:3017] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:56.558652Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124741091887746:3017] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:56.558683Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7587124741091887746:3017] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2025-12-23T18:38:56.560179Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7587124741091887746:3017] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-12-23T18:38:56.560226Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124741091887746:3017] txid# 281474976710664 SEND to# [59:7587124741091887745:2354] Source {TEvProposeTransactionStatus Status# 5} 2025-12-23T18:38:56.560864Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=59&id=Zjk0NDMwNWUtN2ZiN2MzNS01NWQwNjQyMC1lMmIxY2YxZA==, ActorId: [59:7587124741091887736:2354], ActorState: ExecuteState, TraceId: 01kd683p0ker8kphv2hqmb4ba3, Create QueryResponse for error on request, msg: , status: UNAUTHORIZED, issues: { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } 2025-12-23T18:38:56.561618Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7587124715322082516:2114] Handle TEvExecuteKqpTransaction 2025-12-23T18:38:56.561639Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7587124715322082516:2114] TxId# 281474976710665 ProcessProposeKqpTransaction |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |93.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots >> TPQTest::The_Keys_Are_Loaded_In_Several_Iterations [GOOD] >> TPQTestInternal::RestoreKeys [GOOD] |93.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |93.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [FAIL] Test command err: Starting YDB, grpc: 9016, msgbus: 28754 2025-12-23T18:34:43.563101Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123654296465002:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:43.564695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00484b/r3tmp/tmp6zhmri/pdisk_1.dat 2025-12-23T18:34:43.944668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:34:43.995884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:43.996001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:44.023987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:44.069798Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9016, node 1 2025-12-23T18:34:44.196652Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:34:44.284321Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:44.284343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:44.284359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:44.284493Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:34:44.590956Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28754 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:34:44.638043Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587123654296465220:2117] Handle TEvNavigate describe path dc-1 2025-12-23T18:34:44.638099Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587123658591433329:2723] HANDLE EvNavigateScheme dc-1 2025-12-23T18:34:44.638420Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587123658591433329:2723] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:44.693261Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587123658591433329:2723] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-12-23T18:34:44.714789Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587123658591433329:2723] Handle TEvDescribeSchemeResult Forward to# [1:7587123658591433328:2722] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514884141 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:34:44.764285Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123654296465220:2117] Handle TEvProposeTransaction 2025-12-23T18:34:44.764321Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123654296465220:2117] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:34:44.764391Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123654296465220:2117] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7587123658591433338:2729] 2025-12-23T18:34:44.957520Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123658591433338:2729] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:44.957626Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123658591433338:2729] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:34:44.957652Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123658591433338:2729] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:44.957718Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123658591433338:2729] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:44.958030Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123658591433338:2729] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:44.958195Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123658591433338:2729] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-12-23T18:34:44.958327Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7587123658591433338:2729] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:34:44.958474Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7587123658591433338:2729] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:34:44.959112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:44.971688Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7587123658591433338:2729] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:34:44.971747Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7587123658591433338:2729] txid# 281474976715657 SEND to# [1:7587123658591433337:2728] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-12-23T18:34:45.005917Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123654296465220:2117] Handle TEvProposeTransaction 2025-12-23T18:34:45.005945Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123654296465220:2117] TxId# 281474976715658 ProcessProposeTransaction 2025-12-23T18:34:45.005974Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123654296465220:2117] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7587123662886400673:2765] 2025-12-23T18:34:45.008611Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123662886400673:2765] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.008673Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123662886400673:2765] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:34:45.008708Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123662886400673:2765] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.008758Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123662886400673:2765] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.009017Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123662886400673:2765] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.009133Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123662886400673:2765] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:34:45.009187Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7587123662886400673:2765] txid# 28147497671565 ... ldrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515131591 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:52.063198Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124718352101248:2112] Handle TEvProposeTransaction 2025-12-23T18:38:52.063233Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124718352101248:2112] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:38:52.063305Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124718352101248:2112] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [59:7587124722647069577:2754] 2025-12-23T18:38:52.066424Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124722647069577:2754] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:38:52.066498Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124722647069577:2754] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:52.066518Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124722647069577:2754] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:38:52.066580Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124722647069577:2754] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:52.067256Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124722647069577:2754] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:52.067347Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124722647069577:2754] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-12-23T18:38:52.067439Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124722647069577:2754] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:38:52.067580Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124722647069577:2754] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:38:52.068257Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:38:52.076180Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124722647069577:2754] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:38:52.076243Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124722647069577:2754] txid# 281474976715657 SEND to# [59:7587124722647069576:2753] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-12-23T18:38:52.122575Z node 59 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:52.135618Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124718352101248:2112] Handle TEvProposeTransaction 2025-12-23T18:38:52.135649Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124718352101248:2112] TxId# 281474976715658 ProcessProposeTransaction 2025-12-23T18:38:52.135691Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124718352101248:2112] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [59:7587124722647069612:2786] 2025-12-23T18:38:52.138679Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124722647069612:2786] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:38:52.140448Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124722647069612:2786] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:38:52.140485Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124722647069612:2786] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:38:52.140549Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124722647069612:2786] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:52.140838Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124722647069612:2786] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:52.140974Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124722647069612:2786] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:52.141056Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124722647069612:2786] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-12-23T18:38:52.141205Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124722647069612:2786] txid# 281474976715658 HANDLE EvClientConnected 2025-12-23T18:38:52.141906Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:38:52.158454Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124722647069612:2786] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-12-23T18:38:52.168534Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124722647069612:2786] txid# 281474976715658 SEND to# [59:7587124722647069611:2785] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-12-23T18:38:57.496267Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kd683hsxfsfk3ysqjw2h4yd9", Request deadline has expired for 0.316497s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14878 TBackTrace::Capture()+28 (0x1B03950C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B5289DC) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1825 (0x1AC12391) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+1758 (0x1AC29A9E) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AC6D066) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AC5AB08) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B56166A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B52F6B8) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AC59C0D) NUnitTest::TTestFactory::Execute()+2176 (0x1B530E70) NUnitTest::RunMain(int, char**)+5805 (0x1B55B4CD) ??+0 (0x7F179E283D90) __libc_start_main+128 (0x7F179E283E40) _start+41 (0x18614029) |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> KqpErrors::ProposeResultLost_RwTx+UseSink [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TLocksTest::BrokenDupLock [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] >> TSchemeShardTest::AlterMixedStorageConfigAndChannelProfileIdTable >> KqpQueryService::CloseSessionsWithLoad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::RestoreKeys [GOOD] Test command err: 2025-12-23T18:38:24.136069Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2025-12-23T18:38:24.270215Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:24.270293Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:24.270355Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:24.270470Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:182:2057] recipient: [1:14:2061] 2025-12-23T18:38:24.315725Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:24.362228Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2025-12-23T18:38:24.363835Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2025-12-23T18:38:24.366671Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:188:2142] 2025-12-23T18:38:24.377695Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:189:2142] 2025-12-23T18:38:24.379825Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:189:2142] 2025-12-23T18:38:24.418366Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:24.419098Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|72182df7-95171cf9-12bbd221-6546e3ec_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:235:2057] recipient: [1:104:2137] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:238:2057] recipient: [1:237:2232] Leader for TabletID 72057594037927937 is [1:239:2233] sender: [1:240:2057] recipient: [1:237:2232] 2025-12-23T18:38:24.696511Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:24.696599Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:24.697735Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:24.697796Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:24.698872Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:288:2233] 2025-12-23T18:38:24.701138Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:289:2233] 2025-12-23T18:38:24.713010Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:38:24.713733Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:289:2233] 2025-12-23T18:38:24.736760Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:38:24.738640Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:288:2233] 2025-12-23T18:38:24.739604Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [1:239:2233] sender: [1:317:2057] recipient: [1:14:2061] Got start offset = 40000 2025-12-23T18:38:25.497752Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:113:2057] recipient: [2:106:2138] 2025-12-23T18:38:25.604319Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:25.604401Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:25.604460Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:25.604528Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927938 is [2:158:2176] sender: [2:159:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:184:2057] recipient: [2:14:2061] 2025-12-23T18:38:25.648164Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:25.649181Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 2 actor [2:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } Consumers { Name: "user1" Generation: 2 Important: true } 2025-12-23T18:38:25.650013Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:190:2142] 2025-12-23T18:38:25.661026Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:190:2142] 2025-12-23T18:38:25.668533Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:191:2142] 2025-12-23T18:38:25.670689Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:191:2142] 2025-12-23T18:38:25.701846Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:25.702436Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|87b10bb8-b5a74931-74eb1ffd-ae18f9fa_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:237:2057] recipient: [2:104:2137] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:240:2057] recipient: [2:239:2234] Leader for TabletID 72057594037927937 is [2:241:2235] sender: [2:242:2057] recipient: [2:239:2234] 2025-12-23T18:38:25.930020Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:25.930116Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:25.931043Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:25.931111Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:38:25.931871Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:290:2235] 2025-12-23T18:38:25.934507Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:291:2235] 2025-12-23T18:38:25.944273Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:38:25.945263Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:291:2235] 2025-12-23T18:38:25.965957Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:38:25.967406Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:290:2235] 2025-12-23T18:38:25.968352Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:241:2235] sender: [2:319:2057] recipient: [2:14:2061] Got start offset = 40000 2025-12-23T18:38:26.562352Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:108:2057] recipient: [3:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:108:2057] recipient: [3:106:2138] L ... 0000000000000000234_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000235_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000236_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000237_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000238_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000239_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000240_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000241_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000242_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000243_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000244_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000245_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000246_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000247_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000248_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000249_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000250_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000251_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000252_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000253_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000254_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000255_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000256_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000257_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000258_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000259_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000260_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000261_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000262_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000263_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000264_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000265_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000266_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000267_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000268_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000269_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000270_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000271_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000272_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000273_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000274_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000275_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000276_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000277_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000278_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000279_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000280_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000281_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000282_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000283_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000284_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000285_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000286_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000287_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000288_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000289_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000290_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000291_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000292_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000293_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000294_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000295_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000296_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000297_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000298_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000299_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000300_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000301_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000302_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000303_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000304_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000305_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000306_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000307_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000308_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000309_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000310_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000311_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000312_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000313_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000314_00000_0000000001_00000? size 28702 2025-12-23T18:39:06.750824Z node 9 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:39:06.750899Z node 9 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:39:06.750937Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:39:06.750976Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:39:06.751050Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:39:06.751093Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:39:06.751131Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:39:06.751202Z node 9 :PERSQUEUE DEBUG: partition.cpp:757: [72057594037927937][Partition][0][StateIdle] Init complete for topic 'rt3.dc1--asdfgs--topic' Partition: 0 SourceId: sourceid SeqNo: 315 offset: 314 MaxOffset: 315 2025-12-23T18:39:06.751249Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 13 2025-12-23T18:39:06.751295Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:976: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 13 2025-12-23T18:39:06.751372Z node 9 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:39:06.751887Z node 9 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:39:06.753310Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 3 Topic 'rt3.dc1--asdfgs--topic' partition 0 user user offset 0 partno 0 count 1 size 1024000 endOffset 315 max time lag 0ms effective offset 0 2025-12-23T18:39:06.753587Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 8352628 count 168 last offset 0, current partition end offset: 315 2025-12-23T18:39:06.753636Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2025-12-23T18:39:06.753869Z node 9 :PERSQUEUE DEBUG: cache_eviction.h:464: No blob in L1. Partition 0 offset 0 partno 0 count 167 parts_count 0 actorID [9:3737:5380] 2025-12-23T18:39:06.753932Z node 9 :PERSQUEUE DEBUG: read.h:142: [72057594037927937][PQCacheProxy]Reading cookie 3. Have to read 1 of 1 from KV 2025-12-23T18:39:06.754048Z node 9 :PERSQUEUE DEBUG: pq_l2_cache.cpp:223: PQ Cache (L2). Missed blob. tabletId '72057594037927937' partition 0 offset 0 partno 0 count 167 parts_count 0 2025-12-23T18:39:06.811864Z node 9 :PERSQUEUE DEBUG: read.h:178: [72057594037927937][PQCacheProxy]Got results. 1 of 1 from KV. Status 1 2025-12-23T18:39:06.811951Z node 9 :PERSQUEUE DEBUG: read.h:195: [72057594037927937][PQCacheProxy]Got results. result 0 from KV. Status 0 2025-12-23T18:39:06.812081Z node 9 :PERSQUEUE DEBUG: cache_eviction.h:398: Prefetched blob in L1. Partition 0 offset 0 count 167 size 8352628 actorID [9:3737:5380] 2025-12-23T18:39:06.812380Z node 9 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 167 parts 0 suffix '0' size 8352628 2025-12-23T18:39:06.812611Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2025-12-23T18:39:06.832278Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 167 count 11 size 550146 from pos 0 cbcount 11 2025-12-23T18:39:06.843448Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:1026: Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp done, result 130 queuesize 0 startOffset 0 2025-12-23T18:39:06.845012Z node 9 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 9 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [9:3696:5380] sender: [9:3914:2057] recipient: [9:14:2061] 2025-12-23T18:39:06.846057Z node 9 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [9:3913:5430], now have 1 active actors on pipe Got start offset = 0 |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> KqpErrors::ResolveTableError [GOOD] >> ScriptExecutionsTest::BackgroundOperationFinalization [GOOD] >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TLocksTest::GoodNullLock [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:37:02.920181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:37:02.920271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:02.920306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:37:02.920342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:37:02.920379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:37:02.920405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:37:02.920481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:02.920587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:37:02.921591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:37:02.921885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:37:03.032033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:37:03.032115Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:03.043801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:37:03.044051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:37:03.044277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:37:03.057197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:37:03.057501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:37:03.058297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:37:03.062220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:37:03.078559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:03.078817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:37:03.080241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:37:03.080336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:03.080548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:37:03.080600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:37:03.080761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:37:03.080896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:37:03.235077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.236101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.236239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.236335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.236401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.236489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.236575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.236679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.236750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.236829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.236904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.237001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.237087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.237163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:03.237249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Reads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 6963 Memory: 124368 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 133 TableOwnerId: 72057594046678944 FollowerId: 0 2025-12-23T18:39:06.792677Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5284: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-12-23T18:39:06.792758Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.6963 2025-12-23T18:39:06.792897Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-12-23T18:39:06.792962Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-12-23T18:39:06.831240Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:39:06.831340Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:39:06.831385Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-12-23T18:39:06.831475Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2025-12-23T18:39:06.831523Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-12-23T18:39:06.831684Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2025-12-23T18:39:06.831786Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-12-23T18:39:06.831848Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2025-12-23T18:39:06.832011Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 1, Rows# 100, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-12-23T18:39:06.832239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 30 seconds 2025-12-23T18:39:06.832303Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:227: [BackgroundCompaction] [Update] Enqueued shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2025-12-23T18:39:06.832395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2025-12-23T18:39:06.832512Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:39:06.832921Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [3:128:2152], Recipient [3:720:2705]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 38 } CompactSinglePartedShards: true 2025-12-23T18:39:06.833120Z node 3 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 1 of 72075186233409546 tableId# 38 localTid# 1001, requested from [3:128:2152], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-12-23T18:39:06.835002Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-12-23T18:39:06.835064Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 0, front# 1 2025-12-23T18:39:06.863821Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:710:2697], Recipient [3:720:2705]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-12-23T18:39:06.882615Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 1, ts 1970-01-01T00:00:30.244000Z 2025-12-23T18:39:06.882986Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 1, front# 1 2025-12-23T18:39:06.883069Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:128:2152]pathId# [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:39:06.884157Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269553211, Sender [3:720:2705], Recipient [3:128:2152]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 38 } Status: OK 2025-12-23T18:39:06.884224Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-12-23T18:39:06.884356Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 0, Rows# 0, Deletes# 0, Compaction# 1970-01-01T00:00:30.000000Z}, next wakeup in# 29.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-12-23T18:39:06.884513Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 29.996000s, rate# 1, in queue# 0 shards, waiting after compaction# 0 shards, running# 1 shards at schemeshard 72057594046678944 2025-12-23T18:39:06.885105Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [3:128:2152], Recipient [3:720:2705]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 38 } CompactSinglePartedShards: true 2025-12-23T18:39:06.885268Z node 3 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 2 of 72075186233409546 tableId# 38 localTid# 1001, requested from [3:128:2152], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-12-23T18:39:06.887526Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 1, ts 1970-01-01T00:00:30.244000Z 2025-12-23T18:39:06.887576Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 1, front# 2 2025-12-23T18:39:06.912264Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:710:2697], Recipient [3:720:2705]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-12-23T18:39:06.912585Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:189: Updated last full compaction of tablet# 72075186233409546, tableId# 38, last full compaction# 1970-01-01T00:00:30.244000Z 2025-12-23T18:39:06.930321Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:710:2697], Recipient [3:720:2705]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-12-23T18:39:06.930919Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 2, ts 1970-01-01T00:00:30.248000Z 2025-12-23T18:39:06.931003Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 2, front# 2 2025-12-23T18:39:06.931050Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:128:2152]pathId# [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:39:06.931477Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269553211, Sender [3:720:2705], Recipient [3:128:2152]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 38 } Status: OK 2025-12-23T18:39:06.931528Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5323: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-12-23T18:39:06.931650Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 0 seconds 2025-12-23T18:39:06.931725Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.992000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-12-23T18:39:06.940204Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:710:2697], Recipient [3:720:2705]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-12-23T18:39:06.951268Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:39:06.951362Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:39:06.951399Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:39:06.983895Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:189: Updated last full compaction of tablet# 72075186233409546, tableId# 38, last full compaction# 1970-01-01T00:00:30.248000Z |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> TTicketParserTest::AuthorizationModify [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] Test command err: Starting YDB, grpc: 30775, msgbus: 17015 2025-12-23T18:34:43.889630Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123652926110369:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:43.889686Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:34:43.951220Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.008200s 2025-12-23T18:34:43.993954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:34:44.007937Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005730s 2025-12-23T18:34:44.039477Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639240 Duration# 0.011610s test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0047fc/r3tmp/tmpil5wWP/pdisk_1.dat 2025-12-23T18:34:44.348563Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:34:44.408585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:44.408684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:44.439931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:44.449836Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30775, node 1 2025-12-23T18:34:44.589613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:34:44.601675Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:44.601697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:44.601712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:44.601815Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17015 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:34:44.895698Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587123652926110571:2116] Handle TEvNavigate describe path dc-1 2025-12-23T18:34:44.895777Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587123657221078702:2738] HANDLE EvNavigateScheme dc-1 2025-12-23T18:34:44.896229Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587123657221078702:2738] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:44.934853Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:34:44.941543Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587123657221078702:2738] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-12-23T18:34:44.949942Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587123657221078702:2738] Handle TEvDescribeSchemeResult Forward to# [1:7587123657221078699:2736] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514884533 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:34:44.978999Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123652926110571:2116] Handle TEvProposeTransaction 2025-12-23T18:34:44.979028Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123652926110571:2116] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:34:44.979105Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123652926110571:2116] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7587123657221078718:2748] 2025-12-23T18:34:45.114551Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123657221078718:2748] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.114649Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123657221078718:2748] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.114669Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123657221078718:2748] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.115010Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123657221078718:2748] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.115361Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123657221078718:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.115508Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123657221078718:2748] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-12-23T18:34:45.115612Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7587123657221078718:2748] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:34:45.115770Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7587123657221078718:2748] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:34:45.116660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:45.120187Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7587123657221078718:2748] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:34:45.120254Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7587123657221078718:2748] txid# 281474976715657 SEND to# [1:7587123657221078717:2747] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-12-23T18:34:45.203294Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123652926110571:2116] Handle TEvProposeTransaction 2025-12-23T18:34:45.203326Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123652926110571:2116] TxId# 281474976715658 ProcessProposeTransaction 2025-12-23T18:34:45.203362Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123652926110571:2116] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7587123661516046053:2781] 2025-12-23T18:34:45.206278Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123661516046053:2781] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.206330Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123661516046053:2781] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.206346Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123661516046053:2781] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.206402Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123661516046053:2781] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.206780Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123661516046053:2781] txid ... rd__operation_create_resource_pool.cpp:179) 2025-12-23T18:38:59.624513Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124750555845698:2930] txid# 281474976710660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-12-23T18:38:59.624583Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124750555845698:2930] txid# 281474976710660 SEND to# [59:7587124750555845696:2335] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 53} 2025-12-23T18:38:59.653939Z node 59 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7587124750555845696:2335], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:38:59.721122Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124729081007884:2102] Handle TEvProposeTransaction 2025-12-23T18:38:59.721165Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124729081007884:2102] TxId# 281474976710661 ProcessProposeTransaction 2025-12-23T18:38:59.721230Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124729081007884:2102] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7587124750555845777:2989] 2025-12-23T18:38:59.724671Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124750555845777:2989] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\317\003\032\014root@builtin \003\n#\010\000\022\037\010\001\020\377\317\003\032\025cluster_admin@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-12-23T18:38:59.724728Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124750555845777:2989] txid# 281474976710661 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:38:59.724751Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124750555845777:2989] txid# 281474976710661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-12-23T18:38:59.725688Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [59:7587124750555845777:2989] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:38:59.725800Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124750555845777:2989] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:59.726039Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124750555845777:2989] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:59.726130Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124750555845777:2989] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:59.726252Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124750555845777:2989] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-12-23T18:38:59.726397Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124750555845777:2989] txid# 281474976710661 HANDLE EvClientConnected 2025-12-23T18:38:59.736207Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124750555845777:2989] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-12-23T18:38:59.736330Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7587124750555845777:2989] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:38:59.736360Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124750555845777:2989] txid# 281474976710661 SEND to# [59:7587124750555845696:2335] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-12-23T18:38:59.765877Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124729081007884:2102] Handle TEvProposeTransaction 2025-12-23T18:38:59.765912Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124729081007884:2102] TxId# 281474976710662 ProcessProposeTransaction 2025-12-23T18:38:59.765965Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124729081007884:2102] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7587124750555845800:3000] 2025-12-23T18:38:59.769081Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124750555845800:3000] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:57070" 2025-12-23T18:38:59.769170Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124750555845800:3000] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:38:59.769196Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124750555845800:3000] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:38:59.769253Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124750555845800:3000] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:59.769636Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124750555845800:3000] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:59.769739Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124750555845800:3000] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:38:59.821347Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124750555845800:3000] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-12-23T18:38:59.821522Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124750555845800:3000] txid# 281474976710662 HANDLE EvClientConnected 2025-12-23T18:38:59.828916Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124750555845800:3000] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-12-23T18:38:59.828994Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124750555845800:3000] txid# 281474976710662 SEND to# [59:7587124750555845799:2328] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-12-23T18:38:59.903260Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124729081007884:2102] Handle TEvProposeTransaction 2025-12-23T18:38:59.903295Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124729081007884:2102] TxId# 281474976710663 ProcessProposeTransaction 2025-12-23T18:38:59.903353Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124729081007884:2102] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7587124750555845833:3015] 2025-12-23T18:38:59.906478Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124750555845833:3015] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:57094" 2025-12-23T18:38:59.906570Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124750555845833:3015] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:38:59.906595Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124750555845833:3015] txid# 281474976710663 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2025-12-23T18:38:59.906658Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124750555845833:3015] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:38:59.907101Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124750555845833:3015] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:38:59.907166Z node 59 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [59:7587124750555845833:3015] txid# 281474976710663, Access denied for cluster_admin@builtin on path /dc-1, with access AlterSchema 2025-12-23T18:38:59.907277Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7587124750555845833:3015] txid# 281474976710663, issues: { message: "Access denied for cluster_admin@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2025-12-23T18:38:59.907316Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124750555845833:3015] txid# 281474976710663 SEND to# [59:7587124750555845832:2345] Source {TEvProposeTransactionStatus Status# 5} 2025-12-23T18:38:59.907803Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=59&id=YWU3NTJhMTAtZDc1M2MwMS0zNTgzMjZlOS00MmZhM2QyOA==, ActorId: [59:7587124750555845818:2345], ActorState: ExecuteState, TraceId: 01kd683s961vnhhtrnk9ann099, Create QueryResponse for error on request, msg: , status: UNAUTHORIZED, issues: { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } 2025-12-23T18:38:59.908146Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7587124729081007884:2102] Handle TEvExecuteKqpTransaction 2025-12-23T18:38:59.908162Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7587124729081007884:2102] TxId# 281474976710664 ProcessProposeKqpTransaction |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] Test command err: Starting YDB, grpc: 3631, msgbus: 1394 2025-12-23T18:34:43.999465Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123651492118517:2197];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:43.999623Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:34:44.037414Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.009664s test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0047f5/r3tmp/tmp5XwSnb/pdisk_1.dat 2025-12-23T18:34:44.681938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:34:44.786714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:44.787162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:44.806845Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:44.843391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3631, node 1 2025-12-23T18:34:44.917376Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007271s 2025-12-23T18:34:44.961901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:34:45.000198Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:34:45.005985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:45.006009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:45.006017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:45.006089Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1394 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:34:45.322475Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587123655787085908:2118] Handle TEvNavigate describe path dc-1 2025-12-23T18:34:45.322530Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587123660082054051:2759] HANDLE EvNavigateScheme dc-1 2025-12-23T18:34:45.323166Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587123660082054051:2759] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.370085Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587123660082054051:2759] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-12-23T18:34:45.376414Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587123660082054051:2759] Handle TEvDescribeSchemeResult Forward to# [1:7587123660082054050:2758] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514884953 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:34:45.407697Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123655787085908:2118] Handle TEvProposeTransaction 2025-12-23T18:34:45.407736Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123655787085908:2118] TxId# 281474976710657 ProcessProposeTransaction 2025-12-23T18:34:45.407877Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123655787085908:2118] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7587123660082054056:2764] 2025-12-23T18:34:45.539384Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123660082054056:2764] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.539488Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123660082054056:2764] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.539535Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123660082054056:2764] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.539613Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123660082054056:2764] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.539967Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123660082054056:2764] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.540115Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123660082054056:2764] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-12-23T18:34:45.540199Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7587123660082054056:2764] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-12-23T18:34:45.540387Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7587123660082054056:2764] txid# 281474976710657 HANDLE EvClientConnected 2025-12-23T18:34:45.541203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:45.554702Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7587123660082054056:2764] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-12-23T18:34:45.554795Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7587123660082054056:2764] txid# 281474976710657 SEND to# [1:7587123660082054055:2763] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-12-23T18:34:45.609131Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123655787085908:2118] Handle TEvProposeTransaction 2025-12-23T18:34:45.609159Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123655787085908:2118] TxId# 281474976710658 ProcessProposeTransaction 2025-12-23T18:34:45.609199Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123655787085908:2118] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7587123660082054092:2797] 2025-12-23T18:34:45.612066Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123660082054092:2797] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.612126Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123660082054092:2797] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.612151Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123660082054092:2797] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.612232Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123660082054092:2797] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.612561Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123660082054092:2797] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.612671Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123660082054092:2797] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 ... 046382081 RedirectRequired# true 2025-12-23T18:39:00.070405Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124758373969449:2931] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-12-23T18:39:00.070541Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124758373969449:2931] txid# 281474976710661 HANDLE EvClientConnected 2025-12-23T18:39:00.076277Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124758373969449:2931] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-12-23T18:39:00.076348Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124758373969449:2931] txid# 281474976710661 SEND to# [59:7587124758373969448:2327] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-12-23T18:39:00.158624Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124732604164519:2116] Handle TEvProposeTransaction 2025-12-23T18:39:00.158668Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124732604164519:2116] TxId# 281474976710662 ProcessProposeTransaction 2025-12-23T18:39:00.158732Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124732604164519:2116] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7587124758373969473:2949] 2025-12-23T18:39:00.162258Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124758373969473:2949] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44234" 2025-12-23T18:39:00.162712Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124758373969473:2949] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:39:00.162773Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124758373969473:2949] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:39:00.162845Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124758373969473:2949] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:39:00.163316Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124758373969473:2949] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:00.163488Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124758373969473:2949] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:39:00.163589Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124758373969473:2949] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-12-23T18:39:00.163856Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124758373969473:2949] txid# 281474976710662 HANDLE EvClientConnected 2025-12-23T18:39:00.164674Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:39:00.169466Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124758373969473:2949] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-12-23T18:39:00.169545Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124758373969473:2949] txid# 281474976710662 SEND to# [59:7587124758373969472:2341] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-12-23T18:39:00.267404Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124732604164519:2116] Handle TEvProposeTransaction 2025-12-23T18:39:00.267429Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124732604164519:2116] TxId# 281474976710663 ProcessProposeTransaction 2025-12-23T18:39:00.267468Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124732604164519:2116] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7587124758373969558:3020] 2025-12-23T18:39:00.269654Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124758373969558:3020] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44256" 2025-12-23T18:39:00.269720Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124758373969558:3020] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:39:00.269740Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124758373969558:3020] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:39:00.269780Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124758373969558:3020] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:39:00.270079Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124758373969558:3020] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:00.270144Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7587124758373969558:3020] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:39:00.318034Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7587124758373969558:3020] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-12-23T18:39:00.318231Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7587124758373969558:3020] txid# 281474976710663 HANDLE EvClientConnected 2025-12-23T18:39:00.324898Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7587124758373969558:3020] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-12-23T18:39:00.324971Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124758373969558:3020] txid# 281474976710663 SEND to# [59:7587124758373969557:2343] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-12-23T18:39:00.409664Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7587124732604164519:2116] Handle TEvProposeTransaction 2025-12-23T18:39:00.409708Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7587124732604164519:2116] TxId# 281474976710664 ProcessProposeTransaction 2025-12-23T18:39:00.409779Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7587124732604164519:2116] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7587124758373969587:3034] 2025-12-23T18:39:00.413113Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7587124758373969587:3034] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "targetuser" Password: "passwd" } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2NjU1ODM0MCwiaWF0IjoxNzY2NTE1MTQwLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.Sfj_73JOSe2gOb6SGds9plxSOgeK6sDRBoqV8x-IU-Y5IxEmol54LKcvGrtUSjq_Z6zWzv72SvW9FjqLDiBQbp-l_gVjf05jaWjciS8ueJOOggjk6AaH0n7XgsGj-jNSY47aXq4Z-mKAdAawKHNvJVR8L_qaunMz0Y41Pw4xN58qiGegoDf5ikves5OO8VY4hcDaEnUe8v_2Tvb30ddHyULWmHo83j6pLfbTD70FHNaTj14hIBx-nYueadV20aCMYAWJOq0lBrLWQdQOvCDbCEP7TlK223qOreAzRKryJBoU-K4jHQ6VYn5566j7zkcdEZFNcWAYAAPQoOTBlF1nzQ\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2NjU1ODM0MCwiaWF0IjoxNzY2NTE1MTQwLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:38002" 2025-12-23T18:39:00.413210Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7587124758373969587:3034] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-12-23T18:39:00.413236Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7587124758373969587:3034] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-12-23T18:39:00.413410Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7587124758373969587:3034] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-12-23T18:39:00.413459Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7587124758373969587:3034] txid# 281474976710664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-12-23T18:39:00.413511Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7587124758373969587:3034] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:39:00.413802Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7587124758373969587:3034] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:00.413833Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7587124758373969587:3034] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2025-12-23T18:39:00.413935Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7587124758373969587:3034] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-12-23T18:39:00.413965Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7587124758373969587:3034] txid# 281474976710664 SEND to# [59:7587124758373969586:2354] Source {TEvProposeTransactionStatus Status# 5} 2025-12-23T18:39:00.414947Z node 59 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=59&id=YzUyMGMzYTctYTA2YTZjMTAtNDdmNzJjYzYtMWMwMzUxMGQ=, ActorId: [59:7587124758373969577:2354], ActorState: ExecuteState, TraceId: 01kd683srwah87pdtq4e7bb7s8, Create QueryResponse for error on request, msg: , status: UNAUTHORIZED, issues: { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } 2025-12-23T18:39:00.415338Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7587124732604164519:2116] Handle TEvExecuteKqpTransaction 2025-12-23T18:39:00.415360Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7587124732604164519:2116] TxId# 281474976710665 ProcessProposeKqpTransaction |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:39:03.444023Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:39:03.445287Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:39:03.626169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:03.627731Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:04.174577Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:04.329193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:04.329361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:04.329877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:04.329967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:04.411878Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:39:04.412425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:04.412874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:04.453699Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:04.557248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005fde/r3tmp/tmpo7g0sr/pdisk_1.dat 2025-12-23T18:39:05.309515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:39:05.374608Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:05.374789Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:05.765660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:07.082788Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-12-23T18:39:07.082919Z node 1 :KQP_EXECUTER TRACE: kqp_executer_impl.h:187: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState 2025-12-23T18:39:07.083197Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:797: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Executing physical tx, type: 2, stages: 1 2025-12-23T18:39:07.083286Z node 1 :KQP_EXECUTER TRACE: kqp_executer_impl.h:811: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Got request, become WaitResolveState 2025-12-23T18:39:07.083585Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710658. Resolved key sets: 1 2025-12-23T18:39:07.083764Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-12-23T18:39:07.084189Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:580: TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-12-23T18:39:07.084610Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:867: TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1855:3211] 2025-12-23T18:39:07.084668Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:859: TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1855:3211], channels: 0 2025-12-23T18:39:07.084754Z node 1 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:39:07.084804Z node 1 :KQP_EXECUTER TRACE: kqp_data_executer.cpp:2623: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors 2025-12-23T18:39:07.084859Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:867: TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1855:3211] 2025-12-23T18:39:07.084907Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:859: TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1855:3211], channels: 0 2025-12-23T18:39:07.084991Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [1:1855:3211], 2025-12-23T18:39:07.085068Z node 1 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1855:3211], 2025-12-23T18:39:07.085118Z node 1 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:2227: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-12-23T18:39:07.097677Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [1:1855:3211], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-12-23T18:39:07.097796Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [1:1855:3211], 2025-12-23T18:39:07.097894Z node 1 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1855:3211], 2025-12-23T18:39:07.099328Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [1:1855:3211], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1833 Tasks { TaskId: 1 CpuTimeUs: 1051 FinishTimeMs: 1766515147098 EgressBytes: 30 EgressRows: 3 ComputeCpuTimeUs: 44 BuildCpuTimeUs: 1007 HostName: "ghrun-ou4bsyjxme" NodeId: 1 CreateTimeMs: 1766515147085 UpdateTimeMs: 1766515147098 } MaxMemoryUsage: 1048576 } 2025-12-23T18:39:07.099475Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [1:1855:3211] 2025-12-23T18:39:07.099547Z node 1 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:214: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor=[1:1851:3211] 2025-12-23T18:39:07.099597Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.001833s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-12-23T18:39:07.191845Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:39:07.191941Z node 1 :KQP_EXECUTER TRACE: kqp_executer_impl.h:1400: ActorId: [1:1852:3211] TxId: 281474976710658. Ctx: { TraceId: 01kd683zyhcn443fw95yerwwyz, Database: , SessionId: ydb://session/3?node_id=1&id=OTNmYzBhYzctZGU0NmNjMjktNTFjNGNkYTctZDk1OGQ0N2U=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState 2025-12-23T18:39:07.250318Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:1871:3229], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2025-12-23T18:39:07.254498Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=NDI5ZDVlMzItNGY2YWVmMWYtY2M3Yjk4YzctM2FkZWI3NmQ=, ActorId: [1:1869:3227], ActorState: ExecuteState, TraceId: 01kd6840dya15jrg98gq738ge6, ReplyQueryCompileError, status: UNAVAILABLE, issues: { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[/Root/table-1]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } }, remove tx with tx_id: |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_errors/unittest |93.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs |93.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs |93.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_phantom_blobs/blobstorage-ut_blobstorage-ut_phantom_blobs >> TSchemeShardTest::AlterMixedStorageConfigAndChannelProfileIdTable [GOOD] >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenDupLock [GOOD] Test command err: 2025-12-23T18:38:16.104829Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124567956582370:2187];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:16.105221Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:16.153824Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e7/r3tmp/tmppoj1sF/pdisk_1.dat 2025-12-23T18:38:16.450862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:16.483106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:16.483254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:16.523676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:16.618614Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:16.665986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:62433 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515096612 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:16.874247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:16.887588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:16.906191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:17.041915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:17.116347Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:17.119074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e7/r3tmp/tmplbPcvC/pdisk_1.dat 2025-12-23T18:38:20.246862Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:20.247030Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:20.276777Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:20.276879Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:20.307006Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124582578508142:2081] 1766515100030491 != 1766515100030494 2025-12-23T18:38:20.335633Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:20.351465Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:20.480652Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:15198 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515100413 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:20.587826Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:20.594565Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:20.605489Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:20.609912Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:20.720267Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:20.794777Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:21.126711Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:24.667571Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124602356357532:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:24.667635Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e7/r3tmp/tmp9ULmxo/pdisk_1.dat 2025-12-23T18:38:24.734643Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:24.819782Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:24.821074Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587124602356357507:2081] 1766515104666408 != 1766515104666411 2025-12-23T18:38:24.839115Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:24.839196Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:24.857095Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:24.976947Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:29877 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ... 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515131710 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:51.997157Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:52.016994Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:38:52.020627Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions waiting... 2025-12-23T18:38:52.040049Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:52.131124Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:52.234862Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:52.338120Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:56.339148Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587124738802634950:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:56.339209Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e7/r3tmp/tmpqKO6RO/pdisk_1.dat 2025-12-23T18:38:56.397625Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:56.489900Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:56.491344Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7587124738802634919:2081] 1766515136338569 != 1766515136338572 2025-12-23T18:38:56.501880Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:56.501978Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:56.530923Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:56.664641Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:24972 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515136582 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:56.808505Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:38:56.841027Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:56.937474Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:57.026720Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:02.040857Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587124763103220027:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:02.041238Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e7/r3tmp/tmpK9U3Oo/pdisk_1.dat 2025-12-23T18:39:02.070629Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:02.173589Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:02.190083Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:02.190204Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:02.221490Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:02.322077Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:62559 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515142287 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:39:02.640918Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:02.665552Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:39:02.693649Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:39:02.705960Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:02.823620Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:02.960283Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:03.048920Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |93.6%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> KqpService::CloseSessionsWithLoad |93.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenSplit |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> KqpErrors::ProposeError [GOOD] >> KqpErrors::ProposeErrorEvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] Test command err: 2025-12-23T18:38:38.914039Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124663300668136:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:38.914083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:39.057663Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003343/r3tmp/tmpTdr8WY/pdisk_1.dat 2025-12-23T18:38:39.642584Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:38:39.652549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:39.652688Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:39.797637Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:39.798590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:39.802032Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124663300668114:2081] 1766515118911980 != 1766515118911983 TServer::EnableGrpc on GrpcPort 19405, node 1 2025-12-23T18:38:39.859740Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:38:39.975704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:39.975730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:39.975743Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:39.975900Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:38:40.022938Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515119866 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:38:40.328845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:40.335407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:38:40.349364Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-12-23T18:38:40.349478Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d3cf73f0450] Connect to grpc://localhost:19987 2025-12-23T18:38:40.352320Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3cf73f0450] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-12-23T18:38:40.391600Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d3cf73f0450] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:38:40.395060Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-12-23T18:38:40.396376Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d3cf73f0b50] Connect to grpc://localhost:10564 2025-12-23T18:38:40.403427Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3cf73f0b50] Request GetUserAccountRequest { user_account_id: "user1" } 2025-12-23T18:38:40.421237Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d3cf73f0b50] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-12-23T18:38:40.421669Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2025-12-23T18:38:44.388187Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124688140111016:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:44.388236Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003343/r3tmp/tmpM3C319/pdisk_1.dat 2025-12-23T18:38:44.666873Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:38:44.672213Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:44.678901Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124688140110975:2081] 1766515124380756 != 1766515124380759 2025-12-23T18:38:44.686304Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:44.686404Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:44.729711Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16729, node 2 2025-12-23T18:38:45.035405Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:45.035428Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:45.035435Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:45.035517Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:38:45.074143Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:38:45.419011Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515124780 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:38:45.492646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:45.516639Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:38:45.519411Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-12-23T18:38:45.519493Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d3cf74b1dd0] Connect to grpc://localhost:2999 2025-12-23T18:38:45.520496Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3cf74b1dd0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-12-23T18:38:45.538589Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d3cf74b1dd0] Status 14 Service Unavailable 2025-12-23T18:38:45.540064Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-12-23T18:38:45.540101Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-12-23T18:38:45.540283Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3cf74b1dd0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-12-23T18:38:45.543219Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d3cf74b1dd0] Status 1 CANCELLED 2025-12-23T18:38:45.552014Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2025-12-23T18:38:49.458299Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124707137301701:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:49.460253Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath fo ... _path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:38:59.936746Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d3cf74b6e50] Status 16 Access Denied 2025-12-23T18:38:59.936908Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (BE2EA0D0) permission something.read now has a permanent error "Access Denied" retryable:0 2025-12-23T18:38:59.936965Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2025-12-23T18:38:59.937651Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-12-23T18:38:59.937847Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3cf74b6e50] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-12-23T18:38:59.940355Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d3cf74b6e50] Status 16 Access Denied 2025-12-23T18:38:59.940449Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-12-23T18:38:59.940471Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-12-23T18:38:59.941046Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-12-23T18:38:59.941286Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3cf74b6e50] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:38:59.943118Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d3cf74b6e50] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:38:59.943221Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-12-23T18:38:59.943303Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:38:59.943833Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-12-23T18:38:59.943990Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3cf74b6e50] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-12-23T18:38:59.945409Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d3cf74b6e50] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:38:59.945530Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-12-23T18:38:59.945639Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:38:59.946250Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(monitoring.view) 2025-12-23T18:38:59.946417Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3cf74b6e50] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "monitoring.view" resource_path { id: "gizmo" type: "iam.gizmo" } } 2025-12-23T18:38:59.948155Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d3cf74b6e50] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:38:59.948402Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission monitoring.view now has a valid subject "user1@as" 2025-12-23T18:38:59.948496Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:38:59.949204Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (6968D2E8) asking for AccessServiceAuthorization(something.write) 2025-12-23T18:38:59.949389Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3cf74b6e50] Request AuthorizeRequest { iam_token: "**** (6968D2E8)" permission: "something.write" resource_path { id: "123" type: "ydb.database" } resource_path { id: "folder" type: "resource-manager.folder" } } 2025-12-23T18:38:59.952414Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d3cf74b6e50] Response AuthorizeResponse { subject { service_account { id: "service1" } } } 2025-12-23T18:38:59.952662Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (6968D2E8) permission something.write now has a valid subject "service1@as" 2025-12-23T18:38:59.952806Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (6968D2E8) () has now valid token of service1@as 2025-12-23T18:39:04.445448Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587124772141028739:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:04.445639Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003343/r3tmp/tmphAMNxI/pdisk_1.dat 2025-12-23T18:39:04.513065Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:04.615673Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:04.615767Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:04.618153Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:04.633020Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7587124772141028707:2081] 1766515144444723 != 1766515144444726 2025-12-23T18:39:04.648333Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:04.678090Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 20525, node 6 2025-12-23T18:39:04.741478Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:04.741508Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:04.741515Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:04.741612Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515144723 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:04.996380Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:05.004260Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-12-23T18:39:05.004346Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d3cf73ec8d0] Connect to grpc://localhost:63187 2025-12-23T18:39:05.005247Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3cf73ec8d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:39:05.014179Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d3cf73ec8d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:39:05.014417Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-12-23T18:39:05.014515Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:39:05.015116Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-12-23T18:39:05.015175Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-12-23T18:39:05.015342Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3cf73ec8d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:39:05.015922Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d3cf73ec8d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:39:05.018580Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d3cf73ec8d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:39:05.018772Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-12-23T18:39:05.019940Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d3cf73ec8d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:39:05.020058Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-12-23T18:39:05.020171Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> KqpRollback::DoubleUpdate [GOOD] >> KqpSinkLocks::DifferentKeyUpdate |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodNullLock [GOOD] Test command err: 2025-12-23T18:38:11.920000Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124544785011866:2171];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:11.920300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057fb/r3tmp/tmpyFauXT/pdisk_1.dat 2025-12-23T18:38:12.573785Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:12.591085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:12.591177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:12.655771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:12.817874Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124544785011733:2081] 1766515091913398 != 1766515091913401 2025-12-23T18:38:12.832415Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:12.886957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:38:12.975052Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:4315 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515092930 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:13.452813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:13.468344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:13.491128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:13.879378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:13.993224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057fb/r3tmp/tmp00pYEu/pdisk_1.dat 2025-12-23T18:38:18.631191Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:18.631321Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:18.811783Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:18.815549Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:18.818928Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124576787652734:2081] 1766515098508775 != 1766515098508778 2025-12-23T18:38:18.851708Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:18.851777Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:18.921919Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4033 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:38:19.339864Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515098971 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:19.346097Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:19.353234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:19.371857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:19.454478Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:19.518117Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:19.590876Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:23.791240Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124599076241290:2084];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:23.791318Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057fb/r3tmp/tmpUzQrxg/pdisk_1.dat 2025-12-23T18:38:24.093331Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:24.096433Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:24.116590Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:24.116683Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:24.144672Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:24.269953Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:23720 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Ve ... is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:51.783191Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:51.796557Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:38:51.808742Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:51.971471Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:52.111435Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:52.167131Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057fb/r3tmp/tmpj1Kw48/pdisk_1.dat 2025-12-23T18:38:57.300391Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:57.300897Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:57.433888Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:57.433981Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:57.456733Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:57.460516Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7587124744421314221:2081] 1766515137236306 != 1766515137236309 2025-12-23T18:38:57.480785Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:57.484397Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13555 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515137534 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:57.892523Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:57.899433Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:57.909449Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:57.915465Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:58.008308Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:58.065359Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:38:58.095919Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:58.280164Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:02.971678Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057fb/r3tmp/tmpSHmwjE/pdisk_1.dat 2025-12-23T18:39:03.007680Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:03.201530Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:03.207291Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7587124764715350469:2081] 1766515142841746 != 1766515142841749 2025-12-23T18:39:03.239336Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:03.239442Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:03.263057Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:03.274777Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:1304 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515143316 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:39:03.596323Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:03.612724Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:39:03.636419Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:39:03.647874Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:03.750152Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:03.915497Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:03.968989Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] Test command err: 2025-12-23T18:38:09.516394Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124538331818936:2255];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:09.516633Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:09.594179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005801/r3tmp/tmpkwZe9b/pdisk_1.dat 2025-12-23T18:38:10.479169Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:10.543060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:10.543166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:10.555122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:10.793367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:10.809722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:10.874027Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:10.881275Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124538331818718:2081] 1766515089171659 != 1766515089171662 2025-12-23T18:38:11.000817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:62534 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515091131 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:11.776468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:11.799458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:11.809915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:11.821345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:12.043326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:12.146559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:17.539781Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124570110215416:2087];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:17.540377Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:17.556179Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005801/r3tmp/tmpQ6ML6W/pdisk_1.dat 2025-12-23T18:38:17.652738Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:17.652823Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:17.653961Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:17.655146Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:17.666398Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5741 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:38:17.935895Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515097746 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:17.979510Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:17.996569Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:18.023190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:38:18.038034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:18.148967Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:18.252992Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:18.554969Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:21.665121Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124587584857634:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:21.665180Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:21.690568Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005801/r3tmp/tmpaIH6SN/pdisk_1.dat 2025-12-23T18:38:21.906808Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:21.910272Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:21.918906Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587124 ... 2057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:52.311182Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:52.375710Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:52.446775Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:52.522761Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:52.742900Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:57.342998Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587124744354638866:2161];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:57.354943Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:57.361874Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005801/r3tmp/tmpbTYB09/pdisk_1.dat 2025-12-23T18:38:57.534860Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:57.542000Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:57.542120Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:57.550677Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7587124744354638743:2081] 1766515137327617 != 1766515137327620 2025-12-23T18:38:57.562967Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:57.571452Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:57.783392Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:18632 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515137660 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:57.911317Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... 2025-12-23T18:38:57.939266Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:58.035719Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:58.101500Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:58.351705Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:03.169449Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587124769198866255:2243];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:03.169541Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005801/r3tmp/tmpl6keYg/pdisk_1.dat 2025-12-23T18:39:03.230950Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:03.354813Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:03.383163Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7587124769198866025:2081] 1766515143151885 != 1766515143151888 2025-12-23T18:39:03.392700Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:03.392826Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:03.406861Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:03.410261Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18812 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515143470 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:39:03.836806Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:03.859271Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:39:03.889200Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:39:03.908074Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:04.014051Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:39:04.063503Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:04.218396Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:04.269439Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink >> KqpTx::MixEnginesOldNew [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap-UpdateAfterInsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 32304, MsgBus: 9843 2025-12-23T18:33:56.631895Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123452722592013:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:56.633168Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:33:56.672342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003c67/r3tmp/tmpYwXWRR/pdisk_1.dat 2025-12-23T18:33:57.127107Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:57.163739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:57.163867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:57.237010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32304, node 1 2025-12-23T18:33:57.353768Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:57.378920Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123452722591964:2081] 1766514836526304 != 1766514836526307 2025-12-23T18:33:57.522827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:57.523800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:57.523810Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:57.523824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:57.523987Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:33:57.643049Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:9843 TClient is connected to server localhost:9843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514837339 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:58.539531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:33:58.584662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:33:58.601493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:58.830335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:59.177542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:59.419289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:01.618873Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123452722592013:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:01.618959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:34:02.656985Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123478492397621:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.657088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.657492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123478492397631:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.657534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:03.320303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.374779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.435138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.473160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.515394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.594714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.645706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.729193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.883067Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123482787365811:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:03.883147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:03.883293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123482787365816:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:03.883314Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123482787365817:2492], DatabaseId: /Root, PoolId: default, ... IER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:36:18.998374Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:36:19.038845Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9918 2025-12-23T18:36:19.403258Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:9918 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514978830 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:36:19.884416Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:36:19.899639Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:36:19.910242Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:20.025229Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:20.275149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:20.406206Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:23.454871Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587124061003591815:2195];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:23.454941Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:36:23.858914Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124082478430009:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:23.859036Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:23.860332Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124082478430019:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:23.860418Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:24.022447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:24.087280Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:24.173583Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:24.278698Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:24.366034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:24.501472Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:24.613319Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:24.700157Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:24.881686Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124086773398202:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:24.881823Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:24.883142Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124086773398207:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:24.883208Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124086773398208:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:24.883520Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:24.888851Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:36:24.909983Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587124086773398211:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:36:24.990796Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587124086773398262:3780] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:36:28.004159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:33.696011Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:36:33.696049Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools [GOOD] >> KqpSinkLocks::OlapVisibleUncommittedRows >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite >> KqpSinkLocks::EmptyRange >> KqpSinkLocks::InvalidateOnCommit >> KqpSinkLocks::OlapUncommittedRead >> KqpSnapshotIsolation::TPragmaSettingOltp+IsSnapshotIsolation >> KqpSinkMvcc::DirtyReads+IsOlap |93.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |93.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |93.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::MixEnginesOldNew [GOOD] Test command err: Trying to start YDB, gRPC: 3029, MsgBus: 3479 2025-12-23T18:38:45.840852Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124693644003496:2195];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:45.841882Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00414a/r3tmp/tmpIXhVKH/pdisk_1.dat 2025-12-23T18:38:46.254927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:38:46.266364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:46.266493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:46.373335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:46.449016Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3029, node 1 2025-12-23T18:38:46.458990Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:38:46.599298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:46.599319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:46.599327Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:46.599432Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:38:46.841139Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3479 TClient is connected to server localhost:3479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515126509 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:38:47.378463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:47.390040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:47.414693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:47.674666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:47.899231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:47.993409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:49.945268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124710823874377:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:49.945388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:49.945736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124710823874387:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:49.945783Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:50.344528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:50.396212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:50.436941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:50.534085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:50.573753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:50.610962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:50.657032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:50.709868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:50.808506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124715118842555:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:50.808590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:50.809281Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124715118842561:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:50.809336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124715118842560:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:50.809370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:50.815032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:38:50.831472Z node 1 :METADATA_PROVIDER ... ons 2025-12-23T18:39:03.410114Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:03.420285Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:03.420383Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:03.453275Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25083, node 3 2025-12-23T18:39:03.649200Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:03.662833Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:03.662877Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:03.662887Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:03.663015Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25931 2025-12-23T18:39:04.299297Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:25931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515143505 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:04.416942Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:04.439846Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:04.512748Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:04.710342Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:04.782629Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:07.484564Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124787894844018:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:07.484666Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:07.484981Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124787894844028:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:07.485060Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:07.578074Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:07.633504Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:07.680119Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:07.729614Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:07.787194Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:07.893683Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:07.956356Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:08.025075Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:08.137762Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124792189812195:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:08.137882Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:08.138274Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124792189812200:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:08.138303Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124792189812201:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:08.138361Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:08.144664Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:08.157004Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587124792189812204:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:39:08.258674Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587124792189812255:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:08.293279Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587124770714973092:2148];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:08.293367Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpTx::DeferredEffects >> TLocksTest::Range_GoodLock1 [GOOD] |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |93.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |93.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |93.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator >> KqpSinkMvcc::OltpNamedStatementNoSink >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink >> KqpSinkMvcc::TxReadsItsOwnWrites-IsOlap >> KqpSnapshotIsolation::TReadOnlyOltp >> TProxyActorTest::TestCreateSemaphoreInterrupted >> KqpTx::RollbackTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:37:45.154313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:37:45.154388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:45.154435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:37:45.154466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:37:45.154500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:37:45.154546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:37:45.154605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:45.154677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:37:45.155496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:37:45.155813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:37:45.353012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:37:45.353062Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:45.366469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:37:45.366736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:37:45.366957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:37:45.380113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:37:45.380373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:37:45.381222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:37:45.381467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:37:45.386199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:45.386433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:37:45.387886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:37:45.387960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:45.388146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:37:45.388221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:37:45.388268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:37:45.388427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:37:45.609579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.610588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.610733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.611245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.611349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.611446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.611511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.611622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.611736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.611820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.611904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.611987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.612053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.612120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.612203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-12-23T18:39:12.232660Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:39:12.233522Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:39:12.233641Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:39:12.236091Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:39:12.236278Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:39:12.236359Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:39:12.236438Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-12-23T18:39:12.236544Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:39:12.238169Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:39:12.238269Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:39:12.238307Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:39:12.238345Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:39:12.238389Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:39:12.238495Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:39:12.238964Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2095 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:39:12.239011Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:39:12.239170Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2095 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:39:12.239386Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2095 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2025-12-23T18:39:12.240637Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 745 RawX2: 73014446763 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:39:12.240722Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:39:12.240956Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 745 RawX2: 73014446763 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:39:12.241075Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:39:12.241267Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 745 RawX2: 73014446763 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:39:12.241411Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:39:12.241482Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:39:12.241570Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:39:12.241656Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:39:12.249742Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:39:12.250132Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:39:12.250258Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:39:12.250531Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:39:12.251029Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:39:12.251102Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:39:12.251367Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:39:12.251454Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:39:12.251535Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:39:12.251612Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:39:12.251697Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:39:12.251826Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [17:711:2701] message: TxId: 102 2025-12-23T18:39:12.251933Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:39:12.252032Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:39:12.252098Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:39:12.252347Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:39:12.254819Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:39:12.254927Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [17:771:2749] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-12-23T18:39:12.259803Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table1" Columns { Name: "value" FamilyName: "new_family" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:39:12.260240Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table1, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:39:12.260680Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Column families require StorageConfig specification, at schemeshard: 72057594046678944 2025-12-23T18:39:12.264402Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Column families require StorageConfig specification" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:39:12.264893Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column families require StorageConfig specification, operation: ALTER TABLE, path: /MyRoot/Table1 TestModificationResult got TxId: 103, wait until txId: 103 >> KqpSnapshotIsolation::TSnapshotTwoInsertOlap >> KqpLocksTricky::TestNoLocksIssue+withSink >> KqpSinkTx::SnapshotROInteractive2 |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] >> KqpSinkMvcc::SnapshotExpiration |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> KqpSnapshotIsolation::TSimpleOltp >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated >> KqpSinkTx::Interactive [GOOD] >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock1 [GOOD] Test command err: 2025-12-23T18:38:17.331247Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124572917458935:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:17.331293Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e4/r3tmp/tmpWYWQ1T/pdisk_1.dat 2025-12-23T18:38:17.927415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:17.927519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:17.967671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:18.064030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:18.069086Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124572917458913:2081] 1766515097303647 != 1766515097303650 2025-12-23T18:38:18.071636Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:18.236241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:1201 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:38:18.362513Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515098138 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:18.530396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:18.540197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:18.550533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:18.557613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:18.729327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:18.809102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e4/r3tmp/tmpq2h7hQ/pdisk_1.dat 2025-12-23T18:38:23.855109Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:23.855290Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:23.984168Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:23.984292Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:24.001739Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:24.020364Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124599286874181:2081] 1766515103742288 != 1766515103742291 2025-12-23T18:38:24.038365Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:24.097698Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:18457 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515104116 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:24.346734Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:24.359943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:24.369073Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:24.374560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:24.461662Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:24.532793Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:28.428853Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124617306411552:2131];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:28.428912Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:28.522435Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e4/r3tmp/tmpD8yVYf/pdisk_1.dat 2025-12-23T18:38:28.670672Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:28.874934Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587124617306411461:2081] 1766515108409752 != 1766515108409755 2025-12-23T18:38:28.883014Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:28.919311Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:28.940863Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:28.940985Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:29.016496Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:29.483787Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev ... e, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:57.330960Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:57.344245Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:57.352452Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:57.444702Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:57.570949Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:57.794418Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:02.089673Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587124766458466154:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:02.089750Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e4/r3tmp/tmp9f8U8d/pdisk_1.dat 2025-12-23T18:39:02.158966Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:02.286293Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:02.286414Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:02.290956Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:02.293616Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7587124766458466118:2081] 1766515142088380 != 1766515142088383 2025-12-23T18:39:02.314543Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:02.380426Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:14950 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515142392 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:39:02.737681Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:02.746044Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:39:02.756414Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:39:02.762462Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:02.869810Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:02.973987Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:03.076673Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:07.525960Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587124785517900668:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:07.526955Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e4/r3tmp/tmpWKzRrD/pdisk_1.dat 2025-12-23T18:39:07.549195Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:07.658567Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:07.660973Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7587124785517900644:2081] 1766515147514184 != 1766515147514187 2025-12-23T18:39:07.677959Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:07.678064Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:07.712544Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:07.723379Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:5949 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515147761 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:39:07.992987Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:39:08.006223Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:39:08.023212Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:39:08.055069Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:08.168677Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:08.242253Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] Test command err: 2025-12-23T18:38:16.616733Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124566107822868:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:16.616863Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e5/r3tmp/tmpj9Kulr/pdisk_1.dat 2025-12-23T18:38:16.709977Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:38:17.586599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:17.586726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:17.672899Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:17.673272Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:17.754881Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124566107822847:2081] 1766515096611419 != 1766515096611422 2025-12-23T18:38:17.781374Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:17.813039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:17.931557Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:13138 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515097900 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:18.420950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:18.435477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:18.455171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:18.460738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:18.727290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:18.821784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:23.311279Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124599320759787:2208];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:23.311583Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:23.342046Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e5/r3tmp/tmp95Lw06/pdisk_1.dat 2025-12-23T18:38:23.425521Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:23.551946Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:23.561163Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124599320759616:2081] 1766515103274333 != 1766515103274336 2025-12-23T18:38:23.566334Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:23.566420Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:23.591930Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:23.694802Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:7141 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515103647 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:23.981986Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:23.987846Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:24.005606Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:24.095114Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:24.183075Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:24.329504Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:38:30.119350Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124625948668643:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:30.119421Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:30.198907Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e5/r3tmp/tmpnoshCl/pdisk_1.dat 2025-12-23T18:38:30.345603Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:30.345708Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:30.345995Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:30.350933Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587124625948668596:2081] 1766515110091468 != 1766515110091471 2025-12-23T18:38:30.386946Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0 ... OpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:58.193860Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:38:58.204915Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:38:58.216113Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:58.297980Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:58.364661Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:58.598757Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:03.218787Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587124770389376628:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:03.218950Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e5/r3tmp/tmpIjCKEv/pdisk_1.dat 2025-12-23T18:39:03.254595Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:03.424155Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:03.453277Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7587124770389376601:2081] 1766515143181608 != 1766515143181611 2025-12-23T18:39:03.468235Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:03.468329Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:03.478807Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:03.548758Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:27433 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515143526 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:39:03.917468Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:03.931806Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:39:03.954068Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:39:03.961119Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:04.050282Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:04.140138Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:04.235474Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:09.062516Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587124793155769321:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:09.063128Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057e5/r3tmp/tmp53zsmE/pdisk_1.dat 2025-12-23T18:39:09.114844Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:09.236469Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:09.263856Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:09.263978Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:09.314026Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:09.318319Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions TClient is connected to server localhost:64618 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:39:09.690818Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515149364 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:39:09.697323Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:39:09.726419Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:39:09.737591Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:09.867761Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:09.944059Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:10.067891Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 9072, MsgBus: 2624 2025-12-23T18:38:47.360286Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124699577699764:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:47.360322Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00412a/r3tmp/tmpd02x1j/pdisk_1.dat 2025-12-23T18:38:47.967274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:47.967379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:48.020606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:48.030032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:38:48.046087Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124699577699741:2081] 1766515127357250 != 1766515127357253 2025-12-23T18:38:48.047800Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9072, node 1 2025-12-23T18:38:48.244795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:48.244820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:48.244827Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:48.244925Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:38:48.311847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:38:48.486891Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:2624 TClient is connected to server localhost:2624 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515128112 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:38:48.946386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:51.757112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124716757569794:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:51.757214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124716757569830:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:51.757274Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:51.758861Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124716757569834:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:51.758925Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:51.761757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:38:51.792441Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124716757569833:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:38:51.851930Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124716757569886:2540] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:38:52.212784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:52.360655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124699577699764:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:52.360726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:38:52.374167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:53.746488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 64631, MsgBus: 16316 2025-12-23T18:38:56.986130Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124741181112925:2083];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:56.986585Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00412a/r3tmp/tmpTF2Cfq/pdisk_1.dat 2025-12-23T18:38:57.054727Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:38:57.101578Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:57.111722Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:57.111785Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:57.136162Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64631, node 2 2025-12-23T18:38:57.197893Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:57.197919Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:57.197926Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:57.197999Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:38:57.269208Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16316 TClient is connected to server localhost:16316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515137184 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:38:57.761753Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but prop ... status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:00.494952Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:00.510497Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587124758360982945:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:00.569681Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587124758360982998:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:00.625613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:00.673831Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:01.920412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:02.002238Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587124741181112925:2083];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:02.041798Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:03.892714Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=ZDdlZjE5NDQtZDY4M2RjYTItOTcwMzU3NjUtOWZmOWM4ZTQ=, ActorId: [2:7587124771245892888:2962], ActorState: ReadyState, TraceId: 01kd683x675y9v7r9e0mb0w9np, Create QueryResponse for error on request, msg: , status: NOT_FOUND, issues: { message: "Transaction not found: 01kd683wvk7119d92fjfdbp4jy" issue_code: 2015 severity: 1 } Trying to start YDB, gRPC: 5219, MsgBus: 7931 2025-12-23T18:39:05.275016Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124776654687362:2082];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:05.276102Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:05.318381Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00412a/r3tmp/tmpnc4KRG/pdisk_1.dat 2025-12-23T18:39:05.470171Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:05.474839Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:05.475226Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587124776654687305:2081] 1766515145263703 != 1766515145263706 2025-12-23T18:39:05.488414Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:05.488504Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:05.503455Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5219, node 3 2025-12-23T18:39:05.678772Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:05.699411Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:05.699441Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:05.699448Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:05.699535Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7931 2025-12-23T18:39:06.263616Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515145577 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:06.504738Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:06.513891Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:39:09.926283Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124793834557351:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:09.926400Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:09.926826Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124793834557365:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:09.926867Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124793834557388:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:09.926928Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:09.931238Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:09.948773Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587124793834557390:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:39:10.018361Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587124798129524737:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:10.111711Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:10.244137Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:10.273370Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587124776654687362:2082];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:10.278694Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:11.769449Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> DataShardVolatile::DistributedWriteThenReadIterator >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |93.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |93.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |93.7%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |93.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |93.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] Test command err: ... waiting for blocked registrations ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR cookie 0 ... waiting for blocked registrations (done) 2025-12-23T18:39:18.689452Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 2 ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |93.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |93.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink >> KqpSinkLocks::DifferentKeyUpdate [GOOD] >> KqpSinkLocks::DifferentKeyUpdateOlap |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TProxyActorTest::TestAttachSession [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::TopicAutoPartitioning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:39:01.392772Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:39:01.393839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:39:01.394714Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:678:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:39:01.513365Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:39:01.515233Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:39:01.520925Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:683:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:39:01.521379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:01.521720Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:39:01.523505Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:01.523661Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:39:02.007891Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:02.148765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:02.148956Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:02.149997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:02.150099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:02.207676Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:39:02.208745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:02.209153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:02.338074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:02.406607Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005fe1/r3tmp/tmpGTjXeh/pdisk_1.dat 2025-12-23T18:39:03.168856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:39:03.235729Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:03.235913Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:03.668768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:04.884742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1914:3231], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:04.884906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1924:3236], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:04.885048Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:04.886188Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1929:3240], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:04.886370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:04.892743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:05.570206Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1928:3239], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:05.845323Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2039:3317] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:06.348714Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-12-23T18:39:06.348861Z node 1 :KQP_EXECUTER TRACE: kqp_executer_impl.h:187: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kd683y5g38taj5etazadyanq, Database: , SessionId: ydb://session/3?node_id=1&id=ODViMDA1ZmUtMmFlNjQ2MWEtZWNlMTAzYjQtYjExZmIwYjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState 2025-12-23T18:39:06.349205Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:797: ActorId: [1:2065:3229] TxId: 281474976710660. Ctx: { TraceId: 01kd683y5g38taj5etazadyanq, Database: , SessionId: ydb://session/3?node_id=1&id=ODViMDA1ZmUtMmFlNjQ2MWEtZWNlMTAzYjQtYjExZmIwYjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Executing physical tx, type: 2, stages: 1 2025-12-23T18:39:06.349314Z node 1 :KQP_EXECUTER TRACE: kqp_executer_impl.h:811: ActorId: [1:2065:3229] TxId: 281474976710660. Ctx: { TraceId: 01kd683y5g38taj5etazadyanq, Database: , SessionId: ydb://session/3?node_id=1&id=ODViMDA1ZmUtMmFlNjQ2MWEtZWNlMTAzYjQtYjExZmIwYjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got request, become WaitResolveState 2025-12-23T18:39:06.349628Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710660. Resolved key sets: 1 2025-12-23T18:39:06.349815Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-12-23T18:39:06.350255Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:580: TxId: 281474976710660. Ctx: { TraceId: 01kd683y5g38taj5etazadyanq, Database: , SessionId: ydb://session/3?node_id=1&id=ODViMDA1ZmUtMmFlNjQ2MWEtZWNlMTAzYjQtYjExZmIwYjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-12-23T18:39:06.350707Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:867: TxId: 281474976710660. Ctx: { TraceId: 01kd683y5g38taj5etazadyanq, Database: , SessionId: ydb://session/3?node_id=1&id=ODViMDA1ZmUtMmFlNjQ2MWEtZWNlMTAzYjQtYjExZmIwYjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:2068:3229] 2025-12-23T18:39:06.350838Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:859: TxId: 281474976710660. Ctx: { TraceId: 01kd683y5g38taj5etazadyanq, Database: , SessionId: ydb://session/3?node_id=1&id=ODViMDA1ZmUtMmFlNjQ2MWEtZWNlMTAzYjQtYjExZmIwYjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:2068:3229], channels: 0 2025-12-23T18:39:06.350926Z node 1 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [1:2065:3229] TxId: 281474976710660. Ctx: { TraceId: 01kd683y5g38taj5etazadyanq, Database: , SessionId: ydb://session/3?node_id=1&id=ODViMDA1ZmUtMmFlNjQ2MWEtZWNlMTAzYjQtYjExZmIwYjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:39:06.350968Z node 1 :KQP_EXECUTER TRACE: kqp_data_executer.cpp:2623: ActorId: [1:2065:3229] TxId: 281474976710660. Ctx: { TraceId: 01kd683y5g38taj5etazadyanq, Database: , SessionId: ydb://session/3?node_id=1&id=ODViMDA1ZmUtMmFlNjQ2MWEtZWNlMTAzYjQtYjExZmIwYjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors 2025-12-23T18:39:06.351013Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:867: TxId: 281474976710660. Ctx: { TraceId: 01kd683y5g38taj5etazadyanq, Database: , SessionId: ydb://session/3?node_id=1&id=ODViMDA1ZmUtMmFlNjQ2MWEtZWNlMTAz ... r.cpp:867: TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [4:2121:2469] 2025-12-23T18:39:19.747489Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:903: TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task: 1, output channelId: 1, dst task: 2, at actor [3:2119:3363] 2025-12-23T18:39:19.747560Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:859: TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [4:2121:2469], channels: 1 2025-12-23T18:39:19.747634Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:859: TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [3:2119:3363], channels: 1 2025-12-23T18:39:19.747998Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [4:2121:2469], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-12-23T18:39:19.748061Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:2121:2469], CA [3:2119:3363], 2025-12-23T18:39:19.748122Z node 3 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:2121:2469], CA [3:2119:3363], 2025-12-23T18:39:19.748819Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [4:2121:2469], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 863 Tasks { TaskId: 1 CpuTimeUs: 538 ComputeCpuTimeUs: 13 BuildCpuTimeUs: 525 HostName: "ghrun-ou4bsyjxme" NodeId: 4 CreateTimeMs: 1766515159744 CurrentWaitInputTimeUs: 40 UpdateTimeMs: 1766515159746 } MaxMemoryUsage: 1048576 } 2025-12-23T18:39:19.748946Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:2121:2469], CA [3:2119:3363], 2025-12-23T18:39:19.748987Z node 3 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:2121:2469], CA [3:2119:3363], 2025-12-23T18:39:19.759424Z node 3 :KQP_EXECUTER TRACE: kqp_executer_impl.h:475: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got result, channelId: 2, inputIndex: 0, from: [3:2120:3363], finished: 0 2025-12-23T18:39:19.759572Z node 3 :KQP_EXECUTER TRACE: kqp_executer_impl.h:478: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send ack to channelId: 2, seqNo: 1, to: [3:2120:3363] 2025-12-23T18:39:19.770847Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [4:2121:2469], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1775 Tasks { TaskId: 1 CpuTimeUs: 724 OutputRows: 3 OutputBytes: 12 IngressRows: 3 ComputeCpuTimeUs: 199 BuildCpuTimeUs: 525 WaitInputTimeUs: 11768 HostName: "ghrun-ou4bsyjxme" NodeId: 4 StartTimeMs: 1766515159757 CreateTimeMs: 1766515159744 CurrentWaitInputTimeUs: 11879 UpdateTimeMs: 1766515159769 } MaxMemoryUsage: 1048576 } 2025-12-23T18:39:19.770989Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:2121:2469], CA [3:2119:3363], 2025-12-23T18:39:19.771089Z node 3 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:2121:2469], CA [3:2119:3363], 2025-12-23T18:39:19.773508Z node 3 :KQP_EXECUTER TRACE: kqp_executer_impl.h:475: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got result, channelId: 2, inputIndex: 0, from: [3:2120:3363], finished: 1 2025-12-23T18:39:19.773594Z node 3 :KQP_EXECUTER TRACE: kqp_executer_impl.h:478: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send ack to channelId: 2, seqNo: 2, to: [3:2120:3363] 2025-12-23T18:39:19.774985Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [3:2119:3363], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2229 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 1349 FinishTimeMs: 1766515159774 InputRows: 3 InputBytes: 12 OutputRows: 3 OutputBytes: 12 ResultRows: 3 ResultBytes: 12 ComputeCpuTimeUs: 628 BuildCpuTimeUs: 721 HostName: "ghrun-ou4bsyjxme" NodeId: 3 CreateTimeMs: 1766515159743 UpdateTimeMs: 1766515159774 } MaxMemoryUsage: 1048576 } 2025-12-23T18:39:19.775160Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [3:2119:3363] 2025-12-23T18:39:19.775260Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:2121:2469], 2025-12-23T18:39:19.775323Z node 3 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [4:2121:2469], 2025-12-23T18:39:19.775751Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [4:2121:2469], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2246 DurationUs: 16000 Tasks { TaskId: 1 CpuTimeUs: 755 FinishTimeMs: 1766515159773 OutputRows: 3 OutputBytes: 12 Tables { TablePath: "/Root/table-1" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 4 } IngressRows: 3 ComputeCpuTimeUs: 230 BuildCpuTimeUs: 525 WaitInputTimeUs: 14920 HostName: "ghrun-ou4bsyjxme" NodeId: 4 StartTimeMs: 1766515159757 CreateTimeMs: 1766515159744 UpdateTimeMs: 1766515159773 } MaxMemoryUsage: 1048576 } 2025-12-23T18:39:19.775832Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [4:2121:2469] 2025-12-23T18:39:19.776065Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:39:19.776167Z node 3 :KQP_EXECUTER TRACE: kqp_executer_impl.h:1400: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState 2025-12-23T18:39:19.776239Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [3:2111:3363] TxId: 281474976710663. Ctx: { TraceId: 01kd684cdserd2s403qp4ntnz5, Database: , SessionId: ydb://session/3?node_id=3&id=NTFkYWJhZGYtN2VjYzY1YjQtZTNiOWM1NDEtOWE1NWFjYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.004475s ReadRows: 3 ReadBytes: 24 ru: 3 rate limiter was not found force flag: 1 { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } } |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_errors/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |93.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |93.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow >> KqpSinkLocks::InvalidateOnCommit [GOOD] >> KqpSinkLocks::InvalidateOlapOnCommit |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> KqpSinkLocks::EmptyRange [GOOD] >> KqpSinkLocks::EmptyRangeOlap >> KqpTx::DeferredEffects [GOOD] >> KqpTx::EmptyTxOnCommit |93.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |93.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |93.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps >> StreamCreator::Basic |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> KqpSinkTx::SnapshotROInteractive2 [GOOD] >> KqpSinkTx::TIsolationSettingTest+IsOlap-UsePragma >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart [GOOD] >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] >> KqpSinkMvcc::OltpNamedStatement >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink >> KqpTx::RollbackTx [GOOD] >> KqpTx::RollbackTx2 >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] >> KqpSinkMvcc::TxReadsItsOwnWrites-IsOlap [GOOD] >> KqpSinkMvcc::UpdateColumns+IsOlap >> KqpSnapshotIsolation::TReadOnlyOltp [GOOD] >> KqpSnapshotIsolation::TReadOnlyOlap >> TProxyActorTest::TestCreateSemaphore |93.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/large/ydb-tests-olap-large |93.7%| [LD] {RESULT} $(B)/ydb/tests/olap/large/ydb-tests-olap-large |93.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/large/ydb-tests-olap-large |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] >> KqpSnapshotIsolation::TSimpleOltp [GOOD] >> KqpSnapshotIsolation::TSimpleOlap |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |93.7%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |93.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart [GOOD] Test command err: 2025-12-23T18:38:03.671368Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124512101351682:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:03.672401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/006010/r3tmp/tmpnVqcsf/pdisk_1.dat 2025-12-23T18:38:04.274928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:04.275030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:04.360518Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:04.450989Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124512101351603:2081] 1766515083573329 != 1766515083573332 2025-12-23T18:38:04.528243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:04.694934Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:26304 TServer::EnableGrpc on GrpcPort 1128, node 1 2025-12-23T18:38:05.171248Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:05.171268Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:05.171275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:05.171346Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515084915 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:05.390571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:08.666951Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124512101351682:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:08.667023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:38:09.088692Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:38:09.092727Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:38:09.092780Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:38:09.092806Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:38:09.107321Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7587124537871156222:2497] Owner: [1:7587124537871156217:2494]. Describe result: PathErrorUnknown 2025-12-23T18:38:09.107334Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7587124537871156222:2497] Owner: [1:7587124537871156217:2494]. Creating table 2025-12-23T18:38:09.107396Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7587124537871156222:2497] Owner: [1:7587124537871156217:2494]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2025-12-23T18:38:09.107547Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7587124537871156220:2495] Owner: [1:7587124537871156217:2494]. Describe result: PathErrorUnknown 2025-12-23T18:38:09.107552Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7587124537871156220:2495] Owner: [1:7587124537871156217:2494]. Creating table 2025-12-23T18:38:09.107565Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7587124537871156220:2495] Owner: [1:7587124537871156217:2494]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2025-12-23T18:38:09.107669Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7587124537871156221:2496] Owner: [1:7587124537871156217:2494]. Describe result: PathErrorUnknown 2025-12-23T18:38:09.107683Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7587124537871156221:2496] Owner: [1:7587124537871156217:2494]. Creating table 2025-12-23T18:38:09.107721Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7587124537871156221:2496] Owner: [1:7587124537871156217:2494]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2025-12-23T18:38:09.121255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:09.123923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:09.126635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:09.134404Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7587124537871156222:2497] Owner: [1:7587124537871156217:2494]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2025-12-23T18:38:09.134482Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7587124537871156222:2497] Owner: [1:7587124537871156217:2494]. Subscribe on create table tx: 281474976710658 2025-12-23T18:38:09.136432Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7587124537871156220:2495] Owner: [1:7587124537871156217:2494]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2025-12-23T18:38:09.136455Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7587124537871156220:2495] Owner: [1:7587124537871156217:2494]. Subscribe on create table tx: 281474976710659 2025-12-23T18:38:09.138001Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7587124537871156221:2496] Owner: [1:7587124537871156217:2494]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2025-12-23T18:38:09.138029Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7587124537871156221:2496] Owner: [1:7587124537871156217:2494]. Subscribe on create table tx: 281474976710660 2025-12-23T18:38:09.144095Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7587124537871156222:2497] Owner: [1:7587124537871156217:2494]. Subscribe on tx: 281474976710658 registered 2025-12-23T18:38:09.144135Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7587124537871156220:2495] Owner: [1:7587124537871156217:2494]. Subscribe on tx: 281474976710659 registered 2025-12-23T18:38:09.144303Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7587124537871156221:2496] Owner: [1:7587124537871156217:2494]. Subscribe on tx: 281474976710660 registered 2025-12-23T18:38:09.498369Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7587124537871156222:2497] Owner: [1:7587124537871156217:2494]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-12-23T18:38:09.559001Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7587124537871156220:2495] Owner: [1:7587124537871156217:2494]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-12-23T18:38:09.571118Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7587124537871156222:2497] Owner: [1:7587124537871156217:2494]. Table already exists, number of columns: 7, has SecurityObject: true 2025-12-23T18:38:09.571164Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7587124537871156222:2497] Owner: [1:7587124537871156217:2494]. Column diff is empty, finishing 2025-12-23T18:38:09.572250Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7587124537871156222:2497] Owner: [1:7587124537871156217:2494]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2025-12-23T18:38:09.573628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:38:09.578407Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7587124537871156222:2497] Owner: [1:7587124537871156217:2494]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2025-12-23T18:38:09.578432Z node 1 :KQP_PROXY INFO: table_cr ... customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL), retry_state = $retry_state WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-12-23T18:39:23.967333Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=NGNiNTg0YTktMWJhODZhMmMtMjVlMmRhOGQtMTdkYjNmNjM=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 62, targetId: [4:7587124854481660167:2635] 2025-12-23T18:39:23.967372Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 62 timeout: 300.000000s actor id: [4:7587124854481660192:3166] 2025-12-23T18:39:23.985401Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 62, sender: [4:7587124854481660191:2642], selfId: [4:7587124790057148538:2084], source: [4:7587124854481660167:2635] 2025-12-23T18:39:23.986126Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7587124854481660164:2632], ActorId: [4:7587124854481660165:2633], TraceId: ExecutionId: b7b39f5b-fcf7f367-6fd1d54a-c2a35549, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, DataQuery #2 finished SUCCESS, Issues: [ {
:20:21: Warning: Symbol $retry_deadline is not used, code: 4527 } {
:21:21: Warning: Symbol $lease_state is not used, code: 4527 } ], SessionId: ydb://session/3?node_id=4&id=NGNiNTg0YTktMWJhODZhMmMtMjVlMmRhOGQtMTdkYjNmNjM=, TxId: 2025-12-23T18:39:23.986273Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7587124854481660164:2632], ActorId: [4:7587124854481660165:2633], TraceId: ExecutionId: b7b39f5b-fcf7f367-6fd1d54a-c2a35549, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NGNiNTg0YTktMWJhODZhMmMtMjVlMmRhOGQtMTdkYjNmNjM=, TxId: 2025-12-23T18:39:23.986326Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4311: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7587124854481660164:2632], ActorId: [4:7587124854481660165:2633], TraceId: ExecutionId: b7b39f5b-fcf7f367-6fd1d54a-c2a35549, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: UNAVAILABLE. Issues: {
: Error: Lease expired } 2025-12-23T18:39:23.986542Z node 4 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [4:7587124854481660162:2631], ActorId: [4:7587124854481660164:2632], TraceId: ExecutionId: b7b39f5b-fcf7f367-6fd1d54a-c2a35549, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [4:7587124854481660165:2633] SUCCESS 2025-12-23T18:39:23.987856Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=NGNiNTg0YTktMWJhODZhMmMtMjVlMmRhOGQtMTdkYjNmNjM=, workerId: [4:7587124854481660167:2635], local sessions count: 1 2025-12-23T18:39:23.987944Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1443: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [4:7587124850186692730:3103] ActorId: [4:7587124854481660064:3118] Database: /dc-1 ExecutionId: b7b39f5b-fcf7f367-6fd1d54a-c2a35549. Successfully finalized script execution operation, WaitingRetry: 0 2025-12-23T18:39:23.988005Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1789: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [4:7587124850186692730:3103] ActorId: [4:7587124854481660064:3118] Database: /dc-1 ExecutionId: b7b39f5b-fcf7f367-6fd1d54a-c2a35549. Reply success 2025-12-23T18:39:23.988270Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4833: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7587124845891725384:3080] ActorId: [4:7587124850186692730:3103]. Lease check #0 [4:7587124854481660067:3120] successfully completed, OperationsToCheck: 0 2025-12-23T18:39:23.988297Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4845: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7587124845891725384:3080] ActorId: [4:7587124850186692730:3103]. Finish, success: 1, issues: 2025-12-23T18:39:23.988359Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:78: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Refresh successfully completed 2025-12-23T18:39:24.059893Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd684gndfmnh2c7tvrd1hefp", Forwarded response to sender actor, requestId: 59, sender: [4:7587124854481660131:2621], selfId: [4:7587124790057148538:2084], source: [4:7587124837301790653:2550] 2025-12-23T18:39:24.078357Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kd684gxd3pz8xg867mdqnknm, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=MmZkOWE3MDktNTZkN2Q2YWYtYzJhNmNlNWItY2IxN2IxYTI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 63, targetId: [4:7587124837301790653:2550] 2025-12-23T18:39:24.078419Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 63 timeout: 300.000000s actor id: [4:7587124858776627523:3176] 2025-12-23T18:39:24.581683Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:94: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Do ScheduleRefreshScriptExecutions (WaitRefreshScriptExecutions: 0), next refresh after 1.000000s 2025-12-23T18:39:24.581736Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:109: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Schedule lease check after 0.542349s 2025-12-23T18:39:25.085176Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd684gxd3pz8xg867mdqnknm", Forwarded response to sender actor, requestId: 63, sender: [4:7587124858776627522:2647], selfId: [4:7587124790057148538:2084], source: [4:7587124837301790653:2550] 2025-12-23T18:39:25.102820Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=MmZkOWE3MDktNTZkN2Q2YWYtYzJhNmNlNWItY2IxN2IxYTI=, workerId: [4:7587124837301790653:2550], local sessions count: 0 2025-12-23T18:39:25.128164Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:53: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Start lease checker: [4:7587124863071594863:3192] 2025-12-23T18:39:25.128260Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4782: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7587124845891725384:3080] ActorId: [4:7587124863071594863:3192]. Bootstrap. Started TListExpiredLeasesQueryActor: [4:7587124863071594864:3193] 2025-12-23T18:39:25.128303Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] [TListExpiredLeasesQueryActor] OwnerId: [4:7587124863071594863:3192], ActorId: [4:7587124863071594864:3193], Bootstrap. Database: /dc-1, IsSystemUser: 1, run create session 2025-12-23T18:39:25.131132Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977558544.420512s seconds to be completed 2025-12-23T18:39:25.134445Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=4&id=NGQ0Y2E0OWQtMjYxNTIzYzItMzc1OGI5YzItYjE2OTlhNjU=, workerId: [4:7587124863071594866:2662], database: /dc-1, longSession: 1, local sessions count: 1 2025-12-23T18:39:25.134728Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T18:39:25.135293Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TListExpiredLeasesQueryActor] OwnerId: [4:7587124863071594863:3192], ActorId: [4:7587124863071594864:3193], TraceId: [4:7587124863071594863:3192], RunDataQuery with SessionId: ydb://session/3?node_id=4&id=NGQ0Y2E0OWQtMjYxNTIzYzItMzc1OGI5YzItYjE2OTlhNjU=, TxId: , text: -- TListExpiredLeasesQueryActor::OnRunQuery DECLARE $max_lease_deadline AS Timestamp; DECLARE $max_listed_leases AS Uint64; SELECT database, execution_id FROM `.metadata/script_execution_leases` WHERE lease_deadline < $max_lease_deadline AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL) LIMIT $max_listed_leases; 2025-12-23T18:39:25.135624Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=NGQ0Y2E0OWQtMjYxNTIzYzItMzc1OGI5YzItYjE2OTlhNjU=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 65, targetId: [4:7587124863071594866:2662] 2025-12-23T18:39:25.135664Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 65 timeout: 300.000000s actor id: [4:7587124863071594868:3194] 2025-12-23T18:39:25.141308Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 65, sender: [4:7587124863071594867:2663], selfId: [4:7587124790057148538:2084], source: [4:7587124863071594866:2662] 2025-12-23T18:39:25.142084Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TListExpiredLeasesQueryActor] OwnerId: [4:7587124863071594863:3192], ActorId: [4:7587124863071594864:3193], TraceId: [4:7587124863071594863:3192], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=NGQ0Y2E0OWQtMjYxNTIzYzItMzc1OGI5YzItYjE2OTlhNjU=, TxId: 2025-12-23T18:39:25.142159Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4759: [ScriptExecutions] [TQueryBase] [TListExpiredLeasesQueryActor] OwnerId: [4:7587124863071594863:3192], ActorId: [4:7587124863071594864:3193], TraceId: [4:7587124863071594863:3192], Found 0 expired leases (fetched rows 0) 2025-12-23T18:39:25.142186Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TListExpiredLeasesQueryActor] OwnerId: [4:7587124863071594863:3192], ActorId: [4:7587124863071594864:3193], TraceId: [4:7587124863071594863:3192], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NGQ0Y2E0OWQtMjYxNTIzYzItMzc1OGI5YzItYjE2OTlhNjU=, TxId: 2025-12-23T18:39:25.142305Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4794: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7587124845891725384:3080] ActorId: [4:7587124863071594863:3192]. Got list expired leases response [4:7587124863071594864:3193], found 0 expired leases 2025-12-23T18:39:25.142330Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4812: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7587124845891725384:3080] ActorId: [4:7587124863071594863:3192]. List expired leases successfully completed 2025-12-23T18:39:25.142355Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4845: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7587124845891725384:3080] ActorId: [4:7587124863071594863:3192]. Finish, success: 1, issues: 2025-12-23T18:39:25.142400Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:78: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Refresh successfully completed 2025-12-23T18:39:25.143326Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=NGQ0Y2E0OWQtMjYxNTIzYzItMzc1OGI5YzItYjE2OTlhNjU=, workerId: [4:7587124863071594866:2662], local sessions count: 0 |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] >> KqpErrors::ProposeErrorEvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 21355, MsgBus: 4562 2025-12-23T18:33:56.004525Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123452554564732:2214];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:56.004580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003c6a/r3tmp/tmpr1v3o0/pdisk_1.dat 2025-12-23T18:33:56.590901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:56.591031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:56.664354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:56.839617Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:56.847278Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123448259597258:2081] 1766514835981096 != 1766514835981099 2025-12-23T18:33:56.934962Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21355, node 1 2025-12-23T18:33:57.087263Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:57.124540Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:57.155459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:57.155496Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:57.155503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:57.155593Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4562 TClient is connected to server localhost:4562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514836877 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:58.351678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:33:58.396749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:58.734569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:59.037475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:59.179411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:01.010860Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123452554564732:2214];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:01.014363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:34:02.252808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123478324370243:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.252912Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.253373Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123478324370253:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.253409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.900855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.959269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.001691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.070372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.139063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.222008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.333268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.399293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:03.518208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123482619338435:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:03.518296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:03.518345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123482619338440:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:03.518447Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123482619338442:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:03.518478Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 20 ... R WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:14.283588Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:14.470014Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:37:14.506997Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:31790 TClient is connected to server localhost:31790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515034081 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:37:14.972160Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:37:14.978905Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:37:14.992672Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:15.085070Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:15.367675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:15.504390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:37:18.482224Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587124294832987487:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:37:18.482376Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:37:20.252274Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124324897760395:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:20.252398Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:20.253045Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124324897760405:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:20.253122Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:20.401572Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:20.549088Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:20.647406Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:20.741833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:20.833527Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:20.947850Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:21.046178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:21.145169Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:21.317378Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124329192728607:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:21.317522Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:21.318231Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124329192728612:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:21.318282Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124329192728613:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:21.318426Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:37:21.322554Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:37:21.346697Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587124329192728616:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:37:21.419016Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587124329192728667:3783] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:37:24.912336Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:28.818950Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:37:28.818985Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> TSubDomainTest::StartAndStopTenanNode >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 4022, MsgBus: 7350 2025-12-23T18:38:48.927366Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124703982477375:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:48.927498Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:38:48.964929Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004127/r3tmp/tmpi7yjnZ/pdisk_1.dat 2025-12-23T18:38:49.234311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:49.234463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:49.242026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:49.293233Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:38:49.318431Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:49.320459Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124703982477330:2081] 1766515128920933 != 1766515128920936 TServer::EnableGrpc on GrpcPort 4022, node 1 2025-12-23T18:38:49.436830Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:49.436872Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:49.436882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:49.436971Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:38:49.521377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7350 2025-12-23T18:38:49.939710Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515129386 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:38:50.145355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:38:50.157320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:50.329611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:50.530218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:50.626560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:52.842339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124721162348389:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:52.842508Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:52.843060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124721162348399:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:52.843147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:53.225589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:53.264960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:53.300568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:53.337647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:53.386262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:53.425667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:53.462674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:53.509156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:53.591538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124725457316567:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:53.591623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:53.591934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124725457316572:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:53.591972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124725457316573:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:53.592133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:53.596063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResou ... rs::TEvents::TEvWakeup; TClient is connected to server localhost:7340 TClient is connected to server localhost:7340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515151142 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:12.138809Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:12.153411Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:12.166033Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:12.266675Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:12.560025Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:12.669125Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:15.686041Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587124797576413909:2137];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:15.686099Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:16.678945Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124823346219466:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:16.679076Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:16.682935Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124823346219476:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:16.683057Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:16.747431Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:16.805997Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:16.871832Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:17.005365Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:17.064415Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:17.144137Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:17.189294Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:17.256071Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:17.354349Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124827641187649:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.354450Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.354925Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124827641187654:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.354976Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124827641187655:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.355016Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.360647Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:17.379248Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587124827641187658:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:39:17.433030Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587124827641187709:3782] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:19.643923Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:19.725665Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:19.810412Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:25.982482Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:39:25.982518Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |93.8%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |93.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:38:18.248368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:38:18.248478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:38:18.248522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:38:18.248562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:38:18.248604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:38:18.248637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:38:18.248695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:38:18.248809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:38:18.249749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:38:18.250129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:38:18.351143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:38:18.351217Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:18.358012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:38:18.358241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:38:18.358411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:38:18.361655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:38:18.361826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:38:18.362502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:38:18.362759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:38:18.365003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:38:18.365136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:38:18.365957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:38:18.366024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:38:18.366115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:38:18.366152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:38:18.366201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:38:18.366309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:38:18.543540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.544609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.544781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.544871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.544965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.545063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.545140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.545219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.545283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.545345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.545435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.545518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.545633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.545719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:18.545814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1120: NTableState::TProposedWaitParts operationId# 281474976715657:1 ProgressState at tablet: 72075186233409546 2025-12-23T18:39:20.669540Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72075186233409546 2025-12-23T18:39:20.669593Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976715657:0 ProgressState 2025-12-23T18:39:20.669694Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:0 progress is 2/3 2025-12-23T18:39:20.669730Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-12-23T18:39:20.669764Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:0 progress is 2/3 2025-12-23T18:39:20.669794Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-12-23T18:39:20.669826Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2025-12-23T18:39:20.670414Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 33 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-12-23T18:39:20.670652Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 33 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-12-23T18:39:20.670784Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-12-23T18:39:20.670877Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 33], version: 5 2025-12-23T18:39:20.670983Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 33] was 4 2025-12-23T18:39:20.672520Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-12-23T18:39:20.672616Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-12-23T18:39:20.672658Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-12-23T18:39:20.672705Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 5 2025-12-23T18:39:20.672773Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 6 2025-12-23T18:39:20.672874Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2025-12-23T18:39:20.693459Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-12-23T18:39:20.696163Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-12-23T18:39:20.716286Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 7517 } } CommitVersion { Step: 400 TxId: 281474976715657 } 2025-12-23T18:39:20.716370Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-12-23T18:39:20.716563Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 7517 } } CommitVersion { Step: 400 TxId: 281474976715657 } 2025-12-23T18:39:20.716774Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 7517 } } CommitVersion { Step: 400 TxId: 281474976715657 } debug: NTableState::TProposedWaitParts operationId# 281474976715657:1 2025-12-23T18:39:20.718299Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 1303 RawX2: 85899349110 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-12-23T18:39:20.718394Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-12-23T18:39:20.718680Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 1303 RawX2: 85899349110 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-12-23T18:39:20.719045Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409546 2025-12-23T18:39:20.719285Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 1303 RawX2: 85899349110 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-12-23T18:39:20.719431Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2025-12-23T18:39:20.719527Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-12-23T18:39:20.719617Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2025-12-23T18:39:20.719696Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:1 129 -> 240 2025-12-23T18:39:20.732920Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-12-23T18:39:20.733510Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-12-23T18:39:20.733766Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-12-23T18:39:20.733854Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2025-12-23T18:39:20.734117Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:1 progress is 3/3 2025-12-23T18:39:20.734188Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-12-23T18:39:20.734268Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:1 progress is 3/3 2025-12-23T18:39:20.734338Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-12-23T18:39:20.734412Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-12-23T18:39:20.734495Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-12-23T18:39:20.734568Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2025-12-23T18:39:20.734630Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976715657:0 2025-12-23T18:39:20.734803Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 33] was 3 2025-12-23T18:39:20.734861Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:1 2025-12-23T18:39:20.734886Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976715657:1 2025-12-23T18:39:20.735023Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 5 2025-12-23T18:39:20.735078Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:2 2025-12-23T18:39:20.735105Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976715657:2 2025-12-23T18:39:20.735158Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable >> TSubDomainTest::Boot-EnableRealSystemViewPaths-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeErrorEvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:39:02.770027Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:39:02.770301Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:39:02.932651Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:39:02.934501Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:39:02.941137Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:505:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:39:02.941741Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:02.942076Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:39:02.949486Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:500:2171], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:39:02.950053Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:39:02.950279Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:03.470310Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:03.660514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:03.660622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:03.661202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:03.661270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:03.720288Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:39:03.721276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:03.721597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:03.816490Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:03.836877Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005fdf/r3tmp/tmpRuDRpo/pdisk_1.dat 2025-12-23T18:39:04.626901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:39:04.689920Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:04.690030Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:05.060576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:06.234323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1916:3228], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:06.234514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1927:3233], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:06.235250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:06.236354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1932:3238], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:06.236694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:06.242235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:06.923114Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1930:3236], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:07.168646Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2041:3314] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:07.631601Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-12-23T18:39:07.631736Z node 1 :KQP_EXECUTER DEBUG: kqp_literal_executer.cpp:96: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2025-12-23T18:39:07.631792Z node 1 :KQP_EXECUTER DEBUG: kqp_literal_executer.cpp:125: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution, txs: 1 2025-12-23T18:39:07.631853Z node 1 :KQP_EXECUTER DEBUG: kqp_literal_executer.cpp:133: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-12-23T18:39:07.631914Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2025-12-23T18:39:07.635209Z node 1 :KQP_EXECUTER DEBUG: kqp_literal_executer.cpp:280: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Execution is complete, results: 1 2025-12-23T18:39:07.647845Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-12-23T18:39:07.647965Z node 1 :KQP_EXECUTER DEBUG: kqp_literal_executer.cpp:96: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kd683zfq6cc2d2fsq86jhc9c, Database: , SessionId: ydb://session/3?node_id=1&id=ZTg5OGE4YzUtMWJjYmUzMi1iNzc5OTAwMy1hZWFlMzUxZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Begin literal execution. Operation timeout: 299.426780s, cancelAfter: (empty maybe) 2025-12-23T18:39:07.648015Z node 1 :KQP_EXECUTER DEBUG: kqp_literal_executer.cpp:125: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kd683zfq6cc2d2fsq86jhc9c, Database: , SessionId: ydb://session/3?node_id=1&id=ZTg5OGE4YzUtMWJjYmUzMi1iNzc5OTAwMy1hZWFlMzUxZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Begin literal execution, txs: 1 2025-12-23T18:39:07.648094Z node 1 :KQP_EXECUTER DEBUG: kqp_literal_executer.cpp:133: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kd683zfq6cc2d2fsq86jhc9c, Database: , SessionId: ydb://session/3?node_id=1&id=ZTg5OGE4YzUtMWJjYmUzMi1iNzc5OTAwMy1hZWFlMzUxZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-12-23T18:39:07.648150Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2025-12-23T18:39:07.648920Z node 1 :KQP_EXECUTER DEBUG: kqp_literal_executer.cpp:280: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kd683zfq6cc2d2fsq86jhc9c, Database: , SessionId: ydb://session/3?node_id=1&id=ZTg5OGE4YzUtMWJjYmUzMi1iNzc5OTAwMy1hZWFlMzUxZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Execution is complete, results: 1 2025-12-23T18:39:07.649200Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-12-23T18:39:07.649282Z node 1 :KQP_EXECUTER TRACE: kqp_executer_impl.h:187: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kd683zfq6cc2d2fsq86jhc9c, Database: , SessionId: ydb://session/3?node_id=1&id=ZTg5OGE4YzUtMWJjYmUzMi1iNzc5OTAwMy1hZWFlMzUxZQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState 2025-12-23T18:39:07.649621Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:797: ActorId: [1:2067:3226] TxId: 281474976710660. Ctx: { TraceId: 01kd683zfq6cc2d2fsq86jhc9c, Database: , SessionId: ydb://session/3?node_id=1&id=ZTg5OGE4YzUtMWJjYmUzMi1iNzc5OTAwMy1hZWFlMz ... Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [3:2341:3467], channels: 0 2025-12-23T18:39:26.743165Z node 3 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:39:26.743219Z node 3 :KQP_EXECUTER TRACE: kqp_data_executer.cpp:2623: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors 2025-12-23T18:39:26.743268Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:867: TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [3:2341:3467] 2025-12-23T18:39:26.743337Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:859: TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [3:2341:3467], channels: 0 2025-12-23T18:39:26.743416Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2341:3467], 2025-12-23T18:39:26.743496Z node 3 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2341:3467], 2025-12-23T18:39:26.743558Z node 3 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:2227: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-12-23T18:39:26.744687Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [3:2341:3467], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-12-23T18:39:26.744793Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2341:3467], 2025-12-23T18:39:26.744891Z node 3 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2341:3467], 2025-12-23T18:39:26.796735Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [3:2341:3467], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 829 Tasks { TaskId: 1 CpuTimeUs: 130 FinishTimeMs: 1766515166796 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 28 BuildCpuTimeUs: 102 HostName: "ghrun-ou4bsyjxme" NodeId: 3 CreateTimeMs: 1766515166743 UpdateTimeMs: 1766515166796 } MaxMemoryUsage: 1048576 } 2025-12-23T18:39:26.796923Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [3:2341:3467] 2025-12-23T18:39:26.797022Z node 3 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:214: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor=[3:2337:3467] 2025-12-23T18:39:26.797104Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.000829s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-12-23T18:39:26.821111Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:856: SelfId: [3:2344:3467], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [3:2328:3467]Got DISK_GROUP_OUT_OF_SPACE for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:2344:3467]. Ignored this error. 2025-12-23T18:39:26.821294Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [3:2337:3467], SessionActorId: [3:2328:3467], statusCode=OVERLOADED. Issue=
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 . sessionActorId=[3:2328:3467]. 2025-12-23T18:39:26.821780Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, ActorId: [3:2328:3467], ActorState: ExecuteState, TraceId: 01kd684kda249mav6xf2jeadbq, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [3:2338:3467] from: [3:2337:3467] 2025-12-23T18:39:26.822005Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1014: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got EvAbortExecution, status: OVERLOADED, message: {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 } 2025-12-23T18:39:26.822114Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. OVERLOADED: {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 } 2025-12-23T18:39:26.822209Z node 3 :KQP_EXECUTER INFO: kqp_executer_impl.h:1143: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. task: 1, does not have the CA id yet or is already complete 2025-12-23T18:39:26.822460Z node 3 :KQP_EXECUTER TRACE: kqp_executer_impl.h:1254: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ReplyErrorAndDie. Response: Status: OVERLOADED Issues { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2006 severity: 1 } Result { Stats { CpuTimeUs: 829 Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 AffectedPartitions: 1 } } } , to ActorId: [3:2328:3467] 2025-12-23T18:39:26.822547Z node 3 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2691: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shutdown immediately - nothing to wait 2025-12-23T18:39:26.823428Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:39:26.823525Z node 3 :KQP_EXECUTER TRACE: kqp_executer_impl.h:1400: ActorId: [3:2338:3467] TxId: 281474976710683. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState 2025-12-23T18:39:26.823935Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, ActorId: [3:2328:3467], ActorState: ExecuteState, TraceId: 01kd684kda249mav6xf2jeadbq, Create QueryResponse for error on request, msg: , status: OVERLOADED, issues: { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2006 severity: 1 } 2025-12-23T18:39:26.824289Z node 3 :KQP_EXECUTER TRACE: kqp_executer_impl.h:187: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState 2025-12-23T18:39:26.824977Z node 3 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:228: ActorId: [3:2347:3467] TxId: 281474976710684. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Rollback to BufferActor=[3:2337:3467] 2025-12-23T18:39:26.825383Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [3:2347:3467] TxId: 281474976710684. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:39:26.825463Z node 3 :KQP_EXECUTER TRACE: kqp_executer_impl.h:1400: ActorId: [3:2347:3467] TxId: 281474976710684. Ctx: { TraceId: 01kd684kda249mav6xf2jeadbq, Database: , SessionId: ydb://session/3?node_id=3&id=YzQwMmU1MjEtZGFiMzRmMzEtNDEwNjc0YzEtZTQ2MWQ0ZTc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_errors/unittest >> TSubDomainTest::UserAttributes >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink >> TModifyUserTest::ModifyUser >> TSubDomainTest::CreateDummyTabletsInDifferentDomains >> TSubDomainTest::LsLs |93.8%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubDomainTest::FailIfAffectedSetNotInterior >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssue-withSink >> TSubDomainTest::CreateTablet >> StreamCreator::WithResolvedTimestamps [GOOD] >> StreamCreator::Basic [GOOD] |93.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |93.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |93.8%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant >> KqpResultSetFormats::ValueFormat_Simple >> KqpSinkLocks::OlapUncommittedRead [GOOD] >> KqpSinkLocks::OlapInsertWithBulkUpsert+UseBulkUpsert >> KqpResultSetFormats::DefaultFormat >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] Test command err: 2025-12-23T18:39:25.880083Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124863515313796:2128];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:25.880123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005731/r3tmp/tmppRB5Li/pdisk_1.dat 2025-12-23T18:39:26.443218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:26.443322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:26.452676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:26.559637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:26.745125Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:26.746888Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124863515313708:2081] 1766515165876373 != 1766515165876376 2025-12-23T18:39:26.805101Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:26.863329Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1597 TServer::EnableGrpc on GrpcPort 23302, node 1 2025-12-23T18:39:27.667587Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:27.667614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:27.667621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:27.667729Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515166654 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:28.662859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:28.684117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766515168810 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515168712 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515166654 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 3... (TRUNCATED) 2025-12-23T18:39:28.860883Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:39:28.860913Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:39:28.861622Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:39:30.880348Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124863515313796:2128];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:30.880453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:31.574771Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766515168810, tx_id: 281474976710658 } } } 2025-12-23T18:39:31.575243Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:39:31.577107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:31.578693Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-12-23T18:39:31.578708Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-12-23T18:39:31.642257Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-12-23T18:39:31.642298Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2025-12-23T18:39:31.649833Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-12-23T18:39:31.866234Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7587124889285118634:2338] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:41:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-12-23T18:39:31.877269Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-12-23T18:39:31.877299Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2025-12-23T18:39:31.925447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:39:31.950993Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-12-23T18:39:31.951035Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766515168810 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> KqpService::CloseSessionsWithLoad [GOOD] |93.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |93.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |93.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic [GOOD] Test command err: 2025-12-23T18:39:25.989291Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124864300110740:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:25.989336Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:26.087861Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005733/r3tmp/tmpYnnupL/pdisk_1.dat 2025-12-23T18:39:26.935431Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:26.946629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:26.955079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:26.988775Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124864300110719:2081] 1766515165970390 != 1766515165970393 2025-12-23T18:39:27.035149Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:27.036162Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:27.039534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:27.125116Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30969 TServer::EnableGrpc on GrpcPort 15917, node 1 2025-12-23T18:39:27.691464Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:27.691488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:27.691495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:27.691598Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515167102 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:28.374203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:28.400695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:39:28.414430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766515168579 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515168425 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515167102 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 3... (TRUNCATED) 2025-12-23T18:39:28.680000Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:39:28.680029Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:39:28.680602Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:39:30.990906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124864300110740:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:30.990972Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:32.210216Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766515168579, tx_id: 281474976715658 } } } 2025-12-23T18:39:32.210640Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:39:32.212309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:32.213798Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-12-23T18:39:32.213822Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-12-23T18:39:32.294079Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-12-23T18:39:32.294109Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2025-12-23T18:39:32.295195Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-12-23T18:39:32.478957Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7587124894364882949:2340] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:41:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-12-23T18:39:32.487344Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-12-23T18:39:32.487368Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2025-12-23T18:39:32.507366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) TClient::Ls request: /Root/Table 2025-12-23T18:39:32.532971Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-12-23T18:39:32.532998Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766515168579 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> KqpTx::EmptyTxOnCommit [GOOD] >> KqpTx::CommitStats >> KqpResultSetFormats::ArrowFormat_Simple >> TPQTest::TestReadSessions [GOOD] >> TPQTest::TestReadSubscription |93.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |93.8%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |93.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 >> KqpSinkMvcc::SnapshotExpiration [GOOD] >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit+IsOlap >> KqpTx::RollbackTx2 [GOOD] >> KqpTx::SnapshotRO >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf >> TSubDomainTest::LsLs [GOOD] >> TSubDomainTest::LsAltered >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink >> KqpSinkLocks::OlapVisibleUncommittedRows [GOOD] >> KqpSinkLocks::OlapVisibleUncommittedRowsUpdate >> TSubDomainTest::StartAndStopTenanNode [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpService::CloseSessionsWithLoad [GOOD] Test command err: Trying to start YDB, gRPC: 15802, MsgBus: 4598 2025-12-23T18:39:10.959610Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124799926475928:2159];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:10.959859Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00616a/r3tmp/tmppWGyKj/pdisk_1.dat 2025-12-23T18:39:11.479152Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:11.630353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:11.630488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:11.676914Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:11.678887Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124799926475806:2081] 1766515150950847 != 1766515150950850 2025-12-23T18:39:11.706897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:11.710200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15802, node 1 2025-12-23T18:39:11.962872Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:12.067762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:12.067784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:12.067789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:12.067863Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:12.174205Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4598 TClient is connected to server localhost:4598 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515151758 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:13.085863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:13.092647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:13.105050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:13.307518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:13.484231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:13.567298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:15.958898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124799926475928:2159];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:15.958966Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:16.236749Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124825696281449:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:16.236863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:16.237312Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124825696281459:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:16.237356Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:16.717390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:16.765088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:16.852538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:16.906882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:16.957484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:17.026912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:17.084433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:17.172537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:17.281888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124829991249631:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.281980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.282411Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124829991249636:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.282452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124829991249637:2490], DatabaseId: /R ... edSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":21}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":23}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":20,\"Plans\":[{\"PlanNodeId\":19,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":16,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":16}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":18}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":14,\"Plans\":[{\"PlanNodeId\":13,\"Plans\":[{\"PlanNodeId\":12,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":11,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":11}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":13}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":10,\"Plans\":[{\"PlanNodeId\":9,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":7,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":6,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":6}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/EightShard\",\"reads\":[{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"}],\"writes\":[{\"columns\":[\"Key\",\"Text\"],\"type\":\"MultiUpsert\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Upsert\"},{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":7,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":10,\"Plans\":[{\"PlanNodeId\":11,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":13,\"Plans\":[{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":16,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":17,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":20,\"Plans\":[{\"PlanNodeId\":21,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":22,\"Plans\":[{\"PlanNodeId\":23,\"Plans\":[{\"PlanNodeId\":25,\"Plans\":[{\"PlanNodeId\":26,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":27,\"Plans\":[{\"PlanNodeId\":28,\"Plans\":[{\"PlanNodeId\":30,\"Plans\":[{\"PlanNodeId\":31,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"c1b6243a-50a8f9f2-98e51632-18aa4f6e","version":"1.0"} 2025-12-23T18:39:27.997935Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7587124855761054169:2759], duration: 3.778382s 2025-12-23T18:39:27.997963Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7587124855761054169:2759], owner: [1:7587124825696281409:2385], status: SUCCESS, issues: , uid: c1b6243a-50a8f9f2-98e51632-18aa4f6e 2025-12-23T18:39:27.998197Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7587124842876152003:2624], status: SUCCESS, compileActor: [1:7587124855761054169:2759] 2025-12-23T18:39:27.998247Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7587124842876152003:2624], queryUid: c1b6243a-50a8f9f2-98e51632-18aa4f6e, status:SUCCESS received non-success status for session 1 received non-success status for session 15 received non-success status for session 2 received non-success status for session 6 received non-success status for session 7 received non-success status for session 4 received non-success status for session 9 received non-success status for session 12 received non-success status for session 18 received non-success status for session 19 received non-success status for session 0 received non-success status for session 8 still compiling... 0 still active sessions ... 0 received non-success status for session 3 received non-success status for session 23 received non-success status for session 25 received non-success status for session 24 received non-success status for session 11 received non-success status for session 13 received non-success status for session 29 received non-success status for session 30 received non-success status for session 31 received non-success status for session 16 received non-success status for session 33 received non-success status for session 36 received non-success status for session 28 received non-success status for session 37 received non-success status for session 17 received non-success status for session 40 received non-success status for session 22 received non-success status for session 20 received non-success status for session 41 received non-success status for session 10 received non-success status for session 27 received non-success status for session 45 received non-success status for session 35 received non-success status for session 38 received non-success status for session 21 received non-success status for session 43 received non-success status for session 5 received non-success status for session 39 received non-success status for session 34 received non-success status for session 44 received non-success status for session 49 received non-success status for session 32 received non-success status for session 46 received non-success status for session 48 received non-success status for session 47 received non-success status for session received non-success status for session 14 received non-success status for session 26 42 |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TSubDomainTest::Boot-EnableRealSystemViewPaths-false [GOOD] >> TSubDomainTest::Boot-EnableRealSystemViewPaths-true >> StreamCreator::TopicAutoPartitioning [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec |93.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |93.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |93.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |93.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |93.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |93.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots >> KqpSinkMvcc::DirtyReads+IsOlap [GOOD] >> KqpSinkMvcc::DirtyReads-IsOlap >> TModifyUserTest::ModifyUser [GOOD] >> TModifyUserTest::ModifyLdapUser >> KqpResultSetFormats::ArrowFormat_EmptyBatch >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink >> TSubDomainTest::LsAltered [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::TopicAutoPartitioning [GOOD] Test command err: 2025-12-23T18:39:23.611443Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124854780447554:2084];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:23.612539Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057bc/r3tmp/tmpM8uVRC/pdisk_1.dat 2025-12-23T18:39:24.346878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:24.380624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:24.380733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:24.594954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:24.625459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:24.626292Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:24.626906Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:25.176197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2035 TServer::EnableGrpc on GrpcPort 4350, node 1 2025-12-23T18:39:25.527481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:25.527506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:25.527513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:25.527597Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515164722 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:26.054928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:26.062896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:26.067631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:26.239128Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-12-23T18:39:28.612357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124854780447554:2084];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:28.612425Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:29.846695Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7587124880550252351:2336] Failed entry at 'ResolveCdcStream': entry# { Path: TableId: [72057594046644480:39:0] RequestType: ByTableId Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-12-23T18:39:29.867318Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-12-23T18:39:29.867355Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2025-12-23T18:39:29.890282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:39:29.912240Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-12-23T18:39:29.912274Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table/Stream/streamImpl TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "streamImpl" PathId: 40 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515169881 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "streamImpl" PathId: 40 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/Root/Table/Strea... (TRUNCATED) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057bc/r3tmp/tmpwBGi7d/pdisk_1.dat 2025-12-23T18:39:31.094987Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:31.095118Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:31.133109Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124885314735121:2081] 1766515170968949 != 1766515170968952 2025-12-23T18:39:31.223793Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:31.223874Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:31.224195Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:31.233513Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:31.364913Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19749 TServer::EnableGrpc on GrpcPort 2041, node 2 2025-12-23T18:39:32.047531Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:32.047556Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:32.047565Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:32.047650Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:32.083015Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:19749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515171393 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:32.753214Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:32.767346Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:32.774084Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:32.850842Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-12-23T18:39:38.631261Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-12-23T18:39:38.631286Z node 2 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2025-12-23T18:39:38.660040Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:39:38.699119Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-12-23T18:39:38.699143Z node 2 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table/Stream/streamImpl TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "streamImpl" PathId: 40 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515178582 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "streamImpl" PathId: 40 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "Stream" TopicPath: "/Root/Table/Strea... (TRUNCATED) |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery |93.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |93.8%| [LD] {RESULT} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |93.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |93.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |93.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |93.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |93.8%| [BN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/statistics_workload |93.8%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/statistics_workload |93.8%| [TA] $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkMvcc::OltpNamedStatement [GOOD] >> KqpSinkMvcc::OltpMultiSinksNoSinks |93.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |93.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |93.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::LsAltered [GOOD] Test command err: 2025-12-23T18:39:32.734447Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124891801834380:2152];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:32.734504Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003618/r3tmp/tmpAv3sb5/pdisk_1.dat 2025-12-23T18:39:33.218487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:33.218602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:33.225609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:33.273859Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:33.363013Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:33.366898Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124891801834248:2081] 1766515172686853 != 1766515172686856 2025-12-23T18:39:33.467003Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:4919 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:39:33.562222Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587124891801834511:2105] Handle TEvNavigate describe path dc-1 2025-12-23T18:39:33.562283Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587124896096802111:2267] HANDLE EvNavigateScheme dc-1 2025-12-23T18:39:33.562396Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587124891801834534:2118], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:33.562520Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587124896096802024:2221][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587124891801834534:2118], cookie# 1 2025-12-23T18:39:33.564578Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124896096802052:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124896096802049:2221], cookie# 1 2025-12-23T18:39:33.564626Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124896096802053:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124896096802050:2221], cookie# 1 2025-12-23T18:39:33.564648Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124896096802054:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124896096802051:2221], cookie# 1 2025-12-23T18:39:33.564690Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124891801834216:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124896096802052:2221], cookie# 1 2025-12-23T18:39:33.564735Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124891801834222:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124896096802054:2221], cookie# 1 2025-12-23T18:39:33.567122Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124891801834219:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124896096802053:2221], cookie# 1 2025-12-23T18:39:33.567128Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124896096802052:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124891801834216:2049], cookie# 1 2025-12-23T18:39:33.567190Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124896096802054:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124891801834222:2055], cookie# 1 2025-12-23T18:39:33.567216Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124896096802053:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124891801834219:2052], cookie# 1 2025-12-23T18:39:33.567262Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124896096802024:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124896096802049:2221], cookie# 1 2025-12-23T18:39:33.567815Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587124896096802024:2221][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:39:33.567881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124896096802024:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124896096802051:2221], cookie# 1 2025-12-23T18:39:33.567932Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587124896096802024:2221][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:39:33.567991Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124896096802024:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124896096802050:2221], cookie# 1 2025-12-23T18:39:33.568006Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587124896096802024:2221][/dc-1] Sync cookie mismatch: sender# [1:7587124896096802050:2221], cookie# 1, current cookie# 0 2025-12-23T18:39:33.568098Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587124891801834534:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:39:33.574830Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587124891801834534:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587124896096802024:2221] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:33.574988Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587124891801834534:2118], cacheItem# { Subscriber: { Subscriber: [1:7587124896096802024:2221] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:39:33.577872Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587124896096802112:2268], recipient# [1:7587124896096802111:2267], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:39:33.577939Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587124896096802111:2267] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:33.631425Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587124896096802111:2267] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:39:33.635244Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587124896096802111:2267] Handle TEvDescribeSchemeResult Forward to# [1:7587124896096802110:2266] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ... ableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766515178939 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) TClient::Ls request: /dc-1 2025-12-23T18:39:39.446939Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7587124917739373907:2106] Handle TEvNavigate describe path /dc-1 2025-12-23T18:39:39.446981Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7587124922034341624:2338] HANDLE EvNavigateScheme /dc-1 2025-12-23T18:39:39.447089Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7587124917739373914:2109], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:39.447188Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7587124917739373939:2118][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7587124917739373914:2109], cookie# 4 2025-12-23T18:39:39.447246Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7587124917739373948:2118][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587124917739373945:2118], cookie# 4 2025-12-23T18:39:39.447263Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7587124917739373949:2118][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587124917739373946:2118], cookie# 4 2025-12-23T18:39:39.447277Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7587124917739373950:2118][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587124917739373947:2118], cookie# 4 2025-12-23T18:39:39.447303Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7587124917739373615:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587124917739373948:2118], cookie# 4 2025-12-23T18:39:39.447328Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7587124917739373618:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587124917739373949:2118], cookie# 4 2025-12-23T18:39:39.447345Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7587124917739373621:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587124917739373950:2118], cookie# 4 2025-12-23T18:39:39.447393Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7587124917739373948:2118][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7587124917739373615:2049], cookie# 4 2025-12-23T18:39:39.447410Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7587124917739373949:2118][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7587124917739373618:2052], cookie# 4 2025-12-23T18:39:39.447425Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7587124917739373950:2118][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7587124917739373621:2055], cookie# 4 2025-12-23T18:39:39.447474Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7587124917739373939:2118][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7587124917739373945:2118], cookie# 4 2025-12-23T18:39:39.447499Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7587124917739373939:2118][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:39:39.447530Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7587124917739373939:2118][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7587124917739373946:2118], cookie# 4 2025-12-23T18:39:39.447552Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7587124917739373939:2118][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:39:39.447574Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7587124917739373939:2118][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7587124917739373947:2118], cookie# 4 2025-12-23T18:39:39.447588Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7587124917739373939:2118][/dc-1] Sync cookie mismatch: sender# [2:7587124917739373947:2118], cookie# 4, current cookie# 0 2025-12-23T18:39:39.447640Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7587124917739373914:2109], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:39:39.447724Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7587124917739373914:2109], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7587124917739373939:2118] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766515178890 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:39.447809Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7587124917739373914:2109], cacheItem# { Subscriber: { Subscriber: [2:7587124917739373939:2118] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766515178890 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-12-23T18:39:39.448063Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7587124922034341625:2339], recipient# [2:7587124922034341624:2338], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:39:39.448110Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7587124922034341624:2338] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:39.448180Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7587124922034341624:2338] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:39:39.448832Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7587124922034341624:2338] Handle TEvDescribeSchemeResult Forward to# [2:7587124922034341623:2337] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515178890 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515178890 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766515178939 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 ... (TRUNCATED) >> TSubDomainTest::FailIfAffectedSetNotInterior [GOOD] >> TSubDomainTest::GenericCases |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain |93.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |93.8%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |93.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts >> TPQTest::TestReadSubscription [GOOD] >> TSubDomainTest::Boot-EnableRealSystemViewPaths-true [GOOD] >> TSubDomainTest::CheckAccessCopyTable >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOltp >> TSubDomainTest::UserAttributesApplyIf [GOOD] >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] >> KqpSinkLocks::InsertWithBulkUpsert+UseBulkUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadSubscription [GOOD] Test command err: 2025-12-23T18:38:30.475337Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2025-12-23T18:38:30.559169Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:30.559264Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:30.559360Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:30.559461Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:182:2057] recipient: [1:14:2061] 2025-12-23T18:38:30.607858Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:30.657763Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "test" Generation: 1 Important: false } Consumers { Name: "another-user" Generation: 1 Important: false } 2025-12-23T18:38:30.659414Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2025-12-23T18:38:30.671013Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:188:2142] 2025-12-23T18:38:30.677738Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:189:2142] 2025-12-23T18:38:30.679945Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:189:2142] 2025-12-23T18:38:30.686399Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [1:190:2142] 2025-12-23T18:38:30.688453Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:190:2142] 2025-12-23T18:38:30.708101Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:210:2207], now have 1 active actors on pipe 2025-12-23T18:38:30.708245Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:38:30.708294Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-12-23T18:38:30.708606Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2023: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2025-12-23T18:38:30.708653Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2023: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2025-12-23T18:38:30.708739Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][0][StateIdle] Received TPartition::TEvWrite 2025-12-23T18:38:30.708788Z node 1 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-12-23T18:38:30.708838Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:30.708898Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:30.708939Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:30.708990Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:30.709033Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:30.709106Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1252: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-12-23T18:38:30.709155Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:38:30.709443Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:212:2209], now have 1 active actors on pipe 2025-12-23T18:38:30.709580Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:38:30.709631Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-12-23T18:38:30.709733Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8ed48a36-afcc59b5-a182f819-ab182c7c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:38:30.709782Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:30.709815Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:38:30.709857Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:38:30.709891Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:30.709944Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:38:30.709998Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:38:30.710035Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:38:30.710072Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:30.710127Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-12-23T18:38:30.710251Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-12-23T18:38:30.710520Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:214:2211], now have 1 active actors on pipe 2025-12-23T18:38:30.710603Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:38:30.710695Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-12-23T18:38:30.710875Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2023: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2025-12-23T18:38:30.710915Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2023: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2025-12-23T18:38:30.710973Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][0][StateIdle] Received TPartition::TEvWrite 2025-12-23T18:38:30.711026Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:30.711056Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:38:30.711110Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:38:30.711136Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2025-12-23T18:38:30.711159Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:30.711194Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:38:30.711333Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 1 partNo 0 2025-12-23T18:38:30.712304Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 72 count 1 nextOffset 1 batches 1 2025-12-23T18:38:30.712412Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:38:30.712461Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 2 partNo 0 2025-12-23T18:38:30.712507Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 112 count 2 nextOffset 2 batches 1 2025-12-23T18:38:30.712533Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2025-12-23T18:38:30.712563Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2025-12-23T18:38:30.712603Z node 1 :PERSQU ... n][4][StateInit] bootstrapping 4 [56:193:2142] 2025-12-23T18:39:41.397285Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][4][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [56:193:2142] 2025-12-23T18:39:41.453444Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:39:41.454054Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|60367393-2782ff03-d97f4089-3f119c7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:39:41.507078Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:41.609534Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:41.635228Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:41.735019Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:41.795002Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:41.855014Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:41.963052Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:41.979026Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:42.139019Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:42.220925Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:42.414372Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:42.519035Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:42.620520Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:39:42.621138Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|904702f4-63cf09e4-38d535e-30871b20_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:39:42.667636Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:39:42.668350Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|60c45fdd-ffef5287-c16b9e21-4c066712_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:39:43.754321Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:108:2057] recipient: [57:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:108:2057] recipient: [57:106:2138] Leader for TabletID 72057594037927937 is [57:112:2142] sender: [57:113:2057] recipient: [57:106:2138] 2025-12-23T18:39:43.898897Z node 57 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:39:43.898966Z node 57 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:39:43.899015Z node 57 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:39:43.899074Z node 57 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [57:154:2057] recipient: [57:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [57:154:2057] recipient: [57:152:2172] Leader for TabletID 72057594037927938 is [57:158:2176] sender: [57:159:2057] recipient: [57:152:2172] Leader for TabletID 72057594037927937 is [57:112:2142] sender: [57:184:2057] recipient: [57:14:2061] 2025-12-23T18:39:43.946721Z node 57 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:39:43.948778Z node 57 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 60 actor [57:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 60 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 } Consumers { Name: "user1" Generation: 60 Important: true } 2025-12-23T18:39:43.950140Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [57:190:2142] 2025-12-23T18:39:43.960917Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [57:190:2142] 2025-12-23T18:39:43.968663Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [57:191:2142] 2025-12-23T18:39:43.975343Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [57:191:2142] 2025-12-23T18:39:43.983391Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [57:192:2142] 2025-12-23T18:39:43.986028Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [57:192:2142] 2025-12-23T18:39:43.993416Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [57:193:2142] 2025-12-23T18:39:43.999551Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [57:193:2142] 2025-12-23T18:39:44.002664Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][4][StateInit] bootstrapping 4 [57:194:2142] 2025-12-23T18:39:44.009425Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][4][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [57:194:2142] 2025-12-23T18:39:44.034288Z node 57 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:39:44.034715Z node 57 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|88c301d4-423d7dfd-4ef60f21-420871e1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:39:44.055083Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:44.091108Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:44.167044Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:44.210628Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:44.290652Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:44.389549Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:44.410887Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:44.507056Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:44.651196Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:44.673669Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:44.895092Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:44.931226Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:39:45.123941Z node 57 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:39:45.124435Z node 57 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|91fd8f6d-957e01cd-c4a3cfab-470be0cf_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:39:45.140874Z node 57 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:39:45.141298Z node 57 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|20cc61e3-f8303cc5-6075f541-ef2c339_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap-UpdateAfterInsert [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp+UpdateAfterInsert |93.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |93.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |93.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] Test command err: 2025-12-23T18:39:30.052545Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124883231487617:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:30.052611Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003621/r3tmp/tmpdiDp5P/pdisk_1.dat 2025-12-23T18:39:30.470995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:30.655442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:30.655606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:30.663630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:30.770712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:39:30.779757Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:20377 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:39:31.086813Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587124883231487840:2119] Handle TEvNavigate describe path dc-1 2025-12-23T18:39:31.086868Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587124887526455646:2452] HANDLE EvNavigateScheme dc-1 2025-12-23T18:39:31.086979Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587124883231487850:2124], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:31.087073Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587124883231488093:2282][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587124883231487850:2124], cookie# 1 2025-12-23T18:39:31.089377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124883231488103:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124883231488100:2282], cookie# 1 2025-12-23T18:39:31.089445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124883231488104:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124883231488101:2282], cookie# 1 2025-12-23T18:39:31.089468Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124883231488105:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124883231488102:2282], cookie# 1 2025-12-23T18:39:31.089524Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124878936520229:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124883231488103:2282], cookie# 1 2025-12-23T18:39:31.089564Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124878936520232:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124883231488104:2282], cookie# 1 2025-12-23T18:39:31.089579Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124878936520235:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124883231488105:2282], cookie# 1 2025-12-23T18:39:31.089631Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124883231488103:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124878936520229:2050], cookie# 1 2025-12-23T18:39:31.089647Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124883231488104:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124878936520232:2053], cookie# 1 2025-12-23T18:39:31.089673Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124883231488105:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124878936520235:2056], cookie# 1 2025-12-23T18:39:31.089740Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124883231488093:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124883231488100:2282], cookie# 1 2025-12-23T18:39:31.089789Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587124883231488093:2282][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:39:31.089825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124883231488093:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124883231488101:2282], cookie# 1 2025-12-23T18:39:31.089866Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587124883231488093:2282][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:39:31.089899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124883231488093:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124883231488102:2282], cookie# 1 2025-12-23T18:39:31.089912Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587124883231488093:2282][/dc-1] Sync cookie mismatch: sender# [1:7587124883231488102:2282], cookie# 1, current cookie# 0 2025-12-23T18:39:31.089964Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587124883231487850:2124], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:39:31.101703Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587124883231487850:2124], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587124883231488093:2282] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:31.101869Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587124883231487850:2124], cacheItem# { Subscriber: { Subscriber: [1:7587124883231488093:2282] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:39:31.111475Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:31.117116Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587124887526455647:2453], recipient# [1:7587124887526455646:2452], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:39:31.117224Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587124887526455646:2452] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:31.203614Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587124887526455646:2452] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:39:31.212613Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587124887526455646:2452] Handle TEvDescribeSchemeResult Forward to# [1:7587124887526455645:2451] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Childr ... rsion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:43.001044Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7587124936401368537:2552][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587124919221498372:2050] 2025-12-23T18:39:43.001062Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7587124936401368538:2552][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587124919221498375:2053] 2025-12-23T18:39:43.001079Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7587124936401368539:2552][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587124919221498378:2056] 2025-12-23T18:39:43.001108Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587124936401368526:2552][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587124936401368534:2552] 2025-12-23T18:39:43.001132Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587124936401368526:2552][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587124936401368535:2552] 2025-12-23T18:39:43.001153Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7587124936401368526:2552][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7587124923516465977:2118], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:39:43.001171Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587124936401368526:2552][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587124936401368536:2552] 2025-12-23T18:39:43.001191Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7587124936401368526:2552][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7587124923516465977:2118], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:39:43.001940Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7587124923516465977:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-12-23T18:39:43.001983Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7587124923516465977:2118], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7587124936401368526:2552] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:43.002038Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587124923516465977:2118], cacheItem# { Subscriber: { Subscriber: [3:7587124936401368526:2552] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:43.002101Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587124936401368540:2554], recipient# [3:7587124936401368513:2302], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:43.002154Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587124919221498372:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587124936401368537:2552] 2025-12-23T18:39:43.002167Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587124919221498375:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587124936401368538:2552] 2025-12-23T18:39:43.002177Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587124919221498378:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587124936401368539:2552] 2025-12-23T18:39:43.018934Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587124923516465977:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:43.019041Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587124923516465977:2118], cacheItem# { Subscriber: { Subscriber: [3:7587124927811433868:2535] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:43.019125Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587124940696335838:2555], recipient# [3:7587124940696335837:2307], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:43.982874Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587124919221498663:2263];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:43.982967Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:44.003178Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587124923516465977:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:44.003302Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587124923516465977:2118], cacheItem# { Subscriber: { Subscriber: [3:7587124936401368517:2550] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:44.003379Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587124944991303145:2559], recipient# [3:7587124944991303144:2308], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:44.023235Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587124923516465977:2118], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:44.023361Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587124923516465977:2118], cacheItem# { Subscriber: { Subscriber: [3:7587124927811433868:2535] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:44.023475Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587124944991303147:2560], recipient# [3:7587124944991303146:2309], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyLdapUser [GOOD] >> TModifyUserTest::ModifyUserIsEnabled >> KqpTx::CommitStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] Test command err: 2025-12-23T18:39:31.719359Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124890589360500:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:31.766265Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00361d/r3tmp/tmpXU4iY2/pdisk_1.dat 2025-12-23T18:39:32.299984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:32.300081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:32.302808Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:32.324696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:32.465699Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:32.470878Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124890589360463:2081] 1766515171601312 != 1766515171601315 2025-12-23T18:39:32.536895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:39:32.731098Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:26891 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:39:32.816673Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587124890589360596:2091] Handle TEvNavigate describe path dc-1 2025-12-23T18:39:32.816741Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587124894884328334:2270] HANDLE EvNavigateScheme dc-1 2025-12-23T18:39:32.816845Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587124890589360732:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:32.816956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587124894884328227:2217][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587124890589360732:2117], cookie# 1 2025-12-23T18:39:32.818661Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124894884328262:2217][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124894884328259:2217], cookie# 1 2025-12-23T18:39:32.818702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124894884328263:2217][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124894884328260:2217], cookie# 1 2025-12-23T18:39:32.818716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124894884328264:2217][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124894884328261:2217], cookie# 1 2025-12-23T18:39:32.818760Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124890589360431:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124894884328262:2217], cookie# 1 2025-12-23T18:39:32.818786Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124890589360434:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124894884328263:2217], cookie# 1 2025-12-23T18:39:32.818811Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124890589360437:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124894884328264:2217], cookie# 1 2025-12-23T18:39:32.818870Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124894884328262:2217][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124890589360431:2049], cookie# 1 2025-12-23T18:39:32.818893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124894884328263:2217][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124890589360434:2052], cookie# 1 2025-12-23T18:39:32.818907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124894884328264:2217][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124890589360437:2055], cookie# 1 2025-12-23T18:39:32.818946Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124894884328227:2217][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124894884328259:2217], cookie# 1 2025-12-23T18:39:32.818968Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587124894884328227:2217][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:39:32.818996Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124894884328227:2217][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124894884328260:2217], cookie# 1 2025-12-23T18:39:32.819073Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587124894884328227:2217][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:39:32.819109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124894884328227:2217][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124894884328261:2217], cookie# 1 2025-12-23T18:39:32.819120Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587124894884328227:2217][/dc-1] Sync cookie mismatch: sender# [1:7587124894884328261:2217], cookie# 1, current cookie# 0 2025-12-23T18:39:32.819169Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587124890589360732:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:39:32.825700Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587124890589360732:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587124894884328227:2217] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:32.831179Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587124890589360732:2117], cacheItem# { Subscriber: { Subscriber: [1:7587124894884328227:2217] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:39:32.833825Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587124894884328335:2271], recipient# [1:7587124894884328334:2270], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:39:32.833894Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587124894884328334:2270] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:32.911107Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587124894884328334:2270] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:39:32.921705Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587124894884328334:2270] Handle TEvDescribeSchemeResult Forward to# [1:7587124894884328333:2269] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion ... :39:39.394199Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710662 2025-12-23T18:39:39.394212Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710662, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-12-23T18:39:39.394225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-12-23T18:39:39.394269Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710662, subscribers: 0 2025-12-23T18:39:39.394641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710662 waiting... 2025-12-23T18:39:39.419389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1/USER_0 2025-12-23T18:39:39.421542Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7587124920204487327:2090] Handle TEvNavigate describe path /dc-1/USER_0 2025-12-23T18:39:39.421583Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7587124924499455281:2367] HANDLE EvNavigateScheme /dc-1/USER_0 2025-12-23T18:39:39.421681Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7587124920204487562:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:39.421759Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7587124924499455203:2312][/dc-1/USER_0] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7587124920204487562:2114], cookie# 10 2025-12-23T18:39:39.421814Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7587124924499455207:2312][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7587124924499455204:2312], cookie# 10 2025-12-23T18:39:39.421838Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7587124924499455208:2312][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7587124924499455205:2312], cookie# 10 2025-12-23T18:39:39.421861Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7587124924499455209:2312][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7587124924499455206:2312], cookie# 10 2025-12-23T18:39:39.421887Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7587124920204487262:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7587124924499455207:2312], cookie# 10 2025-12-23T18:39:39.421909Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7587124920204487265:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7587124924499455208:2312], cookie# 10 2025-12-23T18:39:39.421930Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7587124920204487268:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7587124924499455209:2312], cookie# 10 2025-12-23T18:39:39.421975Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7587124924499455207:2312][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7587124920204487262:2049], cookie# 10 2025-12-23T18:39:39.421993Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7587124924499455208:2312][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7587124920204487265:2052], cookie# 10 2025-12-23T18:39:39.422009Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7587124924499455209:2312][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7587124920204487268:2055], cookie# 10 2025-12-23T18:39:39.422042Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7587124924499455203:2312][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7587124924499455204:2312], cookie# 10 2025-12-23T18:39:39.422071Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7587124924499455203:2312][/dc-1/USER_0] Sync is in progress: cookie# 10, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:39:39.422098Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7587124924499455203:2312][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7587124924499455205:2312], cookie# 10 2025-12-23T18:39:39.422121Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7587124924499455203:2312][/dc-1/USER_0] Sync is done in the ring group: cookie# 10, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:39:39.422145Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7587124924499455203:2312][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7587124924499455206:2312], cookie# 10 2025-12-23T18:39:39.422157Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7587124924499455203:2312][/dc-1/USER_0] Sync cookie mismatch: sender# [2:7587124924499455206:2312], cookie# 10, current cookie# 0 2025-12-23T18:39:39.422197Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7587124920204487562:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-12-23T18:39:39.422275Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7587124920204487562:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7587124924499455203:2312] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1766515179184 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:39.422338Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7587124920204487562:2114], cacheItem# { Subscriber: { Subscriber: [2:7587124924499455203:2312] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1766515179184 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2025-12-23T18:39:39.422479Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7587124924499455282:2368], recipient# [2:7587124924499455281:2367], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:39:39.422510Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7587124924499455281:2367] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:39.422565Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7587124924499455281:2367] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2025-12-23T18:39:39.423128Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7587124924499455281:2367] Handle TEvDescribeSchemeResult Forward to# [2:7587124924499455280:2366] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766515179184 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766515179184 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1... (TRUNCATED) |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain >> TSubDomainTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain >> KqpSnapshotIsolation::ConflictWrite-IsOlap-FillTables >> KqpResultSetFormats::ValueFormat_Simple [GOOD] >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [FAIL] >> KqpResultSetFormats::DefaultFormat [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always >> KqpResultSetFormats::ArrowFormat_Simple [GOOD] >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitStats [GOOD] Test command err: Trying to start YDB, gRPC: 26888, MsgBus: 14629 2025-12-23T18:39:14.375542Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124815908984317:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:14.375625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00411a/r3tmp/tmpYcsUK0/pdisk_1.dat 2025-12-23T18:39:14.971649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:14.971743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:15.139475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:15.230456Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:15.231294Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26888, node 1 2025-12-23T18:39:15.407968Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:15.450807Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:15.475065Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:15.478997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:15.479034Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:15.483099Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14629 TClient is connected to server localhost:14629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515155230 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:16.440178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:16.447739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:16.523704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:16.787117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:17.013910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:17.117839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:19.379029Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124815908984317:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:19.379106Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:19.668567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124837383822630:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.668665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.669148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124837383822640:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.669204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.135044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:20.207245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:20.264698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:20.303539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:20.333966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:20.372539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:20.449976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:20.512341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:20.623136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124841678790819:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.623235Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.623626Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124841678790824:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.623675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124841678790825:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.624036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18 ... Notification cookie mismatch for subscription [3:7587124915347789957:2081] 1766515177378894 != 1766515177378897 2025-12-23T18:39:37.773121Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:37.784729Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64338, node 3 2025-12-23T18:39:38.100174Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:38.161545Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:38.161575Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:38.161582Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:38.161658Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:38.405235Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:9676 TClient is connected to server localhost:9676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515178169 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:39:39.080057Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:39:39.087442Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:39.095556Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:39.265072Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:39.585574Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:39.687606Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:42.402193Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587124915347790019:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:42.402260Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:44.287643Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124945412562903:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.287734Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.288234Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124945412562913:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.288284Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.486149Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.585329Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.653252Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.730068Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.785972Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.883950Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.942057Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:45.040335Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:45.197404Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124949707531104:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:45.197517Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:45.198045Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124949707531109:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:45.198094Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124949707531110:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:45.198232Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:45.202705Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:45.216742Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587124949707531113:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:39:45.307056Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587124949707531166:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpTx::CommitRequired >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] >> KqpSinkMvcc::UpdateColumns+IsOlap [GOOD] >> KqpSinkMvcc::UpdateColumns-IsOlap >> KqpSinkLocks::EmptyRangeOlap [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBroken >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked >> TSubDomainTest::GenericCases [GOOD] >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] >> KqpSinkMvcc::TxDeleteOwnUncommitted+IsOlap [GOOD] >> KqpSinkMvcc::TxDeleteOwnUncommitted-IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] Test command err: Trying to start YDB, gRPC: 15772, MsgBus: 14881 2025-12-23T18:39:13.047013Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124814233649288:2255];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:13.047078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004122/r3tmp/tmpTEZWeY/pdisk_1.dat 2025-12-23T18:39:13.467202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:13.487870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:13.487950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:13.588201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:13.648328Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15772, node 1 2025-12-23T18:39:13.654868Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124809938681773:2081] 1766515152989919 != 1766515152989922 2025-12-23T18:39:13.828930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:13.895388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:13.895409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:13.895416Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:13.895510Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:14.045655Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:14881 TClient is connected to server localhost:14881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515153669 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:39:14.977258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:39:14.985081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:17.500368Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124831413519140:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.500471Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.501764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124831413519150:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.501857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.502144Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124831413519154:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.506547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:17.521129Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124831413519156:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:17.598466Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124831413519207:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:18.048543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124814233649288:2255];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:18.069490Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:18.084245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:18.273729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:19.751136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 22096, MsgBus: 9174 2025-12-23T18:39:24.904464Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124858084430725:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:24.904529Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004122/r3tmp/tmpKfj4Tu/pdisk_1.dat 2025-12-23T18:39:25.047257Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:25.193887Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:25.193972Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:25.213080Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:25.214889Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124858084430702:2081] 1766515164885826 != 1766515164885829 2025-12-23T18:39:25.267628Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:25.322201Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 22096, node 2 2025-12-23T18:39:25.553865Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:25.553887Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:25.553894Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:25.553968Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9174 2025-12-23T18:39:26.002600Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:9174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist ... 25-12-23T18:39:35.615209Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710666; 2025-12-23T18:39:35.627630Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [2:7587124905329079952:2965], Table: `/Root/KV` ([72057594046644480:43:1]), SessionActorId: [2:7587124901034112584:2965]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7587124905329079952:2965].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-12-23T18:39:35.628497Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [2:7587124905329079945:2965], SessionActorId: [2:7587124901034112584:2965], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7587124901034112584:2965]. 2025-12-23T18:39:35.628759Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=2&id=N2M0ZjJhOGEtNjQwYWJjMjEtMWU0M2JlZWEtOWQ3OWU5ODA=, ActorId: [2:7587124901034112584:2965], ActorState: ExecuteState, TraceId: 01kd684vya11jx5j91egc9xnyh, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7587124905329079946:2965] from: [2:7587124905329079945:2965] 2025-12-23T18:39:35.628913Z node 2 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [2:7587124905329079946:2965] TxId: 281474976710666. Ctx: { TraceId: 01kd684vya11jx5j91egc9xnyh, Database: /Root, SessionId: ydb://session/3?node_id=2&id=N2M0ZjJhOGEtNjQwYWJjMjEtMWU0M2JlZWEtOWQ3OWU5ODA=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-12-23T18:39:35.629329Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=N2M0ZjJhOGEtNjQwYWJjMjEtMWU0M2JlZWEtOWQ3OWU5ODA=, ActorId: [2:7587124901034112584:2965], ActorState: ExecuteState, TraceId: 01kd684vya11jx5j91egc9xnyh, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } Trying to start YDB, gRPC: 23350, MsgBus: 3518 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004122/r3tmp/tmpmSwR5H/pdisk_1.dat 2025-12-23T18:39:37.965907Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:37.978998Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:38.076082Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:38.078841Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587124915971175949:2081] 1766515177906028 != 1766515177906031 2025-12-23T18:39:38.097868Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:38.097969Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:38.126540Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23350, node 3 2025-12-23T18:39:38.227227Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:38.252571Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:38.252598Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:38.252607Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:38.252725Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3518 TClient is connected to server localhost:3518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515178197 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:38.845190Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:38.853826Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:38.953388Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:42.468044Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124937446013297:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:42.468170Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:42.470490Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124937446013324:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:42.470555Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124937446013325:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:42.470635Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:42.475036Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:42.490080Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587124937446013328:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:42.554904Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587124937446013379:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:42.701249Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:42.837024Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.500881Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:49.374285Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [3:7587124967510792565:2963], SessionActorId: [3:7587124954625890493:2963], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 . sessionActorId=[3:7587124954625890493:2963]. 2025-12-23T18:39:49.374830Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=3&id=OGE0NDYyZjQtMzUyZGJhYzItNmZiMTJjZDMtYTA1N2JkY2E=, ActorId: [3:7587124954625890493:2963], ActorState: ExecuteState, TraceId: 01kd68597b04qwpd3sj5nyf5qt, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7587124967510792566:2963] from: [3:7587124967510792565:2963] 2025-12-23T18:39:49.374914Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [3:7587124967510792566:2963] TxId: 281474976710666. Ctx: { TraceId: 01kd68597b04qwpd3sj5nyf5qt, Database: /Root, SessionId: ydb://session/3?node_id=3&id=OGE0NDYyZjQtMzUyZGJhYzItNmZiMTJjZDMtYTA1N2JkY2E=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 } 2025-12-23T18:39:49.375306Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=OGE0NDYyZjQtMzUyZGJhYzItNmZiMTJjZDMtYTA1N2JkY2E=, ActorId: [3:7587124954625890493:2963], ActorState: ExecuteState, TraceId: 01kd68597b04qwpd3sj5nyf5qt, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration-withSink >> KqpTx::SnapshotRO [GOOD] >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoInsertOlap [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoInsertOltp ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 16160, MsgBus: 24058 2025-12-23T18:39:14.752736Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124816373825904:2175];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:14.753071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:14.796512Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639248 Duration# 0.005561s 2025-12-23T18:39:14.799548Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004117/r3tmp/tmpXIDOhz/pdisk_1.dat 2025-12-23T18:39:15.330850Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:15.371283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:15.371385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:15.529394Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:15.542096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:15.642856Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 16160, node 1 2025-12-23T18:39:15.807114Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:15.939471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:15.939507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:15.939516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:15.940140Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24058 TClient is connected to server localhost:24058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515155601 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:39:17.070185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:39:17.104568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:17.296336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:17.508802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:17.652148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:19.753437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124816373825904:2175];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:19.753526Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:20.494302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124842143631414:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.494431Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.494973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124842143631424:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.495033Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:21.099412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.188991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.255184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.305294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.356417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.452782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.556030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.690133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.869706Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124846438599595:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:21.869806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:21.870301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124846438599600:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:21.870404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124846438599601:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:21.870466Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Fa ... butable configuration TClient is connected to server localhost:1521 2025-12-23T18:39:39.365086Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515178652 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:39.444499Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:39.459300Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:39.477646Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:39.556266Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:39.751480Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:39.960389Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:42.720450Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124936304532054:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:42.720548Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:42.730954Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124936304532064:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:42.731075Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:42.945667Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:43.029339Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:43.137695Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:43.211905Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:43.290073Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:43.391686Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:43.475645Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:43.562364Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:43.703301Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124940599500239:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:43.703421Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:43.703890Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124940599500244:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:43.703935Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124940599500245:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:43.703969Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:43.708854Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:43.738748Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587124940599500248:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:39:43.808722Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587124940599500299:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:49.809103Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [3:7587124966369304527:2535], SessionActorId: [3:7587124953484402519:2535], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/EightShard`, code: 2001 . sessionActorId=[3:7587124953484402519:2535]. 2025-12-23T18:39:49.809340Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=3&id=MzBmNDg5NmMtNGQ3ZjM1MTUtZTE1OWZmNmQtM2Q3NjkwOTY=, ActorId: [3:7587124953484402519:2535], ActorState: ExecuteState, TraceId: 01kd6859jz16yq0pyaervd2kw3, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7587124966369304528:2535] from: [3:7587124966369304527:2535] 2025-12-23T18:39:49.809410Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [3:7587124966369304528:2535] TxId: 281474976710675. Ctx: { TraceId: 01kd6859jz16yq0pyaervd2kw3, Database: /Root, SessionId: ydb://session/3?node_id=3&id=MzBmNDg5NmMtNGQ3ZjM1MTUtZTE1OWZmNmQtM2Q3NjkwOTY=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/EightShard`, code: 2001 } 2025-12-23T18:39:49.809762Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=MzBmNDg5NmMtNGQ3ZjM1MTUtZTE1OWZmNmQtM2Q3NjkwOTY=, ActorId: [3:7587124953484402519:2535], ActorState: ExecuteState, TraceId: 01kd6859jz16yq0pyaervd2kw3, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/EightShard`" issue_code: 2001 severity: 1 } |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpQueryService::CloseSessionsWithLoad [GOOD] >> KqpTx::ExplicitTcl >> KqpSinkMvcc::DirtyReads-IsOlap [GOOD] >> KqpSinkMvcc::ChangeFromTheFuture+IsOlap >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit+IsOlap [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::GenericCases [GOOD] Test command err: 2025-12-23T18:39:33.434853Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124900015961911:2083];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:33.459665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:33.566969Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.006447s test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003616/r3tmp/tmpYMJCJu/pdisk_1.dat 2025-12-23T18:39:33.882883Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:33.933540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:33.933667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:33.940739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:34.064114Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:34.149546Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:2794 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:39:34.468357Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587124900015962146:2134], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:34.468441Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7587124900015962146:2134], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-12-23T18:39:34.468661Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7587124904310929915:2449][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:39:34.470710Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587124900015961803:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7587124904310929919:2449] 2025-12-23T18:39:34.470732Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587124900015961803:2051] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-12-23T18:39:34.470827Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587124900015961803:2051] Subscribe: subscriber# [1:7587124904310929919:2449], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:39:34.470895Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587124900015961806:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7587124904310929920:2449] 2025-12-23T18:39:34.470905Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587124900015961806:2054] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-12-23T18:39:34.470927Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587124900015961806:2054] Subscribe: subscriber# [1:7587124904310929920:2449], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:39:34.470966Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587124900015961809:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7587124904310929921:2449] 2025-12-23T18:39:34.470981Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587124900015961809:2057] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-12-23T18:39:34.471007Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587124900015961809:2057] Subscribe: subscriber# [1:7587124904310929921:2449], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:39:34.471052Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7587124904310929919:2449][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7587124900015961803:2051] 2025-12-23T18:39:34.471102Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7587124904310929920:2449][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7587124900015961806:2054] 2025-12-23T18:39:34.471121Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7587124904310929921:2449][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7587124900015961809:2057] 2025-12-23T18:39:34.471220Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587124900015961803:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587124904310929919:2449] 2025-12-23T18:39:34.471242Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587124900015961806:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587124904310929920:2449] 2025-12-23T18:39:34.471257Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587124900015961809:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587124904310929921:2449] 2025-12-23T18:39:34.471479Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7587124904310929915:2449][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7587124904310929916:2449] 2025-12-23T18:39:34.484783Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7587124904310929915:2449][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7587124904310929917:2449] 2025-12-23T18:39:34.484879Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587124904310929915:2449][/dc-1/.metadata/initialization/migrations] Set up state: owner# [1:7587124900015962146:2134], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:39:34.484997Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587124900015962146:2134], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2025-12-23T18:39:34.485099Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587124900015962146:2134], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7587124904310929915:2449] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:34.487035Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7587124904310929915:2449][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7587124904310929918:2449] 2025-12-23T18:39:34.487132Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7587124904310929915:2449][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [1:7587124900015962146:2134], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:39:34.496173Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587124900015962146:2134], cacheItem# { Subscriber: { Subscriber: [1:7587124904310929915:2449] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:34.496351Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587124904310929922:2450], recipient# [1:7587124904310929914:2286], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:34.497742Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587124900015962128:2125] Handle TEvNavigate describe path dc-1 2025-12-23T18:39:34.497787Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587124904310929924:2452] HANDLE EvNavigateScheme dc-1 2025-12-23T18:39:34.497868Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587124900015962146:2134], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:34.497956Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587124900015962401:2302][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587124900015962146:2134], cookie# 1 2025-12-23T18:39:34.498079Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124900015962444:2302][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124900015962441:2302], cookie# 1 2025-12-23T18:39:34.498102Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124900015962445:2302][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124900015962442:2302], cookie# 1 2025-12-23T18:39:34.498119Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [re ... : AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:39:48.989718Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [4:7587124945199639602:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-12-23T18:39:48.989777Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [4:7587124945199639602:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7587124962379510036:3017] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:48.989867Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587124945199639602:2127], cacheItem# { Subscriber: { Subscriber: [4:7587124962379510036:3017] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:48.989948Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587124962379510056:3019], recipient# [4:7587124962379510033:2325], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:49.774554Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587124945199639565:2241];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:49.774850Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:49.781466Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7587124945199639602:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:49.781616Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587124945199639602:2127], cacheItem# { Subscriber: { Subscriber: [4:7587124949494607422:2456] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:49.781745Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587124966674477360:3020], recipient# [4:7587124966674477359:2326], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:49.991310Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7587124945199639602:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:49.991496Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587124945199639602:2127], cacheItem# { Subscriber: { Subscriber: [4:7587124962379510036:3017] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:49.991606Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587124966674477367:3026], recipient# [4:7587124966674477366:2327], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:50.775780Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7587124945199639602:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:50.775952Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587124945199639602:2127], cacheItem# { Subscriber: { Subscriber: [4:7587124949494607422:2456] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:50.776052Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587124970969444673:3029], recipient# [4:7587124970969444672:2328], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:50.785730Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7587124945199639602:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:50.785901Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587124945199639602:2127], cacheItem# { Subscriber: { Subscriber: [4:7587124949494607422:2456] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:50.785998Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587124970969444675:3030], recipient# [4:7587124970969444674:2329], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:50.993469Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7587124945199639602:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:50.993638Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587124945199639602:2127], cacheItem# { Subscriber: { Subscriber: [4:7587124962379510036:3017] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:50.993730Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587124970969444680:3034], recipient# [4:7587124970969444679:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> KqpLocksTricky::TestNoLocksIssue-withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink >> KqpResultSetFormats::ArrowFormat_EmptyBatch [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes+isOlap >> KqpSnapshotIsolation::TSimpleOlap [GOOD] >> KqpSnapshotIsolation::TReadOwnChangesOltp |93.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |93.8%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |93.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut >> TModifyUserTest::ModifyUserIsEnabled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotRO [GOOD] Test command err: Trying to start YDB, gRPC: 3311, MsgBus: 7823 2025-12-23T18:39:14.975453Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124814321992848:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:14.975863Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:15.041576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004112/r3tmp/tmpUFJ45m/pdisk_1.dat 2025-12-23T18:39:15.774021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:15.774212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:15.774304Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:15.775638Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:15.781271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3311, node 1 2025-12-23T18:39:15.986978Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:16.064314Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:16.067533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:16.067559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:16.067570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:16.067646Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7823 TClient is connected to server localhost:7823 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515155825 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:17.334287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:17.341712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:17.352000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:17.541920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:17.777999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:17.914526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:19.974247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124814321992848:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:19.974379Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:20.828194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124840091798383:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.828313Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.829595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124840091798393:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.829673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:21.269505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.339932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.398996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.469395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.510590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.596980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.672301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.810106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.986660Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124844386766569:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:21.986754Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:21.987167Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124844386766574:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:21.987210Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124844386766575:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:21.987239Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_wor ... cPort 23195, node 3 2025-12-23T18:39:38.768350Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:38.768379Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:38.768387Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:38.768479Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:38.984780Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30558 2025-12-23T18:39:39.140118Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:30558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515178547 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:40.048952Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:40.055638Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:40.061696Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:40.313473Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:41.062950Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:41.266999Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:46.526348Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124951963426496:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:46.526439Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:46.526947Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124951963426505:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:46.527010Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:46.696351Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:46.752092Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:46.819404Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:46.898426Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:47.015197Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:47.111603Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:47.195610Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:47.294996Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:47.460598Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124956258394685:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:47.460702Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:47.461329Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124956258394690:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:47.461385Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124956258394691:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:47.461644Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:47.465586Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:47.485491Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-12-23T18:39:47.486713Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587124956258394694:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:39:47.581125Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587124956258394747:3806] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:52.074035Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=ZWNiMmU0ODMtNWI4YmQ4ODMtZTEwYjA2NzYtOWQyN2U1YQ==, ActorId: [3:7587124969143296952:2541], ActorState: ExecuteState, TraceId: 01kd685c4g8gcypg5tx6rjexv6, Create QueryResponse for error on request, msg: , status: GENERIC_ERROR, issues: { position { row: 3 column: 25 } message: "Operation \'Upsert\' can\'t be performed in read only transaction" end_position { row: 3 column: 25 } issue_code: 2008 severity: 1 }
:3:25: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltp [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] Test command err: Trying to start YDB, gRPC: 10878, MsgBus: 25717 ... waiting for SysViewsRoster update finished 2025-12-23T18:38:48.172893Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:38:48.297925Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:38:48.310378Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:38:48.310820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:38:48.310955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:48.623831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:48.623966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:48.714447Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515124448597 != 1766515124448601 2025-12-23T18:38:48.725172Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:48.770759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:48.939825Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004153/r3tmp/tmplDlCqf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10878, node 1 2025-12-23T18:38:49.349310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:38:49.349393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:38:49.349431Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:38:49.349748Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:38:49.361265Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:25717 TClient is connected to server localhost:25717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:38:49.811424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:38:49.821720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:38:49.844665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:50.205069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:50.613200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:50.943714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:51.994215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1906:3513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:51.994790Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:51.995768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1980:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:51.995863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:52.032243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:52.257130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:52.530363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:52.853164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:53.155300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:53.503736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:53.829109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:54.159477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:38:54.617720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4170], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:54.617857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:54.618349Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:54.618461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:38:54.618533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [T ... 10662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.662815Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038030;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.686965Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.687029Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.687048Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.719677Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038036;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.726338Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038036;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.726374Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038036;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.746159Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038038;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.746236Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038038;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.746272Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038038;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.764697Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.764770Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.764814Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.775400Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.775473Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.775492Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.785647Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.785712Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.785730Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.813755Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.813826Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.813849Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.831735Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.831810Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.831831Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.849743Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.849835Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.849860Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.861633Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.861707Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.861730Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.870333Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038032;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.870391Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038032;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.870412Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038032;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.887584Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.887654Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.887698Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.888722Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.888791Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:46.888812Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:47.066003Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd684gb6azjt8zj72hvxv1c5", SessionId: ydb://session/3?node_id=4&id=MWQwODAxN2MtZWIyMmViNGYtYTA3Y2JhZTYtY2Q4NzQzYmQ=, Slow query, duration: 18.383152s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CheckAccessCopyTable [GOOD] Test command err: 2025-12-23T18:39:31.459011Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124887414176929:2167];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:31.459371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00361e/r3tmp/tmpYSO45w/pdisk_1.dat 2025-12-23T18:39:32.092260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:32.092376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:32.093583Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:32.104470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:32.281458Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2212} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.143927s 2025-12-23T18:39:32.281528Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.144015s 2025-12-23T18:39:32.307956Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:32.310932Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124887414176787:2081] 1766515171365554 != 1766515171365557 2025-12-23T18:39:32.398900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:39:32.486864Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24282 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:39:32.766939Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587124887414177056:2107] Handle TEvNavigate describe path dc-1 2025-12-23T18:39:32.766981Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587124891709144666:2272] HANDLE EvNavigateScheme dc-1 2025-12-23T18:39:32.767070Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587124887414177079:2120], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:32.767150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587124891709144565:2220][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587124887414177079:2120], cookie# 1 2025-12-23T18:39:32.768762Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124891709144591:2220][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124891709144588:2220], cookie# 1 2025-12-23T18:39:32.768806Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124891709144592:2220][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124891709144589:2220], cookie# 1 2025-12-23T18:39:32.768828Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124891709144593:2220][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124891709144590:2220], cookie# 1 2025-12-23T18:39:32.768863Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124887414176761:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124891709144593:2220], cookie# 1 2025-12-23T18:39:32.768916Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124891709144593:2220][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124887414176761:2055], cookie# 1 2025-12-23T18:39:32.768968Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124891709144565:2220][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124891709144590:2220], cookie# 1 2025-12-23T18:39:32.769040Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587124891709144565:2220][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:39:32.769064Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124887414176755:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124891709144591:2220], cookie# 1 2025-12-23T18:39:32.769098Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124887414176758:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124891709144592:2220], cookie# 1 2025-12-23T18:39:32.769126Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124891709144591:2220][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124887414176755:2049], cookie# 1 2025-12-23T18:39:32.769158Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124891709144592:2220][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124887414176758:2052], cookie# 1 2025-12-23T18:39:32.769202Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124891709144565:2220][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124891709144588:2220], cookie# 1 2025-12-23T18:39:32.769235Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587124891709144565:2220][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:39:32.769282Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124891709144565:2220][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124891709144589:2220], cookie# 1 2025-12-23T18:39:32.769298Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587124891709144565:2220][/dc-1] Sync cookie mismatch: sender# [1:7587124891709144589:2220], cookie# 1, current cookie# 0 2025-12-23T18:39:32.769390Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587124887414177079:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:39:32.781348Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587124887414177079:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587124891709144565:2220] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:32.799571Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587124887414177079:2120], cacheItem# { Subscriber: { Subscriber: [1:7587124891709144565:2220] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:39:32.802158Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587124891709144667:2273], recipient# [1:7587124891709144666:2272], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:39:32.802220Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587124891709144666:2272] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:32.959403Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587124891709144666:2272] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:39:32.963152Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587124891709144666:2272] Handle TEvDescribeSchemeResult Forward to# [1:7587124891709144665:2271] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" Path ... 4073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:52.526858Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587124956262993475:2109], cacheItem# { Subscriber: { Subscriber: [4:7587124973442862932:2236] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:52.526961Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587124977737830296:2241], recipient# [4:7587124973442862926:2312], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:52.531136Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7587124973442862926:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:39:52.539384Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587124956262993330:2152];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:52.539473Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:52.564606Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7587124956262993475:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:52.564752Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587124956262993475:2109], cacheItem# { Subscriber: { Subscriber: [4:7587124960557960987:2229] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:52.564828Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587124977737830298:2242], recipient# [4:7587124977737830297:2316], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:52.565247Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:52.707190Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587124973442862930:2234][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7587124973442862933:2234] 2025-12-23T18:39:52.707275Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587124973442862930:2234][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7587124956262993475:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:39:52.707304Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587124973442862930:2234][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7587124973442862934:2234] 2025-12-23T18:39:52.707329Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587124973442862930:2234][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7587124956262993475:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:39:52.707351Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587124973442862930:2234][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7587124973442862935:2234] 2025-12-23T18:39:52.707374Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587124973442862930:2234][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7587124956262993475:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:39:52.707470Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587124973442862931:2235][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7587124973442862939:2235] 2025-12-23T18:39:52.707503Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587124973442862931:2235][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7587124956262993475:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:39:52.707521Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587124973442862931:2235][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7587124973442862940:2235] 2025-12-23T18:39:52.707570Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587124973442862931:2235][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7587124956262993475:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:39:52.707605Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587124973442862931:2235][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7587124973442862941:2235] 2025-12-23T18:39:52.707631Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587124973442862931:2235][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7587124956262993475:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:39:52.707726Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587124973442862932:2236][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7587124973442862945:2236] 2025-12-23T18:39:52.707751Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587124973442862932:2236][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7587124956262993475:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:39:52.707768Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587124973442862932:2236][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7587124973442862946:2236] 2025-12-23T18:39:52.707788Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587124973442862932:2236][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7587124956262993475:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:39:52.707806Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587124973442862932:2236][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7587124973442862947:2236] 2025-12-23T18:39:52.707830Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587124973442862932:2236][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7587124956262993475:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::SingleKey >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryService::CloseSessionsWithLoad [GOOD] Test command err: Trying to start YDB, gRPC: 18248, MsgBus: 30279 2025-12-23T18:39:08.579586Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124791684358640:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:08.615603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/006180/r3tmp/tmpaYtvXO/pdisk_1.dat 2025-12-23T18:39:08.995074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:08.995183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:09.027033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:09.066412Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:09.092463Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:09.100284Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124791684358604:2081] 1766515148550242 != 1766515148550245 TServer::EnableGrpc on GrpcPort 18248, node 1 2025-12-23T18:39:09.366812Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:09.390995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:09.391016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:09.391023Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:09.391110Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:09.658199Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:30279 TClient is connected to server localhost:30279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515149168 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:10.288495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:10.299310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:39:10.312700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:10.536276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:10.794332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:10.930864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:13.348558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124813159196953:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:13.348678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:13.349142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124813159196963:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:13.349196Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:13.566906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124791684358640:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:13.566968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:13.770820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:13.808897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:13.888643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:13.933249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:13.974684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:14.033844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:14.087700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:14.158706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:14.328671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124817454165139:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:14.328744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:14.329128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124817454165144:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:14.329169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124817454165145:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:14.329405Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [W ... istType (TupleType $9 $9))))))))))) )))) ) 2025-12-23T18:39:49.547981Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2025-12-23T18:39:49.547992Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 2 from task: 2 with index: 0 2025-12-23T18:39:49.548003Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 3 from task: 3 with index: 0 2025-12-23T18:39:49.548011Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 4 from task: 4 with index: 0 2025-12-23T18:39:49.548021Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 5 from task: 5 with index: 0 2025-12-23T18:39:49.548032Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 6 from task: 6 with index: 0 2025-12-23T18:39:49.557622Z node 1 :KQP_EXECUTER DEBUG: kqp_literal_executer.cpp:280: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Execution is complete, results: 6 2025-12-23T18:39:49.610359Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7587124954893120359:3056]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGhAvUm9vdC9FaWdodFNoYXJkIgAqDAiAgpSEgICAgAEQJzABOAFKMwoERGF0YRADGgVJbnQzMiABMABCAEgAUgBYAGIWCP///////////wEQ////////////AUozCgNLZXkQARoGVWludDY0IAQwAEIASABSAFgAYhYI////////////ARD///////////8BSjUKBFRleHQQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1766515189","query_text":"\\n SELECT Key, Text, Data FROM `/Root/EightShard` WHERE Key=-1823490084 + 0;\\n SELECT Key, Data, Text FROM `/Root/EightShard` WHERE Key=-1823490084 + 1;\\n SELECT Text, Key, Data FROM `/Root/EightShard` WHERE Key=-1823490084 + 2;\\n SELECT Text, Data, Key FROM `/Root/EightShard` WHERE Key=-1823490084 + 3;\\n SELECT Data, Key, Text FROM `/Root/EightShard` WHERE Key=-1823490084 + 4;\\n SELECT Data, Text, Key FROM `/Root/EightShard` WHERE Key=-1823490084 + 5;\\n\\n UPSERT INTO `/Root/EightShard` (Key, Text) VALUES\\n (-561370730ul, \\\"New\\\");\\n ","query_type":"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":21,\"Plans\":[{\"PlanNodeId\":20,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{Key: 18446744073148180886,Text: \\\"New\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Operators\":[{\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Sink\"},{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":16,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":14,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":13,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":11,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":10,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":9,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":7,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":6,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":4,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/EightShard\",\"reads\":[{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"}],\"writes\":[{\"columns\":[\"Key\",\"Text\"],\"type\":\"MultiUpsert\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Upsert\"},{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":10,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":11,\"Plans\":[{\"PlanNodeId\":13,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":14,\"Plans\":[{\"PlanNodeId\":16,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":17,\"Plans\":[{\"PlanNodeId\":19,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"d6c72c9a-3fc84247-dff82c2e-7f943d54","version":"1.0"} 2025-12-23T18:39:49.613163Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7587124954893120359:3056], duration: 2.776442s 2025-12-23T18:39:49.613215Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7587124954893120359:3056], owner: [1:7587124813159196912:2383], status: SUCCESS, issues: , uid: d6c72c9a-3fc84247-dff82c2e-7f943d54 2025-12-23T18:39:49.614869Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7587124830339067563:2673], status: SUCCESS, compileActor: [1:7587124954893120359:3056] 2025-12-23T18:39:49.615079Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:719: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT Key, Text, Data FROM `/Root/EightShard` WHERE Key=-1823490084 + 0;\n SELECT Key, Data, Text FROM `/Root/EightShard` WHERE Key=-1823490084 + 1;\n SELECT Text, Key, Data FROM `/Root/EightShard` WHERE Key=-1823490084 + 2;\n SELECT Text, Data, Key FROM `/Root/EightShard` WHERE Key=-1823490084 + 3;\n SELECT Data, Key, Text FROM `/Root/EightShard` WHERE Key=-1823490084 + 4;\n SELECT Data, Text, Key FROM `/Root/EightShard` WHERE Key=-1823490084 + 5;\n\n UPSERT INTO `/Root/EightShard` (Key, Text) VALUES\n (-561370730ul, \"New\");\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:39:49.615383Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7587124830339067563:2673], queryUid: d6c72c9a-3fc84247-dff82c2e-7f943d54, status:SUCCESS still compiling... 0 still active sessions ... 0 |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] >> KqpSnapshotIsolation::TReadOwnChangesOlap >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped |93.9%| [TA] $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLocks::Invalidate |93.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyUserIsEnabled [GOOD] Test command err: 2025-12-23T18:39:31.976378Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124889824947353:2191];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:31.976433Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:32.000551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00361c/r3tmp/tmpbSBiWv/pdisk_1.dat 2025-12-23T18:39:32.765030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:32.768659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:32.788112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:32.973963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:33.048390Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:33.058442Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:33.078910Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124889824947198:2081] 1766515171905238 != 1766515171905241 2025-12-23T18:39:33.245759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:13198 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:39:33.512959Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587124894119914746:2106] Handle TEvNavigate describe path dc-1 2025-12-23T18:39:33.513018Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587124898414882361:2271] HANDLE EvNavigateScheme dc-1 2025-12-23T18:39:33.513141Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587124894119914776:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:33.513266Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587124894119914976:2227][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587124894119914776:2119], cookie# 1 2025-12-23T18:39:33.515273Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124894119914999:2227][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124894119914996:2227], cookie# 1 2025-12-23T18:39:33.515323Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124894119915000:2227][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124894119914997:2227], cookie# 1 2025-12-23T18:39:33.515366Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124894119915001:2227][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124894119914998:2227], cookie# 1 2025-12-23T18:39:33.515405Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124889824947166:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124894119914999:2227], cookie# 1 2025-12-23T18:39:33.515447Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124889824947169:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124894119915000:2227], cookie# 1 2025-12-23T18:39:33.515466Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124889824947172:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124894119915001:2227], cookie# 1 2025-12-23T18:39:33.515528Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124894119914999:2227][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124889824947166:2049], cookie# 1 2025-12-23T18:39:33.515903Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124894119915000:2227][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124889824947169:2052], cookie# 1 2025-12-23T18:39:33.515971Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124894119915001:2227][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124889824947172:2055], cookie# 1 2025-12-23T18:39:33.516021Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124894119914976:2227][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124894119914996:2227], cookie# 1 2025-12-23T18:39:33.516047Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587124894119914976:2227][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:39:33.516091Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124894119914976:2227][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124894119914997:2227], cookie# 1 2025-12-23T18:39:33.516135Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587124894119914976:2227][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:39:33.516175Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124894119914976:2227][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124894119914998:2227], cookie# 1 2025-12-23T18:39:33.516188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587124894119914976:2227][/dc-1] Sync cookie mismatch: sender# [1:7587124894119914998:2227], cookie# 1, current cookie# 0 2025-12-23T18:39:33.516261Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587124894119914776:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:39:33.522108Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587124894119914776:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587124894119914976:2227] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:33.522230Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587124894119914776:2119], cacheItem# { Subscriber: { Subscriber: [1:7587124894119914976:2227] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:39:33.525566Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587124898414882362:2272], recipient# [1:7587124898414882361:2271], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:39:33.525678Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587124898414882361:2271] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:33.627428Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587124898414882361:2271] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:39:33.642958Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587124898414882361:2271] Handle TEvDescribeSchemeResult Forward to# [1:7587124898414882360:2270] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true C ... ype: 2 SyncCookie: 9 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766515189649 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 9 IsSync: true Partial: 0 } 2025-12-23T18:39:49.909044Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587124964694949698:2343], recipient# [3:7587124964694949697:2342], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2025-12-23T18:39:49.909074Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [3:7587124964694949697:2342] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:49.909110Z node 3 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [3:7587124964694949697:2342] txid# 281474976710662, Access denied for user2 on path /dc-1, with access AlterSchema 2025-12-23T18:39:49.909183Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587124964694949697:2342] txid# 281474976710662, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2025-12-23T18:39:49.909207Z node 3 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [3:7587124964694949697:2342] txid# 281474976710662 SEND to# [3:7587124964694949696:2341] Source {TEvProposeTransactionStatus Status# 5} 2025-12-23T18:39:49.910678Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:7587124960399981983:2099] Handle TEvProposeTransaction 2025-12-23T18:39:49.910705Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:7587124960399981983:2099] TxId# 281474976710663 ProcessProposeTransaction 2025-12-23T18:39:49.910755Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [3:7587124960399981983:2099] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [3:7587124964694949700:2345] 2025-12-23T18:39:49.913171Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [3:7587124964694949700:2345] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user2" Password: "password" CanLogin: false } } } } UserToken: "\n\005user2\022\030\022\026\n\024all-users@well-known\032\322\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2NjU1ODM4OSwiaWF0IjoxNzY2NTE1MTg5LCJzdWIiOiJ1c2VyMiJ9.VjjyUCcLhQ4L5MAlaSdCqv_Ouu1QpMievZ0psONrtq60uxEHJBjBLn6rF88i8psx9mo50gf2xmzcKhvr3KYdpbydQtTVDis8vxIrW155lPhPvzJK41JYhmqz1os35MZQa2B96kz6u-dAQInWxOP2_O0rCLELtt_tWfGOQ9toHE0gh8brQ_A1sy6_zJHivLOeJi-0TykF-B2sFZynmtYyGUZDeqVU9tj3yBGMFZSUzZXr4T3sL3LsvWEt4th_bMG4mhxCpdTMnv3RpMsAmbO6orK-Mi5YwXTw4hp44axJPA90MZHOl6rh2ODqMcGTQfB8YFyhrr1IATPU-EfjC-9raQ\"\005Login*~eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2NjU1ODM4OSwiaWF0IjoxNzY2NTE1MTg5LCJzdWIiOiJ1c2VyMiJ9.**0\000" PeerName: "" 2025-12-23T18:39:49.913218Z node 3 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [3:7587124964694949700:2345] txid# 281474976710663 Bootstrap, UserSID: user2 CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:39:49.913233Z node 3 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [3:7587124964694949700:2345] txid# 281474976710663 Bootstrap, UserSID: user2 IsClusterAdministrator: 1 2025-12-23T18:39:49.913276Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [3:7587124964694949700:2345] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:39:49.913355Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587124960399982018:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:49.913426Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:7587124960399982173:2198][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:7587124960399982018:2116], cookie# 10 2025-12-23T18:39:49.913481Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7587124960399982210:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7587124960399982207:2198], cookie# 10 2025-12-23T18:39:49.913499Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7587124960399982211:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7587124960399982208:2198], cookie# 10 2025-12-23T18:39:49.913514Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7587124960399982212:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7587124960399982209:2198], cookie# 10 2025-12-23T18:39:49.913536Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7587124960399981716:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7587124960399982210:2198], cookie# 10 2025-12-23T18:39:49.913560Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7587124960399981719:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7587124960399982211:2198], cookie# 10 2025-12-23T18:39:49.913580Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7587124960399981722:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7587124960399982212:2198], cookie# 10 2025-12-23T18:39:49.913616Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7587124960399982210:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7587124960399981716:2049], cookie# 10 2025-12-23T18:39:49.913633Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7587124960399982211:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7587124960399981719:2052], cookie# 10 2025-12-23T18:39:49.913650Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7587124960399982212:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7587124960399981722:2055], cookie# 10 2025-12-23T18:39:49.913685Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7587124960399982173:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7587124960399982207:2198], cookie# 10 2025-12-23T18:39:49.913705Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:7587124960399982173:2198][/dc-1] Sync is in progress: cookie# 10, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:39:49.913728Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7587124960399982173:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7587124960399982208:2198], cookie# 10 2025-12-23T18:39:49.913748Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:7587124960399982173:2198][/dc-1] Sync is done in the ring group: cookie# 10, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:39:49.913770Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7587124960399982173:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7587124960399982209:2198], cookie# 10 2025-12-23T18:39:49.913784Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:7587124960399982173:2198][/dc-1] Sync cookie mismatch: sender# [3:7587124960399982209:2198], cookie# 10, current cookie# 0 2025-12-23T18:39:49.913817Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7587124960399982018:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:39:49.913879Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7587124960399982018:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7587124960399982173:2198] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766515189649 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:49.913945Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587124960399982018:2116], cacheItem# { Subscriber: { Subscriber: [3:7587124960399982173:2198] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766515189649 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2025-12-23T18:39:49.914105Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587124964694949701:2346], recipient# [3:7587124964694949700:2345], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2025-12-23T18:39:49.914140Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [3:7587124964694949700:2345] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:49.914177Z node 3 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [3:7587124964694949700:2345] txid# 281474976710663, Access denied for user2 on path /dc-1, with access AlterSchema 2025-12-23T18:39:49.914250Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587124964694949700:2345] txid# 281474976710663, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2025-12-23T18:39:49.914273Z node 3 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [3:7587124964694949700:2345] txid# 281474976710663 SEND to# [3:7587124964694949699:2344] Source {TEvProposeTransactionStatus Status# 5} |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_String >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic >> KqpSinkTx::OlapLocksAbortOnCommit >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-false |93.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |93.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |93.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false >> KqpSnapshotIsolation::TPragmaSettingOltp+IsSnapshotIsolation [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOltp-IsSnapshotIsolation >> JsonChangeRecord::DataChange [GOOD] >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] Test command err: 2025-12-23T18:39:33.493584Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.006424s 2025-12-23T18:39:33.827153Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124896270421554:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:33.827234Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:33.936508Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124899077016148:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:33.936549Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003615/r3tmp/tmpcZR7LC/pdisk_1.dat 2025-12-23T18:39:34.347062Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:34.347210Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:34.351723Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:34.498851Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:34.510538Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:34.510871Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:34.695532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:34.695653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:34.704935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:34.705040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:34.705349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:34.705405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:34.799051Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:39:34.799105Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:39:34.832558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:34.833672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:34.833870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:35.083361Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:35.086383Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:35.152210Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:35.162012Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.020806s 2025-12-23T18:39:35.169892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:39:35.172017Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:39:35.178673Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:35.220637Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:15908 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:39:36.106035Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587124896270421591:2147] Handle TEvNavigate describe path dc-1 2025-12-23T18:39:36.106107Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587124909155323994:2486] HANDLE EvNavigateScheme dc-1 2025-12-23T18:39:36.106257Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587124896270421597:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:36.106400Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587124900565389126:2296][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587124896270421597:2149], cookie# 1 2025-12-23T18:39:36.108240Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124900565389139:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124900565389136:2296], cookie# 1 2025-12-23T18:39:36.108282Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124900565389140:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124900565389137:2296], cookie# 1 2025-12-23T18:39:36.108299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124900565389141:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124900565389138:2296], cookie# 1 2025-12-23T18:39:36.108402Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124896270421234:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124900565389139:2296], cookie# 1 2025-12-23T18:39:36.108440Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124896270421237:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124900565389140:2296], cookie# 1 2025-12-23T18:39:36.108465Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124896270421240:2059] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124900565389141:2296], cookie# 1 2025-12-23T18:39:36.108516Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124900565389139:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124896270421234:2053], cookie# 1 2025-12-23T18:39:36.108534Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124900565389140:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124896270421237:2056], cookie# 1 2025-12-23T18:39:36.108549Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124900565389141:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124896270421240:2059], cookie# 1 2025-12-23T18:39:36.108590Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124900565389126:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124900565389136:2296], cookie# 1 2025-12-23T18:39:36.108622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587124900565389126:2296][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:39:36.108643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124900565389126:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124900565389137:2296], cookie# 1 2025-12-23T18:39:36.108672Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587124900565389126:2296][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:39:36.108779Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124900565389126:2296][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124900565389138:2296], cookie# 1 2025-12-23T18:39:36.108797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587124900565389126:2296][/dc-1] Sync cookie mismatch: sender# [1:7587124900565389138:2296], cookie# 1, current cookie# 0 2025-12-23T18:39:36.108855Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587124896270421597:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:39:36.141090Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587124896270421597:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587124900565389126:2296] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:36.141243Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587124896270421597:2149], cacheItem# { Subscriber: { Subscriber: [1:7587124900565389126:2296] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc- ... esNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:55.507211Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7587124991375918012:2869], recipient# [7:7587124991375918011:2313], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:56.067257Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7587124969901080493:2119], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:56.067435Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7587124969901080493:2119], cacheItem# { Subscriber: { Subscriber: [7:7587124974196048513:2649] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:56.067561Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7587124995670885313:2873], recipient# [7:7587124995670885312:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:56.095312Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7587124969901080493:2119], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:56.095488Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7587124969901080493:2119], cacheItem# { Subscriber: { Subscriber: [7:7587124974196048513:2649] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:56.095595Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7587124995670885315:2874], recipient# [7:7587124995670885314:2315], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:56.515245Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7587124969901080493:2119], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:56.515401Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7587124969901080493:2119], cacheItem# { Subscriber: { Subscriber: [7:7587124987080950679:2858] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:56.515512Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7587124995670885323:2875], recipient# [7:7587124995670885322:2316], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:57.069765Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7587124969901080493:2119], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:57.069959Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7587124969901080493:2119], cacheItem# { Subscriber: { Subscriber: [7:7587124974196048513:2649] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:57.070070Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7587124999965852624:2879], recipient# [7:7587124999965852623:2317], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:57.098966Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7587124969901080493:2119], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:57.099147Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7587124969901080493:2119], cacheItem# { Subscriber: { Subscriber: [7:7587124974196048513:2649] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:57.099266Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7587124999965852626:2880], recipient# [7:7587124999965852625:2318], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:57.524486Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7587124969901080493:2119], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:57.524668Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7587124969901080493:2119], cacheItem# { Subscriber: { Subscriber: [7:7587124987080950679:2858] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:39:57.524811Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7587124999965852634:2881], recipient# [7:7587124999965852633:2319], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists >> KqpSinkMvcc::OltpMultiSinksNoSinks [GOOD] >> KqpSinkMvcc::OltpMultiSinks >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload >> JsonChangeRecord::DataChangeVersion [GOOD] |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching |93.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |93.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |93.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> KqpSinkMvcc::UpdateColumns-IsOlap [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly >> KqpTx::CommitRequired [GOOD] >> KqpTx::CommitRoTx |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> KqpSinkLocks::OlapVisibleUncommittedRowsUpdate [GOOD] >> KqpSinkLocks::OlapUpdateLocksTwoShards |93.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |93.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |93.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> AsyncIndexChangeCollector::UpsertToSameKey >> KqpSinkLocks::OlapInsertWithBulkUpsert+UseBulkUpsert [GOOD] >> KqpSinkLocks::OlapInsertWithBulkUpsert-UseBulkUpsert >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [FAIL] Test command err: Starting YDB, grpc: 2499, msgbus: 16522 2025-12-23T18:34:43.672295Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123654128144311:2225];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:43.672380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:34:43.739523Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004836/r3tmp/tmp9bEs2e/pdisk_1.dat 2025-12-23T18:34:44.222840Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:34:44.289137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:34:44.289235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:34:44.324200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:34:44.477859Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:34:44.554210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TServer::EnableGrpc on GrpcPort 2499, node 1 2025-12-23T18:34:44.695803Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.016150s 2025-12-23T18:34:44.707080Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:34:44.879302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:34:44.879332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:34:44.879340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:34:44.879408Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16522 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:34:45.313632Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587123654128144372:2118] Handle TEvNavigate describe path dc-1 2025-12-23T18:34:45.313695Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587123662718079832:2777] HANDLE EvNavigateScheme dc-1 2025-12-23T18:34:45.314117Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587123662718079832:2777] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.371264Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587123662718079832:2777] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-12-23T18:34:45.377854Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587123662718079832:2777] Handle TEvDescribeSchemeResult Forward to# [1:7587123662718079831:2776] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514884638 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:34:45.431018Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123654128144372:2118] Handle TEvProposeTransaction 2025-12-23T18:34:45.431076Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123654128144372:2118] TxId# 281474976710657 ProcessProposeTransaction 2025-12-23T18:34:45.431162Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123654128144372:2118] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7587123662718079837:2782] 2025-12-23T18:34:45.624629Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123662718079837:2782] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.624881Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123662718079837:2782] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.624905Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123662718079837:2782] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.624971Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123662718079837:2782] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.625279Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123662718079837:2782] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.625384Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123662718079837:2782] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-12-23T18:34:45.625468Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7587123662718079837:2782] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-12-23T18:34:45.625602Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7587123662718079837:2782] txid# 281474976710657 HANDLE EvClientConnected 2025-12-23T18:34:45.626215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:34:45.631031Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7587123662718079837:2782] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-12-23T18:34:45.631089Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7587123662718079837:2782] txid# 281474976710657 SEND to# [1:7587123662718079836:2781] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-12-23T18:34:45.683139Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7587123654128144372:2118] Handle TEvProposeTransaction 2025-12-23T18:34:45.683166Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7587123654128144372:2118] TxId# 281474976710658 ProcessProposeTransaction 2025-12-23T18:34:45.683202Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7587123654128144372:2118] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7587123662718079876:2818] 2025-12-23T18:34:45.685761Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7587123662718079876:2818] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2025-12-23T18:34:45.685814Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7587123662718079876:2818] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-12-23T18:34:45.685830Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7587123662718079876:2818] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-12-23T18:34:45.685895Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7587123662718079876:2818] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:34:45.686164Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7587123662718079876:2818] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:34:45.686255Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7587123662718079876:2818] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# ... d__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725677 2025-12-23T18:39:39.191003Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725678 2025-12-23T18:39:39.191033Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725679 2025-12-23T18:39:39.191063Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725672 2025-12-23T18:39:39.191096Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725673 2025-12-23T18:39:39.191127Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725674 2025-12-23T18:39:39.191159Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725675 2025-12-23T18:39:39.191195Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725677 2025-12-23T18:39:39.191230Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725676 2025-12-23T18:39:39.191263Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725679 2025-12-23T18:39:39.191298Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976725678 2025-12-23T18:39:39.191395Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_duration_one_hour' 2025-12-23T18:39:39.191440Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/compile_cache_queries' 2025-12-23T18:39:39.191460Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_request_units_one_minute' 2025-12-23T18:39:39.191479Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_read_bytes_one_minute' 2025-12-23T18:39:39.191503Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/resource_pool_classifiers' 2025-12-23T18:39:39.191522Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/nodes' 2025-12-23T18:39:39.191540Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_cpu_time_one_hour' 2025-12-23T18:39:39.191560Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_partitions_one_minute' 2025-12-23T18:39:39.191578Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/auth_permissions' 2025-12-23T18:39:39.191597Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/auth_groups' 2025-12-23T18:39:39.191615Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/auth_effective_permissions' 2025-12-23T18:39:39.191632Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/auth_owners' 2025-12-23T18:39:39.191650Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/tables' 2025-12-23T18:39:39.191668Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/query_sessions' 2025-12-23T18:39:39.191687Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_partitions_by_tli_one_minute' 2025-12-23T18:39:39.191707Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_cpu_time_one_minute' 2025-12-23T18:39:39.191725Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_partitions_by_tli_one_hour' 2025-12-23T18:39:39.191745Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_request_units_one_hour' 2025-12-23T18:39:39.191768Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/pg_tables' 2025-12-23T18:39:39.191789Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_partitions_one_hour' 2025-12-23T18:39:39.191806Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/auth_group_members' 2025-12-23T18:39:39.191824Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/tenant-db/.sys/top_queries_by_duration_one_minute' 2025-12-23T18:39:39.191850Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [60:7587124920530889934:2297] at schemeshard: 72075186224037891 Send TEvRosterUpdateFinished 2025-12-23T18:39:40.754898Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7587124905072024027:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:40.755027Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:43.084658Z node 60 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[60:7587124920530889491:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:43.084785Z node 60 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/tenant-db/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:44.343092Z node 59 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 60 2025-12-23T18:39:44.343663Z node 59 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(60, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:39:44.356399Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:39:45.363636Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01kd684zf2f6j2gmes9nvmka3n", Request deadline has expired for 1.396839s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:61773 TBackTrace::Capture()+28 (0x1B03950C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B5289DC) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1825 (0x1AC12391) void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant(NUnitTest::TTestContext&)+3284 (0x1AC98984) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AC6E5F8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B56166A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B52F6B8) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAdminAccessInTenant::TCurrentTest::Execute()+1300 (0x1AC6D934) NUnitTest::TTestFactory::Execute()+2176 (0x1B530E70) NUnitTest::RunMain(int, char**)+5805 (0x1B55B4CD) ??+0 (0x7F62E349BD90) __libc_start_main+128 (0x7F62E349BE40) _start+41 (0x18614029) |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp+UpdateAfterInsert [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp-UpdateAfterInsert >> KqpSinkLocks::InsertWithBulkUpsert+UseBulkUpsert [GOOD] >> KqpSinkLocks::InsertWithBulkUpsert-UseBulkUpsert >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] >> TSubDomainTest::ConsistentCopyTable >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink >> CdcStreamChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::UpsertSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::UpdateColumns-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 13414, MsgBus: 10699 2025-12-23T18:39:14.767423Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124815077794530:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:14.767479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004118/r3tmp/tmpuEYj1c/pdisk_1.dat 2025-12-23T18:39:15.162692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:15.170533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:15.170651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:15.268157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:15.278101Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:15.348156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 13414, node 1 2025-12-23T18:39:15.531529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:15.531557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:15.531565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:15.531677Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10699 2025-12-23T18:39:15.805296Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515155363 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:16.358376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:16.367943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:39:19.059318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124836552631859:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.059521Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.060216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124836552631871:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.060288Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124836552631872:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.060613Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.065334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:19.078396Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124836552631875:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:39:19.154565Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124836552631926:2541] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:19.618773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:19.771695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124815077794530:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:19.771880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:19.796695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.760558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 15446, MsgBus: 25588 2025-12-23T18:39:26.375572Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124867463236269:2212];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:26.375903Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:26.390419Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004118/r3tmp/tmpE4hPdC/pdisk_1.dat 2025-12-23T18:39:26.487104Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:26.678491Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:26.689552Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:26.689645Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:26.706947Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124867463236092:2081] 1766515166332977 != 1766515166332980 2025-12-23T18:39:26.726098Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:26.742069Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15446, node 2 2025-12-23T18:39:27.011339Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:27.011363Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:27.011370Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:27.011457Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:27.189910Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25588 2025-12-23T18:39:27.364516Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:25588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" E ... e_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:48.429059Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:48.429078Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:48.430562Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:48.430630Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:48.430675Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:48.438174Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:48.438247Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:48.438264Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:48.479030Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd684msw6kx3w1ymr13tt6v7", SessionId: ydb://session/3?node_id=2&id=MmIwNTQ3ZGMtODJjZDI4MzMtZDk2OWU0M2ItYzI5OWU4NTU=, Slow query, duration: 17.107578s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b Trying to start YDB, gRPC: 19227, MsgBus: 26476 2025-12-23T18:39:51.623344Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124977039686504:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:51.623399Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004118/r3tmp/tmpBUgre4/pdisk_1.dat 2025-12-23T18:39:51.738793Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:51.954893Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:52.002661Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:52.002882Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:52.017315Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19227, node 3 2025-12-23T18:39:52.045229Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:52.239383Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:52.239411Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:52.239423Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:52.239506Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26476 2025-12-23T18:39:52.663170Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:26476 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515192064 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:52.778073Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:56.141084Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124998514523801:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:56.141970Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:56.142525Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124998514523836:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:56.142570Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124998514523837:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:56.142639Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:56.156417Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:56.203232Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587124998514523840:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:56.274556Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587124998514523891:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:56.421138Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:56.542262Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:56.626917Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587124977039686504:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:56.627016Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:58.655387Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-false [GOOD] >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Binary >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> KqpTx::ExplicitTcl [GOOD] >> KqpTx::InteractiveTx |93.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |93.9%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |93.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console >> KqpResultSetFormats::ArrowFormat_Types_String [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Time >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams >> KqpSinkMvcc::TxDeleteOwnUncommitted-IsOlap [GOOD] >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit-IsOlap >> AsyncIndexChangeCollector::DeleteNothing >> AsyncIndexChangeCollector::InsertSingleRow |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest >> DataShardVolatile::DistributedWriteWithAsyncIndex [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 26206, MsgBus: 18086 2025-12-23T18:39:15.395894Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124819780845819:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:15.395968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00410f/r3tmp/tmpDnfy2G/pdisk_1.dat 2025-12-23T18:39:15.755262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:15.775372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:15.775463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:15.974357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:16.002832Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124819780845584:2081] 1766515155369583 != 1766515155369586 2025-12-23T18:39:16.018641Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:16.034833Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 26206, node 1 2025-12-23T18:39:16.307752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:16.307778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:16.307799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:16.307904Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:16.394941Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18086 TClient is connected to server localhost:18086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515156098 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:17.124307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:17.135196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:20.047442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124841255682957:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.047665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.048127Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124841255682968:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.048172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124841255682970:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.048207Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.052544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:20.068090Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124841255682973:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:20.164875Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124841255683024:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:20.394876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124819780845819:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:20.394972Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:20.469117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:20.595490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.797125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:30.691832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:39:30.691857Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:36.684004Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2061: SelfId: [1:7587124909975168648:2964], TxId: 281474976710677, task: 1. Ctx: { CheckpointId : . TraceId : 01kd684x759bapy8db4fqfng9h. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjQ4ZjFkMjctZGFkZGU4ZWUtMWNjZmU3MmYtM2Q1NmYxMTE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 43 has no snapshot at v1766515163602/18446744073709551615 shard 72075186224037889 with lowWatermark v1766515164001/18446744073709551615 (node# 1 state# Ready) } } 2025-12-23T18:39:36.684566Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [1:7587124909975168648:2964], TxId: 281474976710677, task: 1. Ctx: { CheckpointId : . TraceId : 01kd684x759bapy8db4fqfng9h. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjQ4ZjFkMjctZGFkZGU4ZWUtMWNjZmU3MmYtM2Q1NmYxMTE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 43 has no snapshot at v1766515163602/18446744073709551615 shard 72075186224037889 with lowWatermark v1766515164001/18446744073709551615 (node# 1 state# Ready) } }. 2025-12-23T18:39:36.685416Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=YjQ4ZjFkMjctZGFkZGU4ZWUtMWNjZmU3MmYtM2Q1NmYxMTE=, ActorId: [1:7587124854140592846:2964], ActorState: ExecuteState, TraceId: 01kd684x759bapy8db4fqfng9h, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Read request aborted" severity: 1 issues { message: "Table id 43 has no snapshot at v1766515163602/18446744073709551615 shard 72075186224037889 with lowWatermark v1766515164001/18446744073709551615 (node# 1 state# Ready)" severity: 1 } } Trying to start YDB, gRPC: 2529, MsgBus: 6704 2025-12-23T18:39:38.089045Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124919671564598:2143];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:38.089106Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:38.108472Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00410f/r3tmp/tmpaioeCe/pdisk_1.dat 2025-12-23T18:39:38.266378Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:38.268746Z node 2 :HIVE WARN: n ... ss_local=44;result=not_found; 2025-12-23T18:39:52.421447Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:52.421510Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:52.421523Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:52.464459Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd684zfcatpbdy8rgsx46b4h", SessionId: ydb://session/3?node_id=2&id=ZjYwOWE1MjEtZjFjOWNiZjItNjEyYmFjNjQtNzMxNDE2MjU=, Slow query, duration: 10.037663s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b Trying to start YDB, gRPC: 22854, MsgBus: 16951 2025-12-23T18:39:54.836682Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124986446656973:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:54.839523Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00410f/r3tmp/tmpnFYwLK/pdisk_1.dat 2025-12-23T18:39:55.015568Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:55.082115Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:55.082226Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:55.172932Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:55.182304Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22854, node 3 2025-12-23T18:39:55.222821Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:55.423380Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:55.423406Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:55.423415Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:55.423505Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16951 2025-12-23T18:39:55.843270Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:16951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515195256 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:55.992220Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:56.000218Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:39:59.827467Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587124986446656973:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:59.827535Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:00.011870Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125012216461491:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:00.012069Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:00.012750Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125012216461518:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:00.017843Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:00.018249Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125012216461547:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:00.018357Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:00.018810Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125012216461549:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:00.018882Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:00.042372Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587125012216461520:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:40:00.124015Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587125012216461575:2540] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:00.188054Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:00.263532Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:01.755937Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:05.174061Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=NWU1MGEzNC1kZjUxOWFmOC04ZjAzMGNmYS1kNmUyZDlhNA==, ActorId: [3:7587125029396338778:2965], ActorState: ExecuteState, TraceId: 01kd685rz276ww244a2gv8ds0w, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken, status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false [FAIL] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true >> KqpResultSetFormats::ArrowFormat_AllTypes+isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap >> KqpSnapshotIsolation::TReadOwnChangesOltp [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement >> KqpLocks::Invalidate [GOOD] >> KqpLocks::DifferentKeyUpdate >> KqpSnapshotIsolation::TSnapshotTwoInsertOltp [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap+UpdateAfterInsert >> Sharding::XXUsage |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true [GOOD] Test command err: 2025-12-23T18:40:00.054034Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125011957564145:2211];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:00.054341Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003939/r3tmp/tmpoCUkfP/pdisk_1.dat 2025-12-23T18:40:00.662532Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:40:00.702221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:00.702319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:00.708561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:00.930024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:40:00.933955Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:00.939182Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125007662596658:2081] 1766515200015388 != 1766515200015391 2025-12-23T18:40:01.055602Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:30372 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:40:01.324084Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587125011957564219:2106] Handle TEvNavigate describe path dc-1 2025-12-23T18:40:01.324143Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587125016252531832:2271] HANDLE EvNavigateScheme dc-1 2025-12-23T18:40:01.324256Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587125011957564242:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:01.324355Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587125011957564390:2198][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587125011957564242:2119], cookie# 1 2025-12-23T18:40:01.326000Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587125011957564423:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587125011957564420:2198], cookie# 1 2025-12-23T18:40:01.326052Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587125011957564424:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587125011957564421:2198], cookie# 1 2025-12-23T18:40:01.326070Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587125011957564425:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587125011957564422:2198], cookie# 1 2025-12-23T18:40:01.326128Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587125007662596626:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587125011957564423:2198], cookie# 1 2025-12-23T18:40:01.326160Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587125007662596629:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587125011957564424:2198], cookie# 1 2025-12-23T18:40:01.326176Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587125007662596632:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587125011957564425:2198], cookie# 1 2025-12-23T18:40:01.326219Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587125011957564423:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587125007662596626:2049], cookie# 1 2025-12-23T18:40:01.326236Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587125011957564424:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587125007662596629:2052], cookie# 1 2025-12-23T18:40:01.326252Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587125011957564425:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587125007662596632:2055], cookie# 1 2025-12-23T18:40:01.326304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587125011957564390:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587125011957564420:2198], cookie# 1 2025-12-23T18:40:01.326331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587125011957564390:2198][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:40:01.326350Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587125011957564390:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587125011957564421:2198], cookie# 1 2025-12-23T18:40:01.326372Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587125011957564390:2198][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:40:01.326405Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587125011957564390:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587125011957564422:2198], cookie# 1 2025-12-23T18:40:01.326426Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587125011957564390:2198][/dc-1] Sync cookie mismatch: sender# [1:7587125011957564422:2198], cookie# 1, current cookie# 0 2025-12-23T18:40:01.326475Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587125011957564242:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:40:01.354559Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587125011957564242:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587125011957564390:2198] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:40:01.354686Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587125011957564242:2119], cacheItem# { Subscriber: { Subscriber: [1:7587125011957564390:2198] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:40:01.357759Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587125016252531833:2272], recipient# [1:7587125016252531832:2271], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:40:01.357828Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587125016252531832:2271] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:40:01.425565Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587125016252531832:2271] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:40:01.435374Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587125016252531832:2271] Handle TEvDescribeSchemeResult Forward to# [1:7587125016252531831:2270] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion ... -12-23T18:40:06.706962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710659:0 2025-12-23T18:40:06.707001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2025-12-23T18:40:06.707219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2025-12-23T18:40:06.707255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2025-12-23T18:40:06.707314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:40:06.707329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2025-12-23T18:40:06.707361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:40:06.707474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:40:06.707488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2025-12-23T18:40:06.707522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-12-23T18:40:06.707924Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:40:06.707946Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:40:06.708758Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7587125041500954065:2090] Handle TEvNavigate describe path /dc-1 2025-12-23T18:40:06.708789Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7587125041500954791:2520] HANDLE EvNavigateScheme /dc-1 2025-12-23T18:40:06.708868Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7587125041500954228:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:06.708956Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7587125041500954425:2215][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7587125041500954228:2114], cookie# 4 2025-12-23T18:40:06.709008Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7587125041500954445:2215][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587125041500954442:2215], cookie# 4 2025-12-23T18:40:06.709023Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7587125041500954446:2215][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587125041500954443:2215], cookie# 4 2025-12-23T18:40:06.709038Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7587125041500954447:2215][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587125041500954444:2215], cookie# 4 2025-12-23T18:40:06.709060Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7587125041500953931:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587125041500954446:2215], cookie# 4 2025-12-23T18:40:06.709106Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7587125041500953934:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587125041500954447:2215], cookie# 4 2025-12-23T18:40:06.709150Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7587125041500954446:2215][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7587125041500953931:2052], cookie# 4 2025-12-23T18:40:06.709168Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7587125041500954447:2215][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7587125041500953934:2055], cookie# 4 2025-12-23T18:40:06.709200Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7587125041500954425:2215][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7587125041500954443:2215], cookie# 4 2025-12-23T18:40:06.709221Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7587125041500954425:2215][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:40:06.709239Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7587125041500954425:2215][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7587125041500954444:2215], cookie# 4 2025-12-23T18:40:06.709258Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7587125041500954425:2215][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:40:06.709297Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7587125041500954228:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:40:06.709364Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7587125041500954228:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7587125041500954425:2215] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766515206687 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:40:06.709430Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7587125041500954228:2114], cacheItem# { Subscriber: { Subscriber: [2:7587125041500954425:2215] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766515206687 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-12-23T18:40:06.709480Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7587125041500953928:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587125041500954445:2215], cookie# 4 2025-12-23T18:40:06.709612Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7587125041500954792:2521], recipient# [2:7587125041500954791:2520], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:40:06.709652Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7587125041500954445:2215][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7587125041500953928:2049], cookie# 4 2025-12-23T18:40:06.709679Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7587125041500954425:2215][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7587125041500954442:2215], cookie# 4 2025-12-23T18:40:06.709693Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7587125041500954425:2215][/dc-1] Sync cookie mismatch: sender# [2:7587125041500954442:2215], cookie# 4, current cookie# 0 2025-12-23T18:40:06.709720Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7587125041500954791:2520] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:40:06.709786Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7587125041500954791:2520] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:40:06.710327Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7587125041500954791:2520] Handle TEvDescribeSchemeResult Forward to# [2:7587125041500954790:2519] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515206687 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-12-23T18:39:32.927155Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.011831s 2025-12-23T18:39:32.976161Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124894320517189:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:32.976203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:33.165445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:39:33.171086Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008850s test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003619/r3tmp/tmpWkxbfB/pdisk_1.dat 2025-12-23T18:39:33.812827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:33.902612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:33.926870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:34.096761Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:34.115862Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:34.119831Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:34.119960Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:34.134661Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2521 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:39:34.574932Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587124898615484701:2119] Handle TEvNavigate describe path dc-1 2025-12-23T18:39:34.575094Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587124898615484710:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:34.575146Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7587124898615484710:2121], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480 2025-12-23T18:39:34.575190Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7587124898615484710:2121], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480 2025-12-23T18:39:34.575218Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7587124898615484710:2121], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480 2025-12-23T18:39:34.575251Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587124902910452489:2454] HANDLE EvNavigateScheme dc-1 2025-12-23T18:39:34.575631Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7587124902910452491:2456][/dc-1/.metadata/script_execution_leases] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:39:34.576135Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7587124902910452492:2457][/dc-1/.metadata/result_sets] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:39:34.578173Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587124894320517091:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_execution_leases DomainOwnerId: 72057594046644480 }: sender# [1:7587124902910452496:2456] 2025-12-23T18:39:34.578190Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587124894320517091:2051] Upsert description: path# /dc-1/.metadata/script_execution_leases 2025-12-23T18:39:34.578297Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587124894320517091:2051] Subscribe: subscriber# [1:7587124902910452496:2456], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:39:34.578401Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587124894320517091:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/result_sets DomainOwnerId: 72057594046644480 }: sender# [1:7587124902910452502:2457] 2025-12-23T18:39:34.578409Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587124894320517091:2051] Upsert description: path# /dc-1/.metadata/result_sets 2025-12-23T18:39:34.578430Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587124894320517091:2051] Subscribe: subscriber# [1:7587124902910452502:2457], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:39:34.578459Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587124894320517094:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_execution_leases DomainOwnerId: 72057594046644480 }: sender# [1:7587124902910452497:2456] 2025-12-23T18:39:34.578464Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587124894320517094:2054] Upsert description: path# /dc-1/.metadata/script_execution_leases 2025-12-23T18:39:34.578492Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587124894320517094:2054] Subscribe: subscriber# [1:7587124902910452497:2456], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:39:34.578537Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587124894320517094:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/result_sets DomainOwnerId: 72057594046644480 }: sender# [1:7587124902910452503:2457] 2025-12-23T18:39:34.578544Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587124894320517094:2054] Upsert description: path# /dc-1/.metadata/result_sets 2025-12-23T18:39:34.578565Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587124894320517094:2054] Subscribe: subscriber# [1:7587124902910452503:2457], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:39:34.578614Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587124894320517097:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_execution_leases DomainOwnerId: 72057594046644480 }: sender# [1:7587124902910452498:2456] 2025-12-23T18:39:34.578619Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587124894320517097:2057] Upsert description: path# /dc-1/.metadata/script_execution_leases 2025-12-23T18:39:34.578638Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587124894320517097:2057] Subscribe: subscriber# [1:7587124902910452498:2456], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:39:34.578673Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587124894320517097:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/result_sets DomainOwnerId: 72057594046644480 }: sender# [1:7587124902910452504:2457] 2025-12-23T18:39:34.578680Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587124894320517097:2057] Upsert description: path# /dc-1/.metadata/result_sets 2025-12-23T18:39:34.578713Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587124894320517097:2057] Subscribe: subscriber# [1:7587124902910452504:2457], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:39:34.578798Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587124898615484710:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:34.578902Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587124898615484970:2292][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587124898615484710:2121], cookie# 1 2025-12-23T18:39:34.579021Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124898615485015:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124898615485012:2292], cookie# 1 2025-12-23T18:39:34.579042Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124898615485016:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124898615485013:2292], cookie# 1 2025-12-23T18:39:34.579053Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124898615485017:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124898615485014:2292], cookie# 1 2025-12-23T18:39:34.579076Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124894320517091:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124898615485015:2292], cookie# 1 2025-12-23T18:39:34.579098Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124894320517094:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124898615485016:2292], cookie# 1 2025-12-23T18:39:34.579112Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124894320517097:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124898615485017:2292], cookie# 1 2025-12-23T18:39:34.579158Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124898615485015:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124894320517091:2051], cookie# 1 2025-12-23T18:39:34.579194Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124898615485016:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { ... .metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [6:7587125049047597539:2772] 2025-12-23T18:40:08.360699Z node 6 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][6:7587125049047597538:2774][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [6:7587125018982825525:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:08.360715Z node 6 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][6:7587125049047597536:2772][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [6:7587125018982825525:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:08.360744Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7587125049047597538:2774][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [6:7587125049047597553:2774] 2025-12-23T18:40:08.360767Z node 6 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][6:7587125049047597536:2772][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [6:7587125049047597541:2772] 2025-12-23T18:40:08.360776Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][6:7587125049047597538:2774][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [6:7587125018982825525:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:08.360794Z node 6 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][6:7587125049047597536:2772][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [6:7587125018982825525:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:08.360802Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7587125018982825232:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7587125049047597554:2774] 2025-12-23T18:40:08.360815Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7587125018982825235:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7587125049047597547:2772] 2025-12-23T18:40:08.360818Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7587125018982825232:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7587125049047597545:2772] 2025-12-23T18:40:08.360832Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7587125018982825235:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7587125049047597555:2774] 2025-12-23T18:40:08.360833Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7587125018982825238:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7587125049047597556:2774] 2025-12-23T18:40:08.360847Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7587125018982825238:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7587125049047597549:2772] 2025-12-23T18:40:08.360894Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [6:7587125018982825525:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-12-23T18:40:08.360950Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [6:7587125018982825525:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [6:7587125049047597538:2774] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:40:08.361013Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7587125018982825525:2120], cacheItem# { Subscriber: { Subscriber: [6:7587125049047597538:2774] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:40:08.361060Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [6:7587125018982825525:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-12-23T18:40:08.361118Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [6:7587125018982825525:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [6:7587125049047597536:2772] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:40:08.361187Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7587125018982825525:2120], cacheItem# { Subscriber: { Subscriber: [6:7587125049047597536:2772] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:40:08.361263Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7587125049047597558:2776], recipient# [6:7587125049047597532:2309], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:08.361321Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7587125049047597557:2775], recipient# [6:7587125049047597534:2311], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:08.659084Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7587125018982825525:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:08.659232Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7587125018982825525:2120], cacheItem# { Subscriber: { Subscriber: [6:7587125023277793319:2444] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:40:08.659322Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7587125049047597563:2777], recipient# [6:7587125049047597562:2313], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:08.675143Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7587125018982825525:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:08.675288Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7587125018982825525:2120], cacheItem# { Subscriber: { Subscriber: [6:7587125023277793319:2444] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:40:08.675387Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7587125049047597565:2778], recipient# [6:7587125049047597564:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> KqpSinkMvcc::OltpMultiSinks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 16106155075887555457 6127676144589051204 16012566013751706386 6679022022881240776 606035231610085722 8963602138301274633 14100475264161067838 14855471370428100985 13272786188860556108 1315596217509556329 15986649685299838902 12929306431959233990 2748475005282757963 509691610684840956 13996290600144546193 680228931992379988 4416728940409127923 14666680786692079677 12242182711566457165 14374606141758771601 14498065372099691019 3974744034721596516 10998742146249207036 11101399337518660878 14034208661326747499 4636093892268601116 9183765484753385591 10197540410047839926 8774611597683310751 936393332846722314 11431264640132263096 12574822893673486777 6517328449023071409 14739198152349906358 4846788887626640741 11050660793187880264 6981406311075667924 18000175611443792405 4686053113785790962 17140100473587779973 16115393849303570128 8353224638930144530 13716682381284622689 13142677810926035475 4693605819253267599 5274660226766025787 9808271671521167686 10998131100543714977 2461038131597043891 2349669436273352702 13463860060886619766 5356436173839643497 9014347093930957773 7228937843096203642 4493429879309335693 6709016718214368789 10113292784034682361 2352281246748991147 6563801738001010661 3175758035567693651 17479902808771915750 5811494824046388260 6985146910577996611 16433639392785596189 15936237148983900496 8376534879038240135 12637122474214974488 9713271047688793988 10180007791381145852 10509345920729278703 572281582119721059 16016328652391491925 14252780860330579080 4800231882363302550 1473002036343377391 9048299720460990329 17280803142100819556 6492710286269227939 4171141547322599476 17368290317164974865 5764846059352036580 5222701830150274602 6336710406662538645 10924604507144290812 2425391003864024154 12765509956540754664 6372039857707282294 15106401140416401402 10514243063881936645 2503787216663822621 |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOwnChangesOltp [GOOD] Test command err: Trying to start YDB, gRPC: 1498, MsgBus: 5532 2025-12-23T18:39:15.550678Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124822631471797:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:15.550719Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004111/r3tmp/tmplvoDvN/pdisk_1.dat 2025-12-23T18:39:16.085992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:16.088424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:16.088528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:16.123305Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:16.267168Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124822631471731:2081] 1766515155535046 != 1766515155535049 2025-12-23T18:39:16.312793Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:16.314842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 1498, node 1 2025-12-23T18:39:16.572962Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:16.638765Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:16.638792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:16.638800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:16.638885Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:16.720941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5532 TClient is connected to server localhost:5532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515156392 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:17.682722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:17.689733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:20.555023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124822631471797:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:20.555099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:20.763494Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124844106309091:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.763626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.764359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124844106309119:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.764399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124844106309120:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.764442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.764728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124844106309124:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.764789Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.768798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:20.785846Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124844106309123:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:20.859929Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124844106309176:2543] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:21.250113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.391552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:23.227874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 22398, MsgBus: 13412 2025-12-23T18:39:28.186459Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124877462268807:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:28.191544Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004111/r3tmp/tmp4xHxvO/pdisk_1.dat 2025-12-23T18:39:28.426889Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:28.437578Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:28.442886Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124877462268567:2081] 1766515168121309 != 1766515168121312 2025-12-23T18:39:28.551027Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:28.551114Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:28.582041Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22398, node 2 2025-12-23T18:39:28.635493Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:28.859376Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:28.859400Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:28.859406Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:28.859482Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:29.127484Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=se ... tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:50.279449Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:50.279505Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:50.279520Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:50.285611Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:50.285676Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:50.285691Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:39:50.352980Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd684pg767tmkd17cdbqe1nc", SessionId: ydb://session/3?node_id=2&id=NDFjN2Y0MDAtZWZkOWEzY2UtODUwZWU3YjctZTdjY2U1MTQ=, Slow query, duration: 16.281519s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b Trying to start YDB, gRPC: 8859, MsgBus: 29595 2025-12-23T18:39:55.652026Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587124991454187296:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:55.652062Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:55.782279Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004111/r3tmp/tmp7MfBdF/pdisk_1.dat 2025-12-23T18:39:56.039169Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587124991454187273:2081] 1766515195649480 != 1766515195649483 2025-12-23T18:39:56.067094Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:56.071231Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:56.074570Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:56.074642Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:56.117253Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8859, node 3 2025-12-23T18:39:56.354849Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:56.394377Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:56.394399Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:56.394408Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:56.394481Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29595 2025-12-23T18:39:56.703286Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:29595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515196180 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:57.325057Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:40:00.658872Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587124991454187296:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:00.658955Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:03.898260Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125025813926534:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:03.898539Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:03.899829Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125025813926554:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:03.899922Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125025813926555:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:03.899974Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:03.904873Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:03.933494Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587125025813926558:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:40:04.038365Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587125030108893905:2547] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:04.114277Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:04.261143Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:06.139168Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] >> KqpTx::BeginTransactionBadMode |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitRoTx [GOOD] >> KqpTx::CommitRoTx_TLI >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertToSameKey >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Binary [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified >> KqpResultSetFormats::ArrowFormat_Types_Time [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest >> HashShuffle::BackPressureInMemoryLoad [GOOD] >> HashShuffle::BackPressureWithSpilling [GOOD] >> HashShuffle::BackPressureWithSpillingLoad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 19563, MsgBus: 16621 2025-12-23T18:39:14.589265Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124816328357450:2261];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:14.589385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004119/r3tmp/tmpF1j0BS/pdisk_1.dat 2025-12-23T18:39:14.934859Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:14.935783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:14.935883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:15.090191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:15.094941Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124816328357211:2081] 1766515154534845 != 1766515154534848 2025-12-23T18:39:15.107975Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19563, node 1 2025-12-23T18:39:15.206942Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:15.412638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:15.412661Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:15.412667Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:15.412765Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:15.590889Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:16621 TClient is connected to server localhost:16621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515155174 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:16.328591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:18.936207Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124833508227285:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:18.936388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:18.936718Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124833508227298:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:18.936723Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124833508227297:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:18.936760Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:18.940945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:18.958367Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124833508227301:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:19.018455Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124837803194648:2540] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:19.442224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:19.576813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:19.590891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124816328357450:2261];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:19.590985Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:21.130452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 14111, MsgBus: 12232 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004119/r3tmp/tmpK1T7Cd/pdisk_1.dat 2025-12-23T18:39:26.246914Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:26.247063Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:26.414924Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124868694294694:2081] 1766515166095646 != 1766515166095649 2025-12-23T18:39:26.423850Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:26.426342Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:26.426425Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:26.426928Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:26.431215Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14111, node 2 2025-12-23T18:39:26.640039Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:26.640064Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:26.640071Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:26.640164Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12232 2025-12-23T18:39:26.861305Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:27.179246Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true C ... aseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:51.505979Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:51.506553Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124976711628491:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:51.506608Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124976711628492:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:51.506767Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:51.512697Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:51.554932Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587124976711628495:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:51.649726Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587124976711628546:2546] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:51.833989Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:51.978873Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:54.983638Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 20945, MsgBus: 23505 2025-12-23T18:40:01.160437Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587125016193548607:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:01.161394Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004119/r3tmp/tmppHtcl1/pdisk_1.dat 2025-12-23T18:40:01.201989Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:01.354633Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:01.359093Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587125016193548558:2081] 1766515201149139 != 1766515201149142 2025-12-23T18:40:01.380919Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:01.381037Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:01.403978Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20945, node 4 2025-12-23T18:40:01.477157Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:01.623463Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:40:01.623488Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:40:01.623498Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:40:01.623610Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23505 2025-12-23T18:40:02.190105Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:23505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515201451 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:02.564536Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:40:06.163046Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587125016193548607:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:06.163130Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:06.732237Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125037668385939:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:06.732310Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125037668385925:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:06.732656Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:06.736576Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125037668385943:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:06.736667Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:06.738517Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:06.759397Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587125037668385942:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:40:06.854489Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587125037668385995:2542] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:06.918455Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:07.010648Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:08.615654Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> CheckIntegrityMirror3of4::PlacementOk >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest >> CheckIntegrityBlock42::PlacementOk >> CheckIntegrityBlock42::DataErrorAdditionalUnequalParts >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows |93.9%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> TSubDomainTest::ConsistentCopyTable [GOOD] >> KqpTx::InteractiveTx [GOOD] >> KqpTx::InvalidateOnError >> CheckIntegrityMirror3of4::PlacementOk [GOOD] >> CheckIntegrityMirror3of4::PlacementMissingParts >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink >> CheckIntegrityBlock42::DataErrorAdditionalUnequalParts [GOOD] >> CheckIntegrityBlock42::DataErrorSixPartsOneBroken >> HashShuffle::BackPressureWithSpillingLoad [GOOD] >> CheckIntegrityBlock42::PlacementOk [GOOD] >> CheckIntegrityBlock42::PlacementOkHandoff >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [FAIL] >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable >> DataShardReadTableSnapshots::ReadTableDropColumn >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] >> KqpScanArrowFormat::AggregateCountStar >> DataShardReadTableSnapshots::ReadTableSplitBefore ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> HashShuffle::BackPressureWithSpillingLoad [GOOD] Test command err: Blocked 254975 time(s) emptyPops 0 Blocked 2895 time(s) emptyPops 0 |93.9%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> KqpSnapshotIsolation::TPragmaSettingOltp-IsSnapshotIsolation [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOlap-IsSnapshotIsolation >> CheckIntegrityMirror3of4::PlacementMissingParts [GOOD] >> CheckIntegrityMirror3of4::PlacementDisintegrated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] Test command err: 2025-12-23T18:36:17.806469Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124055590417488:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:17.806590Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:36:17.966346Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124055097541356:2217];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:17.966415Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003967/r3tmp/tmptZdLnC/pdisk_1.dat 2025-12-23T18:36:18.426541Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:36:18.498839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:36:18.557592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:36:18.557712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:36:18.559329Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:36:18.559428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:36:18.585134Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:36:18.585310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:36:18.600011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:36:18.749528Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:36:18.765280Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:18.774762Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:36:18.871165Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TServer::EnableGrpc on GrpcPort 65001, node 1 2025-12-23T18:36:19.030110Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:36:19.092032Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:36:19.142094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:36:19.142128Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:36:19.142137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:36:19.142234Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514978879 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:36:19.733807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:36:22.655119Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-12-23T18:36:22.657582Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587124077065255043:2322], Start check tables existence, number paths: 2 2025-12-23T18:36:22.662542Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587124077065255043:2322], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:36:22.662634Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587124077065255043:2322], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:36:22.662681Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587124077065255043:2322], Successfully finished 2025-12-23T18:36:22.662827Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:36:22.662849Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:36:22.662882Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-12-23T18:36:22.674252Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 2 2025-12-23T18:36:22.680683Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=MWEyODhkYmYtMmU1MDlkN2QtYzFjZTlhNzQtMTczNmM1OWE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MWEyODhkYmYtMmU1MDlkN2QtYzFjZTlhNzQtMTczNmM1OWE= (tmp dir name: 7d0cbed0-45f3-2340-0bcb-c8ace7079c03) 2025-12-23T18:36:22.682407Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=MWEyODhkYmYtMmU1MDlkN2QtYzFjZTlhNzQtMTczNmM1OWE=, ActorId: [1:7587124077065255073:2329], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:36:22.680680Z node 2 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-12-23T18:36:22.682970Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7587124076572377981:2295], Start check tables existence, number paths: 2 2025-12-23T18:36:22.683971Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:36:22.683993Z node 2 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:36:22.685943Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7587124076572377981:2295], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:36:22.685991Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7587124076572377981:2295], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:36:22.686013Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7587124076572377981:2295], Successfully finished 2025-12-23T18:36:22.686095Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-12-23T18:36:22.686115Z node 2 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 2 2025-12-23T18:36:22.738973Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124077065255075:2813], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:36:22.742389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:36:22.757117Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124077065255075:2813], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2025-12-23T18:36:22.758069Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124077065255075:2813], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-12-23T18:36:22.781943Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124077065255075:2813], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:36:22.806914Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124055590417488:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:22.806998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:36:22.855410Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124077065255075:2813], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:36:22.862770Z node 1 :TX_PROXY ERROR: ... YmI4YzIwODg= (tmp dir name: 3d97c520-4fd9-717b-05f0-74baa289cd51) 2025-12-23T18:40:11.927560Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:40:11.927606Z node 14 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:40:11.927820Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7587125061656566173:2324], Start check tables existence, number paths: 2 2025-12-23T18:40:11.927908Z node 14 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=14&id=OTAzN2I5YTUtMTJhODg0NGQtMzRmYWRjNmYtYmI4YzIwODg=, ActorId: [14:7587125061656566178:2329], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:40:11.931645Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7587125061656566173:2324], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:40:11.931738Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7587125061656566173:2324], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:40:11.931798Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7587125061656566173:2324], Successfully finished 2025-12-23T18:40:11.931923Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-12-23T18:40:11.931967Z node 14 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-12-23T18:40:11.932315Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7587125061656566195:2508], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:40:11.939057Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:11.944332Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7587125061656566195:2508], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-12-23T18:40:11.952439Z node 14 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7587125061656566195:2508], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-12-23T18:40:11.973041Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7587125061656566195:2508], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:40:12.040339Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7587125061656566195:2508], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:40:12.044013Z node 14 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [14:7587125065951533543:2541] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:12.044186Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7587125061656566195:2508], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-12-23T18:40:12.046676Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-12-23T18:40:12.046703Z node 14 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id Root 2025-12-23T18:40:12.046814Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7587125065951533550:2330], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-12-23T18:40:12.049501Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7587125065951533550:2330], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-12-23T18:40:12.049623Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-12-23T18:40:12.049645Z node 14 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-12-23T18:40:12.049988Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [14:7587125065951533559:2331], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2025-12-23T18:40:12.051432Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [14:7587125065951533559:2331], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-12-23T18:40:12.064047Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-12-23T18:40:12.064085Z node 14 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2025-12-23T18:40:12.064140Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-12-23T18:40:12.064245Z node 14 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=14&id=OTAzN2I5YTUtMTJhODg0NGQtMzRmYWRjNmYtYmI4YzIwODg=, ActorId: [14:7587125061656566178:2329], ActorState: ReadyState, TraceId: 01kd685zry6tn9rr77dypa0ggc, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT ALL ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `test@user`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-12-23T18:40:12.066845Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7587125065951533571:2333], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-12-23T18:40:12.068736Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7587125065951533571:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:12.068852Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:12.068944Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-12-23T18:40:12.068986Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7587125065951533581:2334], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-12-23T18:40:12.069390Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7587125065951533581:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:12.069444Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:12.093642Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:40:12.097217Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [14:7587125065951533559:2331], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-12-23T18:40:12.098646Z node 14 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=14&id=OTAzN2I5YTUtMTJhODg0NGQtMzRmYWRjNmYtYmI4YzIwODg=, ActorId: [14:7587125061656566178:2329], ActorState: ExecuteState, TraceId: 01kd685zry6tn9rr77dypa0ggc, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [14:7587125065951533572:2329] WorkloadServiceCleanup: 0 2025-12-23T18:40:12.102715Z node 14 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=14&id=OTAzN2I5YTUtMTJhODg0NGQtMzRmYWRjNmYtYmI4YzIwODg=, ActorId: [14:7587125061656566178:2329], ActorState: CleanupState, TraceId: 01kd685zry6tn9rr77dypa0ggc, EndCleanup, isFinal: 0 2025-12-23T18:40:12.102934Z node 14 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=14&id=OTAzN2I5YTUtMTJhODg0NGQtMzRmYWRjNmYtYmI4YzIwODg=, ActorId: [14:7587125061656566178:2329], ActorState: CleanupState, TraceId: 01kd685zry6tn9rr77dypa0ggc, Sent query response back to proxy, proxyRequestId: 3, proxyId: [14:7587125031591794420:2220] 2025-12-23T18:40:12.120542Z node 14 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=14&id=OTAzN2I5YTUtMTJhODg0NGQtMzRmYWRjNmYtYmI4YzIwODg=, ActorId: [14:7587125061656566178:2329], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:40:12.120593Z node 14 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=14&id=OTAzN2I5YTUtMTJhODg0NGQtMzRmYWRjNmYtYmI4YzIwODg=, ActorId: [14:7587125061656566178:2329], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:40:12.120626Z node 14 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=14&id=OTAzN2I5YTUtMTJhODg0NGQtMzRmYWRjNmYtYmI4YzIwODg=, ActorId: [14:7587125061656566178:2329], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:40:12.120653Z node 14 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=14&id=OTAzN2I5YTUtMTJhODg0NGQtMzRmYWRjNmYtYmI4YzIwODg=, ActorId: [14:7587125061656566178:2329], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:40:12.120741Z node 14 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=14&id=OTAzN2I5YTUtMTJhODg0NGQtMzRmYWRjNmYtYmI4YzIwODg=, ActorId: [14:7587125061656566178:2329], ActorState: unknown state, Session actor destroyed >> CheckIntegrityBlock42::DataErrorSixPartsOneBroken [GOOD] >> CheckIntegrityBlock42::DataErrorFivePartsOneBroken >> DataShardReadTableSnapshots::ReadTableSnapshot >> CheckIntegrityBlock42::PlacementOkHandoff [GOOD] >> CheckIntegrityBlock42::PlacementMissingParts |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOlap+IsSnapshotIsolation >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit-IsOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] Test command err: 2025-12-23T18:39:31.174042Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.026520s 2025-12-23T18:39:31.445951Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124890718572935:2208];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:31.446017Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003620/r3tmp/tmpN3xHnG/pdisk_1.dat 2025-12-23T18:39:32.323016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:32.408228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:32.408320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:32.430371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:32.643840Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:32.766449Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:32.834842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:27709 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:39:33.234995Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587124890718573010:2148] Handle TEvNavigate describe path dc-1 2025-12-23T18:39:33.235045Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587124899308508091:2464] HANDLE EvNavigateScheme dc-1 2025-12-23T18:39:33.235151Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587124890718573016:2150], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:33.235242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587124895013540542:2297][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587124890718573016:2150], cookie# 1 2025-12-23T18:39:33.237490Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124895013540558:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124895013540555:2297], cookie# 1 2025-12-23T18:39:33.237552Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124895013540559:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124895013540556:2297], cookie# 1 2025-12-23T18:39:33.237588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124895013540560:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124895013540557:2297], cookie# 1 2025-12-23T18:39:33.237635Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124886423605361:2060] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124895013540560:2297], cookie# 1 2025-12-23T18:39:33.237710Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124895013540560:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124886423605361:2060], cookie# 1 2025-12-23T18:39:33.237744Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124895013540542:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124895013540557:2297], cookie# 1 2025-12-23T18:39:33.237766Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587124895013540542:2297][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:39:33.237799Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124886423605355:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124895013540558:2297], cookie# 1 2025-12-23T18:39:33.237825Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124886423605358:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124895013540559:2297], cookie# 1 2025-12-23T18:39:33.237854Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124895013540558:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124886423605355:2054], cookie# 1 2025-12-23T18:39:33.237870Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124895013540559:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124886423605358:2057], cookie# 1 2025-12-23T18:39:33.237893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124895013540542:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124895013540555:2297], cookie# 1 2025-12-23T18:39:33.237919Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587124895013540542:2297][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:39:33.237945Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124895013540542:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124895013540556:2297], cookie# 1 2025-12-23T18:39:33.237967Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587124895013540542:2297][/dc-1] Sync cookie mismatch: sender# [1:7587124895013540556:2297], cookie# 1, current cookie# 0 2025-12-23T18:39:33.238026Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587124890718573016:2150], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:39:33.252397Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587124890718573016:2150], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587124895013540542:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:33.252541Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587124890718573016:2150], cacheItem# { Subscriber: { Subscriber: [1:7587124895013540542:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:39:33.255611Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587124899308508092:2465], recipient# [1:7587124899308508091:2464], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:39:33.255693Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587124899308508091:2464] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:33.384030Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587124899308508091:2464] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:39:33.391809Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587124899308508091:2464] Handle TEvDescribeSchemeResult Forward to# [1:7587124899308508090:2463] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVers ... 615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:40:13.880588Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [14:7587125068171449415:2253], recipient# [14:7587125068171449414:2319], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:13.881710Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:14.167177Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7587125063876482034:2243][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [14:7587125063876482037:2243] 2025-12-23T18:40:14.167285Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7587125063876482034:2243][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [14:7587125042401645286:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:14.167322Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7587125063876482034:2243][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [14:7587125063876482038:2243] 2025-12-23T18:40:14.167329Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7587125063876482036:2245][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [14:7587125063876482049:2245] 2025-12-23T18:40:14.167354Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7587125063876482034:2243][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [14:7587125042401645286:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:14.167380Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7587125063876482034:2243][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [14:7587125063876482039:2243] 2025-12-23T18:40:14.167397Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7587125063876482036:2245][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [14:7587125042401645286:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:14.167405Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7587125063876482034:2243][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [14:7587125042401645286:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:14.167424Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7587125063876482036:2245][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [14:7587125063876482050:2245] 2025-12-23T18:40:14.167450Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7587125063876482036:2245][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [14:7587125042401645286:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:14.167476Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7587125063876482036:2245][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [14:7587125063876482051:2245] 2025-12-23T18:40:14.167505Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7587125063876482036:2245][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [14:7587125042401645286:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:14.168182Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7587125063876482035:2244][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [14:7587125063876482043:2244] 2025-12-23T18:40:14.168243Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7587125063876482035:2244][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [14:7587125042401645286:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:14.168272Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7587125063876482035:2244][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [14:7587125063876482044:2244] 2025-12-23T18:40:14.168299Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7587125063876482035:2244][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [14:7587125042401645286:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:14.168321Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7587125063876482035:2244][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [14:7587125063876482045:2244] 2025-12-23T18:40:14.168345Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7587125063876482035:2244][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [14:7587125042401645286:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:14.184253Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [14:7587125042401645286:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:14.184400Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [14:7587125042401645286:2107], cacheItem# { Subscriber: { Subscriber: [14:7587125063876482034:2243] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:40:14.184449Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [14:7587125042401645286:2107], cacheItem# { Subscriber: { Subscriber: [14:7587125063876482035:2244] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:40:14.184576Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [14:7587125072466416721:2254], recipient# [14:7587125063876482029:2312], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:14.185113Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7587125063876482029:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> KqpSinkLocks::InsertWithBulkUpsert-UseBulkUpsert [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing |93.9%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> CheckIntegrityBlock42::DataErrorFivePartsOneBroken [GOOD] >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink >> CheckIntegrityMirror3of4::PlacementDisintegrated [GOOD] >> CheckIntegrityBlock42::PlacementMissingParts [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 32455, MsgBus: 2555 2025-12-23T18:39:53.261370Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124984125635497:2128];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:53.261422Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004108/r3tmp/tmptTECrE/pdisk_1.dat 2025-12-23T18:39:53.698970Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:53.781793Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:53.785662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:53.785787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:53.786989Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124984125635409:2081] 1766515193241897 != 1766515193241900 2025-12-23T18:39:53.805144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32455, node 1 2025-12-23T18:39:53.931338Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:53.931373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:53.931381Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:53.931462Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:53.950019Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2555 2025-12-23T18:39:54.390012Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:2555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515193849 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:54.807614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:58.270892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124984125635497:2128];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:58.270978Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:59.849576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125009895440064:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:59.849738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:59.851167Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125009895440099:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:59.856747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:59.865670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125009895440102:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:59.866067Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:59.875496Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125009895440101:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:39:59.955457Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125009895440165:2548] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:00.486179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:00.719694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:02.889867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 26856, MsgBus: 26082 2025-12-23T18:40:08.001156Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125042313577947:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:08.001910Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:40:08.002243Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004108/r3tmp/tmpqyCawc/pdisk_1.dat 2025-12-23T18:40:08.082831Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:08.213957Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:08.214028Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:08.229491Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587125042313577822:2081] 1766515207899397 != 1766515207899400 2025-12-23T18:40:08.248731Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:08.261262Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26856, node 2 2025-12-23T18:40:08.466835Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:08.483316Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:40:08.483340Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:40:08.483346Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:40:08.483479Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26082 2025-12-23T18:40:08.898481Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:26082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515208311 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:09.105979Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:40:09.116147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:40:12.093497Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125063788415184:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:12.093640Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:12.094302Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125063788415197:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:12.094368Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:12.094424Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125063788415196:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:12.100631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:12.118513Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587125063788415200:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:40:12.221507Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587125063788415251:2533] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:12.316990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:12.382534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:13.181942Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587125042313577947:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:13.197851Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:13.475373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:16.022884Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=ZGRjMjk4YjYtNWM5Zjg4ZTktYTM5NGUzNDAtYzM3MTA2NDg=, ActorId: [2:7587125076673325067:2963], ActorState: ExecuteState, TraceId: 01kd6863jt6v8cp1jf4jaydtfr, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken, status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified [GOOD] >> KqpLocks::DifferentKeyUpdate [GOOD] >> KqpLocks::EmptyRange >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3of4::PlacementDisintegrated [GOOD] Test command err: RandomSeed# 5265437427550822579 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** Group is disintegrated or has network problems |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::PlacementMissingParts [GOOD] Test command err: RandomSeed# 9082534676834263301 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:4:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> KqpSinkMvcc::ChangeFromTheFuture+IsOlap [GOOD] >> KqpSinkMvcc::ChangeFromTheFuture-IsOlap >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp-UpdateAfterInsert [GOOD] >> KqpTx::BeginTransactionBadMode [GOOD] >> KqpTx::CommitPrepared ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::InsertWithBulkUpsert-UseBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 31973, MsgBus: 28657 2025-12-23T18:39:13.087921Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124812068994858:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:13.088133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:13.149754Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00411d/r3tmp/tmpbyBhnZ/pdisk_1.dat 2025-12-23T18:39:13.601017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:13.601154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:13.670235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:13.681252Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:13.682443Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:13.706898Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124812068994647:2081] 1766515153063552 != 1766515153063555 TServer::EnableGrpc on GrpcPort 31973, node 1 2025-12-23T18:39:13.833245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:13.833272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:13.833286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:13.833382Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:13.977325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28657 2025-12-23T18:39:14.086332Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515153725 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:14.524754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:14.534263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:17.384073Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124829248864696:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.384268Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.386137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124829248864717:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.386208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124829248864718:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.386297Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.396068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:17.414675Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124829248864736:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:17.515460Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124829248864787:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:18.052446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:18.086849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124812068994858:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:18.086946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:18.201673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:19.948857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:22.552255Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:7587124850723709283:2964], SessionActorId: [1:7587124846428741943:2964], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 . sessionActorId=[1:7587124846428741943:2964]. 2025-12-23T18:39:22.552501Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=Y2MxOWFhMTQtYmIwY2RkM2MtZjg0ZDFjNzItZmFhYTU1Y2Y=, ActorId: [1:7587124846428741943:2964], ActorState: ExecuteState, TraceId: 01kd684fd4e3c7jm1tj1r6dzp8, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7587124850723709284:2964] from: [1:7587124850723709283:2964] 2025-12-23T18:39:22.552595Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:7587124850723709284:2964] TxId: 281474976710665. Ctx: { TraceId: 01kd684fd4e3c7jm1tj1r6dzp8, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Y2MxOWFhMTQtYmIwY2RkM2MtZjg0ZDFjNzItZmFhYTU1Y2Y=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 } 2025-12-23T18:39:22.552949Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=Y2MxOWFhMTQtYmIwY2RkM2MtZjg0ZDFjNzItZmFhYTU1Y2Y=, ActorId: [1:7587124846428741943:2964], ActorState: ExecuteState, TraceId: 01kd684fd4e3c7jm1tj1r6dzp8, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 }
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 24591, MsgBus: 6690 2025-12-23T18:39:24.285730Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124857405811110:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:24.285807Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00411d/r3tmp/tmpR8d82j/pdisk_1.dat 2025-12-23T18:39:24.349059Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:24.543289Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:24.545020Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:24.545092Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:24.572497Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24591, node 2 2025-12-23T18:39:24.671859Z node 2 :KQP ... ble profiles were not loaded Trying to start YDB, gRPC: 13022, MsgBus: 11860 2025-12-23T18:40:05.525938Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587125035108765872:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:05.525981Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00411d/r3tmp/tmpS5G4mk/pdisk_1.dat 2025-12-23T18:40:05.862853Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:05.865285Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:05.872184Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587125035108765847:2081] 1766515205523706 != 1766515205523709 2025-12-23T18:40:05.904085Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:05.904380Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:05.931278Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13022, node 4 2025-12-23T18:40:06.110622Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:06.179647Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:40:06.179681Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:40:06.179694Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:40:06.179838Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11860 2025-12-23T18:40:06.656257Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:11860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515205980 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:07.208363Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:40:10.526416Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587125035108765872:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:10.526514Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:11.307360Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125060878570505:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:11.307512Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:11.308098Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125060878570526:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:11.312986Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:11.315257Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125060878570551:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:11.315388Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:11.315804Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125060878570559:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:11.315860Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:11.333737Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587125060878570528:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:40:11.403185Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587125060878570583:2540] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:11.491475Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:11.569342Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:13.105838Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:16.031790Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710666; 2025-12-23T18:40:16.033176Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4378: SelfId: [4:7587125082353415085:2966], SessionActorId: [4:7587125073763480430:2966], Got LOCKS BROKEN for table. ShardID=72075186224037888, Sink=[4:7587125082353415085:2966].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-12-23T18:40:16.033259Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [4:7587125082353415085:2966], SessionActorId: [4:7587125073763480430:2966], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[4:7587125073763480430:2966]. 2025-12-23T18:40:16.033407Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=4&id=NTI0M2ZhNTktYTI3NWEyNzUtNWYyMmJlNzItNGIzZjliNjQ=, ActorId: [4:7587125073763480430:2966], ActorState: ExecuteState, TraceId: 01kd6863hv0f05btpeymyqrwsh, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7587125082353415086:2966] from: [4:7587125082353415085:2966] 2025-12-23T18:40:16.033482Z node 4 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [4:7587125082353415086:2966] TxId: 281474976710666. Ctx: { TraceId: 01kd6863hv0f05btpeymyqrwsh, Database: /Root, SessionId: ydb://session/3?node_id=4&id=NTI0M2ZhNTktYTI3NWEyNzUtNWYyMmJlNzItNGIzZjliNjQ=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-12-23T18:40:16.033776Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=4&id=NTI0M2ZhNTktYTI3NWEyNzUtNWYyMmJlNzItNGIzZjliNjQ=, ActorId: [4:7587125073763480430:2966], ActorState: ExecuteState, TraceId: 01kd6863hv0f05btpeymyqrwsh, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } 2025-12-23T18:40:16.036407Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710666; 2025-12-23T18:40:16.036552Z node 4 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [1766515216081 : 281474976710666] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow >> TConsoleConfigTests::TestModifyConfigItem |93.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken [GOOD] Test command err: RandomSeed# 16585156895104535978 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:4:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 ], ver1 disks [ 7 ], ver2 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] ERROR: There are unequal parts Erasure info: { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: Erasure info: ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:3:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 ], ver1 disks [ 0 ] part 2: ver0 disks [ 6 ], ver1 disks [ 1 ] part 3: ver0 disks [ 6 ], ver1 disks [ 2 ] part 4: ver0 disks [ 3 ], ver1 disks [ 6 ] part 5: ver0 disks [ 4 ], ver1 disks [ 6 ] part 6: ver0 disks [ 5 ], ver1 disks [ 6 ] ERROR: There are unequal parts Erasure info: { part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ERROR: There are erasure restore fails |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 1681, MsgBus: 5430 2025-12-23T18:33:55.781544Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123448272415334:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:55.781871Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003c6c/r3tmp/tmpmcputX/pdisk_1.dat 2025-12-23T18:33:56.253639Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:56.281606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:56.281729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:56.317291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1681, node 1 2025-12-23T18:33:56.520386Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123448272415300:2081] 1766514835757869 != 1766514835757872 2025-12-23T18:33:56.537220Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:56.562421Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:56.659500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:33:56.659532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:33:56.659546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:33:56.659674Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:33:56.809318Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5430 TClient is connected to server localhost:5430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514836478 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:57.389818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:33:57.404081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:33:57.422352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:57.754334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:58.076297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:33:58.237551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:34:00.778912Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123448272415334:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:34:00.779024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:34:01.347475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123474042220952:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:01.347591Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:01.348215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123474042220962:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:01.348291Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:01.992546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.048214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.100925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.166119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.225393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.329534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.439055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.620637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:34:02.814663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123478337189140:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.814766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.815240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123478337189146:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.815296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123478337189145:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:34:02.815328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [Worklo ... de 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:36:40.883533Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:36:40.883544Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:36:40.883634Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:36:41.088109Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28577 TClient is connected to server localhost:28577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515000831 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:36:41.382402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:36:41.399464Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:41.474155Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:41.624262Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:36:41.698176Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:41.816345Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:44.846625Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124170231000764:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:44.846790Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:44.847069Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124170231000773:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:44.847138Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:45.003495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:45.036312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:45.069712Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:45.102293Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:45.135079Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:45.171063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:45.205713Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:45.258528Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:45.355353Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124174525968943:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:45.355483Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:45.355759Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124174525968948:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:45.355813Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587124174525968949:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:45.355860Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:45.359749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:36:45.375784Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587124174525968952:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:36:45.439044Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587124174525969003:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:36:45.510853Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587124153051129743:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:45.510930Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:36:47.488944Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:55.694167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:36:55.694192Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> KqpTx::CommitRoTx_TLI [GOOD] |94.0%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkTx::OlapLocksAbortOnCommit [GOOD] >> KqpSinkTx::OlapSnapshotRO >> TJaegerTracingConfiguratorTests::RequestTypeThrottler |94.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotIsolation::TReadOwnChangesOlap [GOOD] >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem >> TConsoleTests::TestRestartConsoleAndPools ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp-UpdateAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 32242, MsgBus: 14559 ... waiting for SysViewsRoster update finished 2025-12-23T18:39:16.196967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:39:16.318375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:39:16.331600Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:39:16.332038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:39:16.332136Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:16.652402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:16.652536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:16.787755Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515152490405 != 1766515152490409 2025-12-23T18:39:16.824153Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:16.873087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:17.099250Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004123/r3tmp/tmpr2owUz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32242, node 1 2025-12-23T18:39:17.627324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:17.627379Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:17.627411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:17.627865Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:17.648565Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:14559 TClient is connected to server localhost:14559 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:18.533394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:18.667534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2793], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:18.667703Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:939:2798], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:18.668202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:18.668941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:944:2803], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:18.669130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:18.673490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:18.785787Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:942:2801], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:39:18.843801Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1000:2840] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:19.326553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:39:19.690224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1125:2925];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:39:19.690514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1125:2925];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:39:19.695069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1125:2925];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:39:19.695308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1125:2925];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:39:19.695449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1125:2925];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:39:19.695586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1125:2925];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:39:19.695729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1125:2925];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:39:19.695845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1125:2925];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:39:19.695951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1125:2925];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:39:19.696108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1125:2925];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:39:19.696257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1125:2925];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:39:19.696378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1125:2925];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:39:19.696505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1125:2925];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:39:19.766652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:39:19.768086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:39:19.768333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:39:19.768409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Tabl ... TPoolFetcherActor] ActorId: [2:909:2774], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:53.306417Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:920:2779], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:53.306537Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:53.316437Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:923:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:53.316728Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:53.336733Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:53.562182Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:924:2783], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:39:53.603399Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:980:2820] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:53.714024Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:54.041970Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:59.027028Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 31005, MsgBus: 14933 ... waiting for SysViewsRoster update finished 2025-12-23T18:40:09.520626Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:09.530070Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:09.533029Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:09.533356Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:09.533426Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:09.822464Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:09.822596Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:09.857310Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:34:2081] 1766515205252809 != 1766515205252813 2025-12-23T18:40:09.863498Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:09.915597Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:10.041389Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004123/r3tmp/tmpyE7SIz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31005, node 3 2025-12-23T18:40:10.485746Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:40:10.485818Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:40:10.485872Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:40:10.486274Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:40:10.500444Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:14933 TClient is connected to server localhost:14933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:11.070164Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:40:11.113500Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:914:2779], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:11.113642Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:925:2784], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:11.114275Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:11.115887Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:933:2792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:11.116002Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:11.122194Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:11.204035Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:928:2787], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:40:11.244793Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:985:2825] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:11.325037Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:11.577618Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:14.832526Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitRoTx_TLI [GOOD] Test command err: Trying to start YDB, gRPC: 28219, MsgBus: 18357 2025-12-23T18:39:51.230927Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124976486307809:2253];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:51.234488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:51.325261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004109/r3tmp/tmpc8cXpc/pdisk_1.dat 2025-12-23T18:39:52.020720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:52.020846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:52.123814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:52.305530Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:52.305624Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:52.322901Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124976486307584:2081] 1766515191177305 != 1766515191177308 2025-12-23T18:39:52.372832Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28219, node 1 2025-12-23T18:39:52.549921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:52.771199Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:52.771219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:52.771225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:52.771293Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18357 TClient is connected to server localhost:18357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515192337 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:53.807679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:53.835141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:53.853720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:54.162186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:54.445541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:54.561799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:56.230852Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124976486307809:2253];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:56.230934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:58.595785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125006551080546:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:58.595998Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:58.602896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125006551080556:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:58.602990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:59.080741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:59.187106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:59.316824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:59.432737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:59.490980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:59.567344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:59.629264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:59.695559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:59.859220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125010846048750:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:59.859364Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:59.860195Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125010846048755:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:59.860267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125010846048756:2494], DatabaseId: /Root, PoolId: defau ... Notification cookie mismatch for subscription [3:7587125064438089655:2081] 1766515212828305 != 1766515212828308 2025-12-23T18:40:13.025950Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:13.026032Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:13.058065Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4903, node 3 2025-12-23T18:40:13.135999Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:13.171386Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:40:13.171415Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:40:13.171424Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:40:13.171514Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25765 TClient is connected to server localhost:25765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515213113 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:13.658579Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:40:13.670616Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:13.738263Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:13.854602Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:40:13.935118Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:14.115237Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:16.803782Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125081617960702:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:16.803877Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:16.804312Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125081617960711:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:16.804360Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:16.911948Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:16.995506Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:17.043247Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:17.121537Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:17.164050Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:17.220683Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:17.271519Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:17.356444Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:17.470972Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125085912928887:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:17.471118Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:17.471925Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125085912928893:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:17.471997Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125085912928892:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:17.472059Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:17.476606Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:17.494108Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587125085912928896:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:40:17.594114Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587125085912928947:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:17.856817Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587125064438089870:2252];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:17.856905Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [FAIL] Test command err: 2025-12-23T18:40:00.880206Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125015531596694:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:00.880273Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00391a/r3tmp/tmpoCLacI/pdisk_1.dat 2025-12-23T18:40:01.845875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:40:01.956909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:01.957037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:02.128271Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:02.176754Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:40:02.177735Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:02.181660Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125015531596663:2082] 1766515200846111 != 1766515200846114 2025-12-23T18:40:02.213433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:02.214897Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:02.581736Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:31406 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:40:02.975055Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587125019826564210:2102] Handle TEvNavigate describe path dc-1 2025-12-23T18:40:02.975131Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587125024121532347:2759] HANDLE EvNavigateScheme dc-1 2025-12-23T18:40:02.975283Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587125019826564239:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:02.975401Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587125019826564509:2298][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587125019826564239:2121], cookie# 1 2025-12-23T18:40:02.977042Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587125019826564550:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587125019826564547:2298], cookie# 1 2025-12-23T18:40:02.977100Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587125019826564551:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587125019826564548:2298], cookie# 1 2025-12-23T18:40:02.977118Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587125019826564552:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587125019826564549:2298], cookie# 1 2025-12-23T18:40:02.977153Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587125015531596631:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587125019826564550:2298], cookie# 1 2025-12-23T18:40:02.977184Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587125015531596634:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587125019826564551:2298], cookie# 1 2025-12-23T18:40:02.977204Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587125015531596637:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587125019826564552:2298], cookie# 1 2025-12-23T18:40:02.977257Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587125019826564550:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587125015531596631:2050], cookie# 1 2025-12-23T18:40:02.977276Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587125019826564551:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587125015531596634:2053], cookie# 1 2025-12-23T18:40:02.977294Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587125019826564552:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587125015531596637:2056], cookie# 1 2025-12-23T18:40:02.977333Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587125019826564509:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587125019826564547:2298], cookie# 1 2025-12-23T18:40:02.977359Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587125019826564509:2298][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:40:02.977378Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587125019826564509:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587125019826564548:2298], cookie# 1 2025-12-23T18:40:02.977421Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587125019826564509:2298][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:40:02.977454Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587125019826564509:2298][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587125019826564549:2298], cookie# 1 2025-12-23T18:40:02.977469Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587125019826564509:2298][/dc-1] Sync cookie mismatch: sender# [1:7587125019826564549:2298], cookie# 1, current cookie# 0 2025-12-23T18:40:02.977531Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587125019826564239:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:40:02.977608Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587125019826564239:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587125019826564509:2298] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:40:02.977696Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587125019826564239:2121], cacheItem# { Subscriber: { Subscriber: [1:7587125019826564509:2298] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:40:02.981713Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587125024121532348:2760], recipient# [1:7587125024121532347:2759], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:40:02.981814Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587125024121532347:2759] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:40:03.013378Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587125024121532347:2759] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:40:03.016527Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587125024121532347:2759] Handle TEvDescribeSchemeResult Forward to# [1:7587125024121532346:2758] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } Pat ... dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587125053983302914:2056] 2025-12-23T18:40:13.756950Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587125053983302914:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587125071163173832:3212] 2025-12-23T18:40:13.756970Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587125071163173812:3212][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587125071163173827:3212] 2025-12-23T18:40:13.757006Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587125071163173812:3212][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587125071163173828:3212] 2025-12-23T18:40:13.757030Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7587125071163173812:3212][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7587125053983303228:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:13.757078Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587125071163173812:3212][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587125071163173829:3212] 2025-12-23T18:40:13.757101Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7587125071163173812:3212][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7587125053983303228:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:40:13.757134Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7587125053983303228:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-12-23T18:40:13.757180Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7587125053983303228:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7587125071163173812:3212] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:40:13.757265Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587125053983303228:2121], cacheItem# { Subscriber: { Subscriber: [3:7587125071163173812:3212] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:40:13.757367Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587125071163173834:3215], recipient# [3:7587125071163173808:2310], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:13.785566Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587125053983303228:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:13.785736Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587125053983303228:2121], cacheItem# { Subscriber: { Subscriber: [3:7587125058278271713:3076] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:40:13.785875Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587125071163173836:3216], recipient# [3:7587125071163173835:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:14.767130Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587125053983303228:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:14.767306Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587125053983303228:2121], cacheItem# { Subscriber: { Subscriber: [3:7587125071163173820:3213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:40:14.767400Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587125075458141152:3220], recipient# [3:7587125075458141151:2315], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:14.777287Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587125053983303068:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:14.777371Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:14.795119Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587125053983303228:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:40:14.795273Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587125053983303228:2121], cacheItem# { Subscriber: { Subscriber: [3:7587125058278271713:3076] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:40:14.795412Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587125075458141156:3223], recipient# [3:7587125075458141155:2316], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:254, void CreateTableInsideThenStopTenantAndForceDeleteSubDomain(TTestEnvWithPoolsSupport &): (ver.Version == (env.GetSettings().FeatureFlags.GetEnableAlterDatabaseCreateHiveFirst() ? 5 : 4)) failed: (8 != 5) TBackTrace::Capture()+28 (0x1B0C63EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B5B77AC) CreateTableInsideThenStopTenantAndForceDeleteSubDomain(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&)+25645 (0x1AC5121D) void NTestSuiteTExtSubDomainTest::TestCreateTableInsideThenStopTenantAndForceDeleteSubDomain(NUnitTest::TTestContext&)+212 (0x1ACDF094) std::__y1::__function::__func, void ()>::operator()()+280 (0x1ACC6308) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B5F043A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B5BE488) NTestSuiteTExtSubDomainTest::TCurrentTest::Execute()+1229 (0x1ACC540D) NUnitTest::TTestFactory::Execute()+2176 (0x1B5BFC40) NUnitTest::RunMain(int, char**)+5805 (0x1B5EA29D) ??+0 (0x7F08E6D78D90) __libc_start_main+128 (0x7F08E6D78E40) _start+41 (0x18622029) |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitBefore [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumn [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber >> DataShardReadTableSnapshots::ReadTableSplitFinished >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler >> TConsoleTests::TestCreateTenant >> KqpTx::InvalidateOnError [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOwnChangesOlap [GOOD] Test command err: Trying to start YDB, gRPC: 11292, MsgBus: 9446 2025-12-23T18:39:14.840385Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124815530811680:2158];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:14.840519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004116/r3tmp/tmpLkNFDk/pdisk_1.dat 2025-12-23T18:39:15.546972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:15.547064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:15.587643Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:15.596271Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:15.602871Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124815530811550:2081] 1766515154823133 != 1766515154823136 2025-12-23T18:39:15.626204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11292, node 1 2025-12-23T18:39:15.896931Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:15.897007Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:15.913334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:15.913359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:15.913366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:15.913464Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9446 TClient is connected to server localhost:9446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515155678 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:16.857344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:16.864254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:39:19.681075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124837005648939:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.681535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124837005648927:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.681641Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.686243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:19.687153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124837005648942:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.687223Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.702556Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124837005648941:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:39:19.779346Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124837005648994:2540] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:19.853837Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124815530811680:2158];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:19.853899Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:20.502160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:20.729080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:22.478152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 32266, MsgBus: 31530 2025-12-23T18:39:26.468173Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124868325856813:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:26.468242Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004116/r3tmp/tmpDj2GXt/pdisk_1.dat 2025-12-23T18:39:26.814854Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:26.824128Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:26.906383Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:26.906490Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:26.918607Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32266, node 2 2025-12-23T18:39:27.175541Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:27.175564Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:27.175572Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:27.175650Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:27.202852Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:31530 2025-12-23T18:39:27.566224Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:31530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 17665151669 ... 15662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.749462Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.753862Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038058;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.753969Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038058;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.753995Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038058;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.759592Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.759687Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.759712Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.764494Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.764574Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.764598Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.772767Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.772848Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.772869Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.774405Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.774457Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.774504Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.786591Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038044;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.786591Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.786661Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.786662Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038044;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.786684Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.786684Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038044;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.796874Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.796874Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.796934Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.796941Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.796956Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.796957Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.807435Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.807448Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.807511Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.807516Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.807533Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.807533Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.817813Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.817901Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.817924Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.818909Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.818976Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.818996Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:16.885459Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd685kmk6m0c6f464e65656k", SessionId: ydb://session/3?node_id=3&id=ZGJlOTk4YjUtMTVjMmFhYTUtMmEyNzQ1ZWMtNWMwNzMwYzE=, Slow query, duration: 11.911387s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 15815, MsgBus: 1262 2025-12-23T18:39:54.557960Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124989482318866:2202];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:54.558020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:54.613674Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004104/r3tmp/tmphDeWAy/pdisk_1.dat 2025-12-23T18:39:55.196927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:55.245088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:55.245212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:55.360428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:55.498828Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15815, node 1 2025-12-23T18:39:55.519187Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124989482318688:2081] 1766515194451488 != 1766515194451491 2025-12-23T18:39:55.583185Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:55.610946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:55.711775Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:55.711805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:55.711815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:55.712019Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1262 TClient is connected to server localhost:1262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515195487 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:56.740129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:56.778360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:57.032749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:57.439549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:39:57.603564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:59.557524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124989482318866:2202];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:59.566769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:00.968590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125015252124354:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:00.968687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:00.969002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125015252124364:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:00.969050Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:01.370630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:01.425780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:01.466688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:01.504953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:01.543035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:01.636995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:01.703615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:01.788306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:01.914386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125019547092528:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:01.914472Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:01.914782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125019547092533:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:01.914874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125019547092534:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:01.914911Z node 1 :KQP_WORKLOAD_SERVI ... .674376Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:16.817645Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:16.941675Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:17.030189Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:19.988983Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125097741385224:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:19.989079Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:19.989349Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125097741385234:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:19.989384Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:20.112157Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:20.165998Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:20.212931Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:20.247367Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:20.279185Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:20.320563Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:20.382847Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:20.435649Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:20.547959Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125102036353400:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:20.548031Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:20.548116Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125102036353405:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:20.548460Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125102036353407:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:20.548512Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:20.552209Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:20.566709Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587125102036353408:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:40:20.633066Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587125102036353460:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:20.782687Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587125080561514288:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:20.782817Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:22.618267Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2025-12-23T18:40:22.618500Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037911 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:40:22.618647Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037911 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:40:22.618814Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:966: SelfId: [3:7587125110626288411:2532], Table: `/Root/KeyValue` ([72057594046644480:42:1]), SessionActorId: [3:7587125110626288385:2532]Got CONSTRAINT VIOLATION for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[3:7587125110626288411:2532].{
: Error: Conflict with existing key., code: 2012 } 2025-12-23T18:40:22.619406Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [3:7587125110626288403:2532], SessionActorId: [3:7587125110626288385:2532], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7587125110626288385:2532]. 2025-12-23T18:40:22.619645Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=3&id=NmNmNDM0MDktNmU2NWFhMDQtYjU4MTQxZTAtNWE3MzMyM2E=, ActorId: [3:7587125110626288385:2532], ActorState: ExecuteState, TraceId: 01kd686a0h78sg4py1gnb5j8zg, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7587125110626288405:2532] from: [3:7587125110626288403:2532] 2025-12-23T18:40:22.619771Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [3:7587125110626288405:2532] TxId: 281474976710673. Ctx: { TraceId: 01kd686a0h78sg4py1gnb5j8zg, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NmNmNDM0MDktNmU2NWFhMDQtYjU4MTQxZTAtNWE3MzMyM2E=, PoolId: default, IsStreamingQuery: 0}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-12-23T18:40:22.620101Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=NmNmNDM0MDktNmU2NWFhMDQtYjU4MTQxZTAtNWE3MzMyM2E=, ActorId: [3:7587125110626288385:2532], ActorState: ExecuteState, TraceId: 01kd686a0h78sg4py1gnb5j8zg, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Constraint violated. Table: `/Root/KeyValue`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } 2025-12-23T18:40:22.700578Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=NmNmNDM0MDktNmU2NWFhMDQtYjU4MTQxZTAtNWE3MzMyM2E=, ActorId: [3:7587125110626288385:2532], ActorState: ExecuteState, TraceId: 01kd686a382yzhefwbz8r2y2ct, Create QueryResponse for error on request, msg: , status: NOT_FOUND, issues: { message: "Transaction not found: 01kd686a020r507xq60aczr4kc" issue_code: 2015 severity: 1 } >> TConsoleTests::TestCreateSharedTenant >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TConsoleConfigHelpersTests::TestConfigCourier >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort+UseSink >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose >> CheckIntegrityBlock42::PlacementWrongDisks >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> CheckIntegrityBlock42::DataOk >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse >> KqpScanArrowInChanels::AllTypesColumns >> KqpSinkLocks::OlapUpdateLocksTwoShards [GOOD] >> KqpSinkLocks::OlapUpdateLocksOneShardRowExists >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> CheckIntegrityMirror3dc::PlacementBlobIsLost >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AllTypesColumns |94.0%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> CheckIntegrityBlock42::PlacementWrongDisks [GOOD] >> CheckIntegrityMirror3dc::DataErrorOneCopy >> CheckIntegrityBlock42::DataOk [GOOD] >> CheckIntegrityBlock42::DataOkAdditionalEqualParts |94.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange >> CdcStreamChangeCollector::PageFaults [GOOD] |94.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |94.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] |94.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> CdcStreamChangeCollector::OldImage >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain >> KqpLocks::EmptyRange [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken >> KqpTx::CommitPrepared [GOOD] >> CheckIntegrityMirror3dc::PlacementBlobIsLost [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] >> CheckIntegrityMirror3dc::PlacementDisintegrated >> KqpSinkLocks::OlapInsertWithBulkUpsert-UseBulkUpsert [GOOD] >> CheckIntegrityBlock42::DataOkAdditionalEqualParts [GOOD] >> KqpSinkLocks::OlapUpdateLocksOneShard >> CheckIntegrityMirror3dc::DataErrorOneCopy [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> CheckIntegrityMirror3dc::DataErrorManyCopies >> CheckIntegrityBlock42::DataErrorSixPartsTwoBroken >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] |94.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> TConsoleTests::TestCreateTenantExtSubdomain |94.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |94.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |94.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] >> CheckIntegrityMirror3dc::DataErrorManyCopies [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] >> CheckIntegrityMirror3dc::PlacementDisintegrated [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> CheckIntegrityBlock42::DataErrorSixPartsTwoBroken [GOOD] >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable [GOOD] >> KqpSinkMvcc::ChangeFromTheFuture-IsOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] >> CheckIntegrityMirror3dc::DataOk >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> CheckIntegrityBlock42::DataOkErasureFiveParts >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags >> TConsoleTests::TestTenantConfigConsistency >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit >> CheckIntegrityMirror3dc::DataOk [GOOD] >> TConsoleTests::TestCreateServerlessTenant >> CheckIntegrityBlock42::DataOkErasureFiveParts [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> CdcStreamChangeCollector::OldImage [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> KqpResultSetFormats::ArrowFormat_Stress >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits >> CdcStreamChangeCollector::SchemaChanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::DataErrorManyCopies [GOOD] Test command err: RandomSeed# 123772177277061112 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 ], ver1 disks [ 2 ] ERROR: There are unequal parts *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 2 ], ver1 disks [ 3 4 5 ] ERROR: There are unequal parts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:08.724862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:08.863343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:08.879026Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:08.879502Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:08.879618Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:09.199577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:09.199763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:09.289516Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515204746936 != 1766515204746940 2025-12-23T18:40:09.305514Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:09.359852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:09.451543Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c79/r3tmp/tmpVtH0JI/pdisk_1.dat 2025-12-23T18:40:09.830533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:09.852447Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:09.974780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:10.025191Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:895:2772] 2025-12-23T18:40:10.025470Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:10.076991Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:10.077238Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:10.079097Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:10.079204Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:10.079266Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:10.079693Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:10.080076Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2025-12-23T18:40:10.080345Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:10.088214Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:10.088324Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2772] in generation 1 2025-12-23T18:40:10.089722Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:10.089812Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:10.091122Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:40:10.091196Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:40:10.091251Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:40:10.091547Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:10.091651Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:10.091704Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:928:2775] in generation 1 2025-12-23T18:40:10.103495Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:10.147370Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:10.147578Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:10.147695Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:931:2793] 2025-12-23T18:40:10.147731Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:10.147780Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:10.147854Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:10.148206Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:10.148249Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:40:10.148305Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:10.148412Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:932:2794] 2025-12-23T18:40:10.148440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:40:10.148462Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:40:10.148488Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:40:10.148895Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:10.148997Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:10.149142Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:10.149179Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:10.149224Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:10.149270Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:10.149321Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T18:40:10.149393Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T18:40:10.149488Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:889:2768], serverId# [1:898:2773], sessionId# [0:0:0] 2025-12-23T18:40:10.149540Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:40:10.149643Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:10.149675Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:40:10.149705Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:40:10.150210Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:10.150484Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:40:10.150582Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:40:10.151098Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:890:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2025-12-23T18:40:10.151282Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:40:10.151463Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-12-23T18:40:10.151522Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-12-23T18:40:10.153500Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:10.153613Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:10.167175Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:10.167292Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:40:10.167954Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T18:40:10.168011Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 n ... :98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:40:29.279886Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:29.281232Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:29.281282Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:29.281345Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:40:29.281423Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:29.281487Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:40:29.281593Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:29.283412Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:29.284824Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:40:29.285004Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:40:29.285065Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:40:29.298988Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:29.299178Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-12-23T18:40:29.299238Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-12-23T18:40:29.299284Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-12-23T18:40:29.299681Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:29.315271Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:29.505780Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:40:29.505878Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:29.506281Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:29.506343Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:40:29.506403Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2025-12-23T18:40:29.506635Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2025-12-23T18:40:29.515008Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:40:29.515530Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:29.516847Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:29.578095Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2025-12-23T18:40:29.578247Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:29.578305Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:29.578362Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:29.578461Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:29.578536Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-12-23T18:40:29.578665Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:29.583322Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-12-23T18:40:29.583454Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:40:29.603854Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1063:2892], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:29.603999Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1074:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:29.604089Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:29.605130Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:29.605298Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:29.610765Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:29.618646Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:29.801434Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:29.805699Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1077:2900], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:40:29.833672Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1135:2939] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:29.948579Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1166:2956], serverId# [4:1167:2957], sessionId# [0:0:0] 2025-12-23T18:40:29.949111Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2025-12-23T18:40:29.949478Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1766515229949334 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-12-23T18:40:29.949739Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-12-23T18:40:29.962541Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2025-12-23T18:40:29.962677Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:30.135710Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2025-12-23T18:40:30.136141Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1766515230135994 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-12-23T18:40:30.136339Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-12-23T18:40:30.151394Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2025-12-23T18:40:30.151488Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:30.153884Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1194:2975], serverId# [4:1195:2976], sessionId# [0:0:0] 2025-12-23T18:40:30.168031Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1196:2977], serverId# [4:1197:2978], sessionId# [0:0:0] |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:11.870306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:11.996146Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:12.005256Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:12.005591Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:12.005684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:12.291998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:12.292111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:12.379212Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515208016649 != 1766515208016653 2025-12-23T18:40:12.396341Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:12.455878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:12.580254Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c73/r3tmp/tmpc8ZE3r/pdisk_1.dat 2025-12-23T18:40:13.002560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:13.017791Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:13.144149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:13.218493Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:895:2772] 2025-12-23T18:40:13.218808Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:13.276938Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:13.277446Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:13.279314Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:13.279417Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:13.279484Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:13.279898Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:13.280215Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2025-12-23T18:40:13.280471Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:13.288899Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:13.289231Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2772] in generation 1 2025-12-23T18:40:13.290726Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:13.290866Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:13.292303Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:40:13.292379Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:40:13.292423Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:40:13.292722Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:13.292898Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:13.292960Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:928:2775] in generation 1 2025-12-23T18:40:13.306878Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:13.354316Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:13.354548Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:13.354688Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:931:2793] 2025-12-23T18:40:13.354768Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:13.354813Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:13.354846Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:13.355187Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:13.355229Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:40:13.355286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:13.355388Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:932:2794] 2025-12-23T18:40:13.355427Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:40:13.355453Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:40:13.355497Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:40:13.355872Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:13.355960Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:13.356093Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:13.356138Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:13.356186Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:13.356227Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:13.356278Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T18:40:13.356374Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T18:40:13.356504Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:889:2768], serverId# [1:898:2773], sessionId# [0:0:0] 2025-12-23T18:40:13.356557Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:40:13.356581Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:13.356611Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:40:13.356642Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:40:13.357129Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:13.357386Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:40:13.357482Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:40:13.357931Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:890:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2025-12-23T18:40:13.358095Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:40:13.358296Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-12-23T18:40:13.358370Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-12-23T18:40:13.362664Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:13.362799Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:13.375495Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:13.375602Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:40:13.376215Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T18:40:13.376355Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 n ... 6224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:40:32.072185Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:32.072271Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 40] schema version# 1 2025-12-23T18:40:32.072722Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:40:32.073116Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:32.074501Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2000} 2025-12-23T18:40:32.074623Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:40:32.077230Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:40:32.077319Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:40:32.077374Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2025-12-23T18:40:32.077456Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:32.077520Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:40:32.077610Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:40:32.078207Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:40:32.078272Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:32.078943Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:40:32.078991Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:32.080725Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:32.080775Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:32.080829Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:40:32.080894Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:32.080951Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:40:32.081021Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:32.082020Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:32.082126Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:32.086040Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:40:32.086247Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-12-23T18:40:32.086336Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-12-23T18:40:32.087056Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:40:32.087492Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:40:32.087546Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:40:32.100537Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1008:2850], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:32.100685Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:997:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:32.101466Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:32.102069Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1016:2858], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:32.102393Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:32.107378Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:32.116394Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:32.116606Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:32.246535Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:32.246711Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:32.250284Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1011:2853], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:40:32.291771Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1070:2893] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:32.413294Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1139:2924], serverId# [4:1140:2925], sessionId# [0:0:0] 2025-12-23T18:40:32.413899Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2025-12-23T18:40:32.414262Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1766515232414150 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-12-23T18:40:32.414503Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-12-23T18:40:32.431485Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2025-12-23T18:40:32.431614Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:40:32.577136Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037889 2025-12-23T18:40:32.577553Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1766515232577403 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-12-23T18:40:32.577814Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1766515232577403 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-12-23T18:40:32.577932Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-12-23T18:40:32.591563Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2025-12-23T18:40:32.591650Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:40:32.597412Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1180:2956], serverId# [4:1181:2957], sessionId# [0:0:0] 2025-12-23T18:40:32.604984Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1182:2958], serverId# [4:1183:2959], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ChangeFromTheFuture-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 3785, MsgBus: 11051 2025-12-23T18:39:13.796313Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124810478105378:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:13.796377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00411b/r3tmp/tmpwRtKx8/pdisk_1.dat 2025-12-23T18:39:14.091473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:14.091610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:14.100968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:14.176479Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:14.257653Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3785, node 1 2025-12-23T18:39:14.342514Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:14.357958Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:14.357983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:14.357993Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:14.358105Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11051 2025-12-23T18:39:14.816375Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:11051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515154243 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:15.082854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:15.095623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:17.588559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124827657975397:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.588676Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.589195Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124827657975406:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.589259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.591042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124827657975411:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.597062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:17.612762Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124827657975413:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:17.679365Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124827657975464:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:18.016372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:39:18.206146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587124831952942931:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:39:18.206433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587124831952942931:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:39:18.206668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587124831952942931:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:39:18.207830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587124831952943049:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:39:18.207916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587124831952943049:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:39:18.208088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587124831952943049:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:39:18.208203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587124831952943049:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:39:18.208298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587124831952943049:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:39:18.208435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587124831952943049:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:39:18.208530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587124831952943049:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:39:18.208633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587124831952943049:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:39:18.208728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587124831952943049:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:39:18.208832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587124831952943049:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:39:18.208934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587124831952943049:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:39:18.209017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587124831952943049:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:39:18.209132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587124831952943049:2346];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:39:18.211373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587124831952942931:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:39:18.211502Z node 1 ... ntroller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.689067Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.689070Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.689272Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.689274Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.689485Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.689490Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.689695Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.689878Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.689912Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.690054Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.691037Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.691546Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.692912Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:17.698060Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; Trying to start YDB, gRPC: 23387, MsgBus: 12381 2025-12-23T18:40:20.219161Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587125101736427375:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:20.219229Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00411b/r3tmp/tmpXnknFF/pdisk_1.dat 2025-12-23T18:40:20.318541Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:20.389933Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:20.396507Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:20.396616Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:20.435391Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23387, node 4 2025-12-23T18:40:20.567502Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:40:20.567528Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:40:20.567539Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:40:20.567641Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:40:20.609192Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12381 TClient is connected to server localhost:12381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515220491 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:21.186170Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:40:21.211153Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:40:21.258923Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:25.223110Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587125101736427375:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:25.223198Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:25.523943Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125123211264674:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:25.524104Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:25.524661Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125123211264686:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:25.524704Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125123211264687:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:25.524822Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:25.529971Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:25.545578Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587125123211264690:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:40:25.633459Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587125123211264742:2541] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:25.709488Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:25.762410Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:27.265516Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:30.171260Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [4:7587125144686109245:2967], SessionActorId: [4:7587125140391141875:2967], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 . sessionActorId=[4:7587125140391141875:2967]. 2025-12-23T18:40:30.171462Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=4&id=ZDhhYzE1M2MtOTUxNzRmY2YtMjEyY2VlMzYtYzY0NTQ3YWU=, ActorId: [4:7587125140391141875:2967], ActorState: ExecuteState, TraceId: 01kd686hes9r9rzq2hzs987jv2, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7587125144686109265:2967] from: [4:7587125144686109245:2967] 2025-12-23T18:40:30.171545Z node 4 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [4:7587125144686109265:2967] TxId: 281474976710668. Ctx: { TraceId: 01kd686hes9r9rzq2hzs987jv2, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ZDhhYzE1M2MtOTUxNzRmY2YtMjEyY2VlMzYtYzY0NTQ3YWU=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 } 2025-12-23T18:40:30.171797Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=4&id=ZDhhYzE1M2MtOTUxNzRmY2YtMjEyY2VlMzYtYzY0NTQ3YWU=, ActorId: [4:7587125140391141875:2967], ActorState: ExecuteState, TraceId: 01kd686hes9r9rzq2hzs987jv2, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:11.743400Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:11.876267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:11.888311Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:11.888696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:11.888779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:12.235216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:12.235381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:12.325213Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515208266177 != 1766515208266181 2025-12-23T18:40:12.339183Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:12.390686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:12.507419Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c70/r3tmp/tmpOFqvzN/pdisk_1.dat 2025-12-23T18:40:12.862281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:12.882173Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:12.996710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:13.062688Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:895:2772] 2025-12-23T18:40:13.063060Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:13.118437Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:13.118732Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:13.120825Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:13.120956Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:13.121019Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:13.121404Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:13.121746Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2025-12-23T18:40:13.121956Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:13.131004Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:13.131140Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2772] in generation 1 2025-12-23T18:40:13.132728Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:13.132846Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:13.134504Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:40:13.134572Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:40:13.134633Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:40:13.135064Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:13.135189Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:13.135259Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:928:2775] in generation 1 2025-12-23T18:40:13.147624Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:13.192354Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:13.192622Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:13.192788Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:931:2793] 2025-12-23T18:40:13.192836Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:13.192888Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:13.192942Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:13.193314Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:13.193362Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:40:13.193426Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:13.193528Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:932:2794] 2025-12-23T18:40:13.193571Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:40:13.193598Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:40:13.193624Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:40:13.194079Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:13.194230Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:13.194439Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:13.194486Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:13.194546Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:13.194594Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:13.194647Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T18:40:13.194727Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T18:40:13.194871Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:889:2768], serverId# [1:898:2773], sessionId# [0:0:0] 2025-12-23T18:40:13.194936Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:40:13.194970Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:13.195004Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:40:13.195037Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:40:13.195563Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:13.195899Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:40:13.196005Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:40:13.196480Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:890:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2025-12-23T18:40:13.196673Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:40:13.196880Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-12-23T18:40:13.196944Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-12-23T18:40:13.199018Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:13.199145Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:13.210264Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:13.210407Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:40:13.211264Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T18:40:13.211356Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 n ... 2-23T18:40:31.492739Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:31.494726Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:40:31.494801Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:31.494926Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-12-23T18:40:31.494979Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:40:31.495275Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:31.495320Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:40:31.495362Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:40:31.495639Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:40:31.495787Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:40:31.497612Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:31.497693Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 40] schema version# 1 2025-12-23T18:40:31.498126Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:40:31.498504Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:31.499839Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2000} 2025-12-23T18:40:31.499944Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:40:31.502513Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:40:31.502591Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:40:31.502641Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2025-12-23T18:40:31.502719Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:31.502808Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:40:31.502903Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:40:31.503484Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:40:31.503542Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:31.504174Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:40:31.504218Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:31.505854Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:31.505903Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:31.505949Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:40:31.506015Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:31.506068Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:40:31.506143Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:31.507880Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:31.508020Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:31.511676Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:40:31.511900Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-12-23T18:40:31.511989Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-12-23T18:40:31.512746Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:40:31.513153Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:40:31.513202Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:40:31.524932Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:997:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:31.525119Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:31.525225Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1008:2850], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:31.526290Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1013:2855], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:31.526485Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:31.531759Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:31.540150Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:31.540317Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:31.660651Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:31.660824Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:31.666833Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1011:2853], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:40:31.701113Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1070:2893] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:31.765455Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1139:2924], serverId# [4:1140:2925], sessionId# [0:0:0] 2025-12-23T18:40:31.765843Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2025-12-23T18:40:31.766053Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1766515231765977 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-12-23T18:40:31.766199Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-12-23T18:40:31.777222Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2025-12-23T18:40:31.777314Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:40:31.781294Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1146:2930], serverId# [4:1147:2931], sessionId# [0:0:0] 2025-12-23T18:40:31.786643Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1148:2932], serverId# [4:1149:2933], sessionId# [0:0:0] |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:09.353826Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:09.490582Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:09.501695Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:09.502068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:09.502144Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:09.887940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:09.888069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:10.023908Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515205926172 != 1766515205926176 2025-12-23T18:40:10.051550Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:10.101344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:10.228131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c7a/r3tmp/tmpeURT5D/pdisk_1.dat 2025-12-23T18:40:10.575220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:10.592150Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:10.733938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:10.804351Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:40:10.804649Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:10.920140Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:10.920310Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:10.928002Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:10.928104Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:10.928185Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:10.928599Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:10.928753Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:10.928852Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:40:10.943244Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:11.006663Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:11.006896Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:11.007020Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:40:11.007062Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:11.007103Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:11.007135Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:11.007605Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:11.007693Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:11.008161Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:11.008210Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:11.008257Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:11.008300Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:11.008362Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:40:11.008506Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:11.008757Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:40:11.008847Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:40:11.010684Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:11.022522Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:11.022630Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:40:11.167302Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:40:11.173162Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:40:11.173251Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:11.174287Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:11.174353Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:40:11.174402Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:40:11.174689Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:40:11.175615Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:40:11.176056Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:11.176169Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:40:11.178346Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:40:11.179103Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:11.181176Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:40:11.181234Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:11.181772Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:40:11.181878Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:11.183494Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:11.183699Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:11.183757Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:40:11.183880Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:11.183946Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:40:11.184032Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:11.190671Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:11.192557Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:40:11.192659Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 7207518622403 ... Heartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:30.469297Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:30.471217Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:40:30.471465Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:40:30.471551Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:40:30.485703Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:30.485935Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-12-23T18:40:30.486008Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-12-23T18:40:30.486060Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-12-23T18:40:30.486533Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:30.502230Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:30.678223Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:40:30.678331Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:30.678800Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:30.678867Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:40:30.678928Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2025-12-23T18:40:30.679192Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2025-12-23T18:40:30.679360Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:40:30.679595Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:30.680684Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:30.731209Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2025-12-23T18:40:30.731358Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:30.731414Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:30.731468Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:30.731560Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:30.731631Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-12-23T18:40:30.731787Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:30.734484Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-12-23T18:40:30.734599Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:40:30.744059Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1063:2892], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:30.744187Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1074:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:30.744289Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:30.749642Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:30.749887Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:30.755348Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:30.767519Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:30.941035Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:30.944359Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1077:2900], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:40:30.970578Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1135:2939] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:31.112968Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1166:2956], serverId# [4:1167:2957], sessionId# [0:0:0] 2025-12-23T18:40:31.113473Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2025-12-23T18:40:31.113913Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1766515231113815 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-12-23T18:40:31.114119Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-12-23T18:40:31.126729Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2025-12-23T18:40:31.126853Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:31.208886Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2025-12-23T18:40:31.209249Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1766515231209132 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-12-23T18:40:31.209461Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-12-23T18:40:31.220596Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2025-12-23T18:40:31.220657Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:31.335628Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:5] at 72075186224037888 2025-12-23T18:40:31.335987Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1766515231335865 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-12-23T18:40:31.336185Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:5] at 72075186224037888, row count=1 2025-12-23T18:40:31.347690Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2025-12-23T18:40:31.347810Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:31.350225Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1213:2986], serverId# [4:1214:2987], sessionId# [0:0:0] 2025-12-23T18:40:31.371299Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1215:2988], serverId# [4:1216:2989], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataOkErasureFiveParts [GOOD] Test command err: RandomSeed# 15584815503457825503 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:4:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 0 ] part 2: ver0 disks [ 7 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 6 0 ]; part 2 disks [ 7 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 0 ]; part 2 disks [ 7 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::DataOk [GOOD] Test command err: RandomSeed# 462921805205029330 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** Group is disintegrated or has network problems *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 2 ] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:09.075125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:09.198142Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:09.211438Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:09.211813Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:09.211892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:09.547717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:09.547880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:09.627641Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515205864714 != 1766515205864718 2025-12-23T18:40:09.641151Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:09.688910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:09.791693Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c78/r3tmp/tmpgMUoIT/pdisk_1.dat 2025-12-23T18:40:10.114019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:10.130799Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:10.265188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:10.329602Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:895:2772] 2025-12-23T18:40:10.329877Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:10.386537Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:10.392098Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:10.394055Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:10.394165Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:10.394249Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:10.394652Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:10.395074Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2025-12-23T18:40:10.395304Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:10.420663Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:10.420796Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2772] in generation 1 2025-12-23T18:40:10.422294Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:10.422402Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:10.423954Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:40:10.424022Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:40:10.424072Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:40:10.424433Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:10.424544Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:10.424607Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:928:2775] in generation 1 2025-12-23T18:40:10.439623Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:10.499470Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:10.499693Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:10.499841Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:931:2793] 2025-12-23T18:40:10.499891Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:10.499947Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:10.499994Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:10.500368Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:10.500413Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:40:10.500474Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:10.500547Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:932:2794] 2025-12-23T18:40:10.500591Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:40:10.500620Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:40:10.500646Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:40:10.501080Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:10.501205Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:10.501371Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:10.501419Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:10.501473Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:10.501524Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:10.501577Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T18:40:10.501660Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T18:40:10.501770Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:889:2768], serverId# [1:898:2773], sessionId# [0:0:0] 2025-12-23T18:40:10.501832Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:40:10.501869Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:10.501901Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:40:10.501933Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:40:10.502414Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:10.502704Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:40:10.510924Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:40:10.511603Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:890:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2025-12-23T18:40:10.511866Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:40:10.512123Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-12-23T18:40:10.512191Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-12-23T18:40:10.514222Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:10.514325Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:10.526613Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:10.526769Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:40:10.527415Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T18:40:10.527490Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 n ... , outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:29.097474Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:29.098005Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-12-23T18:40:29.098286Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:40:29.098328Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2025-12-23T18:40:29.098372Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2025-12-23T18:40:29.098434Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:29.098482Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:40:29.098548Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:40:29.104442Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:40:29.105152Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:40:29.105242Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-12-23T18:40:29.105313Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-12-23T18:40:29.105993Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:40:29.106290Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:40:29.106338Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:40:29.106939Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-12-23T18:40:29.107008Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-12-23T18:40:29.119580Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1045:2882], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:29.119695Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1056:2887], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:29.119798Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:29.121105Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2895], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:29.121192Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:29.126129Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:29.136211Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:29.136327Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:29.136409Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-12-23T18:40:29.264893Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:29.265028Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:29.265087Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-12-23T18:40:29.269010Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1059:2890], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:40:29.307703Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1119:2931] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:29.399260Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1225:2975], serverId# [4:1226:2976], sessionId# [0:0:0] 2025-12-23T18:40:29.399781Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2025-12-23T18:40:29.400058Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1766515229399959 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-12-23T18:40:29.400209Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1766515229399959 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-12-23T18:40:29.400295Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-12-23T18:40:29.411488Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2025-12-23T18:40:29.411598Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:40:29.541876Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037889 2025-12-23T18:40:29.542150Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1766515229542059 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-12-23T18:40:29.542327Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 4 Group: 1766515229542059 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-12-23T18:40:29.542422Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 5 Group: 1766515229542059 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-12-23T18:40:29.542475Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 6 Group: 1766515229542059 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-12-23T18:40:29.542530Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-12-23T18:40:29.553634Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2025-12-23T18:40:29.553729Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:40:29.559080Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1274:3015], serverId# [4:1275:3016], sessionId# [0:0:0] 2025-12-23T18:40:29.566153Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1276:3017], serverId# [4:1277:3018], sessionId# [0:0:0] |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:20.580186Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:20.716227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:20.728807Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:20.729255Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:20.729354Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:21.152884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:21.153038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:21.249588Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515217475230 != 1766515217475234 2025-12-23T18:40:21.263239Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:21.315516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:21.413814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d77/r3tmp/tmp9Y1Y6D/pdisk_1.dat 2025-12-23T18:40:21.771303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:21.786383Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:21.899537Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:40:21.899628Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:40:21.899836Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:40:22.094307Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:40:22.094438Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:40:22.095152Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:40:22.095285Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:40:22.095627Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:40:22.095824Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:40:22.096010Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:40:22.096302Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:40:22.098268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:22.099725Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:40:22.099839Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:40:22.136634Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:40:22.137889Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:40:22.138271Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:40:22.138629Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:22.196552Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:40:22.197443Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:22.197576Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:22.200503Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:22.200620Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:22.200733Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:22.201213Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:22.201405Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:22.201537Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:40:22.214511Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:22.288376Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:22.288622Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:22.288752Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:40:22.288808Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:22.288861Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:22.288909Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:22.289188Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:22.289246Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:22.289605Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:22.289731Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:22.290185Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:22.290234Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:22.290281Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:40:22.290321Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:40:22.290365Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:40:22.290413Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:22.290476Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:22.290635Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:22.290673Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:22.290714Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:40:22.290864Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:40:22.290903Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:40:22.291020Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:22.291260Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:40:22.291315Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2 ... sactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2025-12-23T18:40:29.921671Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2878] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-12-23T18:40:29.921701Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1061:2878] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1196:2984] ShardId# 72075186224037890 2025-12-23T18:40:29.921762Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-12-23T18:40:29.921834Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1196:2984], Recipient [2:1061:2878]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-12-23T18:40:29.921863Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2878] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-12-23T18:40:29.922224Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1060:2878], Recipient [2:1061:2878]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-12-23T18:40:29.922261Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2878] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-12-23T18:40:29.922288Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2878] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-12-23T18:40:29.922328Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-12-23T18:40:29.922414Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:40:29.922552Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1196:2984], Recipient [2:1061:2878]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-12-23T18:40:29.922583Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2878] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-12-23T18:40:29.922611Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1061:2878] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1196:2984] ShardId# 72075186224037890 2025-12-23T18:40:29.922687Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1196:2984], Recipient [2:1061:2878]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-12-23T18:40:29.922713Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2878] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-12-23T18:40:29.922771Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-12-23T18:40:29.923034Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1060:2878], Recipient [2:1061:2878]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-12-23T18:40:29.923065Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2878] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-12-23T18:40:29.923090Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2878] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-12-23T18:40:29.923128Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-12-23T18:40:29.923190Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-12-23T18:40:29.923357Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:1196:2984], Recipient [2:1061:2878]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715662 ShardId: 72075186224037890 2025-12-23T18:40:29.923389Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1061:2878] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2025-12-23T18:40:29.923418Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1061:2878] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037890 2025-12-23T18:40:29.923500Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2025-12-23T18:40:29.923540Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715662, at: 72075186224037890 2025-12-23T18:40:29.923639Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1101:2909], Recipient [2:1101:2909]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:29.923671Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:29.923735Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-12-23T18:40:29.923767Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:40:29.923801Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715662] at 72075186224037890 for ReadTableScan 2025-12-23T18:40:29.923829Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit ReadTableScan 2025-12-23T18:40:29.923863Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715662] at 72075186224037890 error: , IsFatalError: 0 2025-12-23T18:40:29.923901Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-12-23T18:40:29.923928Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit ReadTableScan 2025-12-23T18:40:29.923957Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037890 to execution unit FinishPropose 2025-12-23T18:40:29.923987Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-12-23T18:40:29.924027Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is DelayComplete 2025-12-23T18:40:29.924056Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit FinishPropose 2025-12-23T18:40:29.924085Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037890 to execution unit CompletedOperations 2025-12-23T18:40:29.924117Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit CompletedOperations 2025-12-23T18:40:29.924160Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-12-23T18:40:29.924186Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit CompletedOperations 2025-12-23T18:40:29.924213Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715662] at 72075186224037890 has finished 2025-12-23T18:40:29.924243Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:29.924271Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2025-12-23T18:40:29.924300Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-12-23T18:40:29.924331Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2025-12-23T18:40:29.924389Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:40:29.924426Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-12-23T18:40:29.924465Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-12-23T18:40:29.924529Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:40:29.924819Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1101:2909], Recipient [2:1061:2878]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715662 Step: 0 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 341 } } CommitVersion { Step: 0 TxId: 281474976715662 } 2025-12-23T18:40:29.924863Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1061:2878] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037890 2025-12-23T18:40:29.924931Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1061:2878] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.016261s execute time: 0.384084s total time: 0.400345s 2025-12-23T18:40:29.925303Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1061:2878], Recipient [2:884:2764]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2025-12-23T18:40:29.925566Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1061:2878], Recipient [2:1096:2907]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2025-12-23T18:40:29.925769Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1061:2878], Recipient [2:1101:2909]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] Test command err: Trying to start YDB, gRPC: 24196, MsgBus: 13892 2025-12-23T18:39:13.099577Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124811985597890:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:13.099851Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004121/r3tmp/tmpNG6rPT/pdisk_1.dat 2025-12-23T18:39:13.610005Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:13.612971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:13.613069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:13.675346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:13.747022Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24196, node 1 2025-12-23T18:39:13.830422Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:13.963101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:13.963555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:13.963565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:13.963657Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:14.118385Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:13892 TClient is connected to server localhost:13892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515153767 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:14.830164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:14.841009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:17.616034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124829165467834:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.616206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.616788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124829165467846:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.616803Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124829165467847:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.616868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.625512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:17.648950Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124829165467850:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:17.719542Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124829165467901:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:18.098867Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124811985597890:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:18.098946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:18.228343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:18.339970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:19.959473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:22.997985Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-12-23T18:39:23.007084Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-12-23T18:39:23.007254Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-12-23T18:39:23.007507Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [1:7587124850640312410:2964], Table: `/Root/Test` ([72057594046644480:42:1]), SessionActorId: [1:7587124850640312336:2964]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7587124850640312410:2964].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-12-23T18:39:23.008008Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:7587124850640312403:2964], SessionActorId: [1:7587124850640312336:2964], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7587124850640312336:2964]. 2025-12-23T18:39:23.008284Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=YTY5ZTA0ZDEtNDhkYzc1OS03NmNiMWMzOS1hYTFiN2QzOA==, ActorId: [1:7587124850640312336:2964], ActorState: ExecuteState, TraceId: 01kd684frd3jxw9egjhwjkmz68, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7587124850640312404:2964] from: [1:7587124850640312403:2964] 2025-12-23T18:39:23.008389Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:7587124850640312404:2964] TxId: 281474976710667. Ctx: { TraceId: 01kd684frd3jxw9egjhwjkmz68, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTY5ZTA0ZDEtNDhkYzc1OS03NmNiMWMzOS1hYTFiN2QzOA==, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-12-23T18:39:23.008726Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=YTY5ZTA0ZDEtNDhkYzc1OS03NmNiMWMzOS1hYTFiN2QzOA==, ActorId: [1:7587124850640312336:2964], ActorState: ExecuteState, TraceId: 01kd684frd3jxw9egjhwjkmz68, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } }
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 Trying to start YDB, gRPC: 27801, MsgBus: 4763 2025-12-23T18:39:25.262058Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004121/r3tmp/tmpL5Kb9K/pdisk_1.dat 2025-12-23T18: ... log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.540660Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.540707Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.547416Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.547494Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.547516Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.550916Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.550972Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.550987Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.559438Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.559513Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.559535Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.560191Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.560243Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.560260Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.571113Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.571193Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.571215Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.573102Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038020;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.573171Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038020;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.573192Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038020;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:24.632826Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd685vwd6pvjzr1wbefnrw42", SessionId: ydb://session/3?node_id=4&id=NzAzZDA2OTUtZmM1MWRhOTctYzFkYTcxZmYtYTRlM2QwMjE=, Slow query, duration: 13.230390s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2025-12-23T18:40:27.153860Z node 4 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[4:7587125060872598198:2345];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=281474976710671; 2025-12-23T18:40:27.154726Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4378: SelfId: [4:7587125125297117099:3650], SessionActorId: [4:7587125116707181984:3650], Got LOCKS BROKEN for table. ShardID=72075186224037891, Sink=[4:7587125125297117099:3650].{
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } 2025-12-23T18:40:27.154869Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [4:7587125125297117099:3650], SessionActorId: [4:7587125116707181984:3650], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 . sessionActorId=[4:7587125116707181984:3650]. 2025-12-23T18:40:27.155041Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=4&id=MWRiYTQ0NmQtZTYzOGM0NjEtOGIzN2VmYzUtNzZiYmJlZDU=, ActorId: [4:7587125116707181984:3650], ActorState: ExecuteState, TraceId: 01kd686dvt189mrkgq12xycp6b, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7587125129592084591:3650] from: [4:7587125125297117099:3650] 2025-12-23T18:40:27.155133Z node 4 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [4:7587125129592084591:3650] TxId: 281474976710671. Ctx: { TraceId: 01kd686dvt189mrkgq12xycp6b, Database: /Root, SessionId: ydb://session/3?node_id=4&id=MWRiYTQ0NmQtZTYzOGM0NjEtOGIzN2VmYzUtNzZiYmJlZDU=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } } 2025-12-23T18:40:27.155481Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=4&id=MWRiYTQ0NmQtZTYzOGM0NjEtOGIzN2VmYzUtNzZiYmJlZDU=, ActorId: [4:7587125116707181984:3650], ActorState: ExecuteState, TraceId: 01kd686dvt189mrkgq12xycp6b, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "tablet lock have another internal generation counter: 18446744073709551615 != 0" issue_code: 2001 severity: 1 } } 2025-12-23T18:40:27.156804Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [4:7587125125297117099:3650], SessionActorId: [4:7587125116707181984:3650], StateRollback: unknown message 278003713 2025-12-23T18:40:27.156817Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [4:7587125125297117099:3650], SessionActorId: [4:7587125116707181984:3650], StateRollback: unknown message 278003713 2025-12-23T18:40:27.156826Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [4:7587125125297117099:3650], SessionActorId: [4:7587125116707181984:3650], StateRollback: unknown message 278003713 2025-12-23T18:40:27.156836Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [4:7587125125297117099:3650], SessionActorId: [4:7587125116707181984:3650], StateRollback: unknown message 278003713 2025-12-23T18:40:27.159195Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:27.159632Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:27.160449Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:27.160943Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:27.161293Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:27.161806Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:27.162424Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:27.162542Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2025-12-23T18:40:27.163402Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671;
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:23.065513Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:23.191165Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:23.203968Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:23.204413Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:23.204499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:23.522709Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:23.522874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:23.614698Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515219982384 != 1766515219982388 2025-12-23T18:40:23.629582Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:23.681481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:23.770529Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d76/r3tmp/tmpCJHquT/pdisk_1.dat 2025-12-23T18:40:24.103755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:24.119546Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:24.241070Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:40:24.241156Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:40:24.241313Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:40:24.417294Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:40:24.417423Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:40:24.418175Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:40:24.418297Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:40:24.418695Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:40:24.418966Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:40:24.419128Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:40:24.419468Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:40:24.421622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:24.423122Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:40:24.423217Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:40:24.482086Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:40:24.483490Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:40:24.483877Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:40:24.484263Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:24.544166Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:40:24.545000Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:24.545139Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:24.547075Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:24.547192Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:24.547256Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:24.547701Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:24.547886Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:24.548013Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:40:24.559560Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:24.613538Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:24.613821Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:24.613983Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:40:24.614047Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:24.614110Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:24.614154Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:24.614444Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:24.614508Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:24.614928Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:24.615069Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:24.615889Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:24.615942Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:24.616015Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:40:24.616076Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:40:24.616111Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:40:24.616144Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:24.616203Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:24.616320Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:24.616368Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:24.616422Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:40:24.616577Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:40:24.616622Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:40:24.616745Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:24.617021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:40:24.617109Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2 ... on unit ReadTableScan 2025-12-23T18:40:31.723571Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-12-23T18:40:31.723795Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Continue 2025-12-23T18:40:31.723829Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:40:31.723876Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-12-23T18:40:31.724264Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:40:31.724307Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:40:31.724384Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:31.724903Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2878] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-12-23T18:40:31.725020Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1073:2889], Recipient [2:884:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-12-23T18:40:31.725073Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-12-23T18:40:31.725390Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2878] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-12-23T18:40:31.725433Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2878] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-12-23T18:40:31.725503Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-12-23T18:40:31.725699Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:40:31.725808Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2878] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-12-23T18:40:31.725899Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-12-23T18:40:31.725963Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2878] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-12-23T18:40:31.726349Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2878] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-12-23T18:40:31.726384Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2878] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-12-23T18:40:31.726482Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-12-23T18:40:31.726562Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:40:31.726642Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2878] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-12-23T18:40:31.726712Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-12-23T18:40:31.726770Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2878] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-12-23T18:40:31.727059Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2878] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-12-23T18:40:31.727090Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2878] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-12-23T18:40:31.727164Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-12-23T18:40:31.727233Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:40:31.727285Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1061:2878] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-12-23T18:40:31.727330Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-12-23T18:40:31.727369Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1061:2878] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-12-23T18:40:31.727582Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1061:2878] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-12-23T18:40:31.727628Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1061:2878] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-12-23T18:40:31.727781Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-12-23T18:40:31.727855Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-12-23T18:40:31.728032Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:40:31.728076Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715662, at: 72075186224037888 2025-12-23T18:40:31.728196Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1061:2878] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-12-23T18:40:31.728239Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1061:2878] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-12-23T18:40:31.728346Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:884:2764], Recipient [2:884:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:31.728400Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:31.728466Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:31.728524Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:40:31.728584Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2025-12-23T18:40:31.728650Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-12-23T18:40:31.728690Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715662] at 72075186224037888 error: , IsFatalError: 0 2025-12-23T18:40:31.728733Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-12-23T18:40:31.728767Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ReadTableScan 2025-12-23T18:40:31.728797Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:40:31.728827Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-12-23T18:40:31.728860Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is DelayComplete 2025-12-23T18:40:31.728886Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:40:31.728914Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:40:31.728939Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:40:31.729002Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-12-23T18:40:31.729029Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:40:31.729056Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2025-12-23T18:40:31.729098Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:31.729145Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-12-23T18:40:31.729178Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:40:31.729208Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:40:31.729266Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:31.729297Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-12-23T18:40:31.729331Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-12-23T18:40:31.729416Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:31.729587Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1061:2878] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037888 2025-12-23T18:40:31.729651Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1061:2878] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.013325s execute time: 0.096728s total time: 0.110053s 2025-12-23T18:40:31.729964Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1061:2878], Recipient [2:884:2764]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:20.396979Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:20.523483Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:20.535525Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:20.535942Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:20.536071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:20.860878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:20.861039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:20.955709Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515217141889 != 1766515217141893 2025-12-23T18:40:20.971108Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:21.024104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:21.130101Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d79/r3tmp/tmp82w2rD/pdisk_1.dat 2025-12-23T18:40:21.492224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:21.514425Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:21.621031Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:40:21.621091Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:40:21.621198Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:40:21.758614Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:40:21.758706Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:40:21.759413Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:40:21.759547Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:40:21.759958Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:40:21.760117Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:40:21.760265Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:40:21.760591Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:40:21.762423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:21.763858Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:40:21.763935Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:40:21.808453Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:40:21.809497Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:40:21.809808Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:40:21.810170Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:21.861235Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:40:21.862047Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:21.862251Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:21.864142Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:21.864245Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:21.864309Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:21.864698Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:21.864853Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:21.864940Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:40:21.877002Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:21.947524Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:21.947789Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:21.947966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:40:21.948025Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:21.948086Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:21.948128Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:21.948446Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:21.948516Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:21.948846Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:21.948927Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:21.949341Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:21.949402Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:21.949467Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:40:21.949514Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:40:21.949556Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:40:21.949592Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:21.949664Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:21.949770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:21.949838Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:21.949889Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:40:21.950003Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:40:21.950033Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:40:21.950122Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:21.950337Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:40:21.950383Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2 ... ead_table_scan.cpp:662: Send response data ShardId: 72075186224037896, TxId: 281474976715664, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:40:29.685942Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1545:3254], Recipient [2:1277:3042]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: RESPONSE_DATA TxId: 281474976715664 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2025-12-23T18:40:29.685977Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1277:3042] TxId# 281474976715663] Received stream data from ShardId# 72075186224037896 2025-12-23T18:40:29.686006Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1277:3042] TxId# 281474976715663] Sending TEvStreamDataAck to [2:1545:3254] ShardId# 72075186224037896 2025-12-23T18:40:29.686105Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037896, TxId: 281474976715664, PendingAcks: 0 2025-12-23T18:40:29.686173Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1545:3254], Recipient [2:1277:3042]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-12-23T18:40:29.686199Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1277:3042] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-12-23T18:40:29.686601Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1276:3042], Recipient [2:1277:3042]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715663 MessageSizeLimit: 1 ReservedMessages: 1 2025-12-23T18:40:29.686636Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1277:3042] TxId# 281474976715663] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-12-23T18:40:29.686665Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1277:3042] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-12-23T18:40:29.686715Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-12-23T18:40:29.686933Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-12-23T18:40:29.687081Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:1545:3254], Recipient [2:1277:3042]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715664 ShardId: 72075186224037896 2025-12-23T18:40:29.687111Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1277:3042] TxId# 281474976715663] Received TEvStreamQuotaRelease from ShardId# 72075186224037896 2025-12-23T18:40:29.687139Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1277:3042] TxId# 281474976715663] Released quota 1 reserved messages from ShardId# 72075186224037896 2025-12-23T18:40:29.687250Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037896 2025-12-23T18:40:29.687288Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715664, at: 72075186224037896 2025-12-23T18:40:29.687411Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1445:3175], Recipient [2:1445:3175]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:29.687442Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:29.687490Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037896 2025-12-23T18:40:29.687538Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037896 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:40:29.687574Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037896 for ReadTableScan 2025-12-23T18:40:29.687601Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit ReadTableScan 2025-12-23T18:40:29.687634Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715664] at 72075186224037896 error: , IsFatalError: 0 2025-12-23T18:40:29.687672Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-12-23T18:40:29.687701Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit ReadTableScan 2025-12-23T18:40:29.687745Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037896 to execution unit FinishPropose 2025-12-23T18:40:29.687790Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-12-23T18:40:29.687825Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is DelayComplete 2025-12-23T18:40:29.687853Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit FinishPropose 2025-12-23T18:40:29.687879Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037896 to execution unit CompletedOperations 2025-12-23T18:40:29.687906Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit CompletedOperations 2025-12-23T18:40:29.687945Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-12-23T18:40:29.687988Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit CompletedOperations 2025-12-23T18:40:29.688013Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715664] at 72075186224037896 has finished 2025-12-23T18:40:29.688041Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:29.688065Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037896 2025-12-23T18:40:29.688106Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-12-23T18:40:29.688142Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037896 2025-12-23T18:40:29.688200Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037896 2025-12-23T18:40:29.688234Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-12-23T18:40:29.688272Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715664 at tablet 72075186224037896 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-12-23T18:40:29.688349Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-12-23T18:40:29.688620Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1445:3175], Recipient [2:1277:3042]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715664 Step: 0 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 351 } } CommitVersion { Step: 0 TxId: 281474976715664 } 2025-12-23T18:40:29.688661Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1277:3042] TxId# 281474976715663] Received stream complete from ShardId# 72075186224037896 2025-12-23T18:40:29.688778Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1277:3042] TxId# 281474976715663] RESPONSE Status# ExecComplete prepare time: 0.020476s execute time: 0.666523s total time: 0.686999s 2025-12-23T18:40:29.689208Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3042], Recipient [2:1082:2894]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2025-12-23T18:40:29.689464Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3042], Recipient [2:1189:2976]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2025-12-23T18:40:29.689808Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3042], Recipient [2:1194:2978]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2025-12-23T18:40:29.690115Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3042], Recipient [2:1442:3173]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2025-12-23T18:40:29.690450Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1548:3257], Recipient [2:1335:3091]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:29.690488Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:29.690525Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037893, clientId# [2:1546:3255], serverId# [2:1548:3257], sessionId# [0:0:0] 2025-12-23T18:40:29.690629Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3042], Recipient [2:1445:3175]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2025-12-23T18:40:29.690861Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3042], Recipient [2:1335:3091]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2025-12-23T18:40:29.691026Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1549:3258], Recipient [2:1337:3093]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:29.691072Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:29.691109Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1547:3256], serverId# [2:1549:3258], sessionId# [0:0:0] 2025-12-23T18:40:29.691234Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1277:3042], Recipient [2:1337:3093]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitPrepared [GOOD] Test command err: Trying to start YDB, gRPC: 32177, MsgBus: 23108 2025-12-23T18:39:53.608443Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124983970225128:2175];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:53.608517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004107/r3tmp/tmpuaALKd/pdisk_1.dat 2025-12-23T18:39:54.199185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:54.199283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:54.338522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:54.370118Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:54.371136Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32177, node 1 2025-12-23T18:39:54.606950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:54.619047Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:54.639321Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:54.639350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:54.639364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:54.639445Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23108 TClient is connected to server localhost:23108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515194395 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:55.908664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:55.923771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:55.936830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:56.378639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:56.826963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:56.991866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:58.616311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124983970225128:2175];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:58.616405Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:59.969210Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125009740030629:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:59.969332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:59.969800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125009740030639:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:59.969852Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:00.389207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:00.444772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:00.492498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:00.543240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:00.603557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:00.665738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:00.730885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:00.829354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:01.003173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125014034998807:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:01.003273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:01.003853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125014034998812:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:01.003896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125014034998813:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:01.004206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18 ... 594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:20.819021Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:20.828260Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12363, node 3 2025-12-23T18:40:20.990866Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:21.033860Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:40:21.033885Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:40:21.033893Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:40:21.034099Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3524 TClient is connected to server localhost:3524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515220876 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:21.629617Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:40:21.637175Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:40:21.656676Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:21.658067Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:21.738180Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:40:21.976484Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:22.067490Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:24.782028Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125118643566156:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:24.782180Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:24.782606Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125118643566166:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:24.782667Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:24.845196Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:24.929205Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:24.975022Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:25.023035Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:25.066465Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:25.114004Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:25.155948Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:25.209934Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:25.304123Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125122938534330:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:25.304260Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:25.304820Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125122938534336:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:25.304879Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587125122938534335:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:25.304922Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:25.309854Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:25.330720Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587125122938534339:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:40:25.418120Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587125122938534390:3760] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:25.651063Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587125101463695321:2246];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:25.651139Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:19.959653Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:20.093942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:20.106483Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:20.106924Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:20.107008Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:20.456089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:20.456244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:20.538052Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515216766949 != 1766515216766953 2025-12-23T18:40:20.553868Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:20.600228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:20.737639Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d98/r3tmp/tmp6Hclx0/pdisk_1.dat 2025-12-23T18:40:21.144706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:21.160015Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:21.265792Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:40:21.265884Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:40:21.266044Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:40:21.376915Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:40:21.377022Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:40:21.377712Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:40:21.377835Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:40:21.378216Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:40:21.378409Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:40:21.378583Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:40:21.379043Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:40:21.381098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:21.382382Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:40:21.382465Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:40:21.440721Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:40:21.441795Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:40:21.442076Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:40:21.442386Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:21.529737Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:40:21.530562Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:21.530696Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:21.533323Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:21.533484Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:21.533554Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:21.533974Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:21.534148Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:21.534243Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:40:21.547371Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:21.586319Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:21.586576Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:21.586709Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:40:21.586827Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:21.586903Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:21.586944Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:21.587245Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:21.587305Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:21.587680Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:21.587826Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:21.588358Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:21.588409Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:21.588461Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:40:21.588535Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:40:21.588581Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:40:21.588622Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:21.588686Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:21.588781Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:21.588819Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:21.588862Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:40:21.588995Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:40:21.589035Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:40:21.589152Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:21.589422Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:40:21.589494Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2 ... g event TEvTxProcessing::TEvStreamClearancePending 2025-12-23T18:40:28.418780Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [2:958:2815], Recipient [2:884:2764]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2025-12-23T18:40:28.418829Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-12-23T18:40:28.419031Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:884:2764], Recipient [2:884:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:28.419072Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:28.419125Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:28.419162Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:40:28.419203Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-12-23T18:40:28.419250Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-12-23T18:40:28.419305Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [0:281474976715659] at 72075186224037888 2025-12-23T18:40:28.419348Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-12-23T18:40:28.419383Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2025-12-23T18:40:28.419415Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2025-12-23T18:40:28.419445Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-12-23T18:40:28.419663Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-12-23T18:40:28.419703Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:40:28.419749Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-12-23T18:40:28.419782Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:40:28.419813Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:40:28.419874Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:28.420424Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:974:2830], Recipient [2:884:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-12-23T18:40:28.420468Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-12-23T18:40:28.420575Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:974:2830], Recipient [2:958:2815]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2025-12-23T18:40:28.420612Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:958:2815] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-12-23T18:40:28.420976Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:957:2815], Recipient [2:958:2815]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2025-12-23T18:40:28.421030Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:958:2815] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-12-23T18:40:28.421082Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:958:2815] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2025-12-23T18:40:28.421150Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-12-23T18:40:28.421281Z node 2 :TX_DATASHARD ERROR: read_table_scan.cpp:681: Got scan fatal error: Invalid DyNumber binary representation 2025-12-23T18:40:28.421353Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-12-23T18:40:28.421563Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:974:2830], Recipient [2:958:2815]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2025-12-23T18:40:28.421604Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:958:2815] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-12-23T18:40:28.421649Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:958:2815] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-12-23T18:40:28.421751Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:40:28.421805Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715659, at: 72075186224037888 2025-12-23T18:40:28.421930Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:884:2764], Recipient [2:884:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:28.421981Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:28.422047Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:28.422095Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:40:28.422149Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-12-23T18:40:28.422190Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-12-23T18:40:28.422255Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2025-12-23T18:40:28.422330Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-12-23T18:40:28.422377Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2025-12-23T18:40:28.422419Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:40:28.422460Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-12-23T18:40:28.422503Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2025-12-23T18:40:28.429015Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:40:28.429148Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:40:28.429199Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:40:28.429283Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-12-23T18:40:28.429322Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:40:28.429358Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2025-12-23T18:40:28.429403Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:28.429454Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-12-23T18:40:28.429498Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:40:28.429544Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:40:28.429631Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:28.429679Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-12-23T18:40:28.429733Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2025-12-23T18:40:28.429791Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2025-12-23T18:40:28.429913Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:28.430350Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:884:2764], Recipient [2:958:2815]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 519 } } CommitVersion { Step: 0 TxId: 281474976715659 } 2025-12-23T18:40:28.430417Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1922: [ReadTable [2:958:2815] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2025-12-23T18:40:28.430485Z node 2 :TX_PROXY ERROR: read_table_impl.cpp:2920: [ReadTable [2:958:2815] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2025-12-23T18:40:28.430993Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:958:2815], Recipient [2:884:2764]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 2500 TxId: 281474976715658 |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort+UseSink [GOOD] |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort-UseSink >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongName >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription >> KqpScanArrowInChanels::SingleKey >> CheckIntegrityMirror3dc::PlacementOkWithErrors >> CheckIntegrityBlock42::PlacementBlobIsLost |94.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |94.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table >> CheckIntegrityBlock42::PlacementOkWithErrors |94.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table >> CheckIntegrityMirror3dc::PlacementOk >> JsonChangeRecord::Heartbeat [GOOD] >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AggregateByColumn >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 [GOOD] >> CheckIntegrityMirror3dc::PlacementOkWithErrors [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> CheckIntegrityBlock42::PlacementBlobIsLost [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] >> Cdc::UuidExchange[PqRunner] >> CdcStreamChangeCollector::InsertSingleRow >> Cdc::KeysOnlyLog[PqRunner] >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] >> RetryPolicy::RetryWithBatching [GOOD] >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> CheckIntegrityMirror3dc::PlacementOkWithErrorsOnBlobDisks >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestSetConfig [GOOD] >> KqpResultSetFormats::ArrowFormat_Stress [GOOD] >> TConsoleTests::TestModifyUsedZoneKind >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> CheckIntegrityMirror3dc::PlacementOk [GOOD] >> CheckIntegrityMirror3dc::PlacementOkWithErrorsOnBlobDisks [GOOD] |94.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 |94.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |94.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TConsoleTests::TestTenantGeneration >> CheckIntegrityMirror3dc::PlacementOkHandoff >> KqpResultSetFormats::ArrowFormat_Types_List_1 >> CheckIntegrityBlock42::PlacementAllOnHandoff >> CheckIntegrityBlock42::PlacementOkWithErrors [GOOD] |94.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TConsoleConfigTests::TestAddConfigItem >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions >> CheckIntegrityMirror3of4::PlacementBlobIsLost >> CheckIntegrityMirror3dc::PlacementOkHandoff [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> CheckIntegrityMirror3dc::PlacementMissingParts >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> CheckIntegrityBlock42::PlacementWithErrorsOnBlobDisks >> CheckIntegrityBlock42::PlacementAllOnHandoff [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> CheckIntegrityBlock42::PlacementDisintegrated >> TConsoleConfigTests::TestConfigureOrderConflicts >> CheckIntegrityBlock42::PlacementDisintegrated [GOOD] >> CdcStreamChangeCollector::SchemaChanges [GOOD] >> CheckIntegrityBlock42::PlacementWithErrorsOnBlobDisks [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag >> CheckIntegrityBlock42::DataStatusUnknown >> CheckIntegrityBlock42::PlacementStatusUnknown >> CheckIntegrityBlock42::PlacementStatusUnknown [GOOD] |94.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] |94.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |94.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |94.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |94.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:29.605642Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:29.730881Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:29.742413Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:29.742858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:29.742944Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:30.076268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:30.076416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:30.176992Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515226184472 != 1766515226184476 2025-12-23T18:40:30.190203Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:30.246759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:30.339189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d74/r3tmp/tmp3XTCSc/pdisk_1.dat 2025-12-23T18:40:30.690973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:30.706046Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:30.808169Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:40:30.808276Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:40:30.808425Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:40:30.963619Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:40:30.963757Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:40:30.964472Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:40:30.964594Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:40:30.965054Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:40:30.965236Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:40:30.965402Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:40:30.965707Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:40:30.967601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:30.968870Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:40:30.968944Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:40:31.001400Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:40:31.002481Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:40:31.004229Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:40:31.004616Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:31.054373Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:40:31.055171Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:31.055297Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:31.057237Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:31.057345Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:31.057411Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:31.057836Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:31.057997Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:31.058090Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:40:31.072524Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:31.115830Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:31.116060Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:31.116190Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:40:31.116240Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:31.116308Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:31.116347Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:31.116607Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:31.116664Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:31.116994Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:31.117113Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:31.117555Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:31.117602Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:31.117697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:40:31.117763Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:40:31.117801Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:40:31.117839Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:31.117902Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:31.118007Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:31.118049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:40:31.118098Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:40:31.118232Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:40:31.118275Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:40:31.118386Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:31.118643Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:40:31.118706Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2 ... node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:40:37.966625Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1195:2984], Recipient [2:1086:2896]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-12-23T18:40:37.966664Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-12-23T18:40:37.966731Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1195:2984], Recipient [2:1169:2960]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-12-23T18:40:37.966768Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1169:2960] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-12-23T18:40:37.966794Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1169:2960] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:1195:2984] 2025-12-23T18:40:37.966887Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-12-23T18:40:37.967140Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:40:37.967240Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1195:2984], Recipient [2:1169:2960]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-12-23T18:40:37.967263Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1169:2960] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-12-23T18:40:37.967286Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1169:2960] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1195:2984] ShardId# 72075186224037890 2025-12-23T18:40:37.967325Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-12-23T18:40:37.967402Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1195:2984], Recipient [2:1169:2960]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-12-23T18:40:37.967421Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1169:2960] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-12-23T18:40:37.967633Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1168:2960], Recipient [2:1169:2960]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2025-12-23T18:40:37.967654Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1169:2960] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-12-23T18:40:37.967672Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1169:2960] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:1195:2984] 2025-12-23T18:40:37.967729Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-12-23T18:40:37.967777Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:40:37.967868Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1195:2984], Recipient [2:1169:2960]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2025-12-23T18:40:37.967891Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1169:2960] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-12-23T18:40:37.967913Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1169:2960] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1195:2984] ShardId# 72075186224037890 2025-12-23T18:40:37.967963Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1169:2960] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.015884s execute time: 0.155553s total time: 0.171437s 2025-12-23T18:40:37.968098Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-12-23T18:40:37.968129Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 0 2025-12-23T18:40:37.968346Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2025-12-23T18:40:37.968369Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715663, at: 72075186224037890 2025-12-23T18:40:37.968534Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1169:2960], Recipient [2:1082:2894]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715662 2025-12-23T18:40:37.968703Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1086:2896], Recipient [2:1086:2896]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:37.968728Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:40:37.968757Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-12-23T18:40:37.968781Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:40:37.968805Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037890 for ReadTableScan 2025-12-23T18:40:37.968826Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-12-23T18:40:37.968867Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715663] at 72075186224037890 error: , IsFatalError: 0 2025-12-23T18:40:37.968894Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-12-23T18:40:37.968917Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit ReadTableScan 2025-12-23T18:40:37.968938Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715663] at 72075186224037890 to execution unit FinishPropose 2025-12-23T18:40:37.968956Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-12-23T18:40:37.968981Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is DelayComplete 2025-12-23T18:40:37.969001Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit FinishPropose 2025-12-23T18:40:37.969020Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2025-12-23T18:40:37.969039Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit CompletedOperations 2025-12-23T18:40:37.969067Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-12-23T18:40:37.969085Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2025-12-23T18:40:37.969103Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715663] at 72075186224037890 has finished 2025-12-23T18:40:37.969124Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:37.969143Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2025-12-23T18:40:37.969167Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-12-23T18:40:37.969188Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2025-12-23T18:40:37.969222Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:40:37.969243Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-12-23T18:40:37.969267Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-12-23T18:40:37.969316Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:40:37.969574Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549569, Sender [2:1169:2960], Recipient [2:1086:2896]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715663 2025-12-23T18:40:37.969606Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-12-23T18:40:37.969640Z node 2 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976715663 2025-12-23T18:40:37.969728Z node 2 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976715663 2025-12-23T18:40:37.969861Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287431, Sender [2:1169:2960], Recipient [2:1086:2896]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715663 2025-12-23T18:40:37.969887Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-12-23T18:40:37.969953Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1169:2960], Recipient [2:1086:2896]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715662 |94.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2025-12-23T18:40:22.505234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:22.505305Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:22.739912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:23.995855Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:23.995924Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:24.090652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:25.311177Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:25.311249Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:25.406133Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:26.781905Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:26.781976Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:26.872230Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:28.478017Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:28.478158Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:28.568552Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:29.956928Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:29.956999Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:30.091629Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:31.416787Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:31.416873Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:31.524042Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) E1223 18:40:32.965544732 1285298 trace.cc:67] Unknown trace var: 'sdk_authz' 2025-12-23T18:40:32.966042Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component CMS_CONFIGS has been changed from WARN to NOTICE 2025-12-23T18:40:32.966110Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component CMS_CONFIGS has been changed from WARN to DEBUG 2025-12-23T18:40:32.966147Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component CMS_CLUSTER has been changed from WARN to NOTICE 2025-12-23T18:40:32.966165Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component CMS_CLUSTER has been changed from WARN to DEBUG 2025-12-23T18:40:32.966183Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_PROXY has been changed from WARN to NOTICE 2025-12-23T18:40:32.966203Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_PROXY has been changed from WARN to DEBUG 2025-12-23T18:40:32.966220Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_WORKER has been changed from WARN to NOTICE 2025-12-23T18:40:32.966238Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_WORKER has been changed from WARN to DEBUG 2025-12-23T18:40:32.966257Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_GATEWAY has been changed from WARN to NOTICE 2025-12-23T18:40:32.966275Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_GATEWAY has been changed from WARN to DEBUG 2025-12-23T18:40:32.966290Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_YQL has been changed from WARN to NOTICE 2025-12-23T18:40:32.966308Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_YQL has been changed from WARN to DEBUG 2025-12-23T18:40:32.966324Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_EXECUTER has been changed from WARN to NOTICE 2025-12-23T18:40:32.966341Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_EXECUTER has been changed from WARN to DEBUG 2025-12-23T18:40:32.966359Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPUTE has been changed from WARN to NOTICE 2025-12-23T18:40:32.966376Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPUTE has been changed from WARN to DEBUG 2025-12-23T18:40:32.966393Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_SLOW_LOG has been changed from WARN to NOTICE 2025-12-23T18:40:32.966411Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_SLOW_LOG has been changed from WARN to DEBUG 2025-12-23T18:40:32.966436Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_SERVICE has been changed from WARN to NOTICE 2025-12-23T18:40:32.966483Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_SERVICE has been changed from WARN to DEBUG 2025-12-23T18:40:32.966503Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_ACTOR has been changed from WARN to NOTICE 2025-12-23T18:40:32.966524Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_ACTOR has been changed from WARN to DEBUG 2025-12-23T18:40:32.966542Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_RESOURCE_MANAGER has been changed from WARN to NOTICE 2025-12-23T18:40:32.966560Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_RESOURCE_MANAGER has been changed from WARN to DEBUG 2025-12-23T18:40:32.966580Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TABLET_RESOURCE_BROKER has been changed from WARN to NOTICE 2025-12-23T18:40:32.966604Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TABLET_RESOURCE_BROKER has been changed from WARN to DEBUG 2025-12-23T18:40:32.966629Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_BLOBS_STORAGE has been changed from WARN to NOTICE 2025-12-23T18:40:32.966654Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_BLOBS_STORAGE has been changed from WARN to DEBUG 2025-12-23T18:40:32.966681Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_REQUEST has been changed from WARN to NOTICE 2025-12-23T18:40:32.966706Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_REQUEST has been changed from WARN to DEBUG 2025-12-23T18:40:32.966731Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_NODE has been changed from WARN to NOTICE 2025-12-23T18:40:32.966768Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_NODE has been changed from WARN to DEBUG 2025-12-23T18:40:32.966793Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_LOAD_TEST has been changed from WARN to NOTICE 2025-12-23T18:40:32.966816Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_LOAD_TEST has been changed from WARN to DEBUG 2025-12-23T18:40:32.968900Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_SESSION has been changed from WARN to NOTICE 2025-12-23T18:40:32.968973Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_SESSION has been changed from WARN to DEBUG 2025-12-23T18:40:32.969003Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_COMPUTATION_PATTERN_SERVICE has been changed from WARN to NOTICE 2025-12-23T18:40:32.969026Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_COMPUTATION_PATTERN_SERVICE has been changed from WARN to DEBUG 2025-12-23T18:40:32.969044Z node 8 :CMS_CONFIGS NOTICE: log_settings_con ... 025-12-23T18:40:35.815639Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from WARN to ALERT 2025-12-23T18:40:35.815667Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2025-12-23T18:40:35.815717Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_GETBLOCK has been changed from WARN to ALERT 2025-12-23T18:40:35.815750Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_GETBLOCK has been changed from WARN to ALERT 2025-12-23T18:40:35.815780Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_GETBLOCK has been changed from 0 to 10 2025-12-23T18:40:35.815812Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_SHRED has been changed from WARN to ALERT 2025-12-23T18:40:35.815844Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_SHRED has been changed from WARN to ALERT 2025-12-23T18:40:35.815874Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_SHRED has been changed from 0 to 10 2025-12-23T18:40:35.815907Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_CHECKINTEGRITY has been changed from WARN to ALERT 2025-12-23T18:40:35.815942Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_CHECKINTEGRITY has been changed from WARN to ALERT 2025-12-23T18:40:35.815970Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_CHECKINTEGRITY has been changed from 0 to 10 2025-12-23T18:40:35.815999Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_BRIDGE has been changed from WARN to ALERT 2025-12-23T18:40:35.816035Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_BRIDGE has been changed from WARN to ALERT 2025-12-23T18:40:35.816066Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_BRIDGE has been changed from 0 to 10 2025-12-23T18:40:35.816098Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_CLUSTER_BALANCING has been changed from WARN to ALERT 2025-12-23T18:40:35.816130Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_CLUSTER_BALANCING has been changed from WARN to ALERT 2025-12-23T18:40:35.816157Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_CLUSTER_BALANCING has been changed from 0 to 10 2025-12-23T18:40:35.816189Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_BRIDGE_SYNC has been changed from WARN to ALERT 2025-12-23T18:40:35.816220Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_BRIDGE_SYNC has been changed from WARN to ALERT 2025-12-23T18:40:35.816250Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_BRIDGE_SYNC has been changed from 0 to 10 2025-12-23T18:40:35.816284Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PHANTOM_FLAG_STORAGE has been changed from WARN to ALERT 2025-12-23T18:40:35.816319Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PHANTOM_FLAG_STORAGE has been changed from WARN to ALERT 2025-12-23T18:40:35.816352Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PHANTOM_FLAG_STORAGE has been changed from 0 to 10 2025-12-23T18:40:35.816385Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from WARN to ALERT 2025-12-23T18:40:35.816418Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from WARN to ALERT 2025-12-23T18:40:35.816446Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2025-12-23T18:40:35.816476Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component SASL_AUTH has been changed from WARN to ALERT 2025-12-23T18:40:35.816508Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component SASL_AUTH has been changed from WARN to ALERT 2025-12-23T18:40:35.816541Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component SASL_AUTH has been changed from 0 to 10 2025-12-23T18:40:35.816574Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from WARN to ALERT 2025-12-23T18:40:35.816605Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from WARN to ALERT 2025-12-23T18:40:35.816635Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2025-12-23T18:40:35.816671Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from WARN to ALERT 2025-12-23T18:40:35.816706Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from WARN to ALERT 2025-12-23T18:40:35.816734Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2025-12-23T18:40:35.816768Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from WARN to ALERT 2025-12-23T18:40:35.816799Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from WARN to ALERT 2025-12-23T18:40:35.816828Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2025-12-23T18:40:35.816861Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from WARN to ALERT 2025-12-23T18:40:35.816892Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from WARN to ALERT 2025-12-23T18:40:35.816923Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2025-12-23T18:40:35.816956Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component NAMESERVICE has been changed from WARN to ALERT 2025-12-23T18:40:35.816985Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component NAMESERVICE has been changed from WARN to ALERT 2025-12-23T18:40:35.817013Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component NAMESERVICE has been changed from 0 to 10 2025-12-23T18:40:35.817047Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BRIDGE has been changed from WARN to ALERT 2025-12-23T18:40:35.817077Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BRIDGE has been changed from WARN to ALERT 2025-12-23T18:40:35.817106Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BRIDGE has been changed from 0 to 10 2025-12-23T18:40:35.817141Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TRANSFER has been changed from WARN to ALERT 2025-12-23T18:40:35.817173Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TRANSFER has been changed from WARN to ALERT 2025-12-23T18:40:35.817202Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TRANSFER has been changed from 0 to 10 2025-12-23T18:40:35.817236Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TOKEN_MANAGER has been changed from WARN to ALERT 2025-12-23T18:40:35.817267Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TOKEN_MANAGER has been changed from WARN to ALERT 2025-12-23T18:40:35.817298Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TOKEN_MANAGER has been changed from 0 to 10 2025-12-23T18:40:35.817333Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component LOCAL_DB_BACKUP has been changed from WARN to ALERT 2025-12-23T18:40:35.817364Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component LOCAL_DB_BACKUP has been changed from WARN to ALERT 2025-12-23T18:40:35.817393Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component LOCAL_DB_BACKUP has been changed from 0 to 10 2025-12-23T18:40:35.817426Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component SCHEMA_SECRET_CACHE has been changed from WARN to ALERT 2025-12-23T18:40:35.817459Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component SCHEMA_SECRET_CACHE has been changed from WARN to ALERT 2025-12-23T18:40:35.817488Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component SCHEMA_SECRET_CACHE has been changed from 0 to 10 2025-12-23T18:40:35.817625Z node 11 :CMS_CONFIGS TRACE: log_settings_configurator.cpp:100: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } ... waiting for config update (done) |94.0%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> CheckIntegrityMirror3dc::PlacementMissingParts [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleConfigTests::TestGetItems >> CheckIntegrityMirror3of4::PlacementBlobIsLost [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 25620, MsgBus: 27148 2025-12-23T18:39:58.335689Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125007406288409:2207];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:58.335745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004102/r3tmp/tmphotFBi/pdisk_1.dat 2025-12-23T18:39:58.963480Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:59.019859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:59.019942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:59.203132Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:59.222051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:59.222897Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125007406288237:2081] 1766515198301098 != 1766515198301101 2025-12-23T18:39:59.283087Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 25620, node 1 2025-12-23T18:39:59.414878Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:59.588462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:59.588495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:59.588502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:59.588572Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27148 TClient is connected to server localhost:27148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515199316 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:00.880792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:40:00.917257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:01.152012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:01.388210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:01.537645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:03.337091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125007406288409:2207];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:03.337177Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:05.656595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125037471061194:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:05.656707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:05.657133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125037471061204:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:05.657195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:06.100102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:06.140383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:06.182810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:06.259211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:06.339915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:06.389015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:06.451421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:06.526642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:06.663175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125041766029390:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:06.663292Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:06.663676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125041766029395:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:06.663716Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125041766029396:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:06.664107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ... ode 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:28.935323Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:40:28.935344Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:40:28.935352Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:40:28.935421Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12855 TClient is connected to server localhost:12855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515228800 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:29.469338Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:40:29.502129Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:29.578142Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:29.594341Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:29.835694Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:29.923192Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:40:33.071628Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125157509277465:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:33.071758Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:33.072052Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125157509277474:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:33.072096Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:33.203156Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:33.233023Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:33.263377Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:33.289013Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:33.320651Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:33.351275Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:33.388153Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:33.445681Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:33.523476Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125157509278344:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:33.523592Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:33.523613Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125157509278349:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:33.523794Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587125157509278351:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:33.523829Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:33.558734Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:33.569484Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587125157509278352:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:40:33.569874Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587125136034439166:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:33.569962Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:33.637033Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587125157509278407:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:36.340934Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=4&id=ZWNjOGYyNWMtYzg3YWQ0NTQtYTY4ZDczNzAtOTdmYmNiMTY=, ActorId: [4:7587125166099213333:2534], ActorState: ExecuteState, TraceId: 01kd686q8kdkr8vhqpe8wf4s6x, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken, status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 }
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::PlacementStatusUnknown [GOOD] Test command err: RandomSeed# 15590901389762322388 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-12-23T18:33:25.327065Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.327099Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.327122Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:33:25.328093Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-12-23T18:33:25.328145Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.328176Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.329438Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009910s 2025-12-23T18:33:25.330006Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-12-23T18:33:25.330039Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.330059Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.330108Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006347s 2025-12-23T18:33:25.330534Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-12-23T18:33:25.330559Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.330581Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:33:25.330636Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005719s 2025-12-23T18:33:25.374586Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1766514805374549 2025-12-23T18:33:26.009751Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587123321296895432:2254];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:26.009802Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:33:26.223445Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005995/r3tmp/tmpJsiXsE/pdisk_1.dat 2025-12-23T18:33:26.229862Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:33:26.614833Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:26.614991Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:33:26.655150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:33:26.731580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:26.731705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:26.731941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:33:26.762883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:33:26.924696Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:33:26.937596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:26.937905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:33:27.013370Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:33:27.074969Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587123317001927910:2082] 1766514805947122 != 1766514805947125 2025-12-23T18:33:27.080345Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:27.082926Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:33:27.096792Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:33:27.230932Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TServer::EnableGrpc on GrpcPort 24575, node 1 2025-12-23T18:33:27.538203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005995/r3tmp/yandexkbi7Ai.tmp 2025-12-23T18:33:27.538231Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005995/r3tmp/yandexkbi7Ai.tmp 2025-12-23T18:33:27.558433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005995/r3tmp/yandexkbi7Ai.tmp 2025-12-23T18:33:27.561945Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:33:27.683112Z INFO: TTestServer started on Port 62137 GrpcPort 24575 TClient is connected to server localhost:62137 PQClient connected to localhost:24575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766514807162 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:33:28.214204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:33:31.010957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587123321296895432:2254];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:33:31.011050Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:33:33.061443Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123347066700366:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:33.061611Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:33.063157Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123351361667693:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:33.063224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123351361667694:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:33.063414Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:33.068190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:33:33.083413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587123351361667729:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:33.083645Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:33:33.123151Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587123351361667697:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-12-23T18:33:33.224919Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587123351361667788:2987] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:33:33.761652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0 ... e-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session: acknoledged message 3 2025-12-23T18:40:35.922160Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session: acknoledged message 4 2025-12-23T18:40:35.922191Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session: acknoledged message 5 2025-12-23T18:40:35.922222Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session: acknoledged message 6 2025-12-23T18:40:35.922253Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session: acknoledged message 7 2025-12-23T18:40:35.922288Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session: acknoledged message 8 2025-12-23T18:40:35.922319Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session: acknoledged message 9 2025-12-23T18:40:35.922352Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session: acknoledged message 10 2025-12-23T18:40:35.921511Z node 17 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 suffix '63' size 1208 2025-12-23T18:40:35.921622Z node 17 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 suffix '63' 2025-12-23T18:40:35.926887Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session: close. Timeout = 0 ms 2025-12-23T18:40:35.926974Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session will now close 2025-12-23T18:40:35.927043Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session: aborting 2025-12-23T18:40:35.928594Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session: gracefully shut down, all writes complete 2025-12-23T18:40:35.928653Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session is aborting and will not restart 2025-12-23T18:40:35.929091Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0] Write session: destroy 2025-12-23T18:40:35.928462Z node 17 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0 grpc read done: success: 0 data: 2025-12-23T18:40:35.928509Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0 grpc read failed 2025-12-23T18:40:35.929003Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 7 sessionId: test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0 2025-12-23T18:40:35.929037Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message-group-id|f922e682-c268aa05-75fa69ee-d261d8a3_0 is DEAD 2025-12-23T18:40:35.929527Z node 17 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:40:35.929979Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [17:7587125163885850088:2620] destroyed 2025-12-23T18:40:35.930029Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T18:40:35.930069Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:40:35.930092Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:35.930114Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:40:35.930137Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:35.930159Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:40:36.013421Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:40:36.013469Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.013496Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:40:36.013525Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.013548Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:40:36.113508Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:40:36.113569Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.113596Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:40:36.113627Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.113651Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:40:36.213871Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:40:36.213922Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.213946Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:40:36.213976Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.213999Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:40:36.314181Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:40:36.314226Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.314251Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:40:36.314279Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.314302Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:40:36.414827Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:40:36.414884Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.414913Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:40:36.414948Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.414976Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:40:36.520519Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:40:36.520576Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.520603Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:40:36.520633Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.520658Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:40:36.620902Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:40:36.620959Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.620987Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:40:36.621023Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.621051Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:40:36.721284Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:40:36.721340Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.721368Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:40:36.721404Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.721432Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:40:36.821592Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:40:36.821639Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.821662Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:40:36.821694Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:40:36.821716Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |94.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending >> KqpSinkTx::OlapSnapshotRO [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive1 >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3of4::PlacementBlobIsLost [GOOD] Test command err: RandomSeed# 11788683126432388349 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:2:0] FINISHED WITH OK *** ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::PlacementMissingParts [GOOD] Test command err: RandomSeed# 4472704917872298511 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataStatusUnknown [GOOD] |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap+UpdateAfterInsert [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool |94.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |94.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |94.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::SchemaChanges [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:10.267222Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:10.392697Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:10.405149Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:10.405578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:10.405665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:10.760647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:10.760815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:10.914554Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515206850800 != 1766515206850804 2025-12-23T18:40:10.934874Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:10.992214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:11.093808Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c77/r3tmp/tmpP8xkeO/pdisk_1.dat 2025-12-23T18:40:11.620716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:11.637113Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:11.771833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:11.835957Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:40:11.836284Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:11.887988Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:11.888237Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:11.890214Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:11.890326Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:11.890427Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:11.890849Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:11.891002Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:11.891116Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:40:11.902945Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:11.945180Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:11.945422Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:11.945577Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:40:11.945635Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:11.945678Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:11.945717Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:11.946244Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:11.946355Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:11.946889Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:11.946944Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:11.947006Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:11.947067Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:11.947156Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:40:11.947338Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:11.947630Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:40:11.947734Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:40:11.949717Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:11.961506Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:11.961639Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:40:12.089845Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:40:12.099325Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:40:12.099437Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:12.100622Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:12.100699Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:40:12.100763Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:40:12.101048Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:40:12.101233Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:40:12.101613Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:12.101712Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:40:12.122608Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:40:12.127423Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:12.133939Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:40:12.134030Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:12.134602Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:40:12.134710Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:12.136455Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:12.136507Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:12.136586Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:40:12.136672Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:12.136734Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:40:12.136835Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:12.155498Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:12.157486Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:40:12.157575Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 7207518622403 ... 38], version# 2, step# 2500, txId# 281474976715658, at tablet# 72075186224037888 2025-12-23T18:40:39.382142Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:39.426251Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2025-12-23T18:40:39.426377Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:39.426423Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:39.426466Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:39.426548Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:39.426600Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-12-23T18:40:39.426695Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:39.428646Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-12-23T18:40:39.428730Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:40:39.437713Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1063:2892], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:39.437838Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1074:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:39.437914Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:39.438955Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:39.439117Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:39.444339Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:39.450598Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:39.615237Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:39.619432Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1077:2900], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:40:39.645226Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1135:2939] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:39.735239Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1166:2956], serverId# [4:1167:2957], sessionId# [0:0:0] 2025-12-23T18:40:39.735852Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2025-12-23T18:40:39.736109Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1766515239735990 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 32b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-12-23T18:40:39.736310Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-12-23T18:40:39.747627Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 32 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2025-12-23T18:40:39.747766Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:39.782137Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T18:40:39.784752Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:39.785024Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715662 ssId 72057594046644480 seqNo 2:3 2025-12-23T18:40:39.785113Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 3 current version# 2 expected version# 3 at tablet# 72075186224037888 txId# 281474976715662 2025-12-23T18:40:39.785172Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715662 at tablet 72075186224037888 2025-12-23T18:40:39.796513Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:39.904741Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715662 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 281474976715662 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:40:39.904816Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:39.904987Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:39.905042Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:40:39.905101Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [3500:281474976715662] in PlanQueue unit at 72075186224037888 2025-12-23T18:40:39.905408Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 3500:281474976715662 keys extracted: 0 2025-12-23T18:40:39.905555Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:40:39.905787Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:39.905875Z node 4 :TX_DATASHARD INFO: alter_table_unit.cpp:145: Trying to ALTER TABLE at 72075186224037888 version 3 2025-12-23T18:40:39.906774Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1849: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 38], version# 3, step# 3500, txId# 281474976715662, at tablet# 72075186224037888 2025-12-23T18:40:39.906961Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 0 Step: 3500 TxId: 281474976715662 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcSchemaChange Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-12-23T18:40:39.907337Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:39.909976Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-12-23T18:40:39.910057Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:39.910955Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:39.911031Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 } 2025-12-23T18:40:39.911126Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:39.911182Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-12-23T18:40:39.911297Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 } 2025-12-23T18:40:39.911357Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:39.913683Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-12-23T18:40:39.913753Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:40:39.918971Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1209:2994], serverId# [4:1210:2995], sessionId# [0:0:0] 2025-12-23T18:40:39.936132Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1212:2997], serverId# [4:1213:2998], sessionId# [0:0:0] |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest |94.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |94.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigTests::TestGetNodeItems |94.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataStatusUnknown [GOOD] Test command err: RandomSeed# 14390050092702316814 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Group is disintegrated or has network problems *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: part 2: part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: |94.0%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> Cdc::UuidExchange[PqRunner] [GOOD] >> Cdc::UuidExchange[YdsRunner] >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning+isOlap |94.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex |94.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |94.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |94.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |94.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |94.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |94.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |94.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |94.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |94.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |94.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified |94.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |94.0%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |94.0%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |94.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_2 >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort-UseSink [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap+UpdateAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 63816, MsgBus: 4554 ... waiting for SysViewsRoster update finished 2025-12-23T18:39:18.567722Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:39:18.734067Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:39:18.753175Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:39:18.753553Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:39:18.753640Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:19.126461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:19.126629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:19.224849Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515154944994 != 1766515154944998 2025-12-23T18:39:19.272698Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:19.328122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:19.527094Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004114/r3tmp/tmp43qjZ7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 63816, node 1 2025-12-23T18:39:20.092676Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:20.092737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:20.092770Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:20.093271Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:20.108285Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:4554 TClient is connected to server localhost:4554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:20.675950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:20.760948Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:918:2783], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.761090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:929:2788], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.761220Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.761962Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:932:2791], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.762228Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:20.777612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:20.864615Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:933:2792], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:39:20.910205Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:989:2829] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:21.316553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:39:21.479579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1126:2914];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:39:21.479825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1126:2914];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:39:21.480072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1126:2914];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:39:21.480152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1126:2914];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:39:21.480248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1126:2914];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:39:21.480357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1126:2914];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:39:21.480482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1126:2914];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:39:21.480597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1126:2914];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:39:21.480713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1126:2914];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:39:21.480810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1126:2914];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:39:21.480906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1126:2914];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:39:21.481017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1126:2914];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:39:21.481094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1126:2914];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:39:21.508406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:39:21.509951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:39:21.510126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:39:21.510176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCl ... 15662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.685892Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.694642Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.694765Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.694805Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.703038Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.703136Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.703177Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.711814Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.711915Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.711958Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.721406Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.721505Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.721546Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.730615Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.730713Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.730774Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.739434Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.739527Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.739566Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.748961Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.749063Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.749103Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.758842Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.758944Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.758989Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.768253Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.768358Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.768406Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.776970Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.777073Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.777119Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.786317Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.786414Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.786450Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.794614Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.794707Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.794765Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.803928Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.804028Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.804070Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:37.909441Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd6864tnc6jt4agdm6s4gbf0", SessionId: ydb://session/3?node_id=3&id=NjNiNjVhNjAtYjgwNjAwODYtZWU5Y2JiMTItM2M3NGRmYjI=, Slow query, duration: 20.661593s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.1%| [TA] $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalTableTest::DropTableTwice |94.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TConsoleTests::TestRemoveTenant >> TExternalTableTest::DropExternalTable >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TExternalTableTest::ReadOnlyMode >> TConsoleConfigTests::TestAutoOrder >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex [GOOD] >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges >> TExternalTableTest::ReplaceExternalTableIfNotExists >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::AggregateByColumn >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes [GOOD] >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> TExternalTableTest::CreateExternalTable >> TExternalTableTest::SchemeErrors >> TExternalTableTest::ParallelCreateSameExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:40:43.538991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:43.539084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:43.539130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:43.539169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:43.539212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:43.539246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:43.539303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:43.539414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:43.540318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:43.540607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:43.633060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:43.633124Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:43.645826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:43.646186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:43.646326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:43.651917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:43.652121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:43.652903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:43.653136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:43.655203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:43.655378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:43.656557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:43.656619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:43.656748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:43.656792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:43.656838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:43.656970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:43.829799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.836592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.836769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.836851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.836929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.837000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.837090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.837184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.837260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.837356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.837437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.837528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.837614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.837694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.837789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... s: true ShowPrivateTable: true } 2025-12-23T18:40:45.100698Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:40:45.100795Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndexCopy/ValueIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:40:45.101125Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndexCopy/ValueIndex" took 269us result status StatusSuccess 2025-12-23T18:40:45.102024Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndexCopy/ValueIndex" PathDescription { Self { Name: "ValueIndex" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 42 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Index impl table name: indexImplTable 2025-12-23T18:40:45.102633Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [1:1066:3000], Recipient [1:138:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-12-23T18:40:45.102678Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:40:45.102885Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:40:45.103175Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" took 391us result status StatusSuccess 2025-12-23T18:40:45.104034Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:40:43.492058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:43.492149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:43.492204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:43.492238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:43.492270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:43.492302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:43.492351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:43.492455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:43.493201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:43.493411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:43.575128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:43.575191Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:43.580822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:43.581038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:43.581156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:43.584105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:43.584278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:43.584941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:43.585154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:43.586999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:43.587151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:43.588350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:43.588404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:43.588525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:43.588571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:43.588610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:43.588735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:43.745552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.746587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.746720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.746863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.746953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.747019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.747120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.747230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.747308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.747374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.747432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.747521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.747625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.747726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:43.747817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... t TxId: 102 at schemeshard: 72057594046678944 2025-12-23T18:40:44.991832Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [1:876:2832] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-12-23T18:40:44.992364Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:138:2160], Recipient [1:138:2160]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:40:44.992412Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:40:44.992478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:40:44.992534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-12-23T18:40:44.992578Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:40:44.992622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-12-23T18:40:44.992663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-12-23T18:40:44.992766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-12-23T18:40:44.992812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2025-12-23T18:40:45.000069Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:40:45.000134Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2025-12-23T18:40:45.000307Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435072, Sender [1:138:2160], Recipient [1:138:2160]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-12-23T18:40:45.000349Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5264: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-12-23T18:40:45.000451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:40:45.000516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:40:45.000637Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:40:45.000671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:40:45.000709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:40:45.000776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:40:45.000816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:40:45.000855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:40:45.000930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:777:2744] message: TxId: 102 2025-12-23T18:40:45.000982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:40:45.001027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:40:45.001073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:40:45.001278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2025-12-23T18:40:45.001325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:40:45.003641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:40:45.003801Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [1:777:2744] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-12-23T18:40:45.003986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:40:45.004063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:906:2854] 2025-12-23T18:40:45.004270Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877764, Sender [1:908:2856], Recipient [1:138:2160]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:40:45.004328Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-12-23T18:40:45.004364Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6214: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-12-23T18:40:45.004875Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [1:977:2923], Recipient [1:138:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2025-12-23T18:40:45.004944Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:40:45.005049Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:40:45.005423Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy" took 264us result status StatusSuccess 2025-12-23T18:40:45.006010Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy" PathDescription { Self { Name: "TableCopy" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableCopy" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:45.006589Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [1:978:2924], Recipient [1:138:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2025-12-23T18:40:45.006664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:40:45.006790Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:40:45.007263Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex" took 210us result status StatusPathDoesNotExist 2025-12-23T18:40:45.007453Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/TableCopy/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/TableCopy\' (id: [OwnerId: 72057594046678944, LocalPathId: 41])" Path: "/MyRoot/TableCopy/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/TableCopy" LastExistedPrefixPathId: 41 LastExistedPrefixDescription { Self { Name: "TableCopy" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExternalTableTest::DropTableTwice [GOOD] >> TExternalTableTest::ParallelCreateExternalTable |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] >> KqpResultSetFormats::ArrowFormat_Types_List_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyList >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:44.379881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:44.379999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:44.380041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:44.380080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:44.380127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:44.380161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:44.380267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:44.380344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:44.381273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:44.381583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:44.517866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:44.517959Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:44.518851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:44.534960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:44.535134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:44.535339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:44.544961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:44.545245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:44.546048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:44.546453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:44.549581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:44.549810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:44.551080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:44.551143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:44.551264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:44.551308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:44.551345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:44.551548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:44.673091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.673992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.674090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.674136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.674185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.674258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.674332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.674378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.674418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.674476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.674522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.674590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.674638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.674687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.674805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:40:45.447060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:40:45.447120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-12-23T18:40:45.447177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:40:45.447211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2025-12-23T18:40:45.447964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:40:45.448079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:40:45.448126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:40:45.448172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:40:45.448218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:40:45.449124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:40:45.449285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:40:45.449325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:40:45.449359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:40:45.449398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:40:45.449476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:40:45.453109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:40:45.453224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:40:45.453451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:40:45.453507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:40:45.453933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:40:45.454039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:40:45.454079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:706:2696] TestWaitNotification: OK eventTxId 101 2025-12-23T18:40:45.454565Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:45.454887Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 295us result status StatusSuccess 2025-12-23T18:40:45.455421Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-12-23T18:40:45.459427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:45.459887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-12-23T18:40:45.459974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-12-23T18:40:45.460031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-12-23T18:40:45.463457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:45.463738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:40:45.464086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:40:45.464134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:40:45.464579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:40:45.464678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:40:45.464726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:714:2704] TestWaitNotification: OK eventTxId 102 2025-12-23T18:40:45.465210Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:45.465410Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 202us result status StatusPathDoesNotExist 2025-12-23T18:40:45.465597Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> Cdc::UuidExchange[YdsRunner] [GOOD] >> Cdc::UuidExchange[TopicRunner] >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> Cdc::KeysOnlyLogDebezium >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:40:43.746898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:43.746996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:43.747048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:43.747091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:43.747134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:43.747174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:43.747254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:43.747368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:43.748424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:43.748741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:43.849519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:43.849597Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:43.858918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:43.859206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:43.859371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:43.865132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:43.865331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:43.866181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:43.866415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:43.868704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:43.868900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:43.870212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:43.870277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:43.870430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:43.870485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:43.870557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:43.870709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:44.053942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.055233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.055383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.055502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.055597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.055710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.055819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.055949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.056043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.056149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.056223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.056317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.056436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.056540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:44.056631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 45 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:45.511190Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [1:1218:3129], Recipient [1:138:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-12-23T18:40:45.511252Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:40:45.511352Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:40:45.511636Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex2" took 248us result status StatusSuccess 2025-12-23T18:40:45.512526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy/ValueIndex2" PathDescription { Self { Name: "ValueIndex2" PathId: 46 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 43 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 47 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 46 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex2" LocalPathId: 46 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value2" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value2" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 46 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:45.513083Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [1:1219:3130], Recipient [1:138:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2025-12-23T18:40:45.513136Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:40:45.513232Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:40:45.513450Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex2/indexImplTable" took 215us result status StatusSuccess 2025-12-23T18:40:45.513879Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 47 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 46 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value2" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 47 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TConsoleConfigTests::TestAutoKind >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal |94.1%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend >> TExternalTableTest::ParallelCreateExternalTable [GOOD] |94.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |94.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:45.176657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:45.176730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:45.176756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:45.176782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:45.176810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:45.176830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:45.176898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:45.176953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:45.177560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:45.177863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:45.314404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:45.314502Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:45.315458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:45.342619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:45.342830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:45.343053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:45.362772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:45.363108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:45.364013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:45.364457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:45.369357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:45.369575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:45.370777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:45.370834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:45.370958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:45.371010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:45.371059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:45.371243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:45.500240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.501284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.501406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.501469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.501622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.501726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.501801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.501867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.501929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.502011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.502072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.502156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.502244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.502371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.502449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... T_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.477951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.478098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2025-12-23T18:40:46.478195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.478222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.478277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2025-12-23T18:40:46.478413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.478439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.478547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2025-12-23T18:40:46.478656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2025-12-23T18:40:46.478729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.478778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.478842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-12-23T18:40:46.478986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.479014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.479073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-12-23T18:40:46.479162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.479188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.479338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-12-23T18:40:46.479434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.479459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.479600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-12-23T18:40:46.479681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.479708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.479805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.479845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.479906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2025-12-23T18:40:46.480060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.480085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.480133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-12-23T18:40:46.480175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.480198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.480344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.480373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.480476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2025-12-23T18:40:46.480568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.480595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.480753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2025-12-23T18:40:46.480829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.480853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.480970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.480999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.481107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.481132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.481256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.481285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.481443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.481469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:838:2828] 2025-12-23T18:40:46.481580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2025-12-23T18:40:46.481609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:838:2828] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 132 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 2025-12-23T18:40:46.485751Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:46.486043Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 328us result status StatusSuccess 2025-12-23T18:40:46.486456Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 2 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::SchemeErrors [GOOD] >> TConsoleConfigTests::TestAutoKind [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists >> TConsoleConfigTests::TestAutoSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:46.056388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:46.056481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:46.056523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:46.056554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:46.056586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:46.056612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:46.056679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:46.056761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:46.057559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:46.057818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:46.175419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:46.175501Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:46.176411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:46.191407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:46.191586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:46.191773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:46.207826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:46.208163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:46.209112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:46.209561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:46.213090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:46.213331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:46.214404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:46.214458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:46.214572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:46.214608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:46.214648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:46.214843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:46.373268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.374451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.374594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.374665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.374779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.374872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.374970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.375044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.375117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.375217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.375312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.375387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.375463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.375544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.375699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... tate 2025-12-23T18:40:47.257407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:40:47.257446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:40:47.257490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:40:47.257528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:40:47.257573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:40:47.257640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:40:47.257685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:40:47.257720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:40:47.257792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:40:47.257839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:40:47.257880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-12-23T18:40:47.257913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2025-12-23T18:40:47.259108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:40:47.259237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:40:47.259284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:40:47.259327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-12-23T18:40:47.259373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:40:47.260286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:40:47.260367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:40:47.260425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:40:47.260463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2025-12-23T18:40:47.260509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:40:47.260592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:40:47.266359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:40:47.266544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:40:47.266804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:40:47.266853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:40:47.267341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:40:47.267438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:40:47.267479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:731:2721] TestWaitNotification: OK eventTxId 102 2025-12-23T18:40:47.268036Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:47.268301Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 279us result status StatusSuccess 2025-12-23T18:40:47.268724Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-12-23T18:40:47.272595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:47.272973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-12-23T18:40:47.273105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_table.cpp:304: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists: 1 2025-12-23T18:40:47.273281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, at schemeshard: 72057594046678944 2025-12-23T18:40:47.279887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable" TxId: 103 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-12-23T18:40:47.280183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:40:47.280526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:40:47.280571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:40:47.280991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:40:47.281113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:40:47.281168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:739:2729] TestWaitNotification: OK eventTxId 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:45.875885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:45.875981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:45.876032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:45.876072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:45.876113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:45.876144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:45.876232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:45.876324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:45.877235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:45.877563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:46.044481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:46.044558Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:46.045443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:46.062072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:46.062234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:46.062466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:46.071993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:46.072273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:46.073156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:46.073526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:46.076496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:46.076709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:46.077932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:46.077996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:46.078126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:46.078183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:46.078239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:46.078460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:46.222732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.223860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.223995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.224073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.224243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.224320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.224386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.224429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.224473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.224531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.224583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.224661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.224774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.224857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.225008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... etID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:47.178333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_table.cpp:58: [72057594046678944] TAlterExternalTable TPropose, operationId: 104:0 HandleReply TEvOperationPlan: step# 5000041 2025-12-23T18:40:47.178451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2025-12-23T18:40:47.178679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:40:47.178774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:40:47.179808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:40:47.181057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:40:47.182579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:47.182619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:47.182866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:40:47.183005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:40:47.183098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:47.183138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-12-23T18:40:47.183176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 2025-12-23T18:40:47.183206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 FAKE_COORDINATOR: Erasing txId 104 2025-12-23T18:40:47.183504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:40:47.183550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:40:47.183646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:40:47.183698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:40:47.183748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:40:47.183789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:40:47.183826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-12-23T18:40:47.183881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:40:47.183922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:40:47.183968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:40:47.184034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:40:47.184087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:40:47.184153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-12-23T18:40:47.184203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-12-23T18:40:47.184230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2025-12-23T18:40:47.185826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:40:47.185929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:40:47.186014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:40:47.186055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-12-23T18:40:47.186097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:40:47.190124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:40:47.190313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:40:47.190354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:40:47.190381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2025-12-23T18:40:47.190422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:40:47.190531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-12-23T18:40:47.196480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:40:47.201273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:40:47.201555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:40:47.201602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:40:47.202086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:40:47.202182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:40:47.202225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:794:2784] TestWaitNotification: OK eventTxId 104 2025-12-23T18:40:47.202785Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:47.203025Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 273us result status StatusSuccess 2025-12-23T18:40:47.203453Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 3 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 3 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/other_location" Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.1%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:44.988081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:44.988174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:44.988221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:44.988260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:44.988294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:44.988327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:44.988400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:44.988473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:44.989169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:44.989404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:45.107207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:45.107288Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:45.108210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:45.130012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:45.130187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:45.130410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:45.141894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:45.142221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:45.143430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:45.143854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:45.147454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:45.147733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:45.149160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:45.149239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:45.149377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:45.149441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:45.149507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:45.149713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:45.281518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.282345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.282449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.282530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.282601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.282691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.282781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.282829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.282873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.282916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.282971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.283054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.283115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.283190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.283282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... hildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:47.473766Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:47.473985Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 238us result status StatusSuccess 2025-12-23T18:40:47.474335Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:47.475224Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:47.475384Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 180us result status StatusSuccess 2025-12-23T18:40:47.475792Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "ExternalTable1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000041 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:47.476349Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:47.476572Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 242us result status StatusSuccess 2025-12-23T18:40:47.476889Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000041 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:47.477411Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:47.477595Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 191us result status StatusSuccess 2025-12-23T18:40:47.478019Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:45.625138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:45.625238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:45.625279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:45.625311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:45.625366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:45.625392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:45.625475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:45.625552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:45.626438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:45.626753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:45.748189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:45.748274Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:45.749172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:45.764965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:45.765105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:45.765279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:45.798068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:45.798430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:45.799456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:45.799961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:45.807287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:45.807537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:45.808713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:45.808773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:45.808896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:45.808936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:45.808987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:45.809192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:46.005910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.006953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.007077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.007147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.007300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.007409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.007503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.007575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.007649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.007763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.007836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.007926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.008010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.008094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.008230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... schemeshard: 72057594046678944 2025-12-23T18:40:47.419524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2025-12-23T18:40:47.419676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:40:47.420581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:40:47.420672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:40:47.420713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-12-23T18:40:47.420759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-12-23T18:40:47.420822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2025-12-23T18:40:47.421532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:40:47.421624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:40:47.421659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-12-23T18:40:47.421706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2025-12-23T18:40:47.421745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:40:47.421838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2025-12-23T18:40:47.425109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-12-23T18:40:47.425277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000041 2025-12-23T18:40:47.428472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-12-23T18:40:47.428577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-12-23T18:40:47.428842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:47.428958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:47.429023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000041, at schemeshard: 72057594046678944 2025-12-23T18:40:47.429179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 129:0 128 -> 240 2025-12-23T18:40:47.429357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-12-23T18:40:47.429432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 FAKE_COORDINATOR: Erasing txId 129 2025-12-23T18:40:47.431445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:47.431690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:47.431863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2025-12-23T18:40:47.431960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:47.431994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:892:2850], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-12-23T18:40:47.432034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:892:2850], at schemeshard: 72057594046678944, txId: 129, path id: 41 2025-12-23T18:40:47.432331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-12-23T18:40:47.432380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 129:0 ProgressState 2025-12-23T18:40:47.432486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2025-12-23T18:40:47.432530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-12-23T18:40:47.432574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2025-12-23T18:40:47.432617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-12-23T18:40:47.432661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-12-23T18:40:47.432712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-12-23T18:40:47.432752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2025-12-23T18:40:47.432795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 129:0 2025-12-23T18:40:47.432897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:40:47.432941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-12-23T18:40:47.432976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-12-23T18:40:47.433000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 41], 3 2025-12-23T18:40:47.433629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:40:47.435880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:40:47.435936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-12-23T18:40:47.435981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-12-23T18:40:47.436028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2025-12-23T18:40:47.437128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:40:47.437230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:40:47.437267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-12-23T18:40:47.437306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 3 2025-12-23T18:40:47.437337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2025-12-23T18:40:47.437402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-12-23T18:40:47.441080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-12-23T18:40:47.441179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:46.603333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:46.603425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:46.603468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:46.603510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:46.603544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:46.603570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:46.603639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:46.603716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:46.604504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:46.604788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:46.735887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:46.735947Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:46.736593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:46.759047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:46.759238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:46.759456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:46.778511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:46.778850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:46.779742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:46.780146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:46.786422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:46.786653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:46.788009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:46.788082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:46.788215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:46.788280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:46.788330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:46.788557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:46.991312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.992342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.992484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.992563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.992719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.992817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.992941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.993016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.993091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.993167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.993233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.993330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.993432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.993524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.993679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... ingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:48.055671Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:48.055863Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 205us result status StatusSuccess 2025-12-23T18:40:48.056254Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2025-12-23T18:40:48.056570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2025-12-23T18:40:48.056628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2025-12-23T18:40:48.056804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2025-12-23T18:40:48.056826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2025-12-23T18:40:48.056873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2025-12-23T18:40:48.056916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2025-12-23T18:40:48.057391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-12-23T18:40:48.057647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-12-23T18:40:48.057688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:744:2734] 2025-12-23T18:40:48.057903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-12-23T18:40:48.058001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-12-23T18:40:48.058029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:744:2734] 2025-12-23T18:40:48.058096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-12-23T18:40:48.058169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-12-23T18:40:48.058195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:744:2734] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-12-23T18:40:48.058826Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:48.059010Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 202us result status StatusSuccess 2025-12-23T18:40:48.059372Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2025-12-23T18:40:48.062850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:48.063252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-12-23T18:40:48.063347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2025-12-23T18:40:48.063517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:40:48.066169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 125, at schemeshard: 72057594046678944 2025-12-23T18:40:48.066450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:46.467586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:46.467693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:46.467733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:46.467767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:46.467806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:46.467858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:46.467947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:46.468024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:46.468894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:46.469192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:46.631348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:46.631433Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:46.632481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:46.651534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:46.651747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:46.652015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:46.661533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:46.661881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:46.662862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:46.663274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:46.666725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:46.667003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:46.668495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:46.668570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:46.668750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:46.668805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:46.668866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:46.669128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:46.885380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.886412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.886538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.886609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.886775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.886904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.886996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.887064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.887142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.887215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.887279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.887387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.887475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.887555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.887719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... 025-12-23T18:40:47.945379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2025-12-23T18:40:47.945671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2025-12-23T18:40:47.947348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:47.947545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-12-23T18:40:47.949969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:47.950270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2025-12-23T18:40:47.950342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2025-12-23T18:40:47.950443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2025-12-23T18:40:47.952618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:47.952805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-12-23T18:40:47.958332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:47.958672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2025-12-23T18:40:47.958773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2025-12-23T18:40:47.958914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2025-12-23T18:40:47.961232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:47.961474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-12-23T18:40:47.964673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:47.965023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2025-12-23T18:40:47.965132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2025-12-23T18:40:47.965278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2025-12-23T18:40:47.968545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:47.968840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2025-12-23T18:40:47.972252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:47.972646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2025-12-23T18:40:47.972748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2025-12-23T18:40:47.972935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2025-12-23T18:40:47.975090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:47.975352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2025-12-23T18:40:47.978630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:47.979063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2025-12-23T18:40:47.979158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2025-12-23T18:40:47.979316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-12-23T18:40:47.981249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:47.981430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply >> TSchemeShardSysNames::CreateOpsAreCovered [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:45.543599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:45.543726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:45.543774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:45.543813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:45.543854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:45.543902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:45.543987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:45.544069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:45.544999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:45.545316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:45.683659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:45.683756Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:45.684733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:45.703504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:45.703629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:45.703796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:45.710799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:45.711069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:45.711936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:45.712279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:45.715137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:45.715340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:45.716516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:45.716583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:45.716708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:45.716755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:45.716836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:45.717051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:45.895032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.896187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.896350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.896436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.896589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.896698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.896793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.896873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.896957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.897046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.897148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.897255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.897372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.897484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:45.897632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... 8944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:40:48.177530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:40:48.177685Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:48.177741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:40:48.177792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 39 2025-12-23T18:40:48.177824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 39 2025-12-23T18:40:48.177852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2025-12-23T18:40:48.178078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:40:48.178128Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:40:48.178255Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:40:48.178302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:40:48.178353Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:40:48.178394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:40:48.178443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:40:48.178494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:40:48.178540Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:40:48.178619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:40:48.178726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:40:48.178805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:40:48.178853Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-12-23T18:40:48.178896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-12-23T18:40:48.178936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2025-12-23T18:40:48.178964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2025-12-23T18:40:48.180446Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:40:48.180565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:40:48.180619Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:40:48.180666Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-12-23T18:40:48.180724Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:40:48.182091Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:40:48.182186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:40:48.182244Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:40:48.182285Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2025-12-23T18:40:48.182321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:40:48.183267Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:40:48.183346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:40:48.183379Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:40:48.183432Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:40:48.183472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:40:48.183544Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:40:48.188092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:40:48.196901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:40:48.197056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:40:48.197309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:40:48.197359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:40:48.197781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:40:48.197899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:40:48.197946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:739:2729] TestWaitNotification: OK eventTxId 101 2025-12-23T18:40:48.198447Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:48.198687Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 290us result status StatusSuccess 2025-12-23T18:40:48.199156Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest |94.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |94.1%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |94.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TConsoleConfigTests::TestAllowedScopes >> KqpResultSetFormats::ArrowFormat_Types_List_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_3 >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-ordinaryuser >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert >> KqpSinkLocks::OlapUpdateLocksOneShardRowExists [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-anonymous |94.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |94.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |94.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:40:47.003441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:47.003531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:47.003574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:47.003609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:47.003654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:47.003705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:47.003756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:47.003873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:47.005850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:47.006169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:47.110199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:47.110265Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:47.120841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:47.121946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:47.122114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:47.125565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:47.125758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:47.126548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:47.126801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:47.129019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:47.129191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:47.130347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:47.130409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:47.130535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:47.130582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:47.130627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:47.130777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:47.298629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.299830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.299989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.300101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.300196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.300266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.300366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.300475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.300558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.300663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.300745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.300844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.300951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.301052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:47.301147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... BE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndex/ValueIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:40:49.277301Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndex/ValueIndex" took 309us result status StatusSuccess 2025-12-23T18:40:49.278215Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndex/ValueIndex" PathDescription { Self { Name: "ValueIndex" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 53 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:49.278945Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [1:1482:3306], Recipient [1:138:2160]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-12-23T18:40:49.279000Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:40:49.279102Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:40:49.279430Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" took 316us result status StatusSuccess 2025-12-23T18:40:49.280299Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "19700101000000Z_continuousBackupImpl" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 49 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 53 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |94.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |94.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |94.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-anonymous >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:46.491116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:46.491201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:46.491249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:46.491287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:46.491324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:46.491377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:46.491453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:46.491536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:46.492445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:46.492745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:46.638159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:46.638246Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:46.639177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:46.660106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:46.660314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:46.660527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:46.681411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:46.681732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:46.682693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:46.683111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:46.691519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:46.691827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:46.693138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:46.693213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:46.693374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:46.693428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:46.693493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:46.693713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:46.867267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.868463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.868640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.868799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.868912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.869002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.869100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.869193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.869265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.869337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.869434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.869591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.869689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.869780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:40:46.869874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2216] at schemeshard: 72057594046678944 Send TEvModif ... rd__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2025-12-23T18:40:49.671478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:40:49.672943Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:40:49.673003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:40:49.673022Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:40:49.673063Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:40:49.673090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:40:49.673151Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:40:49.675058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:40:49.675416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:40:49.676309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:40:49.676479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:40:49.676510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:40:49.676830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:40:49.676930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:40:49.676970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:741:2730] TestWaitNotification: OK eventTxId 102 2025-12-23T18:40:49.677422Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:49.677577Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 190us result status StatusSuccess 2025-12-23T18:40:49.677850Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-12-23T18:40:49.681378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:49.681781Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-12-23T18:40:49.681861Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2025-12-23T18:40:49.681965Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:40:49.684239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-12-23T18:40:49.684472Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:40:49.684895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:40:49.684942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:40:49.685373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:40:49.685473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:40:49.685535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:749:2738] TestWaitNotification: OK eventTxId 103 2025-12-23T18:40:49.686051Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:49.686249Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 220us result status StatusSuccess 2025-12-23T18:40:49.686606Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink >> BasicUsage::GetAllStartPartitionSessions >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] |94.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::BasicWriteSession |94.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::KeysOnlyLogDebezium [GOOD] >> Cdc::DocApi[PqRunner] >> BasicUsage::FallbackToSingleDb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:40:49.344891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:49.345011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:49.345051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:49.345091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:49.345137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:49.345195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:49.345260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:49.345354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:49.346353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:49.346644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:49.430697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:49.430795Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:49.436233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:49.436464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:49.436593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:49.439506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:49.439699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:49.440386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:49.440604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:49.442513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:49.442683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:49.443869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:49.443930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:49.444058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:49.444101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:49.444203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:49.444345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:49.451366Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:40:49.618718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:49.619016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:49.619258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:40:49.619307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:40:49.619575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:40:49.619648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:49.623038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:49.623278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:40:49.623492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:49.623574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:40:49.623622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:40:49.623673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:40:49.627759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:49.627835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:40:49.627882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:40:49.630318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:49.630386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:49.630447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:49.630500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:40:49.634466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:40:49.637048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:40:49.637243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:40:49.638425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:49.638573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:49.638630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:49.638921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:40:49.638987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:49.639184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:40:49.639283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:40:49.641828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:49.641878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409548 2025-12-23T18:40:50.552107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-12-23T18:40:50.552372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-12-23T18:40:50.553133Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409547 Forgetting tablet 72075186234409548 2025-12-23T18:40:50.553930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-12-23T18:40:50.554119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186234409547 2025-12-23T18:40:50.556666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-12-23T18:40:50.557211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-12-23T18:40:50.557634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:40:50.557696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-12-23T18:40:50.557808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-12-23T18:40:50.558381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:40:50.558440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-12-23T18:40:50.558503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:40:50.561914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-12-23T18:40:50.561975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2025-12-23T18:40:50.562142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2025-12-23T18:40:50.562167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2025-12-23T18:40:50.562240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-12-23T18:40:50.562277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2025-12-23T18:40:50.563680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:40:50.563778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-12-23T18:40:50.564104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-12-23T18:40:50.564146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-12-23T18:40:50.564678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-12-23T18:40:50.564767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:40:50.564802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:934:2793] TestWaitNotification: OK eventTxId 106 2025-12-23T18:40:50.565400Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:50.565604Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 229us result status StatusPathDoesNotExist 2025-12-23T18:40:50.565796Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:40:50.566301Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:50.566484Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 178us result status StatusPathDoesNotExist 2025-12-23T18:40:50.566633Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:40:50.567199Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:40:50.567440Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 203us result status StatusSuccess 2025-12-23T18:40:50.567903Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted 2025-12-23T18:40:50.568552Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409550 2025-12-23T18:40:50.568649Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409551 2025-12-23T18:40:50.568708Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409552 2025-12-23T18:40:50.568769Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409553 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig |94.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::UuidExchange[TopicRunner] [GOOD] >> Cdc::UpdatesLog[PqRunner] |94.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |94.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |94.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotIsolation::TPragmaSettingOlap-IsSnapshotIsolation [GOOD] >> BasicUsage::PropagateSessionClosed |94.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> KqpSnapshotIsolation::TPragmaSettingOlap+IsSnapshotIsolation [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] |94.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleConfigTests::TestManageValidators >> KqpSinkLocks::OlapUpdateLocksOneShard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapUpdateLocksOneShardRowExists [GOOD] Test command err: Trying to start YDB, gRPC: 20414, MsgBus: 62809 2025-12-23T18:39:12.958290Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124807757884558:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:12.975531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00411f/r3tmp/tmphb19LR/pdisk_1.dat 2025-12-23T18:39:13.401607Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:13.541435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:13.541548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:13.546897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:13.702856Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124807757884507:2081] 1766515152938498 != 1766515152938501 2025-12-23T18:39:13.707705Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:13.722298Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 20414, node 1 2025-12-23T18:39:13.829276Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:13.829303Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:13.829317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:13.829467Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:13.979632Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:62809 TClient is connected to server localhost:62809 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515153676 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:14.527392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:14.543815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:39:17.379061Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124829232721882:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.379161Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.379330Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124829232721892:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.381155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124829232721896:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.381219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.385117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:17.398809Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124829232721897:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:39:17.454179Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124829232721949:2540] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:17.770253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:39:17.954857Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124807757884558:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:17.954951Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:18.083437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587124829232722103:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:39:18.083721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587124829232722103:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:39:18.083969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587124829232722103:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:39:18.084114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587124829232722103:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:39:18.084217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587124829232722103:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:39:18.084342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587124829232722103:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:39:18.084459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587124829232722103:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:39:18.084625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587124829232722103:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:39:18.084742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587124829232722103:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:39:18.084879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587124829232722103:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:39:18.084990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587124829232722103:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:39:18.085130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587124829232722103:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:39:18.085255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587124829232722103:2343];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:39:18.085869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587124829232722099:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:39:18.085932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587124829232722099:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_ ... sult=not_found; 2025-12-23T18:40:43.918669Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.918801Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.918825Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.929528Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.929643Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.929705Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.940455Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.940545Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.940568Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.951426Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.951512Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.951537Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.962290Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.962380Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.962403Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.974330Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.974417Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.974444Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.982514Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.982607Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.982634Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.995011Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.995095Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.995120Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.995326Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.995384Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.995408Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:44.008448Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:44.008515Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:44.008535Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:44.009474Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:44.009526Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:44.009547Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:44.074118Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd686fpf33v6s0d47bdpbgw0", SessionId: ydb://session/3?node_id=4&id=NDk2ZjY1MDQtMzQ0M2Y3MGItM2RmYjk5MGQtOWEzYTgzYWE=, Slow query, duration: 11.893453s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2025-12-23T18:40:44.770254Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037993;tx_state=TTxProgressTx::Execute;tx_current=281474976710668;tx_id=281474976710668;commit_tx_id=281474976710668;commit_lock_id=281474976710666;fline=manager.cpp:80;broken_lock_id=281474976710665; 2025-12-23T18:40:44.890184Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2081: SelfId: [4:7587125201801596436:3679], TxId: 281474976710669, task: 1. Ctx: { TraceId : 01kd686zqab632sejhepd216bq. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NTlkNTBkN2UtYzRiZTIxZTYtOGVkMjhmMy01NGUwMmRhNA==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } 2025-12-23T18:40:44.890324Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [4:7587125201801596436:3679], TxId: 281474976710669, task: 1. Ctx: { TraceId : 01kd686zqab632sejhepd216bq. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NTlkNTBkN2UtYzRiZTIxZTYtOGVkMjhmMy01NGUwMmRhNA==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 }. 2025-12-23T18:40:44.890865Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=4&id=NTlkNTBkN2UtYzRiZTIxZTYtOGVkMjhmMy01NGUwMmRhNA==, ActorId: [4:7587125201801596335:3649], ActorState: ExecuteState, TraceId: 01kd686zqab632sejhepd216bq, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-system >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable >> BasicUsage::WriteSessionWriteInHandlers >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-anonymous >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> BasicUsage::WriteSessionNoAvailableDatabase >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyList [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 >> BasicUsage::WriteSessionCloseWaitsForWrites >> TConsoleConfigTests::TestManageValidators [GOOD] >> TConsoleConfigTests::TestDryRun >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-clusteradmin >> TConsoleTests::TestRemoveTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] Test command err: 2025-12-23T18:40:21.117060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:21.117132Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:21.388445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:22.819995Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:22.820073Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:22.949675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:24.137177Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:24.137253Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:24.220406Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:25.484180Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:25.484264Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:25.583049Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:26.874526Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:26.874595Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:27.010476Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:28.539790Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:28.539867Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:28.652175Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:29.989648Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:29.989726Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:30.102387Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:34.096457Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:34.096531Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:34.207061Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:37.847283Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:37.847360Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:37.949694Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:38.990554Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:38.990630Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:39.120143Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:39.746595Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037936131] NodeDisconnected NodeId# 11 2025-12-23T18:40:39.746735Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 11 2025-12-23T18:40:39.746802Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 11 2025-12-23T18:40:39.747947Z node 11 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [11:460:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-12-23T18:40:40.619071Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:40.619153Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:40.761212Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:42.417417Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:42.417511Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:42.541429Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:44.240112Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:44.240195Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:44.379157Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:45.993403Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:45.993490Z node 18 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:46.121754Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:48.154947Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:48.155031Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:48.307898Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:49.854847Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:49.854942Z node 21 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:49.975729Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:52.544928Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:52.545020Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:52.637647Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:53.331175Z node 23 :BS_CONTROLLER ERROR: {BSC26@console_interaction.cpp:112} failed to parse config obtained from Console ErrorReason# ydb/library/yaml_config/yaml_config_parser.cpp:1372: Condition violated: `config.HasDomainsConfig()' Yaml# --- metadata: kind: MainConfig cluster: "" version: 1 config: log_config: cluster_name: cluster1 allowed_labels: test: type: enum values: ? true selector_config: [] |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TPragmaSettingOlap+IsSnapshotIsolation [GOOD] Test command err: Trying to start YDB, gRPC: 22226, MsgBus: 21210 2025-12-23T18:39:46.429252Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124955162212832:2257];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:46.429419Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00410e/r3tmp/tmpcLbki4/pdisk_1.dat 2025-12-23T18:39:46.750915Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:46.757461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:46.757564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:46.875657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:46.877013Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:46.882526Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124955162212604:2081] 1766515186352753 != 1766515186352756 TServer::EnableGrpc on GrpcPort 22226, node 1 2025-12-23T18:39:46.994526Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:47.067474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:47.067507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:47.067516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:47.067623Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21210 2025-12-23T18:39:47.426089Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515186947 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:47.855042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:47.863466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:50.222061Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124972342082662:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.222242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.223852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124972342082680:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.229058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:50.230378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124972342082715:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.230458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.230814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124972342082719:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.230882Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.245845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:39:50.246117Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124972342082688:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:50.310725Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124972342082744:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:50.642333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.816438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:51.888647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124955162212832:2257];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:51.895248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:52.322222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 30242, MsgBus: 30310 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00410e/r3tmp/tmp234l8i/pdisk_1.dat 2025-12-23T18:39:56.499518Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:56.500267Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:56.750918Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:56.772184Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:56.772270Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:56.781741Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587124997669767236:2081] 1766515196381301 != 1766515196381304 2025-12-23T18:39:56.802674Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:56.869071Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30242, node 2 2025-12-23T18:39:57.279332Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:57.279361Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:57.279368Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:57.279444Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:57.296271Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:57.450903Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unex ... 841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.732823Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.743933Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.744036Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.744068Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.751316Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.751410Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.751446Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.758272Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.758349Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.758374Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.766153Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.766229Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.766263Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.779433Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.779555Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.779615Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.795689Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.795794Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.795835Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.809261Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.809381Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.809426Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.818340Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.818440Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.818477Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.826813Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.826908Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.826943Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.835453Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.835579Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:43.835620Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:44.028021Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd686b3fbm24bd9m9cabx91h", SessionId: ydb://session/3?node_id=3&id=MTNiYmVhMjItNGRjZDY5ZmMtODUxNTYwNzgtYjNkNTc1Mjg=, Slow query, duration: 20.357251s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=FO::FETCHING;event=free;usage=0;delta=445; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=445; FALLBACK_ACTOR_LOGGING;priority=INFO;component=334;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=Key;);;ff=(column_ids=1,2;column_names=Key,Value;);;program_input=(column_ids=1,2;column_names=Key,Value;);;; 2025-12-23T18:40:49.993501Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2081: SelfId: [3:10360:9605], TxId: 281474976715666, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68708388737611np23fqyk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=MzQ5OGZhNDItZmEwYjQxY2ItOTNiNTI2YjAtNzNkMzZiNzQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 } 2025-12-23T18:40:49.994637Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [3:10360:9605], TxId: 281474976715666, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68708388737611np23fqyk. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=MzQ5OGZhNDItZmEwYjQxY2ItOTNiNTI2YjAtNzNkMzZiNzQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 }. 2025-12-23T18:40:49.996624Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=MzQ5OGZhNDItZmEwYjQxY2ItOTNiNTI2YjAtNzNkMzZiNzQ=, ActorId: [3:9590:8908], ActorState: ExecuteState, TraceId: 01kd68708388737611np23fqyk, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=20;stage=FO::FETCHING; >> KqpResultSetFormats::ArrowFormat_Returning+isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning-isOlap |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpRm::NodesMembershipByExchanger >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-anonymous >> KqpRm::NotEnoughMemory >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-anonymous >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleConfigTests::TestAffectedConfigs >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapUpdateLocksOneShard [GOOD] Test command err: Trying to start YDB, gRPC: 31171, MsgBus: 25727 2025-12-23T18:39:13.159529Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124810382158332:2168];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:13.159572Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:13.180300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00411c/r3tmp/tmpx5o9qc/pdisk_1.dat 2025-12-23T18:39:13.535837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:13.567554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:13.567698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:13.574409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:13.621276Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124810382158197:2081] 1766515153115169 != 1766515153115172 2025-12-23T18:39:13.640725Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31171, node 1 2025-12-23T18:39:13.779031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:13.779071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:13.779085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:13.779205Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:13.781336Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25727 2025-12-23T18:39:14.167520Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:25727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515153697 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:14.380098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:14.390072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:17.324725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124827562028250:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.324820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124827562028274:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.324871Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.329686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:17.330905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124827562028280:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.331007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:17.344622Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124827562028279:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:39:17.429524Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124827562028332:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:17.773557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:39:17.916933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587124827562028496:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:39:17.917255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587124827562028496:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:39:17.917462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587124827562028496:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:39:17.917583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587124827562028496:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:39:17.917668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587124827562028496:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:39:17.917744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587124827562028496:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:39:17.917802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587124827562028496:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:39:17.917923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587124827562028496:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:39:17.918016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587124827562028496:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:39:17.918109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587124827562028496:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:39:17.918209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587124827562028496:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:39:17.918327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587124827562028496:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:39:17.918448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587124827562028496:2337];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:39:17.919439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587124827562028498:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:39:17.919497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587124827562028498:2338];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:39:17.919638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587124827562028498:2338];tablet_id=72075 ... sult=not_found; 2025-12-23T18:40:46.713304Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038044;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.713381Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038044;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.713405Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038044;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.715731Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.715810Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.715832Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.724403Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.724478Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.724502Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.727599Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038028;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.727686Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038028;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.727708Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038028;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.735204Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038030;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.735276Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038030;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.735299Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038030;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.737917Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.737957Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.737973Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.745818Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.745903Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.745926Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.746421Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038048;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.746472Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038048;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.746490Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038048;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.756739Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.756837Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.756881Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.759784Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038016;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.759860Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038016;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.759885Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038016;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.769989Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.770073Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.770101Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:46.938340Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd686hv260es08fyfc5zj3en", SessionId: ydb://session/3?node_id=4&id=NDk2YzM1NmItOTBjZDhkNjktZGRlMTMwNWYtOGQyYzY3ODE=, Slow query, duration: 12.644494s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2025-12-23T18:40:47.591422Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=281474976710668;tx_id=281474976710668;commit_tx_id=281474976710668;commit_lock_id=281474976710666;fline=manager.cpp:80;broken_lock_id=281474976710665; 2025-12-23T18:40:47.741547Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2081: SelfId: [4:7587125214503884041:3679], TxId: 281474976710669, task: 1. Ctx: { CheckpointId : . TraceId : 01kd6872ft30sptcq7p8dbnqby. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTMzYzUzODQtMjExNzVkNWQtZDc1NmUwYTgtZmNkMjRlZWE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } 2025-12-23T18:40:47.741730Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [4:7587125214503884041:3679], TxId: 281474976710669, task: 1. Ctx: { CheckpointId : . TraceId : 01kd6872ft30sptcq7p8dbnqby. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTMzYzUzODQtMjExNzVkNWQtZDc1NmUwYTgtZmNkMjRlZWE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 }. 2025-12-23T18:40:47.743088Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=4&id=MTMzYzUzODQtMjExNzVkNWQtZDc1NmUwYTgtZmNkMjRlZWE=, ActorId: [4:7587125214503883940:3651], ActorState: ExecuteState, TraceId: 01kd6872ft30sptcq7p8dbnqby, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-ordinaryuser >> KqpRm::NotEnoughMemory [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-anonymous >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary >> Cdc::UpdatesLog[PqRunner] [GOOD] >> Cdc::UpdatesLog[YdsRunner] >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2025-12-23T18:40:56.057248Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:56.057933Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c5b/r3tmp/tmpxepk9P/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:56.058686Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c5b/r3tmp/tmpxepk9P/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/003c5b/r3tmp/tmpxepk9P/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7005186651447473026 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:56.156834Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:40:56.157226Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:40:56.206324Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:471:2102] with ResourceBroker at [2:441:2101] 2025-12-23T18:40:56.206505Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:472:2103] 2025-12-23T18:40:56.206588Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:469:2349] with ResourceBroker at [1:440:2330] 2025-12-23T18:40:56.206675Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:470:2350] 2025-12-23T18:40:56.206801Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:40:56.206838Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:40:56.206887Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:40:56.206909Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:40:56.207215Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:56.227238Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515256 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:56.227480Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:56.227564Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515256 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:56.227935Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:40:56.228131Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:40:56.228168Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:56.228293Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515256 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:56.228512Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:40:56.228540Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:56.228614Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515256 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:56.228701Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:40:56.229353Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-12-23T18:40:56.229469Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:56.229969Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:56.230428Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-12-23T18:40:56.230631Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:56.230734Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:40:56.235196Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:56.235414Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:40:56.235677Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:40:56.235771Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 >> TConsoleConfigTests::TestAffectedConfigs [GOOD] >> KqpRm::DisonnectNodes >> KqpRm::Reduce |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-anonymous >> KqpRm::NodesMembershipByExchanger [GOOD] |94.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/pgwire/pgwire |94.1%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire |94.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-anonymous >> KqpRm::NotEnoughExecutionUnits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2025-12-23T18:40:22.782473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:22.782545Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:23.014372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:24.409996Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:24.410061Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:24.535955Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:25.748270Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:25.748344Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:25.846513Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:27.072645Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:27.072718Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:27.228170Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:28.374472Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:40:28.374554Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:28.466691Z node 4 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=8651011 serviceid=[0:0:0] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-12-23T18:40:29.246772Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:29.246847Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:29.350628Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:30.402278Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:40:30.402353Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:30.493325Z node 5 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[5:8246204620103118691:7960687] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-12-23T18:40:30.960087Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:30.960166Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:31.044670Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:34.687100Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:34.687172Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:34.795087Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:38.469332Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:38.469406Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:38.593672Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:39.776779Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:39.776854Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:39.886325Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:41.302909Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:41.302992Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:41.416336Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:42.982907Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:42.982984Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:43.091333Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:44.347492Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:44.347565Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:44.452321Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:45.779558Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:45.779658Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:45.900605Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:47.364225Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:47.364316Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:47.518065Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:48.782095Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:48.782178Z node 15 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:48.896649Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:50.212522Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:50.212606Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:50.317145Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:51.685699Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:51.685791Z node 17 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:51.793250Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:53.125789Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:53.125892Z node 18 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:53.282972Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:54.949092Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:54.949190Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:55.068464Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:56.504120Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:56.504221Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:56.679100Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-anonymous >> KqpRm::Reduce [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2025-12-23T18:40:55.891027Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:55.891629Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c5e/r3tmp/tmp2ldH72/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:55.892392Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c5e/r3tmp/tmp2ldH72/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/003c5e/r3tmp/tmp2ldH72/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15432405419660825312 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:55.965644Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:40:55.965990Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:40:55.995440Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:471:2102] with ResourceBroker at [2:441:2101] 2025-12-23T18:40:55.995597Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:472:2103] 2025-12-23T18:40:55.995671Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:469:2349] with ResourceBroker at [1:440:2330] 2025-12-23T18:40:55.995757Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:470:2350] 2025-12-23T18:40:55.995832Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:40:55.995861Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:40:55.995910Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:40:55.995931Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:40:55.996248Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:56.013976Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515255 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:56.014218Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:56.014315Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515255 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:56.014649Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:40:56.015006Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:40:56.015049Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:56.015177Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515255 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:56.015400Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:40:56.015428Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:56.015488Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515255 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:56.015610Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:40:56.016236Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-12-23T18:40:56.016336Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:56.016842Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:56.017342Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-12-23T18:40:56.017575Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:56.017667Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:40:56.017900Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:56.018009Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:40:56.018212Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:40:56.018340Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:40:57.227641Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2025-12-23T18:40:57.227765Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2025-12-23T18:40:57.228839Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:57.543816Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2025-12-23T18:40:58.314701Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:58.315406Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c5a/r3tmp/tmp9fjODV/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:58.316115Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c5a/r3tmp/tmp9fjODV/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/003c5a/r3tmp/tmp9fjODV/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6598699378800527664 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:58.380333Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:40:58.380704Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:40:58.401185Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:471:2102] with ResourceBroker at [2:441:2101] 2025-12-23T18:40:58.401336Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:472:2103] 2025-12-23T18:40:58.401398Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:469:2349] with ResourceBroker at [1:440:2330] 2025-12-23T18:40:58.401492Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:470:2350] 2025-12-23T18:40:58.401602Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:40:58.401649Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:40:58.401702Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:40:58.401726Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:40:58.402384Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:58.422522Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515258 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:58.422808Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:58.422913Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515258 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:58.423250Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:40:58.423454Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:40:58.423493Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:58.423682Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515258 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:58.423904Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:40:58.423935Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:58.424003Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515258 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:58.424104Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:40:58.424701Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-12-23T18:40:58.424813Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:58.425341Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:58.425814Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-12-23T18:40:58.426009Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:58.426121Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:40:58.426365Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:58.426469Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:40:58.426653Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:40:58.426720Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:40:58.429851Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [1:469:2349]) priority=0 resources={0, 100} 2025-12-23T18:40:58.429926Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:40:58.429984Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:469:2349]) from queue queue_kqp_resource_manager 2025-12-23T18:40:58.430038Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:40:58.430102Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:469:2349])) 2025-12-23T18:40:58.430296Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:40:58.430499Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task kqp-1-1-1 (1 by [1:469:2349]) (priority=0 type=kqp_query resources={0, 30} resubmit=0) 2025-12-23T18:40:58.430543Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:40:58.430585Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.075000 (insert task kqp-1-1-1 (1 by [1:469:2349])) 2025-12-23T18:40:58.430634Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 70, Free Tier: 0, ExecutionUnits: 0. |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-ordinaryuser >> KqpRm::NotEnoughExecutionUnits [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice |94.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |94.1%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |94.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestGetUnknownTenantStatus >> KqpRm::ResourceBrokerNotEnoughResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2025-12-23T18:40:59.240886Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:59.241547Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c57/r3tmp/tmpcJIxro/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:59.242388Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c57/r3tmp/tmpcJIxro/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/003c57/r3tmp/tmpcJIxro/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15214318141180177131 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:59.304819Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:40:59.305220Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:40:59.331315Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:471:2102] with ResourceBroker at [2:441:2101] 2025-12-23T18:40:59.331486Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:472:2103] 2025-12-23T18:40:59.331555Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:469:2349] with ResourceBroker at [1:440:2330] 2025-12-23T18:40:59.331689Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:470:2350] 2025-12-23T18:40:59.331792Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:40:59.331833Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:40:59.331889Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:40:59.331914Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:40:59.332373Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:59.353691Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515259 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:59.353976Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:59.354081Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515259 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:59.354433Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:40:59.354645Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:40:59.354686Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:59.355804Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515259 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:59.356141Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:40:59.356180Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:59.356258Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515259 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:59.356378Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:40:59.357176Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-12-23T18:40:59.357302Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:59.357913Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:59.358411Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-12-23T18:40:59.358633Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:59.358863Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:40:59.359164Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:59.359284Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:40:59.359486Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:40:59.359573Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 >> KqpRm::DisonnectNodes [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> KqpRm::SnapshotSharingByExchanger >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient >> KqpResultSetFormats::ArrowFormat_Types_List_3 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Tuple >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2025-12-23T18:40:58.420909Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:58.421410Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c58/r3tmp/tmpLyRJDI/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:58.421979Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c58/r3tmp/tmpLyRJDI/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/003c58/r3tmp/tmpLyRJDI/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4578013681209672446 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:58.468213Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:40:58.468541Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:40:58.485444Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:471:2102] with ResourceBroker at [2:441:2101] 2025-12-23T18:40:58.485579Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:472:2103] 2025-12-23T18:40:58.485633Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:469:2349] with ResourceBroker at [1:440:2330] 2025-12-23T18:40:58.485709Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:470:2350] 2025-12-23T18:40:58.485778Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:40:58.485809Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:40:58.485850Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:40:58.485870Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:40:58.486130Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:58.501803Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515258 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:58.502012Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:58.502095Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515258 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:58.502555Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:40:58.502766Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:40:58.502800Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:58.502929Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515258 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:58.503153Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:40:58.503185Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:40:58.503252Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515258 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:40:58.503400Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:40:58.504033Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-12-23T18:40:58.504151Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:58.504624Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:58.505114Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-12-23T18:40:58.505383Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:58.505475Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:40:58.505711Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:58.505824Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:40:58.506037Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:40:58.506130Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:40:59.644495Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2025-12-23T18:40:59.644594Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2025-12-23T18:40:59.645100Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2025-12-23T18:40:59.645192Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2025-12-23T18:40:59.645265Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 2 2025-12-23T18:40:59.645706Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:151:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-12-23T18:40:59.646296Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:75:2076] ServerId# [1:364:2281] TabletId# 72057594037932033 PipeClientId# [2:75:2076] 2025-12-23T18:40:59.646502Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-12-23T18:40:59.646648Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:492: Subcriber is not available for info exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:40:59.646686Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:167: Kill previous info exchanger subscriber for 'kqpexch+/dc-1' at [2:474:2105], reason: tenant updated 2025-12-23T18:40:59.646896Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:59.649578Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:40:59.649735Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:00.181208Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request >> KqpRm::SingleTask >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> CdcStreamChangeCollector::NewImage [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2025-12-23T18:41:00.490466Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:41:00.490952Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c54/r3tmp/tmpHKXPS6/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:41:00.491534Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c54/r3tmp/tmpHKXPS6/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/003c54/r3tmp/tmpHKXPS6/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10196183906219902372 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:41:00.539047Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:41:00.539362Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:41:00.554275Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:473:2102] with ResourceBroker at [2:443:2101] 2025-12-23T18:41:00.554409Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:474:2103] 2025-12-23T18:41:00.554466Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:471:2351] with ResourceBroker at [1:442:2332] 2025-12-23T18:41:00.554525Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:472:2352] 2025-12-23T18:41:00.554610Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:41:00.554640Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:41:00.554674Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:41:00.554720Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:41:00.554970Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:00.571242Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515260 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-12-23T18:41:00.571459Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:00.571540Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515260 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:41:00.571779Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:41:00.571924Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:41:00.571950Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:00.572029Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515260 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-12-23T18:41:00.572106Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:41:00.572177Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:41:00.572193Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:00.572239Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515260 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:41:00.573167Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:00.573267Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:00.573355Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:00.573798Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-12-23T18:41:00.573922Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:00.574090Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:41:00.574288Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:00.574358Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:41:00.574467Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:41:00.574533Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:41:00.576931Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-1 (1 by [1:471:2351]) priority=0 resources={0, 1000} 2025-12-23T18:41:00.577001Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-1 (1 by [1:471:2351]) to queue queue_kqp_resource_manager 2025-12-23T18:41:00.577047Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 1000} for task kqp-1-2-1 (1 by [1:471:2351]) from queue queue_kqp_resource_manager 2025-12-23T18:41:00.577081Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-1 (1 by [1:471:2351]) to queue queue_kqp_resource_manager 2025-12-23T18:41:00.577119Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 2.500000 (insert task kqp-1-2-1 (1 by [1:471:2351])) 2025-12-23T18:41:00.577305Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 1000ExternalMemory: 0 } 2025-12-23T18:41:00.577384Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-2 (2 by [1:471:2351]) priority=0 resources={0, 100000} 2025-12-23T18:41:00.577417Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-2 (2 by [1:471:2351]) to queue queue_kqp_resource_manager 2025-12-23T18:41:00.577451Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task kqp-1-2-2 (2 by [1:471:2351]) 2025-12-23T18:41:00.577500Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:499: Removing task kqp-1-2-2 (2 by [1:471:2351]) 2025-12-23T18:41:00.577574Z node 1 :KQP_RESOURCE_MANAGER NOTICE: kqp_rm_service.cpp:338: TxId: 1, taskId: 2. Not enough memory for query, requested: 100000. TxResourcesInfo { TxId: 1, Database: , tx initially granted memory: 0B, tx total memory allocations: 1000B, tx largest successful memory allocation: 1000B, tx last failed memory allocation: 0B, tx total execution units: 0, started at: 2025-12-23T18:41:00.576855Z } |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-dbadmin >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> Cdc::OldImageLogDebezium >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-ordinaryuser >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-anonymous |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> KqpRm::SingleTask [GOOD] >> Cdc::DocApi[PqRunner] [GOOD] >> Cdc::DocApi[YdsRunner] >> Cdc::UpdatesLog[YdsRunner] [GOOD] >> Cdc::UpdatesLog[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2025-12-23T18:41:01.928393Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:41:01.929102Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c50/r3tmp/tmpa0oo52/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:41:01.929950Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c50/r3tmp/tmpa0oo52/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/003c50/r3tmp/tmpa0oo52/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10199636206447325662 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:41:01.985999Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:41:01.986394Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:41:02.006066Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:471:2102] with ResourceBroker at [2:441:2101] 2025-12-23T18:41:02.006242Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:472:2103] 2025-12-23T18:41:02.006312Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:469:2349] with ResourceBroker at [1:440:2330] 2025-12-23T18:41:02.006402Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:470:2350] 2025-12-23T18:41:02.006506Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:41:02.006544Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:41:02.006597Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:41:02.006617Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:41:02.007095Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:02.028250Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515261 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:41:02.028502Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:02.028583Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515261 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:41:02.028949Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:41:02.029141Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:41:02.029178Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:02.029295Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515261 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:41:02.029527Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:41:02.029554Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:02.029613Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515261 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:41:02.029713Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:41:02.030412Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-12-23T18:41:02.030532Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:02.031185Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:02.031663Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-12-23T18:41:02.031884Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:02.032010Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:41:02.032251Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:02.032418Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:41:02.032631Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:41:02.032733Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:41:02.035803Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-1 (1 by [1:469:2349]) priority=0 resources={0, 100} 2025-12-23T18:41:02.035880Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-1 (1 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:41:02.035935Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:469:2349]) from queue queue_kqp_resource_manager 2025-12-23T18:41:02.035985Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-1 (1 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:41:02.036044Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:469:2349])) 2025-12-23T18:41:02.036267Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:41:02.036486Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-2-1 (1 by [1:469:2349]) (release resources {0, 100}) 2025-12-23T18:41:02.036534Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.000000 (remove task kqp-1-2-1 (1 by [1:469:2349])) 2025-12-23T18:41:02.036580Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:40.166588Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:40.292252Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:40.306612Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:40.307060Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:40.307171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:40.622446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:40.622556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:40.712931Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515237283143 != 1766515237283147 2025-12-23T18:40:40.728853Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:40.776003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:40.890041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c6f/r3tmp/tmpESSbuq/pdisk_1.dat 2025-12-23T18:40:41.232045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:41.248374Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:41.365606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:41.407581Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:40:41.407910Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:41.461864Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:41.462025Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:41.463952Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:41.464057Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:41.464140Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:41.464599Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:41.464771Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:41.464901Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:40:41.475908Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:41.514545Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:41.514834Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:41.515006Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:40:41.515037Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:41.515072Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:41.515111Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:41.515503Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:41.515579Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:41.515963Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:41.515999Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:41.516052Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:41.516092Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:41.516149Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:40:41.516256Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:41.516500Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:40:41.516576Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:40:41.517968Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:41.528697Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:41.528796Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:40:41.659609Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:40:41.664234Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:40:41.664322Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:41.665273Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:41.665324Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:40:41.665368Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:40:41.665705Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:40:41.665837Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:40:41.666229Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:41.666308Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:40:41.668298Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:40:41.668812Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:41.670559Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:40:41.670610Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:41.673777Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:40:41.673892Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:41.675521Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:41.675574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:41.675628Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:40:41.675736Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:41.675814Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:40:41.675920Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:41.679783Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:41.681566Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:40:41.681656Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 7207518622403 ... :98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:41:00.250211Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:41:00.251478Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:41:00.251537Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:41:00.251599Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:41:00.251692Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:41:00.251755Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:41:00.251862Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:41:00.253884Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:41:00.255633Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:41:00.255857Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:41:00.255942Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:41:00.269107Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:41:00.269279Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-12-23T18:41:00.269324Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-12-23T18:41:00.269356Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-12-23T18:41:00.269734Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:41:00.284336Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:41:00.450347Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:41:00.450420Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:41:00.450796Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:41:00.450856Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:41:00.450908Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2025-12-23T18:41:00.451107Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2025-12-23T18:41:00.451248Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:41:00.451441Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:41:00.452330Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:41:00.498475Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2025-12-23T18:41:00.498607Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:41:00.498658Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:41:00.498706Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:41:00.498814Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:41:00.498891Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-12-23T18:41:00.498993Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:41:00.501333Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-12-23T18:41:00.501438Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:41:00.509655Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1063:2892], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:00.509764Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1074:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:00.509834Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:00.510554Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:00.510680Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:00.515089Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:41:00.521451Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:41:00.677576Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:41:00.680730Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1077:2900], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:41:00.706240Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1135:2939] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:41:00.822126Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1166:2956], serverId# [4:1167:2957], sessionId# [0:0:0] 2025-12-23T18:41:00.822639Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2025-12-23T18:41:00.822956Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1766515260822850 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-12-23T18:41:00.823142Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-12-23T18:41:00.834239Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2025-12-23T18:41:00.834336Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:41:00.921267Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2025-12-23T18:41:00.921557Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1766515260921458 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-12-23T18:41:00.921712Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-12-23T18:41:00.932749Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2025-12-23T18:41:00.932831Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:41:00.935051Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1194:2975], serverId# [4:1195:2976], sessionId# [0:0:0] 2025-12-23T18:41:00.942243Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1196:2977], serverId# [4:1197:2978], sessionId# [0:0:0] |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest >> KqpBatchUpdate::UnknownColumn >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-ordinaryuser >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-anonymous >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-ordinaryuser >> KqpResultSetFormats::ArrowFormat_Returning-isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_ColumnOrder >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-anonymous >> KqpRm::SnapshotSharingByExchanger [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase |94.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |94.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |94.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2025-12-23T18:41:01.556252Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:41:01.556847Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c53/r3tmp/tmp0jvLjb/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:41:01.561585Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c53/r3tmp/tmp0jvLjb/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/003c53/r3tmp/tmp0jvLjb/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6658839460629270837 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:41:01.654923Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:41:01.655239Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:41:01.676551Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:473:2102] with ResourceBroker at [2:443:2101] 2025-12-23T18:41:01.676686Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:474:2103] 2025-12-23T18:41:01.676742Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:471:2351] with ResourceBroker at [1:442:2332] 2025-12-23T18:41:01.676794Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:472:2352] 2025-12-23T18:41:01.676875Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:41:01.676910Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:41:01.676949Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:41:01.676969Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:41:01.677166Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:01.706831Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515261 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:41:01.707109Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:01.707201Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515261 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:41:01.707473Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:41:01.707684Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:41:01.707737Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:01.707829Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515261 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:41:01.707954Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:41:01.708051Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:41:01.708074Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:01.708132Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515261 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:41:01.709019Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:01.709136Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:01.709260Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:01.709724Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-12-23T18:41:01.709865Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:01.710090Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:41:01.710306Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:41:01.711552Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:41:01.711794Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:41:01.711925Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:41:02.864429Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2025-12-23T18:41:02.864519Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2025-12-23T18:41:02.864647Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [1:471:2351]) priority=0 resources={0, 100} 2025-12-23T18:41:02.864684Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [1:471:2351]) to queue queue_kqp_resource_manager 2025-12-23T18:41:02.864720Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:471:2351]) from queue queue_kqp_resource_manager 2025-12-23T18:41:02.864754Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:471:2351]) to queue queue_kqp_resource_manager 2025-12-23T18:41:02.864803Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:471:2351])) 2025-12-23T18:41:02.864945Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:41:02.865010Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-2-1-2 (2 by [1:471:2351]) priority=0 resources={0, 100} 2025-12-23T18:41:02.865038Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-2-1-2 (2 by [1:471:2351]) to queue queue_kqp_resource_manager 2025-12-23T18:41:02.865065Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:471:2351]) from queue queue_kqp_resource_manager 2025-12-23T18:41:02.865097Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-2-1-2 (2 by [1:471:2351]) to queue queue_kqp_resource_manager 2025-12-23T18:41:02.865130Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:471:2351])) 2025-12-23T18:41:02.865180Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:41:02.865224Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:02.865361Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515262 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-12-23T18:41:02.865609Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:41:03.171087Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2025-12-23T18:41:03.171248Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [2:473:2102]) priority=0 resources={0, 100} 2025-12-23T18:41:03.171302Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [2:473:2102]) to queue queue_kqp_resource_manager 2025-12-23T18:41:03.171348Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [2:473:2102]) from queue queue_kqp_resource_manager 2025-12-23T18:41:03.171437Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [2:473:2102]) to queue queue_kqp_resource_manager 2025-12-23T18:41:03.171484Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [2:473:2102])) 2025-12-23T18:41:03.171589Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:41:03.171667Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-2-2-2 (2 by [2:473:2102]) priority=0 resources={0, 100} 2025-12-23T18:41:03.171700Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-2-2-2 (2 by [2:473:2102]) to queue queue_kqp_resource_manager 2025-12-23T18:41:03.171743Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-2-2-2 (2 by [2:473:2102]) from queue queue_kqp_resource_manager 2025-12-23T18:41:03.171792Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-2-2-2 (2 by [2:473:2102]) to queue queue_kqp_resource_manager 2025-12-23T18:41:03.171842Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-2-2 (2 by [2:473:2102])) 2025-12-23T18:41:03.171924Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 2, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:41:03.171989Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:03.172126Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515263 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-12-23T18:41:03.172401Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:41:03.468661Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2025-12-23T18:41:03.468812Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-1-1 (1 by [1:471:2351]) (release resources {0, 100}) 2025-12-23T18:41:03.468885Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [1:471:2351])) 2025-12-23T18:41:03.468933Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2025-12-23T18:41:03.469024Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-12-23T18:41:03.469097Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-2-1-2 (2 by [1:471:2351]) (release resources {0, 100}) 2025-12-23T18:41:03.469153Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-1-2 (2 by [1:471:2351])) 2025-12-23T18:41:03.469201Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-12-23T18:41:03.469276Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:03.469451Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515264 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:41:03.469856Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:41:05.232226Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2025-12-23T18:41:05.232370Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-1-1 (1 by [2:473:2102]) (release resources {0, 100}) 2025-12-23T18:41:05.232437Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [2:473:2102])) 2025-12-23T18:41:05.232475Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2025-12-23T18:41:05.232523Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-12-23T18:41:05.232591Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-2-2-2 (2 by [2:473:2102]) (release resources {0, 100}) 2025-12-23T18:41:05.232632Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-2-2 (2 by [2:473:2102])) 2025-12-23T18:41:05.232686Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 2, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-12-23T18:41:05.232755Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:41:05.232914Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515265 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:41:05.233242Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:41:05.558297Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> BasicUsage::BasicWriteSession [GOOD] >> BasicUsage::CloseWriteSessionImmediately >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-clusteradmin >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicUsage::PreferredDatabaseNoFallback >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 20875, MsgBus: 24874 2025-12-23T18:39:59.469135Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125011116237328:2238];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:59.469395Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:59.558185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004101/r3tmp/tmpy8XArT/pdisk_1.dat 2025-12-23T18:40:00.254862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:00.293394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:00.293480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:00.425044Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:00.466964Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125011116237105:2081] 1766515199366873 != 1766515199366876 2025-12-23T18:40:00.491806Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:00.505738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:00.538828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 20875, node 1 2025-12-23T18:40:00.807356Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:40:00.807378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:40:00.807384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:40:00.807467Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24874 TClient is connected to server localhost:24874 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515200576 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:02.216865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:40:04.469212Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125011116237328:2238];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:04.469296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:05.878164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125036886041781:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:05.878333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:05.878704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125036886041793:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:05.878763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125036886041794:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:05.878951Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:05.883602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:05.901984Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125036886041797:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:40:05.973310Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125036886041848:2544] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:06.326810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:40:06.551031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587125041181009283:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:40:06.551294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587125041181009283:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:40:06.551510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587125041181009283:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:40:06.551626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587125041181009283:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:40:06.551717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587125041181009283:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:40:06.551818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587125041181009283:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:40:06.551946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587125041181009283:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:40:06.552057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587125041181009283:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:40:06.552183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587125041181009283:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:40:06.552283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587125041181009283:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:40:06.552373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587125041181009283:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:40:06.552468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587125041181009283:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:40:06.552579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587125041181009283:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:40:06.557893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7587125041181009291:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:40:06.557953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7587125041181009291:2345];tablet_id=72075186224037892;process=TTxInitSchema::Exe ... 10662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.757375Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.759922Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.760029Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.760053Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.768595Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.768668Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.768691Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.769291Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.769351Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.769369Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.778929Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.778998Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.779019Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038043;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.779141Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.779198Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.779223Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.789516Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038030;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.789592Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038030;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.789605Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.789614Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038030;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.789656Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.789676Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.799886Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.799887Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.799941Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.799956Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.799965Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.799975Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.809895Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.809913Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.809966Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.809969Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.809988Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.809989Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.820501Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038032;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.820570Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038032;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.820592Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038032;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.823577Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.823702Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.823724Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:40:58.883652Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd686xdt3avvp38p5234he47", SessionId: ydb://session/3?node_id=3&id=YjYzZjQ0M2MtZDJiM2E0YmItMWFhYjM2NS0yZTA1YjhjMg==, Slow query, duration: 12.624340s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-ordinaryuser >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestListTenants >> Cdc::OldImageLogDebezium [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> Cdc::NewImageLogDebezium >> TConsoleTests::TestAlterUnknownTenant >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] |94.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |94.2%| [LD] {RESULT} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |94.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-ordinaryuser >> Cdc::UpdatesLog[TopicRunner] [GOOD] >> Cdc::VirtualTimestamps[PqRunner] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-ordinaryuser >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries >> BasicUsage::WriteSessionWriteInHandlers [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict |94.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |94.2%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |94.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-ordinaryuser >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> KqpResultSetFormats::ArrowFormat_Types_Tuple [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Struct >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> BasicUsage::WriteSessionSwitchDatabases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:47.060326Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:47.175363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:47.185577Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:47.185957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:47.186038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:47.503888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:47.504077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:47.603433Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515244134049 != 1766515244134053 2025-12-23T18:40:47.618051Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:47.671234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:47.769615Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c6e/r3tmp/tmpMLyPMk/pdisk_1.dat 2025-12-23T18:40:48.106478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:48.121829Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:48.242991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:48.295983Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:895:2772] 2025-12-23T18:40:48.296277Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:48.349343Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:48.349667Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:48.351552Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:48.351690Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:48.351755Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:48.352226Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:48.352657Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:900:2775] 2025-12-23T18:40:48.352875Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:48.361340Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:48.361475Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2772] in generation 1 2025-12-23T18:40:48.363066Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:48.363194Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:48.364737Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:40:48.364809Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:40:48.364857Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:40:48.365218Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:48.365372Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:48.365455Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:928:2775] in generation 1 2025-12-23T18:40:48.376616Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:48.408705Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:48.408908Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:48.409025Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:931:2793] 2025-12-23T18:40:48.409057Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:48.409100Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:48.409128Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:48.409430Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:48.409461Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:40:48.409496Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:48.409531Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:932:2794] 2025-12-23T18:40:48.409558Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:40:48.409573Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:40:48.409587Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:40:48.409889Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:48.409978Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:48.410090Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:48.410118Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:48.410159Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:48.410188Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:48.410219Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T18:40:48.410273Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T18:40:48.410339Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:889:2768], serverId# [1:898:2773], sessionId# [0:0:0] 2025-12-23T18:40:48.410369Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:40:48.410383Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:48.410407Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T18:40:48.410432Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:40:48.410854Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:48.411107Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:40:48.411189Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:40:48.411517Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:890:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2025-12-23T18:40:48.411672Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:40:48.411802Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-12-23T18:40:48.411854Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-12-23T18:40:48.413343Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:48.413430Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:40:48.427919Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:48.428045Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:40:48.428693Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T18:40:48.428758Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 n ... eartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:41:07.355165Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:41:07.355226Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:41:07.355269Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2025-12-23T18:41:07.355346Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:41:07.355411Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:41:07.355489Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:41:07.357514Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:41:07.357556Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:41:07.357598Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:41:07.357653Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:41:07.357691Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:41:07.357754Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:41:07.357854Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2025-12-23T18:41:07.357884Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:41:07.358475Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 2000} 2025-12-23T18:41:07.358526Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:41:07.360864Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:41:07.360955Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:41:07.361403Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-12-23T18:41:07.361606Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:41:07.361634Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2025-12-23T18:41:07.361664Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2025-12-23T18:41:07.361715Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:41:07.361753Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:41:07.361807Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:41:07.368504Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:41:07.369093Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:41:07.369156Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-12-23T18:41:07.369371Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-12-23T18:41:07.370022Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:41:07.370440Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:41:07.370483Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:41:07.371031Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-12-23T18:41:07.371074Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-12-23T18:41:07.389525Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1045:2882], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:07.389638Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1056:2887], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:07.389717Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:07.390958Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2895], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:07.391030Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:07.396555Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:41:07.405973Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:41:07.406086Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:41:07.406136Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-12-23T18:41:07.578151Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:41:07.578307Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-12-23T18:41:07.578374Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-12-23T18:41:07.602087Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1059:2890], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:41:07.647900Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1119:2931] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:41:07.964494Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1225:2975], serverId# [4:1226:2976], sessionId# [0:0:0] 2025-12-23T18:41:07.965064Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2025-12-23T18:41:07.965418Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1766515267965299 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-12-23T18:41:07.965651Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1766515267965299 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-12-23T18:41:07.965827Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-12-23T18:41:07.979911Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2025-12-23T18:41:07.980023Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:41:07.987420Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1232:2981], serverId# [4:1233:2982], sessionId# [0:0:0] 2025-12-23T18:41:08.003536Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1234:2983], serverId# [4:1235:2984], sessionId# [0:0:0] |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-dbadmin |94.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |94.2%| [LD] {RESULT} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |94.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2025-12-23T18:40:53.272987Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1766515253272951 2025-12-23T18:40:53.702255Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125240874934267:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:53.704510Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:40:53.739364Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:53.759550Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:40:53.762396Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125243368295034:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:53.762446Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:40:53.846327Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005700/r3tmp/tmpxaSJwx/pdisk_1.dat 2025-12-23T18:40:54.158851Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:54.256472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:54.317071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:54.317181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:54.383394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:54.383465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:54.412020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:54.427753Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:54.440798Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:40:54.444351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:54.467969Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:54.489062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 17294, node 1 2025-12-23T18:40:54.683508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005700/r3tmp/yandexPgMP0M.tmp 2025-12-23T18:40:54.683531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005700/r3tmp/yandexPgMP0M.tmp 2025-12-23T18:40:54.712807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005700/r3tmp/yandexPgMP0M.tmp 2025-12-23T18:40:54.713363Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:54.713415Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:40:54.848007Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:54.903265Z INFO: TTestServer started on Port 12666 GrpcPort 17294 TClient is connected to server localhost:12666 PQClient connected to localhost:17294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515254518 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:55.430366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:40:58.702836Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125240874934267:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:58.702898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:58.762851Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587125243368295034:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:58.762956Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:58.840924Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125264843131851:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:58.840939Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125264843131843:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:58.841038Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:58.841350Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125264843131858:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:58.841407Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:58.846748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:58.891868Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587125264843131857:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-12-23T18:40:59.126932Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587125264843131887:2142] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:59.160610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:59.162585Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125262349772009:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:40:59.166929Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=OGRlMjhjMzMtNGExODZiZDgtYjdmMjBjZjAtOTI5YmRiNDM=, ActorId: [1:7587125262349771951:2330], ActorState: ExecuteState, TraceId: 01kd687dfb0xyt0p8s2vvcmajv, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:40:59.169465Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_po ... ition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:41:08.454542Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:08.454562Z node 2 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:41:08.454686Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2025-12-23T18:41:08.455717Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2025-12-23T18:41:08.455761Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:41:08.455776Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:41:08.455792Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:08.456188Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1704: [72075186224037892][Partition][0][StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 1 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000? size 169 WTime 1766515268453 2025-12-23T18:41:08.456370Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:41:08.456462Z node 2 :PERSQUEUE DEBUG: read.h:313: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 1 partNo 0 count 1 size 169 2025-12-23T18:41:08.460058Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 1 count 1 size 169 actorID [2:7587125299202870708:2390] 2025-12-23T18:41:08.460112Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037892][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:41:08.460175Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037892][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:41:08.460205Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-12-23T18:41:08.460236Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-12-23T18:41:08.460378Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:08.460393Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:08.460403Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:08.460417Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:08.460427Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:08.460455Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:41:08.460491Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2025-12-23T18:41:08.461205Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-12-23T18:41:08.460965Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 1 partno 0 count 1 parts 0 suffix '63' size 169 2025-12-23T18:41:08.462393Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-12-23T18:41:08.462580Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 3000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-12-23T18:41:08.462616Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2025-12-23T18:41:08.462644Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2025-12-23T18:41:08.463319Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2025-12-23T18:41:08.463430Z :INFO: [/Root] TraceId [] SessionId [src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-12-23T18:41:08.463461Z :INFO: [/Root] TraceId [] SessionId [src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0] MessageGroupId [src_id] Write session will now close 2025-12-23T18:41:08.463512Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0] MessageGroupId [src_id] Write session: aborting 2025-12-23T18:41:08.463919Z :WARNING: [/Root] TraceId [] SessionId [src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-12-23T18:41:08.463974Z :DEBUG: [/Root] TraceId [] SessionId [src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0] MessageGroupId [src_id] Write session: destroy 2025-12-23T18:41:08.465260Z node 1 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0 grpc read done: success: 0 data: 2025-12-23T18:41:08.465286Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0 grpc read failed 2025-12-23T18:41:08.465316Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0 grpc closed 2025-12-23T18:41:08.465339Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: src_id|3a7d60c9-e7c5f354-9f772fc-d26dd908_0 is DEAD 2025-12-23T18:41:08.466110Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:41:08.468031Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [1:7587125305299446106:2479] destroyed 2025-12-23T18:41:08.468094Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T18:41:08.468143Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:08.468167Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:08.468185Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:08.468205Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:08.468217Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:08.529206Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:08.529239Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:08.529252Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:08.529268Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:08.529279Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:08.630929Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:08.630962Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:08.630974Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:08.630991Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:08.631004Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:08.738888Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:08.738924Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:08.738936Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:08.738956Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:08.738968Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:09.191414Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1177: ActorId: [1:7587125309594413445:2484] TxId: 281474976720677. Ctx: { TraceId: 01kd687q6x96waw4qb314gyant, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ3ZGMwYS0zYmVkYmI5Yi1mODNhNzYxMS05ZDkxYzcxNg==, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2025-12-23T18:41:09.192031Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [1:7587125309594413449:2484], TxId: 281474976720677, task: 3. Ctx: { CheckpointId : . TraceId : 01kd687q6x96waw4qb314gyant. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWQ3ZGMwYS0zYmVkYmI5Yi1mODNhNzYxMS05ZDkxYzcxNg==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7587125309594413445:2484], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-12-23T18:41:09.221405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:41:09.221439Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |94.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-dbadmin |94.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |94.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |94.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink [GOOD] >> DataShardVolatile::VolatileTxAbortedOnSplit |94.2%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-dbadmin |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-anonymous >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites >> KqpBatchUpdate::UnknownColumn [GOOD] |94.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |94.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |94.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-system >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc >> TConsoleTests::TestListTenants [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UnknownColumn [GOOD] Test command err: Trying to start YDB, gRPC: 18864, MsgBus: 61181 2025-12-23T18:41:03.511563Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125283080764808:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:03.511646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:41:03.544535Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005c05/r3tmp/tmpiie4TD/pdisk_1.dat 2025-12-23T18:41:03.881289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:03.940707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:03.940800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:03.946485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:04.050025Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18864, node 1 2025-12-23T18:41:04.180318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:04.227377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:41:04.227405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:41:04.227414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:41:04.227562Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61181 2025-12-23T18:41:04.534893Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:61181 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515264038 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:04.890658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:41:04.896295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:41:04.905971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:41:05.117605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:41:05.362447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:41:05.451845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:41:08.514978Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125283080764808:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:08.515048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:41:09.053503Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125308850570417:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:09.053634Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:09.060910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125308850570427:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:09.061042Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:09.446656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:09.518929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:09.597311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:09.644210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:09.714491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:09.782154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:09.841959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:09.936871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:10.059109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125313145538602:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:10.059204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:10.059622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125313145538607:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:10.059667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125313145538608:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:10.059697Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:10.063822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:41:10.092874Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125313145538611:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:41:10.187803Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125313145538662:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:41:12.469455Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125321735473594:2543], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:34: Error: At lambda, At function: Coalesce
:4:41: Error: At function: ==
:4:27: Error: At function: Member
:4:27: Error: Member not found: UnknownColumn 2025-12-23T18:41:12.471254Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=OWFhZjQyZWQtYTJjOWU4NWMtMTY1OTA0ZmItMjRiNDYzNDg=, ActorId: [1:7587125321735473585:2537], ActorState: ExecuteState, TraceId: 01kd687tnw4qaw0h21z5pwtct9, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 34 } message: "At lambda, At function: Coalesce" end_position { row: 3 column: 34 } severity: 1 issues { position { row: 4 column: 41 } message: "At function: ==" end_position { row: 4 column: 41 } severity: 1 issues { position { row: 4 column: 27 } message: "At function: Member" end_position { row: 4 column: 27 } severity: 1 issues { position { row: 4 column: 27 } message: "Member not found: UnknownColumn" end_position { row: 4 column: 27 } severity: 1 } } } } }, remove tx with tx_id: 2025-12-23T18:41:12.549093Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125321735473603:2547], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:4:43: Error: At function: KiUpdateTable!
:4:43: Error: Column 'UnknownColumn' does not exist in table '/Root/Test'., code: 2017 2025-12-23T18:41:12.551371Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=OWFhZjQyZWQtYTJjOWU4NWMtMTY1OTA0ZmItMjRiNDYzNDg=, ActorId: [1:7587125321735473585:2537], ActorState: ExecuteState, TraceId: 01kd687ts0f82te9hrv5jwf6gg, ReplyQueryCompileError, status: BAD_REQUEST, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 4 column: 43 } message: "At function: KiUpdateTable!" end_position { row: 4 column: 43 } severity: 1 issues { position { row: 4 column: 43 } message: "Column \'UnknownColumn\' does not exist in table \'/Root/Test\'." end_position { row: 4 column: 43 } issue_code: 2017 severity: 1 } } }, remove tx with tx_id: |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> Cdc::VirtualTimestamps[PqRunner] [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] >> Cdc::DocApi[YdsRunner] [GOOD] >> Cdc::DocApi[TopicRunner] >> KqpSnapshotIsolation::ConflictWrite-IsOlap-FillTables [GOOD] >> KqpSnapshotIsolation::ConflictWrite+IsOlap-FillTables >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule >> Cdc::NewImageLogDebezium [GOOD] >> Cdc::NaN[PqRunner] |94.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |94.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |94.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TConsoleTests::TestAlterBorrowedStorage >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-ordinaryuser |94.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |94.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |94.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-anonymous >> TSentinelTests::Smoke >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-dbadmin >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-anonymous >> KqpResultSetFormats::ArrowFormat_Types_Struct [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Variant |94.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |94.3%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |94.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-ordinaryuser >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TConsoleTests::TestMergeConfig >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TSentinelTests::Smoke [GOOD] >> TSentinelTests::PDiskUnknownState >> Cdc::VirtualTimestamps[YdsRunner] [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-dbadmin |94.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |94.3%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |94.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] Test command err: Trying to start YDB, gRPC: 13163, MsgBus: 31855 2025-12-23T18:39:37.460262Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124915295068275:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:37.460381Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0061da/r3tmp/tmp4pLJQY/pdisk_1.dat 2025-12-23T18:39:38.085938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:38.086043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:38.090129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:38.091060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:38.416540Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:38.490806Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124915295068032:2081] 1766515177399345 != 1766515177399348 2025-12-23T18:39:38.531937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:38.541901Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13163, node 1 2025-12-23T18:39:38.827132Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:38.827154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:38.827160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:38.827238Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31855 TClient is connected to server localhost:31855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515178645 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:40.246147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:40.260208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:40.270675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:40.779437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:41.149361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:41.286199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:42.462989Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124915295068275:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:42.463082Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:44.804736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124945359840996:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.804851Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.805381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124945359841006:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.805426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:45.572135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:45.654289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:45.702643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:45.758759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:45.821918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:45.886076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:45.948976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:46.027918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:46.188207Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124953949776480:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:46.188292Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:46.188519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124953949776485:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:46.188570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124953949776486:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:46.188884Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [W ... athId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515262505 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:03.600754Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:41:03.616174Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:41:07.188135Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7587125279731704681:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:07.188259Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:41:08.825994Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125305501509311:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:08.826007Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125305501509322:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:08.826144Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:08.830535Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125305501509326:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:08.830725Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:08.833543Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:41:08.863543Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7587125305501509325:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:41:08.940503Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7587125305501509378:2549] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=344;columns=1; Trying to start YDB, gRPC: 9020, MsgBus: 12088 2025-12-23T18:41:10.709551Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7587125316184218268:2128];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:10.709629Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:41:10.747272Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0061da/r3tmp/tmp4wXldS/pdisk_1.dat 2025-12-23T18:41:11.132863Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:11.150669Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:11.158979Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:11.159887Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:11.163053Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [13:7587125316184218180:2081] 1766515270704923 != 1766515270704926 2025-12-23T18:41:11.259966Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9020, node 13 2025-12-23T18:41:11.359255Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:11.603433Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:41:11.603463Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:41:11.603478Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:41:11.603617Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:41:11.742955Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12088 TClient is connected to server localhost:12088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515271360 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:12.765072Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:41:12.775076Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:41:15.711220Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7587125316184218268:2128];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:15.711323Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:41:19.262914Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7587125354838924739:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:19.263043Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:19.264001Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7587125354838924759:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:19.264055Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7587125354838924761:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:19.264117Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:19.270426Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:41:19.296746Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7587125354838924764:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:41:19.353800Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7587125354838924815:2549] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=120;columns=1; |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest |94.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |94.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |94.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2025-12-23T18:41:05.320328Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125291111573170:2266];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:05.320409Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:41:05.406806Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:41:05.481563Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034d6/r3tmp/tmpVE9VIr/pdisk_1.dat 2025-12-23T18:41:05.604246Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:05.987495Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:05.987904Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:41:06.053365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:06.150240Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:06.219222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:06.219327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:06.255213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:06.255353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:06.256696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:06.342764Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:06.377805Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:41:06.384875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:06.413295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:06.416260Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:06.560904Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TServer::EnableGrpc on GrpcPort 1889, node 1 2025-12-23T18:41:06.714518Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:07.219158Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0034d6/r3tmp/yandexQJOqyE.tmp 2025-12-23T18:41:07.219183Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0034d6/r3tmp/yandexQJOqyE.tmp 2025-12-23T18:41:07.219330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0034d6/r3tmp/yandexQJOqyE.tmp 2025-12-23T18:41:07.219434Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:41:07.487188Z INFO: TTestServer started on Port 29250 GrpcPort 1889 TClient is connected to server localhost:29250 PQClient connected to localhost:1889 === TenantModeEnabled() = 1 === Init PQ - start server on port 1889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515266628 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:08.285364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:41:08.285530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:08.285755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:41:08.285777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:41:08.286061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:41:08.286135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:08.290071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:41:08.290197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:41:08.290379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:08.290425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:41:08.290438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-12-23T18:41:08.290452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-12-23T18:41:08.295007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:08.295064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:41:08.295080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2025-12-23T18:41:08.297524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:08.297549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:08.297572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-12-23T18:41:08.297589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-12-23T18:41:08.301913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:41:08.302248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:41:08.302275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-12-23T18:41:08.302300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:41:08.304680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-12-23T18:41:08.304802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:41:08.307873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766515268350, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:41:08.308056Z node 1 :FL ... 5-12-23T18:41:20.381889Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715665:0 progress is 1/1 2025-12-23T18:41:20.381905Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-12-23T18:41:20.381924Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715665:0 progress is 1/1 2025-12-23T18:41:20.381933Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-12-23T18:41:20.381971Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2025-12-23T18:41:20.382007Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2025-12-23T18:41:20.382023Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2025-12-23T18:41:20.382032Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-12-23T18:41:20.382043Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715665:0 2025-12-23T18:41:20.382052Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2025-12-23T18:41:20.382062Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 46], 3 2025-12-23T18:41:20.384783Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-12-23T18:41:20.385099Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, remove access: -():test_user@builtin:- 2025-12-23T18:41:20.385282Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:41:20.385296Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2025-12-23T18:41:20.385495Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:41:20.385510Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7587125337495551013:2396], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 46 2025-12-23T18:41:20.386758Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-12-23T18:41:20.386834Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-12-23T18:41:20.386846Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2025-12-23T18:41:20.386861Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 3 2025-12-23T18:41:20.386876Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 2025-12-23T18:41:20.386970Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2025-12-23T18:41:20.392029Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2025-12-23T18:41:20.393561Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2025-12-23T18:41:20.393574Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2025-12-23T18:41:20.393876Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2025-12-23T18:41:20.393946Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:447: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:35138 2025-12-23T18:41:20.393960Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1538: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:35138 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-12-23T18:41:20.393967Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2025-12-23T18:41:20.395044Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 2 sessionId: describe result for acl check 2025-12-23T18:41:20.395265Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-12-23T18:41:20.395274Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-12-23T18:41:20.395281Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-12-23T18:41:20.395316Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7587125358970388783:2374] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-12-23T18:41:20.395335Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-12-23T18:41:20.396093Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:822: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-12-23T18:41:20.396318Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie test-message-group|e8b6317f-bef68c9-cf083d3b-bced0261_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2025-12-23T18:41:20.396797Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|e8b6317f-bef68c9-cf083d3b-bced0261_0 2025-12-23T18:41:20.399332Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-message-group|e8b6317f-bef68c9-cf083d3b-bced0261_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-12-23T18:41:20.399688Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1352: updating token 2025-12-23T18:41:20.399728Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2025-12-23T18:41:20.400530Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 2 sessionId: test-message-group|e8b6317f-bef68c9-cf083d3b-bced0261_0 describe result for acl check 2025-12-23T18:41:20.400639Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:815: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|e8b6317f-bef68c9-cf083d3b-bced0261_0 2025-12-23T18:41:20.400928Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group|e8b6317f-bef68c9-cf083d3b-bced0261_0 is DEAD 2025-12-23T18:41:20.401235Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:41:20.637750Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7587125358970388792:2379], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:41:20.638468Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=3&id=Njc2YTc1MmEtODFhNWY1NjgtYjg0YTM1NDEtZDE1NDcyOTc=, ActorId: [3:7587125358970388790:2378], ActorState: ExecuteState, TraceId: 01kd6882pxasmrvzmskc13xk5x, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:41:20.638833Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |94.3%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> KqpResultSetFormats::ArrowFormat_ColumnOrder [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_None >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2025-12-23T18:41:04.927180Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125288325883716:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:04.927251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034d9/r3tmp/tmpMzsQOV/pdisk_1.dat 2025-12-23T18:41:05.118407Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:05.162015Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:05.487352Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:05.487507Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:41:05.667542Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:05.674550Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:05.877746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:05.877871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:05.891413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:05.891489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:06.016102Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:06.016855Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:06.018853Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:41:06.023562Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:06.058113Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:41:06.058327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:06.062760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:06.106947Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TServer::EnableGrpc on GrpcPort 30263, node 1 2025-12-23T18:41:06.374803Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:06.375717Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:06.455462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0034d9/r3tmp/yandex1tY8Tu.tmp 2025-12-23T18:41:06.455489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0034d9/r3tmp/yandex1tY8Tu.tmp 2025-12-23T18:41:06.512259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0034d9/r3tmp/yandex1tY8Tu.tmp 2025-12-23T18:41:06.512938Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:41:06.618712Z INFO: TTestServer started on Port 11537 GrpcPort 30263 TClient is connected to server localhost:11537 PQClient connected to localhost:30263 === TenantModeEnabled() = 1 === Init PQ - start server on port 30263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515266180 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:08.191325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:41:08.191531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:08.191753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:41:08.191771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:41:08.191981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:41:08.192028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:08.198917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:41:08.199060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:41:08.199246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:08.199286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:41:08.199305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-12-23T18:41:08.199319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-12-23T18:41:08.203556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:08.203626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:41:08.203645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2025-12-23T18:41:08.205912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:41:08.205940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-12-23T18:41:08.205959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:41:08.206151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:08.206178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:08.206208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-12-23T18:41:08.206250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-12-23T18:41:08.210753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:41:08.214416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-12-23T18:41:08.214541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 720575940 ... ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:41:20.247063Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710665:0 progress is 1/1 2025-12-23T18:41:20.247080Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-12-23T18:41:20.247113Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710665:0 progress is 1/1 2025-12-23T18:41:20.247122Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-12-23T18:41:20.247163Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2025-12-23T18:41:20.247214Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710665, ready parts: 1/1, is published: false 2025-12-23T18:41:20.247242Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2025-12-23T18:41:20.247257Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2025-12-23T18:41:20.247271Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710665:0 2025-12-23T18:41:20.247286Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710665, publications: 1, subscribers: 0 2025-12-23T18:41:20.247297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710665, [OwnerId: 72057594046644480, LocalPathId: 46], 3 2025-12-23T18:41:20.255492Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710665, response: Status: StatusSuccess TxId: 281474976710665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-12-23T18:41:20.255965Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, remove access: -():test_user_0@builtin:-, remove access: -():test_user_1@builtin:-, remove access: -():test_user_2@builtin:- 2025-12-23T18:41:20.256151Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:41:20.256177Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710665, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2025-12-23T18:41:20.256404Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:41:20.256422Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7587125336771655411:2387], at schemeshard: 72057594046644480, txId: 281474976710665, path id: 46 2025-12-23T18:41:20.257611Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710665 2025-12-23T18:41:20.257687Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710665 2025-12-23T18:41:20.257698Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710665 2025-12-23T18:41:20.257713Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710665, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 3 2025-12-23T18:41:20.257729Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 2025-12-23T18:41:20.257824Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710665, subscribers: 0 2025-12-23T18:41:20.264690Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2025-12-23T18:41:20.264724Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2025-12-23T18:41:20.267619Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710665 2025-12-23T18:41:20.268057Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2025-12-23T18:41:20.268164Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:447: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:60580 2025-12-23T18:41:20.268177Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1538: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:60580 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-12-23T18:41:20.268188Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2025-12-23T18:41:20.269279Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 2 sessionId: describe result for acl check 2025-12-23T18:41:20.269440Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-12-23T18:41:20.269448Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-12-23T18:41:20.269455Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-12-23T18:41:20.269482Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7587125358246493160:2375] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-12-23T18:41:20.269498Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-12-23T18:41:20.270594Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:822: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 4, Generation: 1 2025-12-23T18:41:20.270920Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie test-group-id|544b4ec9-3154f253-39aa2e7f-aa9744fa_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-12-23T18:41:20.279056Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|544b4ec9-3154f253-39aa2e7f-aa9744fa_0 2025-12-23T18:41:20.281816Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|544b4ec9-3154f253-39aa2e7f-aa9744fa_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-12-23T18:41:20.282086Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|544b4ec9-3154f253-39aa2e7f-aa9744fa_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-12-23T18:41:20.282139Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:815: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|544b4ec9-3154f253-39aa2e7f-aa9744fa_0 2025-12-23T18:41:20.282380Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-group-id|544b4ec9-3154f253-39aa2e7f-aa9744fa_0 is DEAD 2025-12-23T18:41:20.282695Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:41:20.564874Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7587125358246493174:2382], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:41:20.566529Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=3&id=MTE0NjAzZmEtMTUxZmQ5ZjYtMWI2NTUyODYtMWY1Y2M5OWM=, ActorId: [3:7587125358246493167:2378], ActorState: ExecuteState, TraceId: 01kd6882mte98d8q5qnhy2zwzn, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:41:20.566905Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |94.3%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-anonymous >> DataShardVolatile::VolatileTxAbortedOnSplit [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop >> BasicUsage::CloseWriteSessionImmediately [GOOD] >> TSentinelTests::PDiskUnknownState [GOOD] >> TSentinelBaseTests::PDiskInitialStatus [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeNotExpectedRestart [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] |94.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |94.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |94.3%| [LD] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-ordinaryuser >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskUnknownState [GOOD] Test command err: 2025-12-23T18:41:20.764581Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2025-12-23T18:41:20.764649Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2025-12-23T18:41:20.764715Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:41:20.764746Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2025-12-23T18:41:20.764801Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-12-23T18:41:20.764885Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-12-23T18:41:20.765789Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-12-23T18:41:20.776669Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... disk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2025-12-23T18:41:24.324968Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 15 2025-12-23T18:41:24.325108Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved16 } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2025-12-23T18:41:24.325137Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 16 2025-12-23T18:41:24.325280Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2025-12-23T18:41:24.325374Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:41:24.325874Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 15:60, status# ACTIVE, required status# INACTIVE, reason# PrevState# Normal State# Unknown StateCounter# 1 current# ACTIVE, dry run# 0 2025-12-23T18:41:24.325965Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-12-23T18:41:24.326283Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2025-12-23T18:41:24.326352Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 15:60 2025-12-23T18:41:24.326456Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:41:24.326488Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2025-12-23T18:41:24.326616Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2025-12-23T18:41:24.326683Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-12-23T18:41:24.326719Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-12-23T18:41:24.326796Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-12-23T18:41:24.326843Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-12-23T18:41:24.326872Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-12-23T18:41:24.326919Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-12-23T18:41:24.326963Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-12-23T18:41:24.327230Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved17 } ResponseTime: 120110 2025-12-23T18:41:24.327276Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 17 2025-12-23T18:41:24.328199Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2025-12-23T18:41:24.328357Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2025-12-23T18:41:24.427165Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2025-12-23T18:41:24.427397Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2025-12-23T18:41:24.427562Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved15 } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2025-12-23T18:41:24.427603Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 15 2025-12-23T18:41:24.427744Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved16 } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2025-12-23T18:41:24.427769Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 16 2025-12-23T18:41:24.427912Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2025-12-23T18:41:24.428015Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:41:24.428539Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 9:39, status# ACTIVE, required status# INACTIVE, reason# PrevState# Normal State# Unknown StateCounter# 1 current# ACTIVE, dry run# 0 2025-12-23T18:41:24.428637Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-12-23T18:41:24.428957Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2025-12-23T18:41:24.429023Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 9:39 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-clusteradmin >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleTests::TestCreateSubSubDomain >> TSentinelBaseTests::PDiskStateChangeNormalFlow [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodePermanentlyBad [GOOD] >> TSentinelTests::BSControllerUnresponsive >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-dbadmin |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-clusteradmin >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] >> TSentinelTests::PDiskPileGuardFullPile ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2025-12-23T18:40:51.558832Z :BasicWriteSession INFO: Random seed for debugging is 1766515251558793 2025-12-23T18:40:52.180368Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125237865017417:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:52.180436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:40:52.298149Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125235567842998:2277];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:52.314820Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:40:52.315066Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:40:52.315427Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00571b/r3tmp/tmpqXqJPO/pdisk_1.dat 2025-12-23T18:40:52.362893Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:40:52.651810Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:52.670856Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:52.713260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:52.713646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:52.714286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:52.714352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:52.751389Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:40:52.751579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:52.777712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:52.880022Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:52.908869Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:52.920697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 28996, node 1 2025-12-23T18:40:53.181889Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/00571b/r3tmp/yandexPPD23D.tmp 2025-12-23T18:40:53.181924Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/00571b/r3tmp/yandexPPD23D.tmp 2025-12-23T18:40:53.182140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/00571b/r3tmp/yandexPPD23D.tmp 2025-12-23T18:40:53.182253Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:40:53.207183Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:53.240738Z INFO: TTestServer started on Port 1077 GrpcPort 28996 2025-12-23T18:40:53.262315Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1077 PQClient connected to localhost:28996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515252964 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:53.468101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:40:53.590901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-12-23T18:40:56.429161Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125252747712304:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:56.429285Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125252747712292:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:56.429369Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:56.430326Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125252747712308:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:56.430401Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:56.450217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:56.530060Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587125252747712307:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-12-23T18:40:56.634174Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587125252747712337:2139] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:57.099780Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125255044888013:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:40:57.101366Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587125252747712344:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:40:57.104549Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=MzViYmIyMGUtYTY3NjQwOWMtMTNkMmM1OTItNTk3ZDJhNg==, ActorId: [2:7587125252747712290:2301], ActorState: ExecuteState, TraceId: 01kd687b2k317ee80702c536zt, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:40:57.103286Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MjM4ZTNhYmItNjVjZGNiMTMtN2MwYmU2NGItNjgwNTdjN2Q=, ActorId: [1:7587125255044887971:2328], ActorState: ExecuteState, TraceId: 01kd687bj90bpj39r4ac11p4q7, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18 ... 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:20.925023Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:21.030850Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:21.030883Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:21.030897Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:21.030916Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:21.030928Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:21.032728Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [3:7587125359391522597:2455] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-12-23T18:41:21.032773Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7587125359391522597:2455] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-12-23T18:41:21.032793Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [3:7587125359391522597:2455] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-12-23T18:41:21.032826Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-12-23T18:41:21.033890Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72075186224037892] server connected, pipe [3:7587125363686489962:2455], now have 1 active actors on pipe 2025-12-23T18:41:21.034248Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:822: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-12-23T18:41:21.034698Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-12-23T18:41:21.034734Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-12-23T18:41:21.034847Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|d5687845-b46357c2-4cd18ccd-7ca2486f_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-12-23T18:41:21.034891Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:21.034907Z node 4 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:41:21.034934Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:41:21.034948Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:21.034967Z node 4 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:41:21.034999Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:41:21.035010Z node 4 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:41:21.035026Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:21.035069Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:37: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-12-23T18:41:21.035120Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-12-23T18:41:21.036268Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-12-23T18:41:21.036295Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-12-23T18:41:21.036366Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-12-23T18:41:21.036851Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|d5687845-b46357c2-4cd18ccd-7ca2486f_0 2025-12-23T18:41:21.041738Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1766515281041 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:41:21.041873Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|d5687845-b46357c2-4cd18ccd-7ca2486f_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-12-23T18:41:21.042843Z :INFO: [] MessageGroupId [src] SessionId [src|d5687845-b46357c2-4cd18ccd-7ca2486f_0] Write session: close. Timeout = 0 ms 2025-12-23T18:41:21.042883Z :INFO: [] MessageGroupId [src] SessionId [src|d5687845-b46357c2-4cd18ccd-7ca2486f_0] Write session will now close 2025-12-23T18:41:21.042922Z :DEBUG: [] MessageGroupId [src] SessionId [src|d5687845-b46357c2-4cd18ccd-7ca2486f_0] Write session: aborting 2025-12-23T18:41:21.043329Z :INFO: [] MessageGroupId [src] SessionId [src|d5687845-b46357c2-4cd18ccd-7ca2486f_0] Write session: gracefully shut down, all writes complete 2025-12-23T18:41:21.043366Z :DEBUG: [] MessageGroupId [src] SessionId [src|d5687845-b46357c2-4cd18ccd-7ca2486f_0] Write session: destroy 2025-12-23T18:41:21.051403Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|d5687845-b46357c2-4cd18ccd-7ca2486f_0 grpc read done: success: 0 data: 2025-12-23T18:41:21.051439Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|d5687845-b46357c2-4cd18ccd-7ca2486f_0 grpc read failed 2025-12-23T18:41:21.051500Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|d5687845-b46357c2-4cd18ccd-7ca2486f_0 grpc closed 2025-12-23T18:41:21.051517Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|d5687845-b46357c2-4cd18ccd-7ca2486f_0 is DEAD 2025-12-23T18:41:21.052349Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:41:21.058793Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [3:7587125363686489962:2455] destroyed 2025-12-23T18:41:21.058846Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T18:41:21.058872Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:21.058886Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:21.058897Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:21.058914Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:21.058926Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist Session was created 2025-12-23T18:41:21.130655Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:21.130683Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:21.130695Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:21.130714Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:21.130726Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:21.230936Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:21.230975Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:21.230985Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:21.231006Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:21.231021Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:21.564641Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1177: ActorId: [3:7587125363686489999:2466] TxId: 281474976710676. Ctx: { TraceId: 01kd6883626yad107s0wn8djen, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NWE2YTI5MDMtNmJlM2JlNTQtYWM5NzdkMzAtNTY0MjNkMQ==, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-12-23T18:41:21.564795Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [3:7587125363686490003:2466], TxId: 281474976710676, task: 3. Ctx: { CheckpointId : . TraceId : 01kd6883626yad107s0wn8djen. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=NWE2YTI5MDMtNmJlM2JlNTQtYWM5NzdkMzAtNTY0MjNkMQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7587125363686489999:2466], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-12-23T18:41:22.567735Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=NWE2YTI5MDMtNmJlM2JlNTQtYWM5NzdkMzAtNTY0MjNkMQ==, ActorId: [3:7587125363686489964:2466], ActorState: ExecuteState, TraceId: 01kd6883626yad107s0wn8djen, Create QueryResponse for error on request, msg: , status: UNAVAILABLE, issues: { message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "Failed to send EvStartKqpTasksRequest because node is unavailable: 4" severity: 1 } } 2025-12-23T18:41:22.569488Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "Failed to send EvStartKqpTasksRequest because node is unavailable: 4" severity: 1 } } TxMeta { id: "01kd6883mjafzmbcdqf5qsjx5w" } } YdbStatus: UNAVAILABLE ConsumedRu: 298 } 2025-12-23T18:41:23.443011Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:41:23.443057Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |94.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-clusteradmin >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2025-12-23T18:40:51.850309Z :FallbackToSingleDb INFO: Random seed for debugging is 1766515251850265 2025-12-23T18:40:52.390795Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125237684309969:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:52.390987Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:40:52.444796Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:52.453736Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00571a/r3tmp/tmp6SKhTJ/pdisk_1.dat 2025-12-23T18:40:52.540298Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:40:52.812170Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:52.812384Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:52.830577Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:52.906716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:52.906914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:52.915358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:52.915449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:52.975355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:52.975750Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:40:52.980889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:53.052583Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29388, node 1 2025-12-23T18:40:53.079359Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:53.082863Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:53.327012Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/00571a/r3tmp/yandexJ3lDnS.tmp 2025-12-23T18:40:53.327039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/00571a/r3tmp/yandexJ3lDnS.tmp 2025-12-23T18:40:53.330954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/00571a/r3tmp/yandexJ3lDnS.tmp 2025-12-23T18:40:53.331092Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:40:53.398021Z INFO: TTestServer started on Port 28660 GrpcPort 29388 2025-12-23T18:40:53.399444Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:53.530885Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28660 PQClient connected to localhost:29388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515253069 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:53.901763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:40:56.839623Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125252799573674:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:56.839743Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125252799573650:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:56.839883Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:56.857293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:56.863478Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125252799573707:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:56.863570Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:56.866190Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125252799573710:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:56.866272Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:56.919081Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587125252799573678:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-12-23T18:40:56.981427Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587125252799573712:2143] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:57.359609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:57.369069Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125254864180399:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:40:57.371266Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YzkxODVhMWQtMmY5NTNhZGEtNDg1MjU4OS1kZDIwZDdlYQ==, ActorId: [1:7587125254864180365:2329], ActorState: ExecuteState, TraceId: 01kd687bhjaadz4nmhm4x4z5bf, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:40:57.373676Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:40:57.371127Z node 2 :KQP_COMPILE_ACTOR ERROR: kq ... node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:23.730873Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:23.730913Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:23.730928Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:23.730948Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:23.730964Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:23.831185Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:23.831219Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:23.831235Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:23.831255Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:23.831280Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:23.934977Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:23.935021Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:23.935037Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:23.935062Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:23.935080Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:24.038937Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:24.038985Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:24.039011Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:24.039034Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:24.039051Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:24.178535Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:24.178586Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:24.178601Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:24.178622Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:24.178640Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:24.202932Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:41:24.202983Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:24.247223Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:24.247261Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:24.247275Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:24.247298Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:24.247313Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >>> Ready to answer: ok 2025-12-23T18:41:24.298555Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2025-12-23T18:41:24.298692Z :INFO: [/Root] [] [3a8356d7-2ba918cf-9fc6312-18fae84f] Open read subsessions to databases: { name: , endpoint: localhost:3343, path: /Root } 2025-12-23T18:41:24.298977Z :INFO: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] Starting read session 2025-12-23T18:41:24.299022Z :DEBUG: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] Starting single session 2025-12-23T18:41:24.302443Z :DEBUG: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-12-23T18:41:24.302800Z :DEBUG: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-12-23T18:41:24.302855Z :DEBUG: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] [] Reconnecting session to cluster in 0.000000s 2025-12-23T18:41:24.303087Z :ERROR: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:3343
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:3343. 2025-12-23T18:41:24.303175Z :DEBUG: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-12-23T18:41:24.303213Z :DEBUG: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-12-23T18:41:24.303361Z :INFO: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:3343" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:3343
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:3343. " } 2025-12-23T18:41:24.307005Z :NOTICE: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:41:24.307051Z :DEBUG: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:3343" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:3343
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:3343. " } 2025-12-23T18:41:24.307146Z :INFO: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] Closing read session. Close timeout: 0.010000s 2025-12-23T18:41:24.307198Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-12-23T18:41:24.307234Z :INFO: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] Counters: { Errors: 1 CurrentSessionLifetimeMs: 8 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:41:24.307276Z :INFO: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] Closing read session. Close timeout: 0.000000s 2025-12-23T18:41:24.307304Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-12-23T18:41:24.307346Z :INFO: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] Counters: { Errors: 1 CurrentSessionLifetimeMs: 8 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:41:24.307385Z :INFO: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] Closing read session. Close timeout: 0.000000s 2025-12-23T18:41:24.307413Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-12-23T18:41:24.307445Z :INFO: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] Counters: { Errors: 1 CurrentSessionLifetimeMs: 8 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:41:24.307510Z :NOTICE: [/Root] [/Root] [b890e660-452f952c-283a334a-81e9a189] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:41:24.351113Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:24.351153Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:24.351170Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:24.351194Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:24.351214Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:24.451156Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:24.451193Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:24.451210Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:24.451233Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:24.451252Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:24.552623Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:24.552666Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:24.552682Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:24.552704Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:24.552725Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |94.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> HttpRequest::Probe [GOOD] |94.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |94.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |94.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-ordinaryuser >> TSentinelTests::PDiskErrorState ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2025-12-23T18:41:07.132878Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125303829524445:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:07.132938Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:41:07.671385Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:07.671173Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125303277038144:2143];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:07.671238Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034d1/r3tmp/tmpx5oLFx/pdisk_1.dat 2025-12-23T18:41:07.887065Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:08.491407Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:08.487356Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:08.490962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:41:08.527084Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:08.734900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:08.734993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:08.737371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:08.737434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:08.918824Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:08.931095Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:41:08.932031Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:08.970845Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:08.967459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:08.972411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:08.982139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 7567, node 1 2025-12-23T18:41:09.331519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0034d1/r3tmp/yandexh0wfGu.tmp 2025-12-23T18:41:09.331546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0034d1/r3tmp/yandexh0wfGu.tmp 2025-12-23T18:41:09.362015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0034d1/r3tmp/yandexh0wfGu.tmp 2025-12-23T18:41:09.370390Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:41:09.440247Z INFO: TTestServer started on Port 28240 GrpcPort 7567 TClient is connected to server localhost:28240 PQClient connected to localhost:7567 === TenantModeEnabled() = 1 === Init PQ - start server on port 7567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515269043 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:10.236850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:41:10.237128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:10.237461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:41:10.237485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:41:10.237730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:41:10.237819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:10.251694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:41:10.251909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:41:10.252102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:10.252150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:41:10.252167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-12-23T18:41:10.252183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-12-23T18:41:10.259615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:10.259691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:41:10.259718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2025-12-23T18:41:10.261393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:41:10.261415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-12-23T18:41:10.261433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:41:10.264026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:10.264063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:10.264106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-12-23T18:41:10.264137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-12-23T18:41:10.268780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:41:10.276070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-12-23T18:41:10.276174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:41:10.279820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766515270324, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:41:10.287098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemesha ... 5-12-23T18:41:25.420130Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710664, subscribers: 1 2025-12-23T18:41:25.420141Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7587125377047425502:2363] Create topic result: 1 === EnablePQLogs 2025-12-23T18:41:25.432195Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2025-12-23T18:41:25.432299Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 === CreateChannel === NewStub === InitializeWritePQService === InitializeWritePQService start iteration === InitializeWritePQService create streamingWriter === InitializeWritePQService Write 2025-12-23T18:41:25.553140Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2025-12-23T18:41:25.553195Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 1 2025-12-23T18:41:25.553892Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2025-12-23T18:41:25.553999Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:447: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:42570 2025-12-23T18:41:25.554014Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1538: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:42570 proto=v1 topic=Root/acc/topic1 durationSec=0 2025-12-23T18:41:25.554024Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2025-12-23T18:41:25.559669Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 1 sessionId: describe result for acl check 2025-12-23T18:41:25.559830Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-12-23T18:41:25.559842Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-12-23T18:41:25.559851Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-12-23T18:41:25.559886Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7587125377047425694:2367] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-12-23T18:41:25.559910Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-12-23T18:41:25.560832Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:822: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-12-23T18:41:25.561062Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie 12345678|724d51e-fb94e9e9-dc6fe56d-6471f71f_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-12-23T18:41:25.561635Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|724d51e-fb94e9e9-dc6fe56d-6471f71f_0 2025-12-23T18:41:25.563317Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: 12345678|724d51e-fb94e9e9-dc6fe56d-6471f71f_0 grpc read done: success: 0 data: 2025-12-23T18:41:25.563334Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: 12345678|724d51e-fb94e9e9-dc6fe56d-6471f71f_0 grpc read failed 2025-12-23T18:41:25.563485Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 1 sessionId: 12345678|724d51e-fb94e9e9-dc6fe56d-6471f71f_0 2025-12-23T18:41:25.563498Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: 12345678|724d51e-fb94e9e9-dc6fe56d-6471f71f_0 is DEAD 2025-12-23T18:41:25.563770Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison Finish: 0 === InitializeWritePQService done === PersQueueClient === InitializePQ completed 2025-12-23T18:41:25.590590Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2025-12-23T18:41:25.590615Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2025-12-23T18:41:25.591093Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2025-12-23T18:41:25.591201Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:447: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:42570 2025-12-23T18:41:25.591220Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1538: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:42570 proto=v1 topic=topic1 durationSec=0 2025-12-23T18:41:25.591229Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2025-12-23T18:41:25.593366Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 2 sessionId: describe result for acl check 2025-12-23T18:41:25.593500Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-12-23T18:41:25.593508Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-12-23T18:41:25.593516Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-12-23T18:41:25.593551Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7587125377047425714:2376] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-12-23T18:41:25.593571Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-12-23T18:41:25.594251Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:822: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-12-23T18:41:25.594472Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie 12345678|15c0a76c-e0f2bde0-d18d2507-40126bca_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-12-23T18:41:25.594886Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|15c0a76c-e0f2bde0-d18d2507-40126bca_0 2025-12-23T18:41:25.597509Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: 12345678|15c0a76c-e0f2bde0-d18d2507-40126bca_0 grpc read done: success: 0 data: 2025-12-23T18:41:25.597530Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: 12345678|15c0a76c-e0f2bde0-d18d2507-40126bca_0 grpc read failed 2025-12-23T18:41:25.597558Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: 12345678|15c0a76c-e0f2bde0-d18d2507-40126bca_0 grpc closed 2025-12-23T18:41:25.597579Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: 12345678|15c0a76c-e0f2bde0-d18d2507-40126bca_0 is DEAD 2025-12-23T18:41:25.598348Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:41:26.230035Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7587125381342393030:2386], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:41:26.231095Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=3&id=YTU5Yzk3Zi01NDI5MjI1MC02MWIwYjNkZS0yNjJjMzFhMw==, ActorId: [3:7587125381342393023:2382], ActorState: ExecuteState, TraceId: 01kd68884xdrgdtarx4069r0dk, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:41:26.231514Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |94.3%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TSentinelBaseTests::GuardianDataCenterRatio >> TSentinelBaseTests::GuardianDataCenterRatio [GOOD] >> TSentinelBaseTests::GuardianFaultyPDisks >> Cdc::VirtualTimestamps[TopicRunner] [GOOD] >> Cdc::Write[PqRunner] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop >> TSentinelBaseTests::GuardianFaultyPDisks [GOOD] >> TSentinelBaseTests::GuardianRackRatio >> TSentinelBaseTests::GuardianRackRatio [GOOD] >> TSentinelBaseTests::PDiskErrorState [GOOD] >> TSentinelBaseTests::PDiskInactiveAfterStateChange [GOOD] >> TSentinelBaseTests::PDiskFaultyState [GOOD] >> BasicUsage::PropagateSessionClosed [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [GOOD] >> BasicUsage::ReadMirrored >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:37:45.023417Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:45.248272Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:37:45.256620Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:37:45.267263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:37:45.267412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:37:45.862769Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:46.044729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:46.044879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:46.120916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:46.304344Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003269/r3tmp/tmpUBuVt2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27868, node 1 2025-12-23T18:37:47.353725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:37:47.363138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:37:47.363218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:37:47.363252Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:37:47.363590Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:37:47.464370Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1454 2025-12-23T18:37:48.392997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:37:53.430293Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:37:53.444314Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:37:53.448970Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:37:53.498922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:53.499058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:53.550789Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:37:53.553395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:53.849496Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:37:53.849618Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:37:53.851783Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:53.852632Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:53.853345Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:53.854281Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:53.854576Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:53.858916Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:53.859280Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:53.859605Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:53.859875Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:37:53.890061Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:37:54.525507Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:54.649406Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:37:54.649552Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:37:54.813972Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:37:54.815008Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:37:54.815267Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:37:54.815341Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:37:54.815398Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:37:54.815471Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:37:54.815542Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:37:54.815599Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:37:54.816159Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:37:54.827188Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:37:54.827307Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2118:2584], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:37:54.916769Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2156:2618] 2025-12-23T18:37:54.918408Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2156:2618], schemeshard id = 72075186224037897 2025-12-23T18:37:55.047687Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2246:2663] 2025-12-23T18:37:55.056250Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:37:55.094897Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Describe result: PathErrorUnknown 2025-12-23T18:37:55.094978Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Creating table 2025-12-23T18:37:55.095072Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:37:55.101394Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2305:2692], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:37:55.113462Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:37:55.133393Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:37:55.133572Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Subscribe on create table tx: 281474976720657 2025-12-23T18:37:55.169842Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:37:55.497811Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:37:55.516119Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:37:56.187756Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:37:56.314524Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:2668]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:37:56.314622Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2254:2669] Owner: [2:2253:26 ... GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:38:42.487613Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:5408:4742], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-12-23T18:38:42.646646Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:5485:4788], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:38:42.741869Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:5484:4787] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:38:43.433124Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:5506:4801]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:38:43.433581Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:38:43.433689Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:5508:4803] 2025-12-23T18:38:43.433769Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:5508:4803] 2025-12-23T18:38:43.434319Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5509:4804] 2025-12-23T18:38:43.434458Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5508:4803], server id = [2:5509:4804], tablet id = 72075186224037894, status = OK 2025-12-23T18:38:43.434569Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5509:4804], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:38:43.434652Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:38:43.434833Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:38:43.434926Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5506:4801], StatRequests.size() = 1 2025-12-23T18:38:43.435045Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:40:18.753343Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5370:2460], ActorId: [2:5380:4727], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:40:18.753633Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5370:2460], ActorId: [2:5380:4727], Start read next stream part 2025-12-23T18:40:18.756278Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd68384f4p21vxtatrr5fnvf", SessionId: ydb://session/3?node_id=2&id=MTY4OTVmODItMjhjYmYyOTAtYTcwMmI0MzQtOWUyZDY0MTI=, Slow query, duration: 96.420110s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:40:18.757096Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5370:2460], ActorId: [2:5765:4975], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:40:18.762391Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5370:2460], ActorId: [2:5765:4975], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table` 2025-12-23T18:40:18.762532Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5370:2460], ActorId: [2:5765:4975], Start read next stream part 2025-12-23T18:40:18.762685Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 37090, txId: 18446744073709551615] shutting down 2025-12-23T18:40:18.766617Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5370:2460], ActorId: [2:5380:4727], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:40:18.768043Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5370:2460], ActorId: [2:5380:4727], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDczYTZiZGEtYTIxZTJlYS1mYzE4NmQyZS01YzA4MDkyYw==, TxId: 2025-12-23T18:40:18.898946Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5792:4998]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:40:18.899235Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:40:18.899289Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5792:4998], StatRequests.size() = 1 2025-12-23T18:41:26.280877Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5370:2460], ActorId: [2:5765:4975], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:41:26.281067Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5370:2460], ActorId: [2:5765:4975], Start read next stream part 2025-12-23T18:41:26.281863Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd6866aa7rj2hj7sgpzjvr28", SessionId: ydb://session/3?node_id=2&id=ZjBlODQ0N2QtODA1NmE5ZjgtNWNiYTcwZTItNjc1ZGZjY2U=, Slow query, duration: 67.515374s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:41:26.285229Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 37160, txId: 18446744073709551615] shutting down 2025-12-23T18:41:26.285529Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:6011:5136], ActorId: [2:6012:5137], Starting query actor #1 [2:6013:5138] 2025-12-23T18:41:26.285594Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:6012:5137], ActorId: [2:6013:5138], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:41:26.289638Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:6012:5137], ActorId: [2:6013:5138], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZDgyY2M1OTEtNDY2YWVkN2MtYjFiZDM5ZDEtYTM1N2E3YTY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:41:26.290565Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5370:2460], ActorId: [2:5765:4975], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:41:26.290625Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5370:2460], ActorId: [2:5765:4975], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWU1YTY2YTgtNjVlZWNlMjktNDA3YWQyNDgtZDVhNGMxOTE=, TxId: 2025-12-23T18:41:26.499628Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:6030:5152]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:41:26.499983Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:41:26.500035Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:6030:5152], StatRequests.size() = 1 2025-12-23T18:41:26.763183Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6012:5137], ActorId: [2:6013:5138], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDgyY2M1OTEtNDY2YWVkN2MtYjFiZDM5ZDEtYTM1N2E3YTY=, TxId: 2025-12-23T18:41:26.763281Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6012:5137], ActorId: [2:6013:5138], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDgyY2M1OTEtNDY2YWVkN2MtYjFiZDM5ZDEtYTM1N2E3YTY=, TxId: 2025-12-23T18:41:26.763725Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6011:5136], ActorId: [2:6012:5137], Got response [2:6013:5138] SUCCESS 2025-12-23T18:41:26.764111Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:41:26.800370Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:41:26.800459Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=LIG$T`nw, ActorId=[1:3807:3503] 2025-12-23T18:41:26.801969Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:6048:4155]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:41:26.802338Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:41:26.802404Z node 1 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:41:26.802708Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:41:26.802801Z node 1 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:41:26.802878Z node 1 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T18:41:26.879009Z node 1 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table[Value]=100' |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |94.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-ordinaryuser |94.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |94.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::PDiskFaultyState [GOOD] |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-clusteradmin |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::GuardianRackRatio [GOOD] |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskPileGuardFullPile [GOOD] >> TSentinelTests::PDiskPileGuardConfig >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] >> TCdcStreamTests::MeteringDedicated [GOOD] >> TCdcStreamTests::ChangeOwner >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant |94.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |94.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |94.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant >> Cdc::DocApi[TopicRunner] [GOOD] >> Cdc::HugeKey[PqRunner] >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-clusteradmin |94.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/etcd_proxy/etcd_proxy |94.3%| [LD] {RESULT} $(B)/ydb/apps/etcd_proxy/etcd_proxy |94.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/etcd_proxy/etcd_proxy >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> TKeyValueTracingTest::WriteSmall ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2025-12-23T18:40:54.421009Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1766515254420979 2025-12-23T18:40:54.832236Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125245901567115:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:54.832408Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056e9/r3tmp/tmpe8jxai/pdisk_1.dat 2025-12-23T18:40:54.941168Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:40:54.951460Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:40:55.247573Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:55.247756Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:55.260202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:55.415648Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:55.526119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:55.526269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:55.528805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:55.528882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:55.564729Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:55.574293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:55.576714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:55.583863Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:40:55.585817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29364, node 1 2025-12-23T18:40:55.726921Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.011427s 2025-12-23T18:40:55.756229Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.016053s 2025-12-23T18:40:55.827552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0056e9/r3tmp/yandexVwCz6w.tmp 2025-12-23T18:40:55.828009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0056e9/r3tmp/yandexVwCz6w.tmp 2025-12-23T18:40:55.828890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0056e9/r3tmp/yandexVwCz6w.tmp 2025-12-23T18:40:55.828998Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:40:55.844453Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:55.908236Z INFO: TTestServer started on Port 20424 GrpcPort 29364 2025-12-23T18:40:55.902904Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:55.943726Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20424 PQClient connected to localhost:29364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515255631 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:56.578375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:40:59.834899Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125245901567115:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:59.835203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:59.867792Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125265871095273:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:59.867916Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125265871095286:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:59.867979Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:59.870283Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125265871095290:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:59.870362Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:59.886022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:59.914517Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587125265871095289:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-12-23T18:40:59.983084Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587125265871095319:2145] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:41:00.220581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:00.224160Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125267376404862:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:41:00.224673Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MmE2NzljZDktZTgyNWRlNjctM2U4NTdhZDgtODFhNWY0ZWQ=, ActorId: [1:7587125267376404802:2329], ActorState: ExecuteState, TraceId: 01kd687efw56yt6j60nh59yqde, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:41:00.225575Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587125265871095334:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:41:00.225979Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=NjMyNmFhZDQtMTQ4OGZjNjYtMTI3MmMwY ... -fe6428a1-cb95fd08_0 is DEAD 2025-12-23T18:41:27.023841Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:41:27.003135Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.003148Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:27.025014Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [3:7587125381510000231:2452] destroyed 2025-12-23T18:41:27.025057Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T18:41:27.025082Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:27.025096Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.025111Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:27.025128Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.025138Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist Session was created 2025-12-23T18:41:27.108380Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:27.108421Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.108435Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:27.108452Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.108462Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >>> Ready to answer: ok 2025-12-23T18:41:27.156772Z :ERROR: [/Root] OnFederationDiscovery: Got error. Status: UNAVAILABLE. Description: 2025-12-23T18:41:27.209951Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:27.209993Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.210005Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:27.210024Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.210036Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:27.309931Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:27.309979Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.309995Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:27.310017Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.310032Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:27.410343Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:27.410395Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.410407Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:27.410445Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.410457Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:27.511106Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:27.511141Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.511154Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:27.511172Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.511185Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:27.614985Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:27.615026Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.615041Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:27.615073Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:27.615088Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:29.574851Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:41:29.574888Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:29.813336Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:29.813371Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:29.813386Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:29.813402Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:29.813416Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:29.914865Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:29.914912Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:29.914927Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:29.914948Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:29.914962Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:30.014697Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:30.014794Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:30.014809Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:30.014831Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:30.014846Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:30.114974Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:30.115011Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:30.115024Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:30.115046Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:30.115059Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:30.217295Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:30.217341Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:30.217357Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:30.217382Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:30.217399Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:30.318890Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:30.318946Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:30.318962Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:30.318984Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:30.318998Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:30.419285Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:30.419325Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:30.419351Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:30.419374Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:30.419388Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] Test command err: Trying to start YDB, gRPC: 64804, MsgBus: 6642 2025-12-23T18:39:34.741094Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124903476892039:2210];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:34.741241Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0061dd/r3tmp/tmpXKMJSa/pdisk_1.dat 2025-12-23T18:39:35.501523Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:35.538841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:35.538966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:35.627869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:35.773530Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:35.793790Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124903476891867:2081] 1766515174716546 != 1766515174716549 2025-12-23T18:39:35.822721Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:35.842578Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 64804, node 1 2025-12-23T18:39:36.695386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:36.695407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:36.695414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:36.695505Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6642 TClient is connected to server localhost:6642 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515176181 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:38.151001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:38.168735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:38.190665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:38.630173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:38.834370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:38.995538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:39.767298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124903476892039:2210];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:39.767457Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:43.107796Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124942131599450:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:43.107956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:43.108364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124942131599459:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:43.108413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.100947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.157300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.203959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.252679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.309122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.360987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.418311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.553825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.722321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124946426567653:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.722394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.722684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124946426567658:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.722717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124946426567659:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.722807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [Work ... : Root/.metadata/script_executions 2025-12-23T18:41:11.890912Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1756 TClient is connected to server localhost:1756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515271304 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:12.544849Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:41:12.556352Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:41:19.255828Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125354897017927:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:19.255982Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:19.256860Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125354897017962:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:19.271969Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:41:19.334644Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7587125354897017964:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:41:19.403214Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7587125354897018026:2558] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; Trying to start YDB, gRPC: 65025, MsgBus: 21649 2025-12-23T18:41:21.353388Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7587125361023253338:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:21.353488Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0061dd/r3tmp/tmpjnOzsQ/pdisk_1.dat 2025-12-23T18:41:21.430190Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:21.662902Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:21.679108Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:21.686858Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [13:7587125361023253316:2081] 1766515281352723 != 1766515281352726 2025-12-23T18:41:21.696836Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:21.696987Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:21.743415Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65025, node 13 2025-12-23T18:41:21.986914Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:41:21.986954Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:41:21.986978Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:41:21.987124Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:41:22.118892Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:22.391599Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21649 TClient is connected to server localhost:21649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515281804 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:23.290160Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:41:23.304904Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:41:26.353655Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7587125361023253338:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:26.353760Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:41:30.360316Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7587125399677959892:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:30.360400Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7587125399677959884:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:30.360632Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:30.361478Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7587125399677959899:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:30.361600Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:30.367127Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:41:30.392034Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7587125399677959898:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:41:30.468576Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7587125399677959955:2550] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] |94.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2025-12-23T18:41:11.110133Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125319798859686:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:11.110178Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:41:11.382102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:41:11.382203Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034cc/r3tmp/tmpFkefyd/pdisk_1.dat 2025-12-23T18:41:11.478789Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125319886084510:2163];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:11.487030Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:11.487417Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:41:11.487616Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:41:11.805882Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:11.831828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:11.874755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:11.874875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:11.907909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:11.907988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:11.990067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:12.067284Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:12.072105Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:12.083507Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:41:12.083756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:12.085489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:12.198957Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TServer::EnableGrpc on GrpcPort 27497, node 1 2025-12-23T18:41:12.226836Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007102s 2025-12-23T18:41:12.443382Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0034cc/r3tmp/yandexaIWNRQ.tmp 2025-12-23T18:41:12.443413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0034cc/r3tmp/yandexaIWNRQ.tmp 2025-12-23T18:41:12.443577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0034cc/r3tmp/yandexaIWNRQ.tmp 2025-12-23T18:41:12.443670Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:41:12.471002Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:12.549317Z INFO: TTestServer started on Port 30192 GrpcPort 27497 TClient is connected to server localhost:30192 PQClient connected to localhost:27497 === TenantModeEnabled() = 1 === Init PQ - start server on port 27497 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515272151 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:13.251851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:41:13.252244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:13.252508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:41:13.252543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:41:13.252813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:41:13.253204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:13.260806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:41:13.260976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:41:13.261175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:13.261215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:41:13.261234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-12-23T18:41:13.261248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2025-12-23T18:41:13.268062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:13.268118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:41:13.268138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 waiting... 2025-12-23T18:41:13.274446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:13.274508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:13.274531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-12-23T18:41:13.274586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-12-23T18:41:13.279876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:41:13.283002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:41:13.283041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-12-23T18:41:13.283074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:41:13.285856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-12-23T18:41:13.285993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationBy ... 3T18:41:30.312878Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976715666 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:45652" , at schemeshard: 72057594046644480 2025-12-23T18:41:30.313183Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_modify_acl.cpp:33: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715666:0, at schemeshard: 72057594046644480 2025-12-23T18:41:30.313329Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 46] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 45] 2025-12-23T18:41:30.313339Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2025-12-23T18:41:30.313470Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715666:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-12-23T18:41:30.313499Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:41:30.313587Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715666:0 progress is 1/1 2025-12-23T18:41:30.313597Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-12-23T18:41:30.313618Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715666:0 progress is 1/1 2025-12-23T18:41:30.313627Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-12-23T18:41:30.313670Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2025-12-23T18:41:30.313710Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715666, ready parts: 1/1, is published: false 2025-12-23T18:41:30.313734Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2025-12-23T18:41:30.313745Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-12-23T18:41:30.313756Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715666:0 2025-12-23T18:41:30.313766Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715666, publications: 1, subscribers: 0 2025-12-23T18:41:30.313777Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976715666, [OwnerId: 72057594046644480, LocalPathId: 46], 4 2025-12-23T18:41:30.317374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715666, response: Status: StatusSuccess TxId: 281474976715666 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-12-23T18:41:30.317662Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715666, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, remove access: -():test_user_0@builtin:- 2025-12-23T18:41:30.317839Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:41:30.317853Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715666, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2025-12-23T18:41:30.318022Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:41:30.318038Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7587125368644365080:2390], at schemeshard: 72057594046644480, txId: 281474976715666, path id: 46 2025-12-23T18:41:30.319364Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2025-12-23T18:41:30.319445Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2025-12-23T18:41:30.319457Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715666 2025-12-23T18:41:30.319473Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715666, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 4 2025-12-23T18:41:30.319491Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 2025-12-23T18:41:30.319623Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 0 2025-12-23T18:41:30.322281Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715666 ===Wait for session created with token with removed ACE to die2025-12-23T18:41:30.847070Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7587125398709137491:2390], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:41:30.849875Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=3&id=MzI4MTQ4NDMtNzk2ZmEwOTItYTViOGQ3YTUtOGY4YjJjYjM=, ActorId: [3:7587125398709137489:2389], ActorState: ExecuteState, TraceId: 01kd688cn585z65g629h0125dx, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:41:30.856178Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:41:31.282647Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2025-12-23T18:41:31.286905Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 2 sessionId: test-group-id|adbcbc80-3bde142e-b6ae2d46-eb54a63_0 describe result for acl check 2025-12-23T18:41:31.287088Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:815: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|adbcbc80-3bde142e-b6ae2d46-eb54a63_0 2025-12-23T18:41:31.287468Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-group-id|adbcbc80-3bde142e-b6ae2d46-eb54a63_0 is DEAD 2025-12-23T18:41:31.287867Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } 2025-12-23T18:41:31.892047Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7587125403004104812:2398], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:41:31.895006Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=3&id=MzQ0ODdjYWItMTRmNWY3YmMtYWY1MWYzZS1lMGMxZDc5Mw==, ActorId: [3:7587125403004104810:2397], ActorState: ExecuteState, TraceId: 01kd688dpr1x7w6qxn3ctb4bsz, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:41:31.895398Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |94.3%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream >> KqpStats::StatsProfile |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-clusteradmin >> DataShardVolatile::VolatileTxAbortedOnDrop [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink >> TSentinelTests::PDiskPileGuardConfig [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-12-23T18:41:13.183129Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125328351484162:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:13.183313Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:41:13.344854Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:41:13.410959Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:13.424653Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125328248374449:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:13.424705Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034cb/r3tmp/tmpx5xLIH/pdisk_1.dat 2025-12-23T18:41:13.533516Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:13.938844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:13.947090Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:14.486338Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:14.486662Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:14.518698Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:41:14.534908Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:14.553035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:41:14.571724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:14.698110Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:14.760235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:14.760325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:14.791356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:14.791426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:14.849549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:14.956731Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:14.989852Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:41:15.012349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:15.019112Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 8461, node 1 2025-12-23T18:41:15.449876Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:15.483369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0034cb/r3tmp/yandexr2xAev.tmp 2025-12-23T18:41:15.483398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0034cb/r3tmp/yandexr2xAev.tmp 2025-12-23T18:41:15.483552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0034cb/r3tmp/yandexr2xAev.tmp 2025-12-23T18:41:15.483648Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:41:15.674403Z INFO: TTestServer started on Port 24782 GrpcPort 8461 TClient is connected to server localhost:24782 PQClient connected to localhost:8461 === TenantModeEnabled() = 1 === Init PQ - start server on port 8461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515275133 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:16.501037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:41:16.501195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:16.501385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:41:16.501402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:41:16.501566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:41:16.501617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:16.532396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:41:16.535450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:41:16.535649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:16.536279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:41:16.547266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-12-23T18:41:16.547295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2025-12-23T18:41:16.580858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:16.580914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:41:16.580941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 waiting... 2025-12-23T18:41:16.586574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:16.586618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:16.586635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-12-23T18:41:16.586657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-12-23T18:41:16.591078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:41:16.595598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompl ... 493150Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.493165Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.493175Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:41:33.493406Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=OGNiZTFlMDktZWZhMzE5MDQtNzE4MDFkNDUtYTQ1MjlkZDI=, ActorId: [1:7587125414250833621:2613], ActorState: ExecuteState, TraceId: 01kd688f7x6ybt24pjvjc2z4gp, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:41:33.493716Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:41:33.500450Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:33.500472Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.500482Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.500507Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.500516Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist 2025-12-23T18:41:33.582950Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:33.582976Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.582986Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.583001Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.583011Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:41:33.583067Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:33.583075Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.583082Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.583092Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.583099Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:41:33.594961Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:33.594987Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.594998Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.595013Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.595021Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:41:33.602887Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:33.602911Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.602922Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.602937Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.602948Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist 2025-12-23T18:41:33.683092Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:33.683096Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:33.683115Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.683117Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.683126Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.683126Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.683141Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.683142Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.683151Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:41:33.683151Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:41:33.699120Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:33.699148Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.699159Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.699175Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.699184Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:41:33.703718Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:33.703741Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.703751Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.703767Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.703777Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist 2025-12-23T18:41:33.789065Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:33.789096Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.789106Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.789122Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.789131Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:41:33.789186Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:33.789197Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.789204Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.789214Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.789220Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:41:33.802814Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:33.802844Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.802854Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.802871Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.802881Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:41:33.806874Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:33.806900Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.806910Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:33.806924Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:33.806934Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist |94.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-false >> TKeyValueTracingTest::WriteSmall [GOOD] |94.3%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |94.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |94.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-dbadmin >> Cdc::Write[PqRunner] [GOOD] >> Cdc::Write[YdsRunner] >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> KqpScanArrowFormat::AggregateEmptySum >> ClusterBalancing::ClusterBalancingEvenDistribution >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-dbadmin |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-ordinaryuser |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskPileGuardConfig [GOOD] Test command err: 2025-12-23T18:41:28.734489Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.009529s 2025-12-23T18:41:29.233008Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2025-12-23T18:41:29.233073Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2025-12-23T18:41:29.233137Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:41:29.233168Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2025-12-23T18:41:29.233214Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-12-23T18:41:29.233327Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-12-23T18:41:29.234231Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-12-23T18:41:29.242597Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: ... { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3600110 2025-12-23T18:41:35.057721Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3600110 2025-12-23T18:41:35.057867Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3600110 2025-12-23T18:41:35.057934Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:41:35.070977Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:41:35.071059Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2025-12-23T18:41:35.071167Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2025-12-23T18:41:35.071229Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-12-23T18:41:35.071271Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-12-23T18:41:35.071308Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-12-23T18:41:35.071334Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-12-23T18:41:35.071362Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-12-23T18:41:35.071393Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-12-23T18:41:35.071430Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-12-23T18:41:35.071807Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2025-12-23T18:41:35.072661Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2025-12-23T18:41:35.072901Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2025-12-23T18:41:35.073220Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2025-12-23T18:41:35.073387Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2025-12-23T18:41:35.073597Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2025-12-23T18:41:35.073732Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2025-12-23T18:41:35.073901Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2025-12-23T18:41:35.073980Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:41:35.074458Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 15:61, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:41:35.074524Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 15:63, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:41:35.074556Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 15:60, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:41:35.074584Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 15:62, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:41:35.074620Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 4 2025-12-23T18:41:35.077067Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 4 2025-12-23T18:41:35.077116Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 15:60 2025-12-23T18:41:35.077162Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 15:61 2025-12-23T18:41:35.077201Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 15:62 2025-12-23T18:41:35.077235Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 15:63 |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [GOOD] |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-clusteradmin |94.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |94.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |94.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-clusteradmin >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-ordinaryuser >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2025-12-23T18:41:16.834449Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125338307418407:2212];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:16.834577Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:41:16.911847Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:16.894999Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125338394218840:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:16.895052Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:41:17.050046Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034c1/r3tmp/tmp49LtJJ/pdisk_1.dat 2025-12-23T18:41:17.528214Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:17.538048Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:17.783051Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:17.814872Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:17.869246Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:17.895575Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:41:17.994168Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:41:18.035030Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:18.182732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:18.182851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:18.196871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:18.196987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:18.378226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:18.451352Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:41:18.468387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:18.477834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:18.503322Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:18.510521Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 2864, node 1 2025-12-23T18:41:19.015057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0034c1/r3tmp/yandexWqMH2D.tmp 2025-12-23T18:41:19.015088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0034c1/r3tmp/yandexWqMH2D.tmp 2025-12-23T18:41:19.015252Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0034c1/r3tmp/yandexWqMH2D.tmp 2025-12-23T18:41:19.015342Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:41:19.115734Z INFO: TTestServer started on Port 26597 GrpcPort 2864 TClient is connected to server localhost:26597 PQClient connected to localhost:2864 === TenantModeEnabled() = 1 === Init PQ - start server on port 2864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515278577 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:19.889804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:41:19.890022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:19.890247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:41:19.890272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:41:19.890466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:41:19.890531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:19.898845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:41:19.899064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:41:19.899810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:19.899853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:41:19.899873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-12-23T18:41:19.899887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-12-23T18:41:19.916067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:19.916117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:41:19.916140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2025-12-23T18:41:19.966401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:19.966446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:41:19.966466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-12-23T18:41:19.966499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-12-23T18:41:20.007260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:41:20.013572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:41:20.013602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-12-23T18:41:20.013626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:41:20.01901 ... te_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-12-23T18:41:35.251167Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 auth for : consumer_aba 2025-12-23T18:41:35.252002Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:131: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 Handle describe topics response 2025-12-23T18:41:35.252117Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 auth is DEAD 2025-12-23T18:41:35.252202Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1058: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 auth ok: topics# 1, initDone# 0 2025-12-23T18:41:35.253489Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1229: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 register session: topic# /Root/account1/write_topic 2025-12-23T18:41:35.254451Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037894][write_topic] pipe [3:7587125420795817913:2388] connected; active server actors: 1 2025-12-23T18:41:35.255731Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1708: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7587125420795817913:2388] session consumer_aba_3_2_7795386351271775142_v1 2025-12-23T18:41:35.255815Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:639: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2025-12-23T18:41:35.255919Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:669: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2025-12-23T18:41:35.255969Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:871: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_7795386351271775142_v1" (Sender=[3:7587125420795817910:2388], Pipe=[3:7587125420795817913:2388], Partitions=[], ActiveFamilyCount=0) 2025-12-23T18:41:35.256004Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2025-12-23T18:41:35.256093Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnreadableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-12-23T18:41:35.256157Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1306: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_7795386351271775142_v1" (Sender=[3:7587125420795817910:2388], Pipe=[3:7587125420795817913:2388], Partitions=[], ActiveFamilyCount=0) 2025-12-23T18:41:35.256237Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:549: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_7795386351271775142_v1" sender [3:7587125420795817910:2388] lock partition 0 for ReadingSession "consumer_aba_3_2_7795386351271775142_v1" (Sender=[3:7587125420795817910:2388], Pipe=[3:7587125420795817913:2388], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-12-23T18:41:35.256308Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-12-23T18:41:35.256346Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000226s 2025-12-23T18:41:35.258356Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1347: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_7795386351271775142_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7587125420795817913 RawX2: 4503612512274772 } Path: "/Root/account1/write_topic" } 2025-12-23T18:41:35.258511Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:1144: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2025-12-23T18:41:35.259254Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:984: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1, pipe: [3:7587125420795817915:2391] 2025-12-23T18:41:35.260566Z :INFO: [/Root] [/Root] [9fbdaccf-c54cddfc-9fe6d251-66b52811] [null] Server session id: consumer_aba_3_2_7795386351271775142_v1 2025-12-23T18:41:35.260517Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: consumer_aba_3_2_7795386351271775142_v1:1 with generation 1 2025-12-23T18:41:35.261127Z :DEBUG: [/Root] [/Root] [9fbdaccf-c54cddfc-9fe6d251-66b52811] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:41:35.262242Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 grpc read done: success# 1, data# { read { } } 2025-12-23T18:41:35.262398Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 got read request: guid# 225cfc94-1a62b1f2-123b60d6-7c2ca236 2025-12-23T18:41:35.270668Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1766515295117 CreateTimestampMS: 1766515295114 SizeLag: 165 WriteTimestampEstimateMS: 1766515295117 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-12-23T18:41:35.271032Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2025-12-23T18:41:35.271095Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2025-12-23T18:41:35.272325Z :INFO: [/Root] [/Root] [9fbdaccf-c54cddfc-9fe6d251-66b52811] Closing read session. Close timeout: 0.000000s 2025-12-23T18:41:35.272398Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2025-12-23T18:41:35.272441Z :INFO: [/Root] [/Root] [9fbdaccf-c54cddfc-9fe6d251-66b52811] Counters: { Errors: 0 CurrentSessionLifetimeMs: 29 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:41:35.272538Z :NOTICE: [/Root] [/Root] [9fbdaccf-c54cddfc-9fe6d251-66b52811] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-12-23T18:41:35.272580Z :DEBUG: [/Root] [/Root] [9fbdaccf-c54cddfc-9fe6d251-66b52811] [null] Abort session to cluster 2025-12-23T18:41:35.273074Z :NOTICE: [/Root] [/Root] [9fbdaccf-c54cddfc-9fe6d251-66b52811] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:41:35.275149Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 grpc read done: success# 0, data# { } 2025-12-23T18:41:35.275984Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 grpc read failed 2025-12-23T18:41:35.276042Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 grpc closed 2025-12-23T18:41:35.277083Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: consumer_aba_3_2_7795386351271775142_v1 2025-12-23T18:41:35.277181Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer consumer_aba session consumer_aba_3_2_7795386351271775142_v1 is DEAD 2025-12-23T18:41:35.280075Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037894][write_topic] pipe [3:7587125420795817913:2388] disconnected. 2025-12-23T18:41:35.280125Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037894][write_topic] pipe [3:7587125420795817913:2388] disconnected; active server actors: 1 2025-12-23T18:41:35.280153Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037894][write_topic] pipe [3:7587125420795817913:2388] client consumer_aba disconnected session consumer_aba_3_2_7795386351271775142_v1 2025-12-23T18:41:35.378663Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7587125420795817923:2393], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:41:35.381894Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=3&id=N2I0NTUyNGUtYzlkMGRiZGMtMWYwMWJiNTUtYzJjMGI0M2M=, ActorId: [3:7587125420795817921:2392], ActorState: ExecuteState, TraceId: 01kd688h357emy48dsx7e1vxmt, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:41:35.387299Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |94.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |94.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |94.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |94.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> KqpResultSetFormats::ArrowFormat_Compression_None [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2025-12-23T18:41:37.538540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:37.538616Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) ... blocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE cookie 0 ... unblocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE 2025-12-23T18:41:38.628807Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:38.628888Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |94.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |94.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |94.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-ordinaryuser >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestAuthorization >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] |94.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |94.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |94.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-dbadmin >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-clusteradmin |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> Cdc::HugeKey[PqRunner] [GOOD] >> Cdc::HugeKey[YdsRunner] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-system |94.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |94.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |94.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-system >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas >> KqpStats::StatsProfile [GOOD] >> KqpStats::StreamLookupStats+StreamLookupJoin >> Cdc::Write[YdsRunner] [GOOD] >> Cdc::Write[TopicRunner] |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:38:21.051845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:38:21.051945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:38:21.051998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:38:21.052045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:38:21.052088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:38:21.052128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:38:21.052193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:38:21.052319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:38:21.053195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:38:21.053509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:38:21.147189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:38:21.147257Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:21.153408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:38:21.153654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:38:21.153800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:38:21.157082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:38:21.157280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:38:21.158119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:38:21.158375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:38:21.160614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:38:21.160812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:38:21.162069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:38:21.162137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:38:21.162256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:38:21.162303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:38:21.162951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:38:21.163156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:38:21.361908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.372595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.372843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.372945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.373017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.373081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.373192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.373287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.373355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.373424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.373490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.373551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.373691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.373786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:38:21.373890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 51615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:41:41.635695Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:41:41.635733Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:41:41.637210Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:41:41.637309Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:41:41.637346Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:41:41.637386Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2025-12-23T18:41:41.637435Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2025-12-23T18:41:41.637550Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2025-12-23T18:41:41.639223Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:41:41.639296Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:41:41.639699Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:41:41.639863Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 5/5 2025-12-23T18:41:41.639915Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-12-23T18:41:41.639985Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 5/5 2025-12-23T18:41:41.640024Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-12-23T18:41:41.640085Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2025-12-23T18:41:41.640210Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [20:790:2756] message: TxId: 103 2025-12-23T18:41:41.640307Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-12-23T18:41:41.640398Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:41:41.640477Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:41:41.640669Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:41:41.640758Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:1 2025-12-23T18:41:41.640791Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:1 2025-12-23T18:41:41.640836Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:41:41.640870Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:2 2025-12-23T18:41:41.640900Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:2 2025-12-23T18:41:41.640958Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2025-12-23T18:41:41.640995Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:3 2025-12-23T18:41:41.641026Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:3 2025-12-23T18:41:41.641071Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:41:41.641106Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:4 2025-12-23T18:41:41.641138Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:4 2025-12-23T18:41:41.641251Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 2025-12-23T18:41:41.642720Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:41:41.642950Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2025-12-23T18:41:41.643136Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2025-12-23T18:41:41.643237Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2025-12-23T18:41:41.643290Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:41:41.643800Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:41:41.645305Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:41:41.646185Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:41:41.646251Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:41:41.646394Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:41:41.646538Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:41:41.649176Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:41:41.649297Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [20:1142:3041] 2025-12-23T18:41:41.649644Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-12-23T18:41:41.650426Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:41:41.650929Z node 20 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 580us result status StatusPathDoesNotExist 2025-12-23T18:41:41.651286Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:41:41.652289Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:41:41.652790Z node 20 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 542us result status StatusPathDoesNotExist 2025-12-23T18:41:41.653103Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:41:43.692787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:41:43.692909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:41:43.692956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:41:43.693000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:41:43.693041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:41:43.693078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:41:43.693136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:41:43.693240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:41:43.694261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:41:43.694598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:41:43.923188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:43.923269Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:43.939018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:41:43.939291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:41:43.939435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:41:43.951310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:41:43.951581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:41:43.952475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:41:43.952812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:41:43.956390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:41:43.956612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:41:43.958099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:41:43.958179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:41:43.958331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:41:43.958387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:41:43.958441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:41:43.958601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:41:43.974406Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:41:44.132557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:41:44.132808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:41:44.133054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:41:44.133114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:41:44.133383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:41:44.133460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:44.136025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:41:44.136251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:41:44.136471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:41:44.136526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:41:44.136565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:41:44.136606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:41:44.139023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:41:44.139102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:41:44.139149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:41:44.140772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:41:44.140808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:41:44.140866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:41:44.140907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:41:44.149039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:41:44.150527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:41:44.150687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:41:44.151827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:41:44.151952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:41:44.152059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:41:44.152379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:41:44.152442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:41:44.152610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:41:44.152670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:41:44.155291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:41:44.155337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-12-23T18:41:44.435440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-12-23T18:41:44.436190Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-12-23T18:41:44.438898Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2025-12-23T18:41:44.439127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-12-23T18:41:44.439364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 2025-12-23T18:41:44.440220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:41:44.440446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-12-23T18:41:44.442490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:41:44.442711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:41:44.444133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:41:44.444203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:41:44.444374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:41:44.445728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:41:44.445781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:41:44.445855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:41:44.446466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-12-23T18:41:44.446539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-12-23T18:41:44.447250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:41:44.447295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:41:44.450933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:41:44.450988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-12-23T18:41:44.451166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-12-23T18:41:44.451196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-12-23T18:41:44.451308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:41:44.451330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:41:44.451413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6152: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-12-23T18:41:44.454064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:41:44.454145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-12-23T18:41:44.454284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:41:44.454389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-12-23T18:41:44.454696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-12-23T18:41:44.454759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-12-23T18:41:44.454873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:41:44.454917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:41:44.455403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-12-23T18:41:44.455536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-12-23T18:41:44.455575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:625:2536] 2025-12-23T18:41:44.455753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:41:44.455857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:41:44.455938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:625:2536] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-12-23T18:41:44.456408Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:41:44.456618Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 236us result status StatusPathDoesNotExist 2025-12-23T18:41:44.456834Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:41:44.457283Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:41:44.457521Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 202us result status StatusSuccess 2025-12-23T18:41:44.458027Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:41:43.920942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:41:43.921064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:41:43.921109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:41:43.921152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:41:43.921195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:41:43.921228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:41:43.921297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:41:43.921395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:41:43.922317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:41:43.922602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:41:44.013775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:44.013861Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:44.020478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:41:44.020746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:41:44.020913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:41:44.024182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:41:44.024398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:41:44.025226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:41:44.025463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:41:44.027808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:41:44.028010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:41:44.029237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:41:44.029314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:41:44.029473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:41:44.029525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:41:44.029624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:41:44.029783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:41:44.183696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.184848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.185011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.185103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.185171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.185257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.185331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.185423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.185497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.185568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.185639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.185778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.185868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.185960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:44.186077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:41:45.804188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-12-23T18:41:45.804321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000038 2025-12-23T18:41:45.804638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:41:45.805215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:41:45.805269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_rtmr.cpp:130: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2025-12-23T18:41:45.805423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 100:0 128 -> 240 2025-12-23T18:41:45.805580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:41:45.805668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 FAKE_COORDINATOR: Erasing txId 100 2025-12-23T18:41:45.808981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:41:45.809064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:41:45.809267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:41:45.809405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:41:45.809464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-12-23T18:41:45.809511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 38 2025-12-23T18:41:45.809711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-12-23T18:41:45.809765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2025-12-23T18:41:45.809883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:41:45.809942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:41:45.809993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:41:45.810033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:41:45.810080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-12-23T18:41:45.810128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:41:45.810222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2025-12-23T18:41:45.810265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 100:0 2025-12-23T18:41:45.810358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:41:45.810402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-12-23T18:41:45.810443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:41:45.810481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2025-12-23T18:41:45.811471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:41:45.811623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:41:45.811673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:41:45.811764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:41:45.811821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:41:45.813061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:41:45.813179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:41:45.813231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:41:45.813285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:41:45.813319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:41:45.813402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-12-23T18:41:45.821200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-12-23T18:41:45.821785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-12-23T18:41:45.822038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-12-23T18:41:45.822093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-12-23T18:41:45.822492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-12-23T18:41:45.822637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-12-23T18:41:45.822677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:713:2703] TestWaitNotification: OK eventTxId 100 2025-12-23T18:41:45.823154Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:41:45.823434Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 261us result status StatusSuccess 2025-12-23T18:41:45.823825Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } RtmrVolumeDescription { Name: "rtmr1" PathId: 38 PartitionsCount: 0 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:41:43.572464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:41:43.572559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:41:43.572599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:41:43.572637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:41:43.572682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:41:43.572713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:41:43.572790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:41:43.572877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:41:43.573830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:41:43.574150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:41:43.767281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:43.767354Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:43.791676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:41:43.791976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:41:43.792155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:41:43.796286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:41:43.796504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:41:43.797379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:41:43.797656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:41:43.804226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:41:43.804494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:41:43.808608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:41:43.808709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:41:43.813507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:41:43.813614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:41:43.813701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:41:43.813936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:41:43.827144Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:41:44.017282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:41:44.017564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:41:44.017820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:41:44.017874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:41:44.018138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:41:44.018213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:44.024081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:41:44.024359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:41:44.024608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:41:44.024690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:41:44.024736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:41:44.024778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:41:44.030043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:41:44.030172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:41:44.030224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:41:44.036354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:41:44.036440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:41:44.036529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:41:44.036608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:41:44.047244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:41:44.059951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:41:44.060264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:41:44.061695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:41:44.061928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:41:44.062024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:41:44.062381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:41:44.062452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:41:44.062672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:41:44.062946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:41:44.069309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:41:44.069377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-12-23T18:41:45.751508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:41:45.751613Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:41:45.754702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:41:45.755272Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-12-23T18:41:45.755893Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:274:2263] Bootstrap 2025-12-23T18:41:45.757105Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:274:2263] Become StateWork (SchemeCache [2:279:2268]) 2025-12-23T18:41:45.768584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:41:45.768918Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2025-12-23T18:41:45.769056Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2025-12-23T18:41:45.770548Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:274:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:41:45.780996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:41:45.781275Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2025-12-23T18:41:45.782117Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2025-12-23T18:41:45.785212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:41:45.785509Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:41:45.785653Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2025-12-23T18:41:45.792216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:41:45.792517Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-12-23T18:41:45.792875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-12-23T18:41:45.792929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-12-23T18:41:45.793018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:41:45.793043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:41:45.793534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-12-23T18:41:45.793652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:41:45.793755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-12-23T18:41:45.793812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [2:293:2282] 2025-12-23T18:41:45.793974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:41:45.793999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:293:2282] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-12-23T18:41:45.794425Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:41:45.794619Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 231us result status StatusPathDoesNotExist 2025-12-23T18:41:45.794840Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:41:45.795388Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:41:45.795576Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 206us result status StatusPathDoesNotExist 2025-12-23T18:41:45.795728Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:41:45.796211Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:41:45.796382Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 187us result status StatusSuccess 2025-12-23T18:41:45.796779Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-dbadmin |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |94.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-dbadmin >> DataShardVolatile::UpsertNoLocksArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter-UseSink |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-clusteradmin |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> TBlobStorageProxyTest::TestCollectGarbagePersistence >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain >> TSchemeShardSecretTest::DropSecret >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge >> TBlobStorageProxyTest::TestVPutVGet >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible [GOOD] Test command err: RandomSeed# 8593418811122272919 2025-12-23T18:41:42.181603Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:42.183797Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8246301497255950926] 2025-12-23T18:41:42.204293Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:42.325606Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:42.327971Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17943504831732303757] 2025-12-23T18:41:42.352928Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:42.449325Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:42.451630Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 9433872444112219530] 2025-12-23T18:41:42.465268Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:43.085173Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:43.087463Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17365472519555028292] 2025-12-23T18:41:43.099258Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:43.234334Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:43.237138Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 691446781809194642] 2025-12-23T18:41:43.271283Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:43.390515Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:43.393067Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2851710236167608359] 2025-12-23T18:41:43.410906Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:44.208958Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:44.211895Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 874796199853453714] 2025-12-23T18:41:44.227666Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:44.373467Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:44.376460Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1281444520403917686] 2025-12-23T18:41:44.408025Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:44.605518Z 1 00h01m33.006656s :BS_CONTROLLER ERROR: {BSC07@impl.h:2212} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.119797s 2025-12-23T18:41:44.605606Z 1 00h01m33.006656s :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.119907s |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |94.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login >> TBlobStorageProxyTest::TestProxySimpleDiscover |94.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login >> TBlobStorageProxyTest::TestInFlightPuts >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-system >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion |94.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-system >> KqpScan::RemoteShardScan >> TSentinelTests::BSControllerUnresponsive [GOOD] >> KqpScan::ScanRetryRead >> TBlobStorageProxyTest::TestVPutVGet [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] >> TSchemeShardSecretTest::DropSecret [GOOD] >> TSchemeShardSecretTest::DropNotASecret >> ClusterBalancing::ClusterBalancingEvenDistribution [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-clusteradmin >> TSentinelTests::PDiskErrorState [GOOD] >> TSentinelTests::NodeStatusComputer [GOOD] >> TSentinelTests::InitialDeploymentGracePeriod >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::BSControllerUnresponsive [GOOD] Test command err: 2025-12-23T18:41:28.048446Z node 7 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.005630s 2025-12-23T18:41:28.107088Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2025-12-23T18:41:28.107172Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2025-12-23T18:41:28.107249Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:41:28.107290Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2025-12-23T18:41:28.107345Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-12-23T18:41:28.107467Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-12-23T18:41:28.108976Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-12-23T18:41:28.128009Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: ... Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2025-12-23T18:41:49.281122Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2025-12-23T18:41:49.281305Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2025-12-23T18:41:49.281412Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:41:49.281881Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 1:5, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:41:49.281945Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 7:29, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:41:49.281998Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 4:18, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:41:49.282045Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-12-23T18:41:49.294753Z node 1 :CMS DEBUG: sentinel.cpp:1403: [Sentinel] [Main] Retrying: attempt# 1 2025-12-23T18:41:49.294819Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-12-23T18:41:49.310122Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:41:49.310184Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2025-12-23T18:41:49.310277Z node 1 :CMS DEBUG: sentinel.cpp:1403: [Sentinel] [Main] Retrying: attempt# 2 2025-12-23T18:41:49.310311Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-12-23T18:41:49.310471Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2025-12-23T18:41:49.310510Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2025-12-23T18:41:49.310541Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2025-12-23T18:41:49.310567Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2025-12-23T18:41:49.310596Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2025-12-23T18:41:49.310652Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2025-12-23T18:41:49.310684Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2025-12-23T18:41:49.310717Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2025-12-23T18:41:49.311171Z node 1 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { }, cookie# 123 2025-12-23T18:41:49.311211Z node 1 :CMS ERROR: sentinel.cpp:1385: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-12-23T18:41:49.311523Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2025-12-23T18:41:49.312051Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2025-12-23T18:41:49.312247Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2025-12-23T18:41:49.312438Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2025-12-23T18:41:49.312615Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2025-12-23T18:41:49.312778Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2025-12-23T18:41:49.312939Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2025-12-23T18:41:49.313071Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2025-12-23T18:41:49.313153Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> KqpScan::ScanDuringSplit10 >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> ClusterBalancing::ClusterBalancingEvenDistribution [GOOD] Test command err: RandomSeed# 18072487580273030412 2025-12-23T18:41:46.063601Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:46.065905Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3454144952195503717] 2025-12-23T18:41:46.091726Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:46.284751Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:46.287370Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 16424375328418583859] 2025-12-23T18:41:46.313473Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:46.411812Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:46.413474Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 5943224283387363880] 2025-12-23T18:41:46.425800Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:46.601563Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:46.605133Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8792034717402428811] 2025-12-23T18:41:46.621260Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:47.400672Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:47.403490Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 14613621920254356936] 2025-12-23T18:41:47.415216Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:47.555328Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:47.557876Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11570751233659549321] 2025-12-23T18:41:47.578036Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:47.692508Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:47.695063Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12158755303548354126] 2025-12-23T18:41:47.711273Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:47.880930Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:47.884778Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12754061658498793322] 2025-12-23T18:41:47.909116Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:48.932739Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:48.935898Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4217215075999640340] 2025-12-23T18:41:48.953072Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:49.150436Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:49.153453Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15520472177587959402] 2025-12-23T18:41:49.188225Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:49.368839Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:49.371708Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 5881539143926213450] 2025-12-23T18:41:49.387675Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:49.611336Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:49.613553Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 8641906200038114626] 2025-12-23T18:41:49.629171Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:50.262288Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:50.264579Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 16176039695823356659] 2025-12-23T18:41:50.277650Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:50.404690Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:50.407047Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2014756903574545708] 2025-12-23T18:41:50.432971Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:50.545796Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:50.548251Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 5880966627437523477] 2025-12-23T18:41:50.563444Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-12-23T18:41:50.722208Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T18:41:50.724333Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18417244754292628979] 2025-12-23T18:41:50.739836Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> BasicUsage::ReadMirrored [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-clusteradmin >> KqpStats::StreamLookupStats+StreamLookupJoin [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin >> KqpScanArrowFormat::AggregateEmptySum [GOOD] |94.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |94.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |94.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> TSchemeShardSecretTest::DropNotASecret [GOOD] >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2025-12-23T18:41:15.570988Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125334450669411:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:15.571068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:41:15.633646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:41:15.717205Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:41:15.717249Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:15.733962Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:15.734020Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:41:15.744631Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125336913999485:2277];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:15.744709Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034c9/r3tmp/tmpu2pHFy/pdisk_1.dat 2025-12-23T18:41:16.306864Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:16.517808Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:16.587684Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:16.632409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:16.632508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:16.635370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:16.635481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:16.667379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:16.673095Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:41:16.680937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:16.845462Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:16.900175Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:16.912916Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125334450669370:2082] 1766515275553951 != 1766515275553954 2025-12-23T18:41:16.916471Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:16.994324Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 29307, node 1 2025-12-23T18:41:17.138345Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:17.271489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0034c9/r3tmp/yandexfwJsoP.tmp 2025-12-23T18:41:17.271521Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0034c9/r3tmp/yandexfwJsoP.tmp 2025-12-23T18:41:17.271717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0034c9/r3tmp/yandexfwJsoP.tmp 2025-12-23T18:41:17.271798Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:41:17.398850Z INFO: TTestServer started on Port 8837 GrpcPort 29307 TClient is connected to server localhost:8837 PQClient connected to localhost:29307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515277156 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:18.371328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:41:18.487792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:41:20.577561Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125334450669411:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:20.582883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:41:20.734893Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587125336913999485:2277];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:20.734944Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:41:23.332950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125368810409270:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:23.333135Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:23.333645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125368810409282:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:23.333687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125368810409283:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:23.333737Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:23.350441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:41:23.391426Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125368810409286:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-12-23T18:41:23.494427Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125368810409368:3102] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:41:24.282872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:24.421666Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125368810409378:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissi ... : partition.cpp:2337: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.705215Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.705223Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037900][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.705246Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.705256Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.705264Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.705274Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.705281Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.728030Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:41:50.728062Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.728075Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.728089Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.728098Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][3][StateIdle] Try persist 2025-12-23T18:41:50.728136Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:41:50.728144Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.728153Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.728162Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.728170Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][4][StateIdle] Try persist 2025-12-23T18:41:50.738622Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:41:50.738664Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.738677Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.738693Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.738701Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2025-12-23T18:41:50.758859Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.758894Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.758905Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.758919Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.758930Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.758990Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:41:50.758998Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.759005Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.759014Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.759021Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2025-12-23T18:41:50.802039Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:41:50.802066Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.802078Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.802093Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.802104Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][3][StateIdle] Try persist 2025-12-23T18:41:50.802148Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:41:50.802156Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.802163Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.802172Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.802181Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][4][StateIdle] Try persist 2025-12-23T18:41:50.802206Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037900][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.802214Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.802221Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.802230Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.802237Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037900][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.802257Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.802265Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.802272Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.802281Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.802286Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.829807Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:41:50.829839Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.829851Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.829867Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.829878Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][3][StateIdle] Try persist 2025-12-23T18:41:50.829916Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:41:50.829924Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.829931Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.829941Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.829948Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][4][StateIdle] Try persist 2025-12-23T18:41:50.839165Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:41:50.839195Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.839207Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.839222Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.839233Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2025-12-23T18:41:50.862918Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.862951Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.862964Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.862978Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.862988Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.863037Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:41:50.863048Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.863056Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.863065Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.863072Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist |94.5%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-system |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-dbadmin >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::DropNotASecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:41:49.704861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:41:49.704983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:41:49.705031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:41:49.705071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:41:49.705118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:41:49.705155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:41:49.705209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:41:49.705338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:41:49.706278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:41:49.706599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:41:49.847268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:49.847367Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:49.883406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:41:49.883670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:41:49.883792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:41:49.887949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:41:49.888193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:41:49.889025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:41:49.889321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:41:49.892362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:41:49.892588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:41:49.894035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:41:49.894119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:41:49.894257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:41:49.894311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:41:49.894426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:41:49.894620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:41:50.071968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.073276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.073423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.073500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.073570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.073647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.073727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.073830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.073911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.073994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.074069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.074183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.074294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.074382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:41:50.074461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:41:53.800063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:41:53.800159Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:41:53.800198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:41:53.800242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2025-12-23T18:41:53.800307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:41:53.800353Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:41:53.800474Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:41:53.800517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:41:53.800562Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:41:53.800608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:41:53.800656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:41:53.800700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:41:53.800741Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:41:53.800778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:41:53.800862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:41:53.800923Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-12-23T18:41:53.800966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:41:53.800999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2025-12-23T18:41:53.802070Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:41:53.802163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:41:53.802200Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:41:53.802249Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:41:53.802301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:41:53.808240Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:41:53.808360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:41:53.808400Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:41:53.808438Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:41:53.808478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:41:53.808590Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:41:53.813130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:41:53.814705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:41:53.814989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:41:53.815053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:41:53.815431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:41:53.815553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:41:53.815594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:710:2699] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-12-23T18:41:53.818841Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropSecret Drop { Name: "dir" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:41:53.819007Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_secret.cpp:120: [72057594046678944] TDropSecret Propose, opId: 102:0, path: /MyRoot/dir 2025-12-23T18:41:53.819137Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:41:53.828754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/dir\', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:41:53.829045Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), operation: DROP SECRET, path: /MyRoot/dir TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:41:53.829399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:41:53.829467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:41:53.830090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:41:53.830316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:41:53.830404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:717:2706] TestWaitNotification: OK eventTxId 102 2025-12-23T18:41:53.831150Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2025-12-23T18:41:53.831362Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 236us result status StatusSuccess 2025-12-23T18:41:53.831804Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStateStorageRingGroupState::TestBoardConfigMismatch |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2025-12-23T18:40:52.238993Z :PropagateSessionClosed INFO: Random seed for debugging is 1766515252238946 2025-12-23T18:40:52.555619Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125236519968195:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:52.555776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:40:52.636141Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125239051573032:2087];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:52.636194Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005706/r3tmp/tmpku3Fpx/pdisk_1.dat 2025-12-23T18:40:52.670316Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:40:52.683524Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:40:52.930009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:52.931730Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:53.085679Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:53.124451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:53.124647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:53.127586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:53.127682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:53.170733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:53.172055Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:40:53.182168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:53.226365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:53.226569Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6267, node 1 2025-12-23T18:40:53.383079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005706/r3tmp/yandexzuEvSM.tmp 2025-12-23T18:40:53.383119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005706/r3tmp/yandexzuEvSM.tmp 2025-12-23T18:40:53.383300Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005706/r3tmp/yandexzuEvSM.tmp 2025-12-23T18:40:53.383427Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:40:53.435816Z INFO: TTestServer started on Port 3475 GrpcPort 6267 2025-12-23T18:40:53.442816Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:53.596294Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:53.662939Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3475 PQClient connected to localhost:6267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515253328 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:53.949069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:40:57.027461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125253699838715:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:57.027649Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:57.031392Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125257994806024:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:57.031468Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125257994806025:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:57.031654Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:57.039128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:57.043110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125257994806060:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:57.043242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:57.043566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125257994806062:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:57.043615Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:57.082917Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125257994806028:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-12-23T18:40:57.393287Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125257994806104:2987] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:57.431305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:57.468158Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587125260526409831:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:40:57.467458Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125257994806126:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:40:57.468770Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=NDEwY2NkZDItODA0MDFmNzUtOWQ5NTU0ZTYtNjgxYTFiOGQ=, ActorId: [1:7587125253699838713:2329], ActorState: ExecuteState, T ... 29: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.028939Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.028955Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.028982Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.029940Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7587125480060717232:2530] disconnected. 2025-12-23T18:41:50.030018Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7587125480060717232:2530] disconnected; active server actors: 1 2025-12-23T18:41:50.030094Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7587125480060717232:2530] client user disconnected session shared/user_3_1_15525833314598359215_v1 2025-12-23T18:41:50.030248Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587125480060717233:2530] disconnected. 2025-12-23T18:41:50.030282Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587125480060717233:2530] disconnected; active server actors: 1 2025-12-23T18:41:50.029003Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.030309Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587125480060717233:2530] client user disconnected session shared/user_3_1_15525833314598359215_v1 2025-12-23T18:41:50.029111Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_15525833314598359215_v1 2025-12-23T18:41:50.030376Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7587125480060717234:2530] disconnected. 2025-12-23T18:41:50.029143Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [3:7587125480060717242:2534] destroyed 2025-12-23T18:41:50.030396Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7587125480060717234:2530] disconnected; active server actors: 1 2025-12-23T18:41:50.030423Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7587125480060717234:2530] client user disconnected session shared/user_3_1_15525833314598359215_v1 2025-12-23T18:41:50.029218Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_15525833314598359215_v1 2025-12-23T18:41:50.046997Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.047039Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.047066Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.047088Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.047104Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.055236Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.055267Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.055282Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.055303Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.055317Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.079200Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.079230Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.079245Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.079266Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.079279Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.151852Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.151899Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.151916Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.151937Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.151949Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.158964Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.158996Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.159011Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.159034Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.159048Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.182961Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.183014Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.183029Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.183055Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.183069Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.253814Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.253855Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.253869Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.253889Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.253909Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.267212Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.267250Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.267263Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.267285Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.267297Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.285897Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.285938Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.285957Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.285979Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.285993Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:50.353316Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:50.353353Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.353368Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:50.353387Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:50.353399Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:41:51.123664Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1177: ActorId: [3:7587125488650652032:2551] TxId: 281474976720687. Ctx: { TraceId: 01kd688znz81sbk2m1f8eapgqr, Database: /Root, SessionId: ydb://session/3?node_id=3&id=N2NmY2E2Y2QtMWE2MWZjNmItYWNlZDFiNjgtMmYzY2M4Nzg=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-12-23T18:41:51.124452Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [3:7587125488650652036:2551], TxId: 281474976720687, task: 3. Ctx: { CheckpointId : . TraceId : 01kd688znz81sbk2m1f8eapgqr. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=N2NmY2E2Y2QtMWE2MWZjNmItYWNlZDFiNjgtMmYzY2M4Nzg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7587125488650652032:2551], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> BasicUsage::PreferredDatabaseNoFallback [GOOD] >> TStateStorageRingGroupState::TestBoardConfigMismatch [GOOD] |94.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> Cdc::HugeKey[YdsRunner] [GOOD] >> Cdc::HugeKey[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 18694, MsgBus: 9312 2025-12-23T18:39:34.572043Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124903600851016:2180];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:34.572214Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0061e5/r3tmp/tmpIKUwEB/pdisk_1.dat 2025-12-23T18:39:35.285913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:35.304085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:35.304207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:35.582624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:35.583063Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124903600850873:2081] 1766515174509703 != 1766515174509706 2025-12-23T18:39:35.585066Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:35.602929Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:35.613177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 18694, node 1 2025-12-23T18:39:36.051440Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:36.051461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:36.051467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:36.051534Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9312 TClient is connected to server localhost:9312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515175663 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:37.621246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:37.629522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:39:37.642567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:37.966085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:38.270566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:38.425599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:39.571237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124903600851016:2180];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:39.571293Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:43.014840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124942255558430:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:43.014976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:43.020146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124942255558440:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:43.020229Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:43.910341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:43.943829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:43.992517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.081414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.152225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.232305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.312280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.400339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:44.542387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124946550526629:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.542463Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.542905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124946550526634:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.542951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124946550526635:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:44.543070Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [Work ... ted 2025-12-23T18:41:37.981382Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:38.008683Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32520, node 12 2025-12-23T18:41:38.171733Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:41:38.171772Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:41:38.171788Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:41:38.171961Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:41:38.173724Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30328 2025-12-23T18:41:38.749791Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:30328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515298072 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:39.370838Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:41:39.395722Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:41:39.511866Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:41:39.861234Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:40.095462Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:41:42.704131Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7587125429092302293:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:42.704248Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:41:45.318958Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125463452042463:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:45.319116Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:45.323195Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125463452042473:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:45.323322Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:45.721538Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:45.795855Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:45.870528Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:45.934873Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:46.008969Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:46.109464Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:46.197452Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:46.308226Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:46.565947Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125467747010656:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:46.566081Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:46.571304Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125467747010662:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:46.571448Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:46.571566Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125467747010661:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:46.579460Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:41:46.611473Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7587125467747010665:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:41:46.704343Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7587125467747010716:3799] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:41:51.479382Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515310707, txId: 281474976710673] shutting down |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-clusteradmin >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributes >> TBlobStorageProxyTest::TestNormal >> TSentinelTests::InitialDeploymentGracePeriod [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestBoardConfigMismatch [GOOD] Test command err: RandomSeed# 2035132542554768433 2025-12-23T18:41:56.368903Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [7:146:1] Cookie# 2110238033915585692 2025-12-23T18:41:56.368983Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-12-23T18:41:56.369032Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 1 Cookie# 2110238033915585692 SessionId# [7:146:1] Binding# 2025-12-23T18:41:56.369093Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 1 Reason# explicit unbind request 2025-12-23T18:41:56.369136Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.6:19001/6 2025-12-23T18:41:56.369181Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2025-12-23T18:41:56.369207Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2025-12-23T18:41:56.369234Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2025-12-23T18:41:56.369279Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2025-12-23T18:41:56.369309Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.8:19001/8 2025-12-23T18:41:56.369354Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2025-12-23T18:41:56.369403Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2025-12-23T18:41:56.369432Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2025-12-23T18:41:56.369501Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} 2025-12-23T18:41:56.369594Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-12-23T18:41:56.369656Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 5 Binding# {5.0/16449375674730582747@[0:0:0]} SessionId# [0:0:0] 2025-12-23T18:41:56.369710Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 7763306606246112870 2025-12-23T18:41:56.369748Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:56.377825Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 7763306606246112870 SessionId# [2:131:1] Binding# {1.7/7763306606246112870@[2:131:1]} Record# {RootNodeId: 1 } 2025-12-23T18:41:56.377956Z 2 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008242s 2025-12-23T18:41:56.378033Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [3:134:1] Cookie# 4193233102857025744 2025-12-23T18:41:56.378092Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [3:134:1] Inserted# false Subscription# {SessionId# [3:134:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-12-23T18:41:56.378177Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 4193233102857025744 SessionId# [3:134:1] Binding# {1.7/4193233102857025744@[3:134:1]} Record# {RootNodeId: 1 } 2025-12-23T18:41:56.378304Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [7:262:20] SessionId# [2:120:6] Cookie# 16449375674730582746 2025-12-23T18:41:56.378334Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 7 SessionId# [2:120:6] Inserted# false Subscription# {SessionId# [2:120:6] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:56.378369Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 7 Cookie# 16449375674730582746 SessionId# [2:120:6] Binding# {1.1/7763306606246112870@[2:131:1]} 2025-12-23T18:41:56.378402Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 7 Reason# explicit unbind request 2025-12-23T18:41:56.378437Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.6:19001/6 2025-12-23T18:41:56.378471Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.7:19001/7 2025-12-23T18:41:56.378508Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.9:19001/9 2025-12-23T18:41:56.378537Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.4:19001/4 2025-12-23T18:41:56.378595Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.1:19001/1 2025-12-23T18:41:56.378641Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.8:19001/8 2025-12-23T18:41:56.378666Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.5:19001/5 2025-12-23T18:41:56.378692Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.2:19001/2 2025-12-23T18:41:56.378730Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 7 NodeId# 127.0.0.3:19001/3 2025-12-23T18:41:56.379041Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 7 Subscription# {SessionId# [2:120:6] SubscriptionCookie# 0} 2025-12-23T18:41:56.379128Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [7:262:20] SessionId# [1:145:6] Cookie# 2110238033915585692 2025-12-23T18:41:56.379185Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 7 Cookie# 2110238033915585692 SessionId# [1:145:6] Binding# Record# {RootNodeId: 7 } 2025-12-23T18:41:56.379228Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [7:262:20] SessionId# [9:32:7] Cookie# 5929884144743551567 2025-12-23T18:41:56.379263Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 7 SessionId# [9:32:7] Inserted# false Subscription# {SessionId# [9:32:7] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:56.379335Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 7 Cookie# 5929884144743551567 SessionId# [9:32:7] Binding# {7.1/5929884144743551567@[9:32:7]} Record# {RootNodeId: 7 } 2025-12-23T18:41:56.379385Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:248:20] SessionId# [8:61:5] Cookie# 13037756743364388188 2025-12-23T18:41:56.379416Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [8:61:5] Inserted# false Subscription# {SessionId# [8:61:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:56.379478Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 13037756743364388188 SessionId# [8:61:5] Binding# {5.1/13037756743364388188@[8:61:5]} Record# {RootNodeId: 7 } 2025-12-23T18:41:56.379787Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [7:58:5] SessionId# [0:0:0] Cookie# 7 2025-12-23T18:41:56.379856Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 5 SessionId# [7:58:5] Cookie# 7 CookieInFlight# true SubscriptionExists# true 2025-12-23T18:41:56.379913Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {5.0/16449375674730582747@[0:0:0]} 2025-12-23T18:41:56.380153Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [7:121:2] Cookie# 16449375674730582746 2025-12-23T18:41:56.380217Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 16449375674730582746 SessionId# [7:121:2] Binding# {5.0/16449375674730582747@[7:58:5]} Record# {RootNodeId: 1 } 2025-12-23T18:41:56.380260Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [5:115:2] Cookie# 16146238848461893436 2025-12-23T18:41:56.380301Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [5:115:2] Inserted# false Subscription# {SessionId# [5:115:2] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-12-23T18:41:56.380351Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 16146238848461893436 SessionId# [5:115:2] Binding# {2.7/16146238848461893436@[5:115:2]} Record# {RootNodeId: 1 } 2025-12-23T18:41:56.380398Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [6:118:2] Cookie# 4396043040774057691 2025-12-23T18:41:56.380428Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [6:118:2] Inserted# false Subscription# {SessionId# [6:118:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:56.380467Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 4396043040774057691 SessionId# [6:118:2] Binding# {2.7/4396043040774057691@[6:118:2]} Record# {RootNodeId: 1 } 2025-12-23T18:41:56.380500Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [4:89:3] Cookie# 11508639747001395550 2025-12-23T18:41:56.380530Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [4:89:3] Inserted# false Subscription# {SessionId# [4:89:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:56.380587Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 11508639747001395550 SessionId# [4:89:3] Binding# {3.7/11508639747001395550@[4:89:3]} Record# {RootNodeId: 1 } 2025-12-23T18:41:56.381527Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [2:227:20] SessionId# [1:130:1] Cookie# 7763306606246112870 2025-12-23T18:41:56.381577Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-12-23T18:41:56.381759Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 2 Cookie# 7763306606246112870 SessionId# [1:130: ... 6.465747Z 1 00h00m00.816550s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:56.465798Z 1 00h00m00.816550s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:56.465826Z 1 00h00m00.816550s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:56.465855Z 1 00h00m00.816550s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.465889Z 1 00h00m00.816550s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.465915Z 1 00h00m00.816550s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.465965Z 1 00h00m00.816550s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.466007Z 1 00h00m00.816550s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.467175Z 1 00h00m01.809225s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:56.467246Z 1 00h00m01.809225s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:56.467286Z 1 00h00m01.809225s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:56.467314Z 1 00h00m01.809225s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:56.467346Z 1 00h00m01.809225s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:56.467400Z 1 00h00m01.809225s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:56.467444Z 1 00h00m01.809225s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.467497Z 1 00h00m01.809225s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.467540Z 1 00h00m01.809225s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.467577Z 1 00h00m01.809225s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.467607Z 1 00h00m01.809225s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.469573Z 1 00h00m03.854135s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:56.469657Z 1 00h00m03.854135s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:56.469714Z 1 00h00m03.854135s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:56.469759Z 1 00h00m03.854135s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:56.469796Z 1 00h00m03.854135s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:56.469830Z 1 00h00m03.854135s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:56.469865Z 1 00h00m03.854135s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.469906Z 1 00h00m03.854135s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.469958Z 1 00h00m03.854135s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.470001Z 1 00h00m03.854135s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.470032Z 1 00h00m03.854135s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.473891Z 1 00h00m07.943955s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:56.473970Z 1 00h00m07.943955s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:56.474007Z 1 00h00m07.943955s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:56.474081Z 1 00h00m07.943955s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:56.474123Z 1 00h00m07.943955s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:56.474159Z 1 00h00m07.943955s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:56.474196Z 1 00h00m07.943955s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.474243Z 1 00h00m07.943955s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.474272Z 1 00h00m07.943955s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.474317Z 1 00h00m07.943955s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.474359Z 1 00h00m07.943955s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.477582Z 1 00h00m10.002048s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 0 2025-12-23T18:41:56.477699Z 1 00h00m10.002048s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 1 2025-12-23T18:41:56.483344Z 1 00h00m16.123595s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:56.483492Z 1 00h00m16.123595s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:56.483549Z 1 00h00m16.123595s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:56.483579Z 1 00h00m16.123595s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:56.483606Z 1 00h00m16.123595s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:56.483633Z 1 00h00m16.123595s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:56.483678Z 1 00h00m16.123595s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.483728Z 1 00h00m16.123595s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.483800Z 1 00h00m16.123595s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.483839Z 1 00h00m16.123595s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.483866Z 1 00h00m16.123595s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.487965Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 5 ClusterStateGuid: 6 2025-12-23T18:41:56.497221Z 1 00h00m30.173840s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 5 ClusterStateGuid: 6 2025-12-23T18:41:56.500910Z 1 00h00m34.609581s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:56.501019Z 1 00h00m34.609581s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:56.501071Z 1 00h00m34.609581s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:56.501101Z 1 00h00m34.609581s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:56.501130Z 1 00h00m34.609581s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:56.501178Z 1 00h00m34.609581s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:56.501238Z 1 00h00m34.609581s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.501289Z 1 00h00m34.609581s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.501323Z 1 00h00m34.609581s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.501358Z 1 00h00m34.609581s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:56.501391Z 1 00h00m34.609581s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TBlobStorageProxyTest::TestCollectGarbagePersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain >> TStateStorage2RingGroups::TestStateStorageReplyOnce >> TStateStorageRingGroupState::TestProxyConfigMismatch >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota >> TStateStorage2RingGroups::TestStateStorageReplyOnce [GOOD] >> TStateStorageRingGroupState::TestProxyConfigMismatch [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2025-12-23T18:41:07.003004Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125299063383994:2264];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:07.003120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:41:07.155029Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:07.219556Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125299871971937:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:07.219736Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:41:07.453860Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034d5/r3tmp/tmp3WR6bf/pdisk_1.dat 2025-12-23T18:41:07.974713Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:07.972255Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:08.008841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:41:08.011228Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:08.274100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:08.274367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:08.275856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:08.275932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:08.316218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:08.514048Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:08.536623Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:41:08.564987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:08.568834Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:08.606908Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:08.648726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 22986, node 1 2025-12-23T18:41:09.084060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0034d5/r3tmp/yandexrOcEW2.tmp 2025-12-23T18:41:09.084092Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0034d5/r3tmp/yandexrOcEW2.tmp 2025-12-23T18:41:09.084313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0034d5/r3tmp/yandexrOcEW2.tmp 2025-12-23T18:41:09.084469Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:41:09.196734Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:09.239971Z INFO: TTestServer started on Port 31948 GrpcPort 22986 TClient is connected to server localhost:31948 PQClient connected to localhost:22986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515268728 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:09.865409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:41:09.930037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:41:11.987466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125299063383994:2264];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:11.987611Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:41:12.190911Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587125299871971937:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:12.190993Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:41:13.802298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125329128156280:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:13.802511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:13.802768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125329128156306:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:13.808079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:41:13.810008Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125329128156308:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:13.810095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:13.848870Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125329128156309:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-12-23T18:41:14.162957Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125329128156414:3071] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:41:14.196950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:14.640241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:14.809174Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125333423123730:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:41:14.812362Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=NTE5NDQ5NDUtM2M1MzA0OTktY2M3MmU1ZTQtZjZlZDgzNmY=, ActorId: [1:7587125329128156276:2333], ... : (Utf8 : Cluster) IncTo: 1 }] } 2025-12-23T18:41:54.766453Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [3:7587125389369413648:2120], cacheItem# { Subscriber: { Subscriber: [3:7587125410844251508:3109] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 32 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1766515293039 PathId: [OwnerId: 72057594046644480, LocalPathId: 48] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:41:54.767097Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587125505333535633:5410], recipient# [3:7587125505333535630:2870], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : Cluster) IncFrom: 1 To: (Utf8 : Cluster) IncTo: 1 }] } 2025-12-23T18:41:54.770281Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2885: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [3:7587125389369413648:2120], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:41:54.770394Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [3:7587125389369413648:2120], cacheItem# { Subscriber: { Subscriber: [3:7587125410844251395:3041] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 32 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1766515292507 PathId: [OwnerId: 72057594046644480, LocalPathId: 46] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:41:54.770590Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587125505333535636:5412], recipient# [3:7587125505333535635:2871], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:41:54.776739Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:54.776775Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.776792Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:54.776812Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.776827Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:41:54.776899Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:41:54.776912Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.776923Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:54.776938Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.776949Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][3][StateIdle] Try persist 2025-12-23T18:41:54.787511Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:41:54.787551Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.787570Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:54.787595Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.787613Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][4][StateIdle] Try persist 2025-12-23T18:41:54.787695Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:54.787711Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.787722Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:54.787738Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.787749Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist 2025-12-23T18:41:54.832661Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037900][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:54.832698Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.832715Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:54.832735Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.832749Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037900][Partition][0][StateIdle] Try persist 2025-12-23T18:41:54.848510Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:41:54.848540Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.848555Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:54.848575Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.848591Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2025-12-23T18:41:54.848639Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:41:54.848651Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.848661Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:54.848672Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.848681Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2025-12-23T18:41:54.848711Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:41:54.848721Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.848731Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:54.848744Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.848754Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][3][StateIdle] Try persist 2025-12-23T18:41:54.848778Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:41:54.848788Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.848796Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:54.848821Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.848831Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][4][StateIdle] Try persist 2025-12-23T18:41:54.877393Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:54.877425Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.877443Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:54.877464Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.877481Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:41:54.877549Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:41:54.877560Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.877571Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:54.877585Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:54.877596Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][3][StateIdle] Try persist >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder |94.5%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::InitialDeploymentGracePeriod [GOOD] Test command err: 2025-12-23T18:41:30.648133Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2025-12-23T18:41:30.648211Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2025-12-23T18:41:30.648290Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:41:30.648328Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2025-12-23T18:41:30.648382Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-12-23T18:41:30.648484Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-12-23T18:41:30.649378Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-12-23T18:41:30.655336Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2025-12-23T18:41:55.132766Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2025-12-23T18:41:55.132858Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2025-12-23T18:41:55.132976Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2025-12-23T18:41:55.133021Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:41:55.145951Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:41:55.146050Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2025-12-23T18:41:55.146178Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2025-12-23T18:41:55.146231Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-12-23T18:41:55.146267Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-12-23T18:41:55.146299Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-12-23T18:41:55.146333Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-12-23T18:41:55.146375Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-12-23T18:41:55.146408Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-12-23T18:41:55.146443Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-12-23T18:41:55.146786Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2025-12-23T18:41:55.147625Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2025-12-23T18:41:55.147808Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2025-12-23T18:41:55.147940Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2025-12-23T18:41:55.148063Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2025-12-23T18:41:55.148188Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2025-12-23T18:41:55.148301Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2025-12-23T18:41:55.148416Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2025-12-23T18:41:55.148474Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:41:55.148945Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 13:54, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:41:55.149031Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-12-23T18:41:55.149302Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 6 2025-12-23T18:41:55.149355Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 13:54 >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2025-12-23T18:40:51.194299Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1766515251194263 2025-12-23T18:40:51.568146Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125234296930044:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:51.568203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:40:51.816695Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00579f/r3tmp/tmpYr42zf/pdisk_1.dat 2025-12-23T18:40:51.827781Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125235163767316:2201];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:51.827829Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:40:51.961823Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:40:52.280541Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:52.314822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:52.374852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:52.374990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:52.375255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:52.375294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:52.419664Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:40:52.419881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:52.424281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:52.491752Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:52.497304Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 28962, node 1 2025-12-23T18:40:52.615315Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:52.650836Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:52.792080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/00579f/r3tmp/yandexi9M4o7.tmp 2025-12-23T18:40:52.792102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/00579f/r3tmp/yandexi9M4o7.tmp 2025-12-23T18:40:52.792265Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/00579f/r3tmp/yandexi9M4o7.tmp 2025-12-23T18:40:52.792343Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:40:52.831584Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:52.871133Z INFO: TTestServer started on Port 13247 GrpcPort 28962 TClient is connected to server localhost:13247 PQClient connected to localhost:28962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515252558 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:53.253581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:40:55.921981Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125252343636700:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:55.922074Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125252343636692:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:55.922227Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:55.923878Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125252343636707:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:55.923988Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:55.930248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:55.970905Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587125252343636706:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-12-23T18:40:56.086088Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587125256638604032:2139] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:56.642239Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125234296930044:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:56.642519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:56.669364Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587125256638604039:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:40:56.672269Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=NWM0Y2IxMzYtMzZkNDY3NzgtOWNlODNiNzctYjdhMmIzOWQ=, ActorId: [2:7587125252343636690:2301], ActorState: ExecuteState, TraceId: 01kd687akeb1c4n6a0jx7kxbc3, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:40:56.675167Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:40:56.674831Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125255771767910:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:40:56.676304Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YzI3ZDFjODUtYjE5M2NjOWYtMTE2MW ... cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:52.179585Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.179600Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:52.283003Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:52.283042Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.283057Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:52.283076Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.283091Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:52.388896Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:52.388932Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.388946Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:52.388966Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.388979Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:52.490859Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:52.490894Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.490907Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:52.490926Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.490939Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:52.590237Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:52.590272Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.590285Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:52.590304Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.590317Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:52.690916Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:52.690956Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.690974Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:52.690994Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.691008Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:52.795386Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:52.795419Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.795433Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:52.795451Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.795481Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:52.899037Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:52.899070Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.899084Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:52.899105Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:52.899118Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:53.002419Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:53.002453Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:53.002467Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:53.002485Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:53.002498Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:53.103794Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:53.103830Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:53.103844Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:53.103863Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:53.103877Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:53.125028Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:6550" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:6550" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:6550" location: "dc3" status: AVAILABLE weight: 500 } ] ControlPlaneEndpoint: cp.logbroker-federation:2135 } === Closing the session 2025-12-23T18:41:53.138868Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: try to update token 2025-12-23T18:41:53.139285Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2025-12-23T18:41:53.147441Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-12-23T18:41:53.147510Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session will now close 2025-12-23T18:41:53.147565Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: aborting 2025-12-23T18:41:53.147698Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2025-12-23T18:41:53.147746Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: destroy 2025-12-23T18:41:53.204766Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:53.204799Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:53.204812Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:53.204832Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:53.204844Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:53.311396Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:53.311429Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:53.311443Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:53.311475Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:53.311489Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:53.415206Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:53.415243Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:53.415256Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:53.415275Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:53.415287Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:41:53.516681Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:41:53.516720Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:53.516734Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:41:53.516759Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:41:53.516772Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-system |94.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |94.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |94.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnce [GOOD] Test command err: RandomSeed# 13784400847643542504 2025-12-23T18:41:58.236428Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.236517Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.236549Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.236597Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.236638Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.236665Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.236697Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.236749Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.236801Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.236842Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.242313Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639257 Sender# [1:340:56] SessionId# [0:0:0] Cookie# 0 2025-12-23T18:41:58.242627Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:58.242711Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:235:51] SessionId# [5:140:1] Cookie# 4744479711491798999 2025-12-23T18:41:58.242880Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [5:140:1] Inserted# false Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:58.251104Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 4744479711491798999 SessionId# [5:140:1] Binding# Record# {CacheUpdate { } } RootNodeId# 5 StorageConfigGeneration# 0 KnownNode# true 2025-12-23T18:41:58.251206Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [5:140:1] Inserted# false Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:58.251280Z 5 00h00m00.002048s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008568s 2025-12-23T18:41:58.251335Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [2:131:1] Cookie# 4651924170918225032 2025-12-23T18:41:58.251375Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-12-23T18:41:58.251484Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 4651924170918225032 SessionId# [2:131:1] Binding# {1.5/4651924170918225032@[2:131:1]} Record# {RootNodeId: 5 CacheUpdate { } } 2025-12-23T18:41:58.251544Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [3:134:1] Cookie# 6173767015788985902 2025-12-23T18:41:58.251575Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [3:134:1] Inserted# false Subscription# {SessionId# [3:134:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-12-23T18:41:58.251617Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 6173767015788985902 SessionId# [3:134:1] Binding# {1.5/6173767015788985902@[3:134:1]} Record# {RootNodeId: 5 CacheUpdate { } } 2025-12-23T18:41:58.251671Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:58.251781Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:1 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:58.251986Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:58.252026Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:58.252069Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:58.252106Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:58.252140Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:58.252170Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:58.252225Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:58.252258Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:58.252287Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:58.252325Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:58.252401Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.252447Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.252475Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.252500Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.252531Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.252599Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.252628Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.252668Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.252706Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.252733Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.252809Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:263:20] SessionId# [1:139:4] Cookie# 4744479711491798999 2025-12-23T18:41:58.252847Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [1:139:4] Inserted# false Subscription# {SessionId# [1:139:4] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-12-23T18:41:58.252928Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 4744479711491798999 SessionId# [1:139:4] Binding# {5.5/4744479711491798999@[1:139:4]} Record# {RootNodeId: 5 CacheUpdate { } } 2025-12-23T18:41:58.252995Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:263:20] SessionId# [9:64:5] Cookie# 9214370694769923906 2025-12-23T18:41:58.253025Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [9:64:5] Inserted# false Subscription# {SessionId# [9:64:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:58.253077Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 9214370694769923906 SessionId# [9:64:5] Binding# {5.5/9214370694769923906@[9:64:5]} Record# {RootNodeId: 5 CacheUpdate { } } 2025-12-23T18:41:58.253110Z 8 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:242:20] SessionId# [8:124:2] Cookie# 939336766952527905 2025-12-23T18:41:58.253141Z 8 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [8:124:2] Inserted# false Subscription# {SessionId# [8:124:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:58.253180Z 8 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 939336766952527905 SessionId# [8:124:2] Binding# {2.5/939336766952527905@[8:124:2]} Record# {RootNodeId: 5 CacheUpdate { } } 2025-12-23T18:41:58.253229Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:249:20] SessionId# [7:98:3] Cookie# 12878294281943864409 2025-12-23T18:41:58.253269Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [7:98:3] Inserted# false Subscription# {SessionId# [7:98:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:58.253321Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 12878294281943864409 SessionId# [7:98:3] Binding# {3.5/12878294281943864409@[7:98:3]} Record# {RootNodeId: 5 CacheUpdate { } } 2025-12-23T18:41:58.253355Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:249:20] SessionId# [4:89:3] Cookie# 12582800367476432521 2025-12-23T18:41:58.253383Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [4:89:3] Inserted# false Subscription# {SessionId# [4:89:3] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-12-23T18:41:58.253420Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 12582800367476432521 SessionId# [4:89:3] Binding# {3.5/12582800367476432521@[4:89:3]} Record# {RootNodeId: 5 CacheUpdate { } } 2025-12-23T18:41:58.253522Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667 ... 94037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:58.318873Z 1 00h00m08.005674s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:58.318909Z 1 00h00m08.005674s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:58.318936Z 1 00h00m08.005674s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:58.318963Z 1 00h00m08.005674s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:58.319012Z 1 00h00m08.005674s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:58.319040Z 1 00h00m08.005674s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:58.319066Z 1 00h00m08.005674s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:58.319097Z 1 00h00m08.005674s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:58.319124Z 1 00h00m08.005674s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:58.319149Z 1 00h00m08.005674s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:58.319205Z 1 00h00m08.005674s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.319241Z 1 00h00m08.005674s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.319268Z 1 00h00m08.005674s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.319295Z 1 00h00m08.005674s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.319327Z 1 00h00m08.005674s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.319391Z 1 00h00m08.005674s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.319418Z 1 00h00m08.005674s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.319472Z 1 00h00m08.005674s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.319500Z 1 00h00m08.005674s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.319526Z 1 00h00m08.005674s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.319616Z 1 00h00m08.005674s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:3298559226995:0] : 157}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:41:58.319679Z 1 00h00m08.005674s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:9895628993651:0] : 163}, {[1:7696605738099:0] : 161}, {[1:8796117365875:0] : 162}, {[1:6597094110323:0] : 160}, {[1:5497582482547:0] : 159}}}} 2025-12-23T18:41:58.319768Z 1 00h00m08.005674s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 10 Signature: {{[1:8796117365875:0] : 162}, {[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:9895628993651:0] : 163}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:41:58.322298Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-12-23T18:41:58.322457Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-12-23T18:41:58.322540Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:1 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-12-23T18:41:58.322604Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:41:58.322649Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.322682Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:41:58.322718Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-12-23T18:41:58.322758Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-12-23T18:41:58.322785Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:41:58.322813Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.322840Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:41:58.322868Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-12-23T18:41:58.322921Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-12-23T18:41:58.322973Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.323038Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.323090Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.323152Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.323197Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.323280Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.323325Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.323356Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.323399Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.323437Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.323563Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:24343667:0] : 154}, {[1:2199047599219:0] : 156}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:41:58.323647Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:7696605738099:0] : 161}, {[1:6597094110323:0] : 160}, {[1:5497582482547:0] : 159}}}} 2025-12-23T18:41:58.323745Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 8 Signature: {{[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:41:58.323844Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037932033 Signature: { Size: 10 Signature: {{[1:8796117365875:0] : 162}, {[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:9895628993651:0] : 163}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyConfigMismatch [GOOD] Test command err: RandomSeed# 7203104090944818104 2025-12-23T18:41:58.265039Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [8:149:1] Cookie# 757741610108833850 2025-12-23T18:41:58.265113Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [8:149:1] Inserted# false Subscription# {SessionId# [8:149:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:58.265159Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 1 Cookie# 757741610108833850 SessionId# [8:149:1] Binding# 2025-12-23T18:41:58.265217Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 1 Reason# explicit unbind request 2025-12-23T18:41:58.265255Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.6:19001/6 2025-12-23T18:41:58.265300Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2025-12-23T18:41:58.265327Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2025-12-23T18:41:58.265353Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2025-12-23T18:41:58.265400Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.8:19001/8 2025-12-23T18:41:58.265436Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2025-12-23T18:41:58.265468Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2025-12-23T18:41:58.265493Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2025-12-23T18:41:58.265518Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2025-12-23T18:41:58.265591Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [8:149:1] SubscriptionCookie# 0} 2025-12-23T18:41:58.265675Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:58.265740Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 5 Binding# {5.0/3886160515250176495@[0:0:0]} SessionId# [0:0:0] 2025-12-23T18:41:58.265830Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 13207912024740109946 2025-12-23T18:41:58.265870Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 8 2025-12-23T18:41:58.276313Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 13207912024740109946 SessionId# [2:131:1] Binding# {1.8/13207912024740109946@[2:131:1]} Record# {RootNodeId: 1 } 2025-12-23T18:41:58.276436Z 2 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.010618s 2025-12-23T18:41:58.276530Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [3:134:1] Cookie# 4364883768174985484 2025-12-23T18:41:58.276602Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [3:134:1] Inserted# false Subscription# {SessionId# [3:134:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:58.276680Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 4364883768174985484 SessionId# [3:134:1] Binding# {1.8/4364883768174985484@[3:134:1]} Record# {RootNodeId: 1 } 2025-12-23T18:41:58.279179Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [8:269:20] SessionId# [2:123:7] Cookie# 3886160515250176494 2025-12-23T18:41:58.279245Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 8 SessionId# [2:123:7] Inserted# false Subscription# {SessionId# [2:123:7] SubscriptionCookie# 0} NextSubscribeCookie# 8 2025-12-23T18:41:58.279291Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 8 Cookie# 3886160515250176494 SessionId# [2:123:7] Binding# {1.1/13207912024740109946@[2:131:1]} 2025-12-23T18:41:58.279342Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 8 Reason# explicit unbind request 2025-12-23T18:41:58.279391Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.6:19001/6 2025-12-23T18:41:58.279466Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.7:19001/7 2025-12-23T18:41:58.279495Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.9:19001/9 2025-12-23T18:41:58.279521Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.4:19001/4 2025-12-23T18:41:58.279564Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.1:19001/1 2025-12-23T18:41:58.279608Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.8:19001/8 2025-12-23T18:41:58.279638Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.5:19001/5 2025-12-23T18:41:58.279665Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.2:19001/2 2025-12-23T18:41:58.279708Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 8 NodeId# 127.0.0.3:19001/3 2025-12-23T18:41:58.279775Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 8 Subscription# {SessionId# [2:123:7] SubscriptionCookie# 0} 2025-12-23T18:41:58.279874Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [8:269:20] SessionId# [1:148:7] Cookie# 757741610108833850 2025-12-23T18:41:58.279962Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 8 Cookie# 757741610108833850 SessionId# [1:148:7] Binding# Record# {RootNodeId: 8 } 2025-12-23T18:41:58.280010Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:248:20] SessionId# [7:58:5] Cookie# 696993739821005444 2025-12-23T18:41:58.280046Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [7:58:5] Inserted# false Subscription# {SessionId# [7:58:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:58.280137Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 696993739821005444 SessionId# [7:58:5] Binding# {5.1/696993739821005444@[7:58:5]} Record# {RootNodeId: 8 } 2025-12-23T18:41:58.280489Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [8:61:5] SessionId# [0:0:0] Cookie# 6 2025-12-23T18:41:58.280538Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 5 SessionId# [8:61:5] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2025-12-23T18:41:58.280591Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {5.0/3886160515250176495@[0:0:0]} 2025-12-23T18:41:58.280910Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [8:124:2] Cookie# 3886160515250176494 2025-12-23T18:41:58.280994Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 3886160515250176494 SessionId# [8:124:2] Binding# {5.0/3886160515250176495@[8:61:5]} Record# {RootNodeId: 1 } 2025-12-23T18:41:58.281040Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [9:127:2] Cookie# 9582248937473337878 2025-12-23T18:41:58.281076Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [9:127:2] Inserted# false Subscription# {SessionId# [9:127:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:58.281121Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 9582248937473337878 SessionId# [9:127:2] Binding# {2.8/9582248937473337878@[9:127:2]} Record# {RootNodeId: 1 } 2025-12-23T18:41:58.281165Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [4:112:2] Cookie# 17003835850887789994 2025-12-23T18:41:58.281206Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [4:112:2] Inserted# false Subscription# {SessionId# [4:112:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:58.281250Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 17003835850887789994 SessionId# [4:112:2] Binding# {2.8/17003835850887789994@[4:112:2]} Record# {RootNodeId: 1 } 2025-12-23T18:41:58.281310Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [5:115:2] Cookie# 6943166289396186495 2025-12-23T18:41:58.281342Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [5:115:2] Inserted# false Subscription# {SessionId# [5:115:2] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-12-23T18:41:58.281388Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 6943166289396186495 SessionId# [5:115:2] Binding# {2.8/6943166289396186495@[5:115:2]} Record# {RootNodeId: 1 } 2025-12-23T18:41:58.281432Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [6:118:2] Cookie# 6541192469724224222 2025-12-23T18:41:58.281461Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [6:118:2] Inserted# false Subscription# {SessionId# [6:118:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:58.281544Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 6541192469724224222 SessionId# [6:118:2] Binding# {2.8/6541192469724224222@[6:118:2]} Record# {RootNodeId: 1 } 2025-12-23T18:41:58.282572Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [2:227:20] SessionId# [1:130:1] Cookie# 13207912024740109946 2025-12-23T18:41:58.282618Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-12-23T18:41:58.282813Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 2 Cookie# 13207912024740109946 SessionId# [1:130:1] B ... leLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.366369Z 1 00h00m01.740819s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.366398Z 1 00h00m01.740819s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.366425Z 1 00h00m01.740819s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.366467Z 1 00h00m01.740819s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.368255Z 1 00h00m03.788727s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:58.368329Z 1 00h00m03.788727s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:58.368372Z 1 00h00m03.788727s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:58.368410Z 1 00h00m03.788727s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:58.368465Z 1 00h00m03.788727s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:58.368493Z 1 00h00m03.788727s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:58.368525Z 1 00h00m03.788727s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.368560Z 1 00h00m03.788727s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.368599Z 1 00h00m03.788727s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.368665Z 1 00h00m03.788727s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.368723Z 1 00h00m03.788727s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.372736Z 1 00h00m08.335082s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:58.372810Z 1 00h00m08.335082s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:58.372865Z 1 00h00m08.335082s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:58.372893Z 1 00h00m08.335082s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:58.372929Z 1 00h00m08.335082s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:58.372955Z 1 00h00m08.335082s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:58.372989Z 1 00h00m08.335082s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.373038Z 1 00h00m08.335082s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.373084Z 1 00h00m08.335082s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.373123Z 1 00h00m08.335082s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.373151Z 1 00h00m08.335082s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.375554Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:58.375651Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:41:58.375702Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.375731Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:41:58.375760Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-12-23T18:41:58.375792Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-12-23T18:41:58.375844Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.375898Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.375952Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.383334Z 1 00h00m18.700771s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:58.383461Z 1 00h00m18.700771s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:58.383512Z 1 00h00m18.700771s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:58.383542Z 1 00h00m18.700771s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:58.383577Z 1 00h00m18.700771s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:58.383619Z 1 00h00m18.700771s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:58.383660Z 1 00h00m18.700771s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.383729Z 1 00h00m18.700771s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 1 ClusterStateGuid: 2} 2025-12-23T18:41:58.383759Z 1 00h00m18.700771s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=2 2025-12-23T18:41:58.383863Z 1 00h00m18.700771s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 2 2025-12-23T18:41:58.386069Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:58.386177Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:41:58.386226Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.386258Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:41:58.386284Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-12-23T18:41:58.386315Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-12-23T18:41:58.386380Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.386444Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 1 ClusterStateGuid: 2 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.386489Z 1 00h00m20.100000s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=2 2025-12-23T18:41:58.386574Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 2 2025-12-23T18:41:58.386674Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:41:58.386716Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2025-12-23T18:41:58.386790Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 2025-12-23T18:41:58.386913Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaUpdate TabletID: 72057594037932033} 2025-12-23T18:41:58.386949Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2025-12-23T18:41:58.387017Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 2025-12-23T18:41:58.390089Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: NKikimrStateStorage.TEvCleanup TabletID: 72057594037932033 ProposedLeader { RawX1: 0 RawX2: 0 } ClusterStateGeneration: 3 ClusterStateGuid: 4 2025-12-23T18:41:58.390161Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2025-12-23T18:41:58.390227Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 >> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged [GOOD] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 [GOOD] >> TStateStorageRingGroupState::TestStateStorageUpdateSig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder [GOOD] Test command err: RandomSeed# 8253797247410876422 2025-12-23T18:41:58.749981Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.750073Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.750106Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.750137Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.750169Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.750198Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.750228Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.750267Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.750321Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.750353Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.754131Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639257 Sender# [1:340:56] SessionId# [0:0:0] Cookie# 0 2025-12-23T18:41:58.754446Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:58.754565Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:235:51] SessionId# [2:131:1] Cookie# 14631605510420165438 2025-12-23T18:41:58.754638Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-12-23T18:41:58.763075Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 14631605510420165438 SessionId# [2:131:1] Binding# Record# {CacheUpdate { } } RootNodeId# 2 StorageConfigGeneration# 0 KnownNode# true 2025-12-23T18:41:58.763176Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-12-23T18:41:58.763267Z 2 00h00m00.002048s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008701s 2025-12-23T18:41:58.763328Z 8 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [8:149:1] Cookie# 3198655597481144840 2025-12-23T18:41:58.763367Z 8 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [8:149:1] Inserted# false Subscription# {SessionId# [8:149:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:58.763519Z 8 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 3198655597481144840 SessionId# [8:149:1] Binding# {1.2/3198655597481144840@[8:149:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-12-23T18:41:58.763590Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [3:134:1] Cookie# 17009546807917103920 2025-12-23T18:41:58.763637Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [3:134:1] Inserted# false Subscription# {SessionId# [3:134:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-12-23T18:41:58.763686Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 17009546807917103920 SessionId# [3:134:1] Binding# {1.2/17009546807917103920@[3:134:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-12-23T18:41:58.763747Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:58.763848Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:1 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:58.764025Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:58.764075Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:58.764129Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:58.764156Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:58.764184Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:58.764222Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:58.764283Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:58.764343Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:58.764376Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:58.764460Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:58.764567Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.764628Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.764654Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.764681Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.764722Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.764800Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.764834Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.764880Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.764914Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.764943Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.764993Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:242:20] SessionId# [7:121:2] Cookie# 7114987495990996464 2025-12-23T18:41:58.765047Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [7:121:2] Inserted# false Subscription# {SessionId# [7:121:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:58.765131Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 7114987495990996464 SessionId# [7:121:2] Binding# {2.2/7114987495990996464@[7:121:2]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-12-23T18:41:58.765174Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:242:20] SessionId# [1:130:1] Cookie# 14631605510420165438 2025-12-23T18:41:58.765202Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-12-23T18:41:58.765243Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 14631605510420165438 SessionId# [1:130:1] Binding# {2.2/14631605510420165438@[1:130:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-12-23T18:41:58.765284Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:249:20] SessionId# [4:89:3] Cookie# 901653722102881043 2025-12-23T18:41:58.765313Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [4:89:3] Inserted# false Subscription# {SessionId# [4:89:3] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-12-23T18:41:58.765355Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 901653722102881043 SessionId# [4:89:3] Binding# {3.2/901653722102881043@[4:89:3]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-12-23T18:41:58.765393Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:249:20] SessionId# [5:92:3] Cookie# 6053658877590883132 2025-12-23T18:41:58.765453Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [5:92:3] Inserted# false Subscription# {SessionId# [5:92:3] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-12-23T18:41:58.765503Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 6053658877590883132 SessionId# [5:92:3] Binding# {3.2/6053658877590883132@[5:92:3]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-12-23T18:41:58.765604Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:3298559226995:0] : 157}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:41:58.765692Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:9895628993651:0] : 163}, {[1:7696605738099:0] : 161}, {[1:8796117365875:0] : 162}, {[1:6597094110323:0] : 160}, {[1:5497582482547:0] : 159}}}} 2025-12-23T18:41:58.765779Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEv ... 94037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:58.834734Z 1 00h00m07.573027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:58.834793Z 1 00h00m07.573027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:58.834822Z 1 00h00m07.573027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:58.834851Z 1 00h00m07.573027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:58.834941Z 1 00h00m07.573027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:58.834973Z 1 00h00m07.573027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:58.835009Z 1 00h00m07.573027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:58.835048Z 1 00h00m07.573027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:58.835106Z 1 00h00m07.573027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:58.835133Z 1 00h00m07.573027s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:58.835191Z 1 00h00m07.573027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.835233Z 1 00h00m07.573027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.835261Z 1 00h00m07.573027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.835291Z 1 00h00m07.573027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.835330Z 1 00h00m07.573027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.835396Z 1 00h00m07.573027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.835424Z 1 00h00m07.573027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.835479Z 1 00h00m07.573027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.835517Z 1 00h00m07.573027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.835545Z 1 00h00m07.573027s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:58.835679Z 1 00h00m07.573027s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:3298559226995:0] : 157}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:41:58.835755Z 1 00h00m07.573027s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:9895628993651:0] : 163}, {[1:7696605738099:0] : 161}, {[1:8796117365875:0] : 162}, {[1:6597094110323:0] : 160}, {[1:5497582482547:0] : 159}}}} 2025-12-23T18:41:58.835846Z 1 00h00m07.573027s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 10 Signature: {{[1:8796117365875:0] : 162}, {[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:9895628993651:0] : 163}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:41:58.839463Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-12-23T18:41:58.839567Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-12-23T18:41:58.839666Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:1 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-12-23T18:41:58.839737Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:41:58.839784Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.839813Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:41:58.839848Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-12-23T18:41:58.839875Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-12-23T18:41:58.839923Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:41:58.839956Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:58.839982Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:41:58.840008Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-12-23T18:41:58.840038Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-12-23T18:41:58.840108Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.840160Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.840205Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.840281Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.840326Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.840390Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.840439Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.840469Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.840511Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.840564Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:58.840663Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:24343667:0] : 154}, {[1:2199047599219:0] : 156}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:41:58.840757Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:7696605738099:0] : 161}, {[1:6597094110323:0] : 160}, {[1:5497582482547:0] : 159}}}} 2025-12-23T18:41:58.840838Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 8 Signature: {{[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:41:58.840943Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037932033 Signature: { Size: 10 Signature: {{[1:8796117365875:0] : 162}, {[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:9895628993651:0] : 163}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TStateStorageRingGroupState::TestStateStorageUpdateSig [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> Cdc::UpdateStream [GOOD] >> Cdc::UpdateShardCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged [GOOD] Test command err: RandomSeed# 16503419133464534035 2025-12-23T18:41:58.976066Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [3:109:2] Cookie# 2120082032387937339 2025-12-23T18:41:58.976145Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [3:109:2] Inserted# false Subscription# {SessionId# [3:109:2] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-12-23T18:41:58.985102Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 2120082032387937339 SessionId# [3:109:2] Binding# {2.0/2120082032387937339@[3:109:2]} Record# {RootNodeId: 1 } 2025-12-23T18:41:58.985234Z 3 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.009211s 2025-12-23T18:41:58.985316Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [2:227:20] SessionId# [1:130:1] Cookie# 11650120398590932046 2025-12-23T18:41:58.985382Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 8 2025-12-23T18:41:58.985764Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 2 Cookie# 11650120398590932046 SessionId# [1:130:1] Binding# {3.3/15656952209279424437@[1:133:2]} Record# {BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032> Record# {BoundNodes { NodeId { Host: "127.0.0.7" Port: 19001 NodeId: 7 } Meta { Fingerprint: "\3403\207\365\032>ClusterStateGeneration=0 clusterStateGeneration=10 Info->ClusterStateGuid=0 clusterStateGuid=0 2025-12-23T18:41:59.133551Z 1 00h00m10.002048s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 10 ClusterStateGuid: 0 2025-12-23T18:41:59.141100Z 1 00h00m18.444976s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:59.141208Z 1 00h00m18.444976s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:59.141257Z 1 00h00m18.444976s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:59.141306Z 1 00h00m18.444976s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:59.141334Z 1 00h00m18.444976s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:59.141360Z 1 00h00m18.444976s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:59.141403Z 1 00h00m18.444976s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.141456Z 1 00h00m18.444976s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.141522Z 1 00h00m18.444976s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.141566Z 1 00h00m18.444976s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.141592Z 1 00h00m18.444976s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.161425Z 1 00h00m38.682777s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:59.161531Z 1 00h00m38.682777s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:59.161584Z 1 00h00m38.682777s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:59.161625Z 1 00h00m38.682777s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:59.161654Z 1 00h00m38.682777s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:59.161688Z 1 00h00m38.682777s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:59.161731Z 1 00h00m38.682777s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.161777Z 1 00h00m38.682777s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.161804Z 1 00h00m38.682777s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.161832Z 1 00h00m38.682777s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.161865Z 1 00h00m38.682777s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-system [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 [GOOD] Test command err: RandomSeed# 5580327719994143806 2025-12-23T18:41:59.398337Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [5:140:1] Cookie# 10512024018850180299 2025-12-23T18:41:59.398407Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [5:140:1] Inserted# false Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:59.398445Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 1 Cookie# 10512024018850180299 SessionId# [5:140:1] Binding# 2025-12-23T18:41:59.398492Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 1 Reason# explicit unbind request 2025-12-23T18:41:59.398520Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.6:19001/6 2025-12-23T18:41:59.398558Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2025-12-23T18:41:59.398584Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2025-12-23T18:41:59.398601Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2025-12-23T18:41:59.398617Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2025-12-23T18:41:59.398645Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.8:19001/8 2025-12-23T18:41:59.398677Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2025-12-23T18:41:59.398706Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2025-12-23T18:41:59.398758Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2025-12-23T18:41:59.398835Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} 2025-12-23T18:41:59.398910Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:59.398969Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 3 Binding# {3.0/3874103875690815867@[0:0:0]} SessionId# [0:0:0] 2025-12-23T18:41:59.399040Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 11317361839857422787 2025-12-23T18:41:59.399083Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 8 2025-12-23T18:41:59.408698Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 11317361839857422787 SessionId# [2:131:1] Binding# {1.5/11317361839857422787@[2:131:1]} Record# {RootNodeId: 1 } 2025-12-23T18:41:59.408852Z 2 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.009823s 2025-12-23T18:41:59.408927Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [9:152:1] Cookie# 16332406167419846849 2025-12-23T18:41:59.408987Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [9:152:1] Inserted# false Subscription# {SessionId# [9:152:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:59.409064Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 16332406167419846849 SessionId# [9:152:1] Binding# {1.5/16332406167419846849@[9:152:1]} Record# {RootNodeId: 1 } 2025-12-23T18:41:59.409323Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [2:227:20] SessionId# [1:130:1] Cookie# 11317361839857422787 2025-12-23T18:41:59.409374Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:59.409537Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 2 Cookie# 11317361839857422787 SessionId# [1:130:1] Binding# Record# {DeletedBoundNodeIds { Host: "127.0.0.9" Port: 19001 NodeId: 9 } DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.5" Port: 19001 NodeId: 5 } } RootNodeId# 1 StorageConfigGeneration# 0 KnownNode# true 2025-12-23T18:41:59.409592Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:59.409640Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.9:19001/9 2025-12-23T18:41:59.409683Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.1:19001/1 2025-12-23T18:41:59.409720Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.5:19001/5 2025-12-23T18:41:59.409835Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:59.409896Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 5 Binding# {5.0/10512024018850180300@[0:0:0]} SessionId# [0:0:0] 2025-12-23T18:41:59.410032Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [5:92:3] SessionId# [0:0:0] Cookie# 6 2025-12-23T18:41:59.410093Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 3 SessionId# [5:92:3] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2025-12-23T18:41:59.410161Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {3.0/3874103875690815867@[0:0:0]} 2025-12-23T18:41:59.410496Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [8:124:2] Cookie# 13986020086393588770 2025-12-23T18:41:59.410555Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [8:124:2] Inserted# false Subscription# {SessionId# [8:124:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:59.410615Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 13986020086393588770 SessionId# [8:124:2] Binding# {2.5/13986020086393588770@[8:124:2]} Record# {RootNodeId: 1 } 2025-12-23T18:41:59.410702Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [3:109:2] Cookie# 7520778170622503978 2025-12-23T18:41:59.410871Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [3:109:2] Inserted# false Subscription# {SessionId# [3:109:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:59.410942Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 7520778170622503978 SessionId# [3:109:2] Binding# {2.5/7520778170622503978@[3:109:2]} Record# {RootNodeId: 1 } 2025-12-23T18:41:59.410986Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [4:112:2] Cookie# 12485454044800358261 2025-12-23T18:41:59.411016Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [4:112:2] Inserted# false Subscription# {SessionId# [4:112:2] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-12-23T18:41:59.411073Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 12485454044800358261 SessionId# [4:112:2] Binding# {2.5/12485454044800358261@[4:112:2]} Record# {RootNodeId: 1 } 2025-12-23T18:41:59.411155Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [1:139:4] SessionId# [0:0:0] Cookie# 6 2025-12-23T18:41:59.411187Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 5 SessionId# [1:139:4] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2025-12-23T18:41:59.411218Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {5.0/10512024018850180300@[0:0:0]} 2025-12-23T18:41:59.412597Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [5:248:20] SessionId# [3:91:4] Cookie# 3874103875690815867 2025-12-23T18:41:59.412761Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 5 Cookie# 3874103875690815867 SessionId# [3:91:4] Binding# {2.1/7520778170622503978@[3:109:2]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.5" Port: 19001 NodeId: 5 } Meta { Fingerprint: "\3403\207\365\032>ClusterStateGeneration=0 msgGeneration=0 Info->ClusterStateGuid=2 msgGuid=0 2025-12-23T18:41:59.527058Z 1 00h00m30.180952s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:41:59.527096Z 1 00h00m30.180952s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-12-23T18:41:59.527129Z 1 00h00m30.180952s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-12-23T18:41:59.527192Z 1 00h00m30.180952s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:59.527251Z 1 00h00m30.180952s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 0 2025-12-23T18:41:59.527306Z 1 00h00m30.180952s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 2 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:59.527333Z 1 00h00m30.180952s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=0 Info->ClusterStateGuid=0 clusterStateGuid=2 2025-12-23T18:41:59.527416Z 1 00h00m30.180952s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 2 2025-12-23T18:41:59.535181Z 1 00h00m38.386346s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:59.535276Z 1 00h00m38.386346s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:59.535319Z 1 00h00m38.386346s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:59.535361Z 1 00h00m38.386346s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:59.535403Z 1 00h00m38.386346s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:59.535478Z 1 00h00m38.386346s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:59.535538Z 1 00h00m38.386346s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.535602Z 1 00h00m38.386346s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.535635Z 1 00h00m38.386346s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.535668Z 1 00h00m38.386346s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.535709Z 1 00h00m38.386346s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.538462Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:59.538555Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:41:59.538607Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:59.538661Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:41:59.538704Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-12-23T18:41:59.538782Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-12-23T18:41:59.538841Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:59.538930Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:59.538973Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:59.548670Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:59.548775Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:41:59.548824Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:59.548901Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:41:59.548942Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-12-23T18:41:59.548972Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-12-23T18:41:59.549054Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:59.549119Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 1 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:59.549169Z 1 00h00m50.300000s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=0 2025-12-23T18:41:59.549258Z 1 00h00m50.300000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:334} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 0 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-anonymous |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageUpdateSig [GOOD] Test command err: RandomSeed# 6781202818594379446 2025-12-23T18:41:59.786799Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [9:152:1] Cookie# 5588691179068861852 2025-12-23T18:41:59.786874Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [9:152:1] Inserted# false Subscription# {SessionId# [9:152:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:59.786935Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:750} TEvNodeConfigUnbind NodeId# 1 Cookie# 5588691179068861852 SessionId# [9:152:1] Binding# {4.1/15019800098807882615@[9:83:4]} 2025-12-23T18:41:59.787004Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:759} UnbindNode NodeId# 1 Reason# explicit unbind request 2025-12-23T18:41:59.787049Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.6:19001/6 2025-12-23T18:41:59.787126Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2025-12-23T18:41:59.787167Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2025-12-23T18:41:59.787192Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2025-12-23T18:41:59.787219Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.8:19001/8 2025-12-23T18:41:59.787271Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2025-12-23T18:41:59.787302Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2025-12-23T18:41:59.787326Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2025-12-23T18:41:59.787395Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [9:152:1] SubscriptionCookie# 0} 2025-12-23T18:41:59.788305Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [8:149:1] Cookie# 2101025092411604394 2025-12-23T18:41:59.788349Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [8:149:1] Inserted# false Subscription# {SessionId# [8:149:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:59.796673Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 2101025092411604394 SessionId# [8:149:1] Binding# {1.9/2101025092411604394@[8:149:1]} Record# {RootNodeId: 1 } 2025-12-23T18:41:59.796788Z 8 00h00m00.000000s :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008491s 2025-12-23T18:41:59.796860Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 12446098852363541852 2025-12-23T18:41:59.796945Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:59.797020Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 12446098852363541852 SessionId# [2:131:1] Binding# {1.9/12446098852363541852@[2:131:1]} Record# {RootNodeId: 1 } 2025-12-23T18:41:59.797193Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [7:77:4] Cookie# 4693538354546740236 2025-12-23T18:41:59.797245Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [7:77:4] Inserted# false Subscription# {SessionId# [7:77:4] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:59.797296Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 4 Cookie# 4693538354546740236 SessionId# [7:77:4] Binding# {4.1/4693538354546740236@[7:77:4]} Record# {RootNodeId: 9 } 2025-12-23T18:41:59.797335Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [9:83:4] Cookie# 15019800098807882615 2025-12-23T18:41:59.797366Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [9:83:4] Inserted# false Subscription# {SessionId# [9:83:4] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:59.797427Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 4 Cookie# 15019800098807882615 SessionId# [9:83:4] Binding# {4.1/15019800098807882615@[9:83:4]} Record# {RootNodeId: 9 } 2025-12-23T18:41:59.797489Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:403} AbortBinding Binding# {4.1/15019800098807882615@[9:83:4]} Reason# binding cycle 2025-12-23T18:41:59.797559Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:125} AbortAllScatterTasks Binding# {4.1/15019800098807882615@[9:83:4]} 2025-12-23T18:41:59.797622Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:345} UnsubscribeInterconnect NodeId# 4 Subscription# {SessionId# [9:83:4] SubscriptionCookie# 0} 2025-12-23T18:41:59.797675Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-12-23T18:41:59.797736Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:188} Initiated bind NodeId# 3 Binding# {3.0/15019800098807882616@[0:0:0]} SessionId# [0:0:0] 2025-12-23T18:41:59.797800Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [6:74:4] Cookie# 5844552239905759993 2025-12-23T18:41:59.797835Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 4 SessionId# [6:74:4] Inserted# false Subscription# {SessionId# [6:74:4] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:59.797900Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 4 Cookie# 5844552239905759993 SessionId# [6:74:4] Binding# {4.1/5844552239905759993@[6:74:4]} Record# {RootNodeId: 9 } 2025-12-23T18:41:59.798661Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [9:104:3] SessionId# [0:0:0] Cookie# 6 2025-12-23T18:41:59.798715Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 3 SessionId# [9:104:3] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2025-12-23T18:41:59.798773Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {3.0/15019800098807882616@[0:0:0]} 2025-12-23T18:41:59.798842Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [9:276:20] SessionId# [4:82:8] Cookie# 15019800098807882615 2025-12-23T18:41:59.798890Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 9 SessionId# [4:82:8] Inserted# false Subscription# {SessionId# [4:82:8] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-12-23T18:41:59.799108Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 9 Cookie# 15019800098807882615 SessionId# [4:82:8] Binding# {2.9/6081208924876694140@[4:112:2]} Record# {DeletedBoundNodeIds { Host: "127.0.0.6" Port: 19001 NodeId: 6 } DeletedBoundNodeIds { Host: "127.0.0.7" Port: 19001 NodeId: 7 } DeletedBoundNodeIds { Host: "127.0.0.4" Port: 19001 NodeId: 4 } DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.8" Port: 19001 NodeId: 8 } DeletedBoundNodeIds { Host: "127.0.0.2" Port: 19001 NodeId: 2 } DeletedBoundNodeIds { Host: "127.0.0.5" Port: 19001 NodeId: 5 } DeletedBoundNodeIds { Host: "127.0.0.3" Port: 19001 NodeId: 3 } } RootNodeId# 9 StorageConfigGeneration# 0 KnownNode# true 2025-12-23T18:41:59.799191Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 9 SessionId# [4:82:8] Inserted# false Subscription# {SessionId# [4:82:8] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-12-23T18:41:59.799261Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 9 NodeId# 127.0.0.6:19001/6 2025-12-23T18:41:59.799311Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 9 NodeId# 127.0.0.7:19001/7 2025-12-23T18:41:59.799337Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 9 NodeId# 127.0.0.4:19001/4 2025-12-23T18:41:59.799363Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 9 NodeId# 127.0.0.1:19001/1 2025-12-23T18:41:59.799410Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 9 NodeId# 127.0.0.8:19001/8 2025-12-23T18:41:59.799466Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 9 NodeId# 127.0.0.2:19001/2 2025-12-23T18:41:59.799500Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 9 NodeId# 127.0.0.5:19001/5 2025-12-23T18:41:59.799532Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 9 NodeId# 127.0.0.3:19001/3 2025-12-23T18:41:59.799621Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [3:109:2] Cookie# 488114974350050366 2025-12-23T18:41:59.799654Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [3:109:2] Inserted# false Subscription# {SessionId# [3:109:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:59.799725Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 488114974350050366 SessionId# [3:109:2] Binding# {2.9/488114974350050366@[3:109:2]} Record# {RootNodeId: 1 } 2025-12-23T18:41:59.799767Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [4:112:2] Cookie# 6081208924876694140 2025-12-23T18:41:59.799808Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [4:112:2] Inserted# false Subscription# {SessionId# [4:112:2] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-12-23T18:41:59.799854Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 6081208924876694140 SessionId# [4:112:2] Binding# {2.9/6081208924876694140@[4:112:2]} Record# {RootNodeId: 1 } 2025-12-23T18:41:59.799908Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [5:115:2] Cookie# 440086599401688819 2025-12-23T18:41:59.799940Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [5:115:2] Inserted# false Subscription# {SessionId# [5:115:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:41:59.799993Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 2 Cookie# 440086599401688819 SessionId# [5:115:2] Binding# {2.9/440086599401688819@[5:115:2]} Record# {RootNodeId: 1 } 2025-12-23T18:41:59.800976Z 4 ... -12-23T18:41:59.869343Z 1 00h00m00.372552s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:59.869372Z 1 00h00m00.372552s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:59.869402Z 1 00h00m00.372552s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:59.869428Z 1 00h00m00.372552s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:59.869453Z 1 00h00m00.372552s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:59.869484Z 1 00h00m00.372552s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.869515Z 1 00h00m00.372552s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.869554Z 1 00h00m00.372552s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.869601Z 1 00h00m00.372552s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.869636Z 1 00h00m00.372552s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.869931Z 1 00h00m00.815662s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:59.869984Z 1 00h00m00.815662s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:59.870014Z 1 00h00m00.815662s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:59.870042Z 1 00h00m00.815662s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:59.870074Z 1 00h00m00.815662s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:59.870127Z 1 00h00m00.815662s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:59.870176Z 1 00h00m00.815662s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.870208Z 1 00h00m00.815662s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.870247Z 1 00h00m00.815662s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.870300Z 1 00h00m00.815662s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.870331Z 1 00h00m00.815662s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.871613Z 1 00h00m01.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 65538 Sender# [0:0:0] SessionId# [0:0:0] Cookie# 0 2025-12-23T18:41:59.872167Z 1 00h00m01.808228s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:59.872257Z 1 00h00m01.808228s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:59.872293Z 1 00h00m01.808228s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:59.872320Z 1 00h00m01.808228s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:59.872347Z 1 00h00m01.808228s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:59.872389Z 1 00h00m01.808228s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:59.872430Z 1 00h00m01.808228s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.872465Z 1 00h00m01.808228s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.872505Z 1 00h00m01.808228s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.872541Z 1 00h00m01.808228s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.872572Z 1 00h00m01.808228s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.874515Z 1 00h00m03.972021s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:59.874576Z 1 00h00m03.972021s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:59.874612Z 1 00h00m03.972021s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:59.874651Z 1 00h00m03.972021s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:59.874702Z 1 00h00m03.972021s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:59.874755Z 1 00h00m03.972021s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:59.874789Z 1 00h00m03.972021s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.874822Z 1 00h00m03.972021s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.874847Z 1 00h00m03.972021s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.874872Z 1 00h00m03.972021s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.874897Z 1 00h00m03.972021s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.879159Z 1 00h00m08.472710s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:41:59.879242Z 1 00h00m08.472710s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:41:59.879280Z 1 00h00m08.472710s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:41:59.879327Z 1 00h00m08.472710s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:41:59.879358Z 1 00h00m08.472710s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:41:59.879388Z 1 00h00m08.472710s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:41:59.879429Z 1 00h00m08.472710s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.879504Z 1 00h00m08.472710s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.879569Z 1 00h00m08.472710s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.879627Z 1 00h00m08.472710s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.879659Z 1 00h00m08.472710s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:41:59.882115Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-12-23T18:41:59.882214Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:41:59.882263Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:41:59.882320Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:41:59.882357Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-12-23T18:41:59.882387Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-12-23T18:41:59.882442Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:59.882512Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:59.882577Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:41:59.882624Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig undelivered ringGroup:0 for: 3 2025-12-23T18:41:59.882706Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:297:39] CurrentLeaderTablet: [1:301:41] CurrentGeneration: 2 CurrentStep: 0} |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> DataShardVolatile::UpsertNoLocksArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-dbadmin |94.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |94.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |94.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly >> TStateStorageRingGroupState::TestStateStorageDoubleReply >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly [GOOD] >> TStateStorageRingGroupState::TestStateStorageDoubleReply [GOOD] >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-dbadmin >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestNormalMirror ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageDoubleReply [GOOD] Test command err: RandomSeed# 1400863793463016934 2025-12-23T18:42:01.720810Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [9:276:20] SessionId# [5:63:8] Cookie# 12235719336870890229 2025-12-23T18:42:01.720880Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 9 SessionId# [5:63:8] Inserted# false Subscription# {SessionId# [5:63:8] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-12-23T18:42:01.727614Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 9 Cookie# 12235719336870890229 SessionId# [5:63:8] Binding# {4.1/11328235433531878015@[5:71:4]} Record# {BoundNodes { NodeId { Host: "127.0.0.8" Port: 19001 NodeId: 8 } Meta { Fingerprint: "\3403\207\365\032>> TSchemeShardLoginTest::UserLogin >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly [GOOD] Test command err: RandomSeed# 12350585075710813424 2025-12-23T18:42:01.521622Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [3:249:20] SessionId# [1:133:2] Cookie# 193912827849025008 2025-12-23T18:42:01.532140Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 3 Cookie# 193912827849025008 SessionId# [1:133:2] Binding# {7.0/10382067765405178076@[1:145:6]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032> Record# {DeletedBoundNodeIds { Host: "127.0.0.6" Port: 19001 NodeId: 6 } DeletedBoundNodeIds { Host: "127.0.0.7" Port: 19001 NodeId: 7 } DeletedBoundNodeIds { Host: "127.0.0.9" Port: 19001 NodeId: 9 } DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.5" Port: 19001 NodeId: 5 } DeletedBoundNodeIds { Host: "127.0.0.3" Port: 19001 NodeId: 3 } } RootNodeId# 1 StorageConfigGeneration# 0 KnownNode# true 2025-12-23T18:42:01.533861Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-12-23T18:42:01.533897Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.6:19001/6 2025-12-23T18:42:01.533937Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.7:19001/7 2025-12-23T18:42:01.533963Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.9:19001/9 2025-12-23T18:42:01.533988Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.1:19001/1 2025-12-23T18:42:01.534013Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.5:19001/5 2025-12-23T18:42:01.534053Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:551} DeleteBound RefererNodeId# 2 NodeId# 127.0.0.3:19001/3 2025-12-23T18:42:01.534120Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [7:277:20] SessionId# [9:32:7] Cookie# 9173956238574735143 2025-12-23T18:42:01.534158Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 7 SessionId# [9:32:7] Inserted# false Subscription# {SessionId# [9:32:7] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:42:01.534214Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 7 Cookie# 9173956238574735143 SessionId# [9:32:7] Binding# {7.1/9173956238574735143@[9:32:7]} Record# {RootNodeId: 3 } 2025-12-23T18:42:01.534255Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [1:133:2] SessionId# [0:0:0] Cookie# 6 2025-12-23T18:42:01.534292Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 3 SessionId# [1:133:2] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2025-12-23T18:42:01.534483Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [3:134:1] Cookie# 193912827849025008 2025-12-23T18:42:01.534534Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 1 SessionId# [3:134:1] Inserted# false Subscription# {SessionId# [3:134:1] SubscriptionCookie# 0} NextSubscribeCookie# 8 2025-12-23T18:42:01.534592Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 1 Cookie# 193912827849025008 SessionId# [3:134:1] Binding# {1.0/193912827849025008@[3:134:1]} Record# {RootNodeId: 1 } 2025-12-23T18:42:01.538338Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [3:249:20] SessionId# [5:92:3] Cookie# 8767813860750250525 2025-12-23T18:42:01.538402Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 3 SessionId# [5:92:3] Inserted# false Subscription# {SessionId# [5:92:3] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-12-23T18:42:01.538493Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 3 Cookie# 8767813860750250525 SessionId# [5:92:3] Binding# {3.3/8767813860750250525@[5:92:3]} Record# {RootNodeId: 1 } 2025-12-23T18:42:01.556279Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:42:01.556361Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:42:01.556390Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:42:01.556419Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:42:01.556470Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:42:01.556501Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:42:01.556530Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:42:01.556555Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:42:01.556588Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:42:01.556644Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:42:01.556982Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:263:20] SessionId# [7:58:5] Cookie# 9497508734625322683 2025-12-23T18:42:01.557032Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [7:58:5] Inserted# false Subscription# {SessionId# [7:58:5] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-12-23T18:42:01.557134Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 9497508734625322683 SessionId# [7:58:5] Binding# {5.3/9497508734625322683@[7:58:5]} Record# {RootNodeId: 1 } 2025-12-23T18:42:01.557197Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [5:263:20] SessionId# [6:55:5] Cookie# 13650367494264703555 2025-12-23T18:42:01.557242Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 5 SessionId# [6:55:5] Inserted# false Subscription# {SessionId# [6:55:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:42:01.557285Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} TEvNodeConfigReversePush NodeId# 5 Cookie# 13650367494264703555 SessionId# [6:55:5] Binding# {5.3/13650367494264703555@[6:55:5]} Record# {RootNodeId: 1 } 2025-12-23T18:42:01.557622Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639239 Sender# [7:277:20] SessionId# [9:32:7] Cookie# 9173956238574735143 2025-12-23T18:42:01.557655Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:373} SubscribeToPeerNode NodeId# 7 SessionId# [9:32:7] Inserted# false Subscription# {SessionId# [9:32:7] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-12-23T18:42:01.557705Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:437} ... E DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:42:01.645111Z 1 00h00m08.176311s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:42:01.645149Z 1 00h00m08.176311s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-12-23T18:42:01.645182Z 1 00h00m08.176311s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-12-23T18:42:01.645240Z 1 00h00m08.176311s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:42:01.645281Z 1 00h00m08.176311s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:42:01.645309Z 1 00h00m08.176311s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:42:01.645336Z 1 00h00m08.176311s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:42:01.645361Z 1 00h00m08.176311s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:42:01.645415Z 1 00h00m08.176311s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:42:01.645442Z 1 00h00m08.176311s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:42:01.645471Z 1 00h00m08.176311s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:42:01.645499Z 1 00h00m08.176311s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:42:01.645542Z 1 00h00m08.176311s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:42:01.645626Z 1 00h00m08.176311s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:3298559226995:0] : 157}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:42:01.645698Z 1 00h00m08.176311s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:1099535971443:0] : 155}, {[1:3298559226995:0] : 157}}}} 2025-12-23T18:42:01.645834Z 1 00h00m08.176311s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:9895628993651:0] : 163}, {[1:7696605738099:0] : 161}, {[1:8796117365875:0] : 162}, {[1:6597094110323:0] : 160}, {[1:5497582482547:0] : 159}}}} 2025-12-23T18:42:01.645904Z 1 00h00m08.176311s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037936131 Signature: { Size: 10 Signature: {{[1:8796117365875:0] : 162}, {[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:9895628993651:0] : 163}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:42:01.654948Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-12-23T18:42:01.655069Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-12-23T18:42:01.655144Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:1 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-12-23T18:42:01.655194Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:42:01.655274Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:42:01.655318Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:42:01.655349Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-12-23T18:42:01.655407Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-12-23T18:42:01.655449Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:42:01.655476Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:42:01.655504Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:42:01.655528Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-12-23T18:42:01.655557Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-12-23T18:42:01.655626Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:42:01.655705Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:42:01.655754Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:42:01.655816Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:42:01.655859Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:42:01.655936Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:42:01.655976Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:42:01.656011Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:42:01.656049Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:42:01.656089Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0} 2025-12-23T18:42:01.656177Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:24343667:0] : 154}, {[1:2199047599219:0] : 156}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:42:01.656254Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:24343667:0] : 154}, {[1:2199047599219:0] : 156}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:42:01.656350Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037932033 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:3298559226995:0] : 157}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:42:01.656423Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:332:54] CurrentLeaderTablet: [1:340:56] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:7696605738099:0] : 161}, {[1:6597094110323:0] : 160}, {[1:5497582482547:0] : 159}}}} 2025-12-23T18:42:01.656503Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037932033 Signature: { Size: 8 Signature: {{[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} 2025-12-23T18:42:01.656594Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037932033 Signature: { Size: 10 Signature: {{[1:8796117365875:0] : 162}, {[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:9895628993651:0] : 163}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent [GOOD] Test command err: RandomSeed# 196692117368989262 2025-12-23T18:42:02.051386Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 131077 Sender# [4:89:3] SessionId# [0:0:0] Cookie# 6 2025-12-23T18:42:02.051469Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:223} TEvNodeConnected NodeId# 3 SessionId# [4:89:3] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2025-12-23T18:42:02.051530Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:259} Continuing bind Binding# {3.0/2346140941702157824@[0:0:0]} 2025-12-23T18:42:02.051640Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639238 Sender# [1:220:36] SessionId# [8:149:1] Cookie# 11163132844829645177 2025-12-23T18:42:02.058731Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:611} TEvNodeConfigPush NodeId# 1 Cookie# 11163132844829645177 SessionId# [8:149:1] Binding# {7.1/1753765561417366820@[8:29:7]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-system >> Cdc::NaN[TopicRunner] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-anonymous >> TContinuousBackupWithRebootsTests::Basic |94.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> KqpSinkTx::TIsolationSettingTest+IsOlap-UsePragma [GOOD] >> KqpSinkTx::TIsolationSettingTest+IsOlap+UsePragma |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |94.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |94.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] >> KqpStats::SelfJoin |94.5%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2025-12-23T18:40:24.302218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:24.302322Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:24.605138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:25.588175Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:25.588768Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmp8UUUHD/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:25.589516Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmp8UUUHD/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmp8UUUHD/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7616395707372458774 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:25.736087Z node 9 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmp8UUUHD/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:40:25.740745Z node 9 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmp8UUUHD/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:40:25.740939Z node 9 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000002:_:0:0:0]: (2147483650) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmp8UUUHD/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:40:25.833631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:40:26.021529Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:26.022150Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmp8UUUHD/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:26.022483Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmp8UUUHD/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmp8UUUHD/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14262605909687544481 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunk ... Weight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:41:47.898721Z node 140 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:41:47.899429Z node 140 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmpgM36sA/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:41:47.899682Z node 140 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmpgM36sA/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmpgM36sA/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10551710409606141186 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:41:47.949658Z node 139 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:41:47.950234Z node 139 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmpgM36sA/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:41:47.950445Z node 139 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmpgM36sA/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmpgM36sA/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13371840289327043860 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:41:48.076593Z node 142 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:41:48.083129Z node 142 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmpgM36sA/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:41:48.083410Z node 142 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmpgM36sA/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmpgM36sA/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8617962223592466288 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:41:48.713061Z node 136 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:48.713165Z node 136 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:48.745016Z node 136 :STATISTICS WARN: tx_init.cpp:298: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2025-12-23T18:41:54.019824Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:54.019978Z node 145 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:54.911369Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:59.364763Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:59.364868Z node 154 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:59.501845Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:00.708960Z node 157 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:42:00.709672Z node 157 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmpp1ly0u/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:42:00.710019Z node 157 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmpp1ly0u/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f03/r3tmp/tmpp1ly0u/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6943782434089694845 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> TSchemeShardLoginTest::UserLogin [GOOD] >> TSchemeShardLoginTest::TestExternalLogin |94.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |94.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |94.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut >> TBlobStorageProxyTest::TestNormalMirror [GOOD] >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-anonymous |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TTxDataShardUploadRows::TestUploadRows |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-anonymous |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TConsoleTests::TestAttributes [GOOD] >> TConsoleTests::TestAttributesExtSubdomain >> Cdc::UpdateShardCount [GOOD] >> Cdc::UpdateRetentionPeriod >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-clusteradmin >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TImmediateControlsConfiguratorTests::TestDynamicMap >> TSchemeShardLoginTest::TestExternalLogin [GOOD] >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-dbadmin >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup [GOOD] >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters >> TTxDataShardUploadRows::RetryUploadRowsToShard >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-ordinaryuser >> Cdc::HugeKey[TopicRunner] [GOOD] >> Cdc::HugeKeyDebezium |94.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |94.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |94.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> KqpScanArrowInChanels::JoinWithParams >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-system |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain [GOOD] >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword >> TJaegerTracingConfiguratorTests::DefaultConfig |94.5%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |94.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters [GOOD] >> TSchemeShardLoginTest::ChangeAccountLockoutParameters |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-system >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME [GOOD] >> KqpResultSetFormats::ArrowFormat_Multistatement >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-ordinaryuser >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-clusteradmin |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-dbadmin |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-anonymous >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-system >> Cdc::UpdateRetentionPeriod [GOOD] >> Cdc::SupportedTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:03.108654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:03.108747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:03.108787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:03.108829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:03.108870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:03.108921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:03.108993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:03.109073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:03.109996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:03.110315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:03.311351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:03.311414Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:03.333701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:03.333972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:03.334114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:03.343023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:03.343289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:03.344367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:03.344666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:03.348348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:03.348571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:03.349847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:03.349911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:03.350048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:03.350105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:03.350153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:03.350319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:03.607786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.608896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.609009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.609089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.609160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.609229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.609385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.609459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.609529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.609605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.609675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.609801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.615138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.615265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.615382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 944, LocalPathId: 10] was 0 2025-12-23T18:42:12.395185Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2025-12-23T18:42:12.395220Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2025-12-23T18:42:12.395246Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2025-12-23T18:42:12.395270Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2025-12-23T18:42:12.395325Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2025-12-23T18:42:12.395359Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2025-12-23T18:42:12.395385Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2025-12-23T18:42:12.395425Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2025-12-23T18:42:12.395454Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2025-12-23T18:42:12.395483Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2025-12-23T18:42:12.395515Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2025-12-23T18:42:12.395540Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2025-12-23T18:42:12.395567Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2025-12-23T18:42:12.395593Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2025-12-23T18:42:12.395620Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2025-12-23T18:42:12.395668Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2025-12-23T18:42:12.395708Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2025-12-23T18:42:12.395734Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2025-12-23T18:42:12.395760Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2025-12-23T18:42:12.395788Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2025-12-23T18:42:12.395834Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2025-12-23T18:42:12.395887Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2025-12-23T18:42:12.395917Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2025-12-23T18:42:12.395944Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2025-12-23T18:42:12.395969Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2025-12-23T18:42:12.395994Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2025-12-23T18:42:12.396043Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2025-12-23T18:42:12.398016Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.398121Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.398212Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.398316Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.398390Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.398516Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.398851Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.398981Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.399504Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.399641Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.399951Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.400175Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.400254Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.400362Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.400634Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.400728Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.401069Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.401299Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.401379Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.401453Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.401600Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.401657Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.401711Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:12.412073Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:12.418807Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:12.418923Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:12.419563Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:12.419656Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:12.419731Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:12.423209Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:819:2788] sender: [5:877:2058] recipient: [5:15:2062] 2025-12-23T18:42:12.463872Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:12.463971Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-12-23T18:42:12.543740Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:12.556336Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:12.556496Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:12.556541Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:12.556996Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2025-12-23T18:42:12.557058Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:12.557108Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:869:2827], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-12-23T18:42:12.557785Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 0 |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> Cdc::RacyActivateAndEnqueue >> TTxAllocatorClientTest::Boot >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] >> KqpUserConstraint::KqpReadNull+UploadNull >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable >> TTxAllocatorClientTest::Boot [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] >> KqpStats::SelfJoin [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TTxDataShardUploadRows::RetryUploadRowsToShard [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2025-12-23T18:42:14.254773Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-12-23T18:42:14.255333Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-12-23T18:42:14.256213Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-12-23T18:42:14.258274Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:14.258884Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-12-23T18:42:14.270899Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:14.271034Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:14.271121Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:14.271232Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-12-23T18:42:14.271411Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:14.271537Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-12-23T18:42:14.271685Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-clusteradmin >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain |94.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |94.6%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |94.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-anonymous >> TestProgram::SimpleFunction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2025-12-23T18:41:09.029700Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125309975941223:2262];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:09.029747Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:41:09.170792Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:09.203889Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125312010393802:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:09.203939Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034ce/r3tmp/tmpDpm6V9/pdisk_1.dat 2025-12-23T18:41:09.322796Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:41:09.594956Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:09.587168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:09.835125Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:10.160827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:10.160969Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:10.167986Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:10.207501Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:41:10.226078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:41:10.270861Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:41:10.288329Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:10.288427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:10.301782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:10.301892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:10.308973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:10.316642Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:41:10.319386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:10.327257Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 15246, node 1 2025-12-23T18:41:10.748266Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:41:10.900499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0034ce/r3tmp/yandexyDCUNH.tmp 2025-12-23T18:41:10.900525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0034ce/r3tmp/yandexyDCUNH.tmp 2025-12-23T18:41:10.900697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0034ce/r3tmp/yandexyDCUNH.tmp 2025-12-23T18:41:10.900788Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:41:11.086616Z INFO: TTestServer started on Port 1400 GrpcPort 15246 2025-12-23T18:41:11.229483Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1400 PQClient connected to localhost:15246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515270681 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:11.885674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:41:11.983711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:41:14.031208Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125309975941223:2262];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:14.031268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:41:14.207544Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587125312010393802:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:14.207633Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:41:16.484961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125340040713531:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:16.485094Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:16.487460Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125340040713543:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:16.487520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125340040713544:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:16.487700Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:16.495080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:41:16.558219Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125340040713547:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:41:16.770895Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125340040713638:3075] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:41:16.825540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:16.962812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard ... eated: 1 CreateStep: 1766515325743 PathId: [OwnerId: 72057594046644480, LocalPathId: 46] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:42:13.266786Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2885: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [7:7587125526210166879:2120], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : Cluster) IncFrom: 1 To: (Utf8 : Cluster) IncTo: 1 }] } 2025-12-23T18:42:13.266884Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2119: FillEntry for TResolve: self# [7:7587125526210166879:2120], cacheItem# { Subscriber: { Subscriber: [7:7587125551979972094:3137] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1766515326163 PathId: [OwnerId: 72057594046644480, LocalPathId: 48] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:42:13.266950Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7587125586339712154:4389], recipient# [7:7587125586339712152:2523], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 46] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:42:13.267100Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7587125586339712155:4390], recipient# [7:7587125586339712151:2522], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 48] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : Cluster) IncFrom: 1 To: (Utf8 : Cluster) IncTo: 1 }] } 2025-12-23T18:42:13.314689Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:42:13.314734Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.314769Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:42:13.314795Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.314810Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2025-12-23T18:42:13.316533Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:42:13.316533Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:42:13.316557Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.316569Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:42:13.316570Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.316585Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:42:13.316591Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.316601Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.316603Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:42:13.316612Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2025-12-23T18:42:13.362413Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7587125526210166879:2120], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:42:13.362610Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7587125526210166879:2120], cacheItem# { Subscriber: { Subscriber: [7:7587125551979971852:3005] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:42:13.362733Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7587125586339712163:4393], recipient# [7:7587125586339712162:2525], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:42:13.414937Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:42:13.414983Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.415002Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:42:13.415026Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.415043Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2025-12-23T18:42:13.419034Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:42:13.419074Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.419098Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:42:13.419121Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.419137Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2025-12-23T18:42:13.419214Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:42:13.419228Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.419239Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:42:13.419250Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.419259Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T18:42:13.518089Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2025-12-23T18:42:13.518135Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.518153Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2025-12-23T18:42:13.518177Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.518193Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2025-12-23T18:42:13.526894Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2025-12-23T18:42:13.526940Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.526964Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2025-12-23T18:42:13.526990Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.527006Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2025-12-23T18:42:13.527080Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:42:13.527091Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.527100Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:42:13.527112Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:42:13.527120Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist >> TestProgram::YqlKernel |94.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TestProgram::YqlKernel [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SelfJoin [GOOD] Test command err: Trying to start YDB, gRPC: 23515, MsgBus: 32149 2025-12-23T18:41:35.670894Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125421377370450:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:41:35.673365Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003463/r3tmp/tmpuNfSxo/pdisk_1.dat 2025-12-23T18:41:36.022231Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:41:36.057969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:41:36.058092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:36.160670Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:36.164850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:36.249594Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 23515, node 1 2025-12-23T18:41:36.371684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:41:36.371784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:41:36.371798Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:41:36.371925Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:41:36.715607Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:32149 TClient is connected to server localhost:32149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515296217 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:41:37.177898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:41:37.183869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:41:37.209788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:41:37.369940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:41:37.570067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:41:37.703596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:41:39.701740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125438557241481:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:39.701890Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:39.702686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125438557241491:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:39.702731Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:40.041018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:40.117724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:40.157086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:40.199867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:40.245992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:40.328186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:40.364985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:40.425053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:41:40.540112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125442852209655:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:40.540179Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:40.540542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125442852209657:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:40.540577Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:40.541155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125442852209662:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:41:40.546475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:41:40.567100Z node 1 :KQP_WORKLOAD ... TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587125574589063442:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"E-Size":"0","PlanNodeId":3,"LookupKeyColumns":["Key"],"Node Type":"TableLookupJoin","Path":"\/Root\/TwoShard","Columns":["Key"],"E-Rows":"0","Table":"TwoShard","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/TwoShard","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"TwoShard","ReadColumns":["Key (-∞, +∞)"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"UseLlvm":"undefined","Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ReadBytes":{"Count":1,"Sum":48,"Max":48,"Min":48}}],"OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":0,"FinishedTasks":1,"Introspections":["1 tasks for a single\/sequential source scan"],"IngressRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Mkql":{},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[8,1048576]},"BaseTimeMs":1766515333490,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"FirstMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"Bytes":{"Count":1,"Sum":42,"Max":42,"Min":42,"History":[8,42]}},"Name":"RESULT","Push":{"WaitTimeUs":{"Count":1,"Sum":6730,"Max":6730,"Min":6730,"History":[8,6730]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"FirstMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7}}}],"CpuTimeUs":{"Count":1,"Sum":746,"Max":746,"Min":746,"History":[8,746]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"FirstMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"Bytes":{"Count":1,"Sum":96,"Max":96,"Min":96,"History":[8,96]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"FirstMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"Bytes":{"Count":1,"Sum":96,"Max":96,"Min":96,"History":[8,96]},"WaitTimeUs":{"Count":1,"Sum":6763,"Max":6763,"Min":6763,"History":[8,6763]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"StageDurationUs":0,"ResultRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResultBytes":{"Count":1,"Sum":42,"Max":42,"Min":42},"OutputBytes":{"Count":1,"Sum":42,"Max":42,"Min":42},"UpdateTimeMs":8,"Tasks":1}}],"PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Collect","Stats":{"UseLlvm":"undefined","Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ReadBytes":{"Count":1,"Sum":24,"Max":24,"Min":24}}],"OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":1,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":42,"Max":42,"Min":42},"Introspections":["1 tasks same as previous stage"],"DurationUs":{"Count":1,"Sum":2000,"Max":2000,"Min":2000},"Mkql":{},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[10,1048576]},"BaseTimeMs":1766515333490,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Bytes":{"Count":1,"Sum":96,"Max":96,"Min":96,"History":[10,96]}},"Name":"6","Push":{"LastMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"PauseMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"WaitTimeUs":{"Count":1,"Sum":7787,"Max":7787,"Min":7787,"History":[10,7787]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":9,"Min":7}}}],"CpuTimeUs":{"Count":1,"Sum":514,"Max":514,"Min":514,"History":[10,514]},"StageDurationUs":2000,"WaitInputTimeUs":{"Count":1,"Sum":3397,"Max":3397,"Min":3397,"History":[10,3397]},"OutputBytes":{"Count":1,"Sum":96,"Max":96,"Min":96},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"FirstMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"Bytes":{"Count":1,"Sum":42,"Max":42,"Min":42,"History":[10,42]}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"FirstMessageMs":{"Count":1,"Sum":7,"Max":7,"Min":7},"Bytes":{"Count":1,"Sum":42,"Max":42,"Min":42,"History":[10,42]},"PauseMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"WaitTimeUs":{"Count":1,"Sum":6149,"Max":6149,"Min":6149,"History":[10,6149]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":7,"Min":5}}}],"UpdateTimeMs":9,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Tasks":1}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":2,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":96,"Max":96,"Min":96},"Introspections":["1 minimum tasks for compute"],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"Mkql":{},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[11,1048576]},"BaseTimeMs":1766515333490,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[11,24]}},"Name":"8","Push":{"LastMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":7250,"Max":7250,"Min":7250,"History":[11,7250]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":9,"Min":2}}}],"CpuTimeUs":{"Count":1,"Sum":724,"Max":724,"Min":724,"History":[10,724]},"StageDurationUs":1000,"OutputBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Bytes":{"Count":1,"Sum":96,"Max":96,"Min":96,"History":[11,96]}},"Name":"4","Push":{"LastMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Bytes":{"Count":1,"Sum":96,"Max":96,"Min":96,"History":[11,96]},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":7175,"Max":7175,"Min":7175,"History":[11,7175]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":9,"Min":2}}}],"UpdateTimeMs":10,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Tasks":1}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":3,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"Introspections":["1 minimum tasks for compute"],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"Mkql":{},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[11,1048576]},"BaseTimeMs":1766515333490,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[11,24]}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":7386,"Max":7386,"Min":7386,"History":[11,7386]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":10,"Min":2}}}],"CpuTimeUs":{"Count":1,"Sum":457,"Max":457,"Min":457,"History":[11,457]},"StageDurationUs":1000,"ResultRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResultBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"OutputBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[11,24]}},"Name":"6","Push":{"LastMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"FirstMessageMs":{"Count":1,"Sum":9,"Max":9,"Min":9},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[11,24]},"PauseMessageMs":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitTimeUs":{"Count":1,"Sum":7321,"Max":7321,"Min":7321,"History":[11,7321]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":9,"Min":2}}}],"UpdateTimeMs":10,"Tasks":1,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6}}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":325417,"CpuTimeUs":318224},"ProcessCpuTimeUs":441,"TotalDurationUs":367148,"ResourcePoolId":"default","QueuedTimeUs":1430},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"TwoShard","ReadColumns":["Key (-∞, +∞)"],"E-Cost":"0"}],"Node Type":"TableFullScan"},{"Operators":[{"E-Rows":"0","Columns":["Key"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"LookupJoin","LookupKeyColumns":["Key"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"A-Rows":6,"A-SelfCpu":0.724,"A-Cpu":0.724,"A-Size":24,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":6,"A-SelfCpu":0.457,"A-Cpu":1.181,"A-Size":24,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"i\":\"2\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N4->N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":4}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"15","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"4":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":4},"0":{"p":{"i":"2","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] Test command err: 2025-12-23T18:40:20.862024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:20.862087Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:21.167718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:22.416078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:40:22.687811Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:22.688228Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpu0xaf2/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:22.688752Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpu0xaf2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpu0xaf2/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4579671797060259695 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:22.739885Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:22.740390Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpu0xaf2/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:22.740686Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpu0xaf2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpu0xaf2/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4275906186538480940 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:22.744115Z node 3 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpu0xaf2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:40:22.836830Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:22.837412Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpu0xaf2/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:22.837618Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpu0xaf2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpu0xaf2/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14497049074184511013 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:22.912881Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:22.913436Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpu0xaf2/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:22.913697Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpu0xaf2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpu0xaf2/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 182494047512799758 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInF ... d to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpzo7FVN/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f09/r3tmp/tmpzo7FVN/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10076148189692376593 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:47.881013Z node 46 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [46:1584:2820] txid# 281474976715659, issues: { message: "Invalid AlterExtSubDomain request: Check failed: path: \'/dc-1/users/tenant-1\', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter)" severity: 1 } 2025-12-23T18:40:47.881339Z node 46 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3596: Cannot configure subdomain for tenant /dc-1/users/tenant-1: GENERIC_ERROR: Invalid AlterExtSubDomain request: Check failed: path: '/dc-1/users/tenant-1', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter) 2025-12-23T18:40:47.961157Z node 46 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [46:1609:2842] txid# 281474976715660, issues: { message: "Invalid AlterExtSubDomain request: Check failed: path: \'/dc-1/users/tenant-1\', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter)" severity: 1 } 2025-12-23T18:40:47.961822Z node 46 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3596: Cannot configure subdomain for tenant /dc-1/users/tenant-1: GENERIC_ERROR: Invalid AlterExtSubDomain request: Check failed: path: '/dc-1/users/tenant-1', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter) 2025-12-23T18:40:48.006690Z node 46 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [46:1668:2886] txid# 281474976715661, issues: { message: "Invalid AlterExtSubDomain request: Check failed: path: \'/dc-1/users/tenant-1\', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter)" severity: 1 } 2025-12-23T18:40:48.007098Z node 46 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3596: Cannot configure subdomain for tenant /dc-1/users/tenant-1: GENERIC_ERROR: Invalid AlterExtSubDomain request: Check failed: path: '/dc-1/users/tenant-1', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter) 2025-12-23T18:40:48.081281Z node 46 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [46:1693:2908] txid# 281474976715662, issues: { message: "Invalid AlterExtSubDomain request: Check failed: path: \'/dc-1/users/tenant-1\', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter)" severity: 1 } 2025-12-23T18:40:48.081687Z node 46 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3596: Cannot configure subdomain for tenant /dc-1/users/tenant-1: GENERIC_ERROR: Invalid AlterExtSubDomain request: Check failed: path: '/dc-1/users/tenant-1', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter) 2025-12-23T18:40:50.503629Z node 55 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:50.503745Z node 55 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:50.617370Z node 55 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:53.997050Z node 64 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:53.997126Z node 64 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:54.106330Z node 64 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:58.091889Z node 73 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:58.091970Z node 73 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:58.216432Z node 73 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:01.969790Z node 82 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:01.969860Z node 82 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:02.096352Z node 82 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:58.923844Z node 91 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:58.923941Z node 91 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:59.107636Z node 91 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:01.402682Z node 92 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:01.406875Z node 92 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:01.556653Z node 92 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:03.810580Z node 93 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:03.810687Z node 93 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:04.052451Z node 93 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:06.262164Z node 94 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:06.262260Z node 94 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:06.395292Z node 94 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:07.993085Z node 95 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:07.993189Z node 95 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:08.128610Z node 95 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:09.816538Z node 96 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:09.816650Z node 96 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:09.954656Z node 96 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:11.526667Z node 97 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:11.526792Z node 97 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:11.657511Z node 97 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:13.198106Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:13.198199Z node 98 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:13.327636Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"3,4\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:3,4"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"3\",\"p\":{\"address\":{\"name\":\"sum\",\"id\":3}},\"o\":\"3\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"3,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"3","p":{"address":{"name":"sum","id":3}},"o":"3","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"3,4","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"3,4","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-anonymous >> Cdc::HugeKeyDebezium [GOOD] >> Cdc::Drop[PqRunner] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows >> TestProgram::YqlKernelStartsWith >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] >> TestProgram::YqlKernelStartsWith [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-anonymous |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> TestProgram::NumRowsWithNulls [GOOD] >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:42:10.545105Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:10.684253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:10.697174Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:42:10.697611Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:10.697703Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:11.056564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:11.056708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:11.138201Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515327397297 != 1766515327397301 2025-12-23T18:42:11.152141Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:11.204129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:11.304966Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0047fa/r3tmp/tmpvAkLiG/pdisk_1.dat 2025-12-23T18:42:11.637024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:11.651519Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:11.767648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:11.811942Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:42:11.813062Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:42:11.813363Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:42:11.813613Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:11.885032Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:42:11.885962Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:11.886090Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:11.887966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:42:11.888053Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:42:11.888119Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:42:11.888483Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:11.888636Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:11.888717Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:42:11.899543Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:11.940732Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:42:11.940928Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:42:11.941048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:42:11.941087Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:42:11.941123Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:42:11.941164Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:11.941428Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:42:11.941479Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:42:11.941769Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:42:11.941863Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:42:11.942321Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:42:11.942376Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:42:11.942443Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:42:11.942493Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:42:11.942532Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:42:11.942569Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:42:11.942617Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:42:11.942712Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:42:11.942771Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:42:11.942819Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:42:11.942914Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:42:11.942973Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:42:11.943106Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:42:11.943382Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:42:11.943448Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:42:11.943536Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:42:11.943583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:42:11.943625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:42:11.943662Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:42:11.943716Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:42:11.944013Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:42:11.944060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:42:11.944098Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:42:11.944130Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:42:11.944187Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T18:42:11.944234Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:42:11.944268Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T18:42:11.944305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T18:42:11.944332Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T18:42:11.945839Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T18:42:11.945908Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:42:17.815494Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:17.815579Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:18.086296Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:18.086441Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:18.108982Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766515334526289 != 1766515334526292 2025-12-23T18:42:18.116475Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:18.163171Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:18.236324Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0047fa/r3tmp/tmpZIqstc/pdisk_1.dat 2025-12-23T18:42:18.572572Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:18.586571Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:18.691946Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:18.715965Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:884:2764] 2025-12-23T18:42:18.716225Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:18.760753Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:18.760956Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:18.762876Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:42:18.762986Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:42:18.763057Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:42:18.763464Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:18.763612Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:18.763774Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:900:2764] in generation 1 2025-12-23T18:42:18.774802Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:18.774936Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:42:18.775087Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:42:18.775189Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:902:2774] 2025-12-23T18:42:18.775232Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:42:18.775278Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:42:18.775318Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:18.775818Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:42:18.775931Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:42:18.776021Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:42:18.776071Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:42:18.776118Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:42:18.776169Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:42:18.776269Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:882:2762], serverId# [2:887:2765], sessionId# [0:0:0] 2025-12-23T18:42:18.776848Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:42:18.777143Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:42:18.777240Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:42:18.779299Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:42:18.790222Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:42:18.790390Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:42:18.912644Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:916:2782], serverId# [2:918:2784], sessionId# [0:0:0] 2025-12-23T18:42:18.913487Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:42:18.913556Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:18.914288Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:42:18.914344Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:42:18.914400Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:42:18.914711Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:42:18.915782Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:42:18.916437Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:42:18.916591Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:42:18.917214Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:42:18.917736Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:42:18.919744Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:42:18.919804Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:18.920895Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:42:18.920986Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:42:18.922539Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:42:18.922601Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:42:18.922656Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:42:18.922768Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:42:18.922843Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:42:18.922987Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:18.925467Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:42:18.927476Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:42:18.927645Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:42:18.927709Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:42:18.936668Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:952:2810], serverId# [2:953:2811], sessionId# [0:0:0] 2025-12-23T18:42:18.936838Z node 2 :TX_DATASHARD NOTICE: datashard__op_rows.cpp:209: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"i\":\"2\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"10001\",\"t\":\"Filter\"}\nREMOVE:10001",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N5(8):{\"a\":true,\"p\":{\"function\":{\"function\":\"NumRows\",\"need_concatenation\":true},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10002\",\"t\":\"Calculation\"}\n"]; N5[shape=box, label="N6(8):{\"i\":\"10002\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N6[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N6->N1->N2->N0->N3->N4->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":6}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]},{"owner_id":5,"inputs":[{"from":4}]},{"owner_id":6,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Filter"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"6":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"10002","t":"Projection"},"w":8,"id":5},"4":{"p":{"a":true,"p":{"function":{"function":"NumRows","need_concatenation":true},"kernel":{"class_name":"SIMPLE"}},"o":"10002","t":"Calculation"},"w":8,"id":4},"0":{"p":{"i":"2","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Filter; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-clusteradmin |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |94.6%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |94.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test >> TestProgram::CountWithNulls >> TestProgram::CountWithNulls [GOOD] >> TestProgram::JsonExistsBinary >> TestProgram::JsonExistsBinary [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:42:17.694052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:17.839133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:17.851940Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:42:17.852382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:17.852475Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:18.208932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:18.209117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:18.301318Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515334522959 != 1766515334522963 2025-12-23T18:42:18.312159Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:18.360267Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:18.475233Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003da2/r3tmp/tmpxPOIbx/pdisk_1.dat 2025-12-23T18:42:18.786631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:18.801879Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:18.911272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:19.317906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1070:2896], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:19.318056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1080:2901], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:19.318134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:19.319143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1085:2905], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:19.319341Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:19.324621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:42:19.448989Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1084:2904], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:42:19.512962Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1141:2942] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:42:19.885269Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2061: SelfId: [1:1172:2963], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01kd689w1kewzxwq7fp2nm18g6. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZDYwZThmMmUtMmM2NzEyYTUtZTg3NjAxNjYtZWY5MTc3ZTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2025-12-23T18:42:19.903676Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [1:1172:2963], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01kd689w1kewzxwq7fp2nm18g6. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZDYwZThmMmUtMmM2NzEyYTUtZTg3NjAxNjYtZWY5MTc3ZTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2025-12-23T18:42:19.909424Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [1:1173:2964], TxId: 281474976715660, task: 2. Ctx: { TraceId : 01kd689w1kewzxwq7fp2nm18g6. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZDYwZThmMmUtMmM2NzEyYTUtZTg3NjAxNjYtZWY5MTc3ZTk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-12-23T18:42:19.917390Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=ZDYwZThmMmUtMmM2NzEyYTUtZTg3NjAxNjYtZWY5MTc3ZTk=, ActorId: [1:1068:2894], ActorState: ExecuteState, TraceId: 01kd689w1kewzxwq7fp2nm18g6, Create QueryResponse for error on request, msg: , status: INTERNAL_ERROR, issues: { message: "Read from column index 1: got NULL from NOT NULL column" issue_code: 2012 }{ message: "Query invalidated on scheme/internal error during Data execution" issue_code: 2019 severity: 1 } |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-system |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"a\":true,\"i\":\"2\",\"p\":{\"function\":{\"function\":\"Count\",\"need_concatenation\":true},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"10001\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N4->N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":4}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"4":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":4},"0":{"p":{"a":true,"i":"2","p":{"function":{"function":"Count","need_concatenation":true},"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; >> KqpScanArrowInChanels::JoinWithParams [GOOD] |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith >> TestProgram::YqlKernelEndsWith [GOOD] >> KqpScan::ScanRetryReadRanges [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-anonymous |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |94.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |94.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink >> TestProgram::YqlKernelEquals [GOOD] |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> KqpResultSetFormats::ArrowFormat_Multistatement [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:42:09.993098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:10.109976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:10.121175Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:42:10.121581Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:10.121684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:10.423023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:10.423169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:10.518177Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515326802975 != 1766515326802979 2025-12-23T18:42:10.529771Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:10.583100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:10.669906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0047fb/r3tmp/tmpbhGquG/pdisk_1.dat 2025-12-23T18:42:11.047461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:11.062793Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:11.174368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:11.225042Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:902:2773] 2025-12-23T18:42:11.225263Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:11.276311Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:11.276472Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:11.278424Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:42:11.278515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:42:11.278595Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:42:11.279025Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:11.279346Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:11.279443Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:941:2773] in generation 1 2025-12-23T18:42:11.279860Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:905:2775] 2025-12-23T18:42:11.280097Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:11.292294Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:911:2779] 2025-12-23T18:42:11.292582Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:11.302973Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:11.303173Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:11.305072Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:42:11.305173Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:42:11.305239Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:42:11.305640Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:11.306026Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:11.306089Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:958:2775] in generation 1 2025-12-23T18:42:11.307126Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:11.307206Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:11.308269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-12-23T18:42:11.308317Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2025-12-23T18:42:11.308350Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2025-12-23T18:42:11.308630Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:11.308787Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:11.308823Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:961:2779] in generation 1 2025-12-23T18:42:11.309211Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:913:2781] 2025-12-23T18:42:11.309443Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:11.319986Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:11.320124Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:11.321937Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-12-23T18:42:11.322044Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2025-12-23T18:42:11.322111Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2025-12-23T18:42:11.322473Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:11.322634Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:11.322704Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:967:2781] in generation 1 2025-12-23T18:42:11.333999Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:11.384646Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:42:11.384885Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:42:11.385021Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:972:2814] 2025-12-23T18:42:11.385072Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:42:11.385114Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:42:11.385172Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:11.385752Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:11.385799Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:42:11.385873Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:42:11.385952Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:973:2815] 2025-12-23T18:42:11.385981Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:42:11.386007Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:42:11.386032Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:42:11.386144Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:11.386196Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2025-12-23T18:42:11.386277Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:42:11.386356Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:974:2816] 2025-12-23T18:42:11.386386Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2025-12-23T18:42:11.386427Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-12-23T18:42:11.386454Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-12-23T18:42:11.386830Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:42:11.386943Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:42:11.387077Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:11.387113Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2025-12-23T18:42:11.387166Z node 1 :TX_DATASHARD ... 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2025-12-23T18:42:22.055637Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:42:22.056203Z node 3 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:71: [ChangeSender][72075186224037889:1][3:932:2794] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 33 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 34 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 35 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 36 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 37 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 38 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 39 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 40 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 41 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 42 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 43 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 44 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 45 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 46 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 47 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 48 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 49 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 50 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 51 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 52 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 53 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 54 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 55 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 56 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 57 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 58 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 59 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 60 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 61 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 62 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 63 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 64 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 65 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 66 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 67 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 68 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 69 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 70 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 71 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 72 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 73 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 74 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 75 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 76 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 77 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 78 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 79 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 80 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 81 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 82 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 83 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 84 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 85 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 86 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 87 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 88 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 89 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 90 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 91 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 }] } 2025-12-23T18:42:22.056588Z node 3 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:224: [AsyncIndexChangeSenderMain][72075186224037889:1][3:965:2819] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 33 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 34 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 35 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 36 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 37 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 38 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 39 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 40 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 41 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 42 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 43 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 44 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 45 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 46 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 47 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 48 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 49 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 50 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 51 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 52 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 53 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 54 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 55 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 56 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 57 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 58 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 59 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 60 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 61 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 62 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 63 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 64 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 65 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 66 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 67 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 68 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 69 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 70 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 71 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 72 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 73 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 74 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 75 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 76 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 77 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 78 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 79 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 80 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 81 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 82 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 83 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 84 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 85 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 86 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 87 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 88 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 89 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 90 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 91 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 }] } 2025-12-23T18:42:22.056804Z node 3 :RPC_REQUEST DEBUG: upload_rows_common_impl.h:1282: Upload rows: got OK from shard 72075186224037889 description: 2025-12-23T18:42:22.056940Z node 3 :RPC_REQUEST DEBUG: upload_rows_common_impl.h:1412: completed with status SUCCESS |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:42:00.888525Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:00.895154Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:00.896223Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:678:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:42:01.053995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:01.056187Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:01.065437Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:683:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:42:01.065957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:01.066274Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:01.068434Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:01.068586Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:01.857455Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:02.079469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:02.079619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:02.080519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:02.080677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:02.145052Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:42:02.146156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:02.146571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:02.254177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:42:02.275640Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034de/r3tmp/tmpHk4T2M/pdisk_1.dat 2025-12-23T18:42:03.185342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:03.279655Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:03.279850Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:03.693955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:04.749852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1722:3109], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:04.750029Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1732:3114], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:04.750146Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:04.756530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1737:3118], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:04.756863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:04.769523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:42:05.310339Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1736:3117], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:42:05.556379Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1830:3183] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } -- nodeId: 2 -- EvScan [1:1910:3236] -> [2:1865:2432] -- EvScanData from [2:1916:2439]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":3,"VirtualTimestamp":{"Step":3000,"TxId":281474976710661},"Finished":true} 2025-12-23T18:42:08.509752Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976710661] shutting down ... waiting for SysViewsRoster update finished 2025-12-23T18:42:16.554888Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:16.556198Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:16.558917Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:42:16.570252Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:16.571891Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:16.573817Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:42:16.574532Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:16.574726Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:16.576592Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:16.576659Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:16.966467Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:17.074667Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:17.074835Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:17.075329Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:17.075424Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:17.160841Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:42:17.161408Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:17.161882Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:17.248748Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:42:17.275714Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034de/r3tmp/tmpHOS3Tz/pdisk_1.dat 2025-12-23T18:42:18.050222Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:18.111796Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:18.111930Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:18.492258Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:19.119520Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1728:3115], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:19.119677Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1738:3120], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:19.119775Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:19.120970Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1743:3124], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:19.121079Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:19.127318Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:42:19.669869Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1742:3123], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:42:19.777099Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1837:3190] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } -- nodeId: 4 -- EvScan [3:1917:3243] -> [4:1872:2432] -- EvScanData from [4:1921:2439]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2,"VirtualTimestamp":{"Step":3000,"TxId":281474976710661},"Finished":false} -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"format":1},"SeqNo":2,"QueryResultIndex":0,"ChannelId":2,"VirtualTimestamp":{"Step":3000,"TxId":281474976710661},"Finished":true} 2025-12-23T18:42:21.343436Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976710661] shutting down |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_scan/unittest >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-ordinaryuser >> Cdc::RacyActivateAndEnqueue [GOOD] >> Cdc::RacyCreateAndSend |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> Cdc::Drop[PqRunner] [GOOD] >> Cdc::Drop[YdsRunner] >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 26944, MsgBus: 17616 2025-12-23T18:39:40.612405Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124926496676813:2246];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:40.612498Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0061d7/r3tmp/tmp1LD5PL/pdisk_1.dat 2025-12-23T18:39:41.416718Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:41.418626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:41.418711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:41.475490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:41.656496Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:41.698634Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:41.740321Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124926496676605:2081] 1766515180556610 != 1766515180556613 TServer::EnableGrpc on GrpcPort 26944, node 1 2025-12-23T18:39:41.765750Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:42.023486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:42.023511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:42.023519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:42.023612Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17616 TClient is connected to server localhost:17616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515181739 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:43.298283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:43.313178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:43.326398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:43.626007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:43.877807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:44.018510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:45.613801Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124926496676813:2246];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:45.613882Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:49.468957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124965151384198:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:49.469107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:49.469522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124965151384208:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:49.469566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:49.932740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:49.976783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.022573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.071694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.142703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.220276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.270451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.348023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.474945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124969446352381:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.475031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.475162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124969446352386:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.475222Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124969446352388:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.475334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [W ... info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:09.706883Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 14534, node 12 2025-12-23T18:42:09.814487Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:42:09.814519Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:42:09.814533Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:42:09.814672Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19310 2025-12-23T18:42:10.278575Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:19310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515329754 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:42:10.711333Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:42:10.749645Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:42:10.908529Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:11.255916Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:42:11.388108Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:42:14.247695Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7587125567842213486:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:42:14.249639Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:42:15.846394Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125593612019121:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:15.846682Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:15.847085Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125593612019130:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:15.847155Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:16.006961Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:16.076974Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:16.135585Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:16.223700Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:16.269042Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:16.376568Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:16.447626Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:16.547668Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:16.724844Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125597906987329:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:16.725017Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:16.725465Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125597906987334:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:16.725527Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125597906987335:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:16.725739Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:16.733668Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:42:16.753391Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7587125597906987338:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:42:16.815010Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7587125597906987391:3787] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:42:20.248882Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515340282, txId: 281474976710673] shutting down 2025-12-23T18:42:20.763031Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515340800, txId: 281474976710675] shutting down |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> Cdc::SupportedTypes [GOOD] >> Cdc::StringEscaping >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"10,11\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"10","p":{"address":{"name":"i16","id":10}},"o":"10","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"10,11","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"11","p":{"address":{"name":"float","id":11}},"o":"11","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"10,11","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"10,11\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; } FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead >> TestProgram::JsonExists >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-anonymous >> TestProgram::JsonExists [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-ordinaryuser |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-ordinaryuser |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:42:10.991264Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:11.107011Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:11.116616Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:42:11.116963Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:11.117023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:11.406129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:11.406307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:11.520700Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515327548207 != 1766515327548211 2025-12-23T18:42:11.540251Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:11.592133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:11.698495Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004800/r3tmp/tmpDbWwFr/pdisk_1.dat 2025-12-23T18:42:12.020288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:12.035347Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:12.141459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:12.186588Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:902:2773] 2025-12-23T18:42:12.186942Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:12.236767Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:12.236904Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:12.238684Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:42:12.238809Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:42:12.238880Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:42:12.239237Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:12.239533Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:12.239606Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:941:2773] in generation 1 2025-12-23T18:42:12.239980Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:905:2775] 2025-12-23T18:42:12.240158Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:12.250084Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:911:2779] 2025-12-23T18:42:12.250313Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:12.258882Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:12.259011Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:12.260423Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:42:12.260510Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:42:12.260564Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:42:12.260845Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:12.261168Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:12.261231Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:958:2775] in generation 1 2025-12-23T18:42:12.262360Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:12.262441Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:12.263781Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-12-23T18:42:12.263846Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2025-12-23T18:42:12.263886Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2025-12-23T18:42:12.264166Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:12.264372Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:12.264420Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:961:2779] in generation 1 2025-12-23T18:42:12.264763Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:913:2781] 2025-12-23T18:42:12.264951Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:12.273659Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:12.273773Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:12.275157Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-12-23T18:42:12.275225Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2025-12-23T18:42:12.275274Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2025-12-23T18:42:12.275575Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:12.275702Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:12.275760Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:967:2781] in generation 1 2025-12-23T18:42:12.287677Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:12.328481Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:42:12.328672Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:42:12.328799Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:972:2814] 2025-12-23T18:42:12.328827Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:42:12.328855Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:42:12.328896Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:12.329347Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:12.329383Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:42:12.329440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:42:12.329487Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:973:2815] 2025-12-23T18:42:12.329504Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:42:12.329519Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:42:12.329534Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:42:12.329588Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:12.329615Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2025-12-23T18:42:12.329655Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:42:12.329695Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:974:2816] 2025-12-23T18:42:12.329710Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2025-12-23T18:42:12.329733Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-12-23T18:42:12.329748Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-12-23T18:42:12.329990Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:42:12.330071Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:42:12.330166Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:12.330187Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2025-12-23T18:42:12.330224Z node 1 :TX_DATASHARD ... 8:42:23.399989Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:23.400221Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:42:23.400279Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:42:23.400333Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:42:23.400693Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:42:23.400860Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:42:23.401707Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:42:23.401788Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:42:23.402331Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:42:23.402863Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:42:23.405089Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:42:23.405151Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:23.405994Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:42:23.406085Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:42:23.407786Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:42:23.407847Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:42:23.407897Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:42:23.407966Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:42:23.408021Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:42:23.408147Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:23.409542Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:42:23.411885Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:42:23.412627Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:42:23.412709Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:42:23.424446Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:950:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:23.424600Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:961:2813], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:23.424725Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:23.426080Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:23.426484Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:23.431176Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:42:23.438390Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:42:23.556903Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:42:23.561113Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:964:2816], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:42:23.595874Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1022:2855] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:42:23.687748Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1053:2872], serverId# [3:1054:2873], sessionId# [0:0:0] 2025-12-23T18:42:23.688185Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2025-12-23T18:42:23.688336Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=3 2025-12-23T18:42:23.699234Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:23.783493Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2025-12-23T18:42:23.783624Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=3 2025-12-23T18:42:23.799574Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:23.895082Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2025-12-23T18:42:23.895257Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=3 2025-12-23T18:42:23.906747Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:23.910113Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-12-23T18:42:23.921470Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-12-23T18:42:23.921570Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:23.923151Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:42:23.923473Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:42:23.923680Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:42:23.923724Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:42:23.923769Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037888 for WaitForStreamClearance 2025-12-23T18:42:23.924039Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:42:23.924118Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:42:23.924691Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715663, MessageQuota: 1 2025-12-23T18:42:23.925094Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:42:23.925227Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715663, PendingAcks: 0 2025-12-23T18:42:23.925272Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715663, MessageQuota: 0 2025-12-23T18:42:23.971409Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T18:42:23.971487Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715663, at: 72075186224037888 2025-12-23T18:42:23.971704Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:42:23.971751Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:42:23.971797Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037888 for ReadTableScan 2025-12-23T18:42:23.971928Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:42:23.971985Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:42:23.972022Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TestProgram::YqlKernelEndsWithScalar [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] >> TestProgram::JsonValue |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"amet.\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"amet."},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; >> TestProgram::CountUIDByVAT >> TestProgram::CountUIDByVAT [GOOD] >> TestProgram::JsonValue [GOOD] |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-system |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TSchemeShardLoginTest::ChangeAccountLockoutParameters [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb >> BridgeGet::PartRestorationAcrossBridgeOnDiscover ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000 ... 04\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"2,4\",\"p\":{\"options\":[\"{10001(Count):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[4]},\"o\":\"10001\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"10001,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"2,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N4->N2->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":4},{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"2,4","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"10001,4","t":"Projection"},"w":27,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"a":true,"i":"2,4","p":{"options":["{10001(Count):[2]}"],"type":"AGGREGATION","keys":[4]},"o":"10001","t":"Aggregation"},"w":18,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:42:11.038814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:11.189709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:11.202707Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:42:11.203197Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:11.203325Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:11.532610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:11.532911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:11.620108Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515327888390 != 1766515327888394 2025-12-23T18:42:11.639358Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:11.689439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:11.783523Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0047f8/r3tmp/tmpKobBCt/pdisk_1.dat 2025-12-23T18:42:12.121519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:12.137590Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:12.243044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:12.281652Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:42:12.282860Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:42:12.283210Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:42:12.283463Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:12.331454Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:42:12.332281Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:12.332416Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:12.334198Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:42:12.334277Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:42:12.334328Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:42:12.334699Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:12.334866Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:12.334945Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:42:12.345758Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:12.377299Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:42:12.377488Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:42:12.377598Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:42:12.377631Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:42:12.377666Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:42:12.377702Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:12.377976Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:42:12.378036Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:42:12.378351Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:42:12.378466Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:42:12.378983Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:42:12.379042Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:42:12.379110Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:42:12.379155Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:42:12.379189Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:42:12.379221Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:42:12.379267Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:42:12.379364Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:42:12.379402Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:42:12.379459Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:42:12.379566Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:42:12.379610Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:42:12.379731Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:42:12.380119Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:42:12.380180Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:42:12.380267Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:42:12.380315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:42:12.380359Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:42:12.380392Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:42:12.380431Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:42:12.380792Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:42:12.380842Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:42:12.380884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:42:12.380916Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:42:12.380975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T18:42:12.381019Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:42:12.381057Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T18:42:12.381093Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T18:42:12.381118Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T18:42:12.382629Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T18:42:12.382690Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... pp:1883: Execution status for [4500:281474976715668] at 72075186224037889 is DelayComplete 2025-12-23T18:42:25.984754Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2025-12-23T18:42:25.984803Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2025-12-23T18:42:25.984845Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037889 on unit CompletedOperations 2025-12-23T18:42:25.984884Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037889 is Executed 2025-12-23T18:42:25.984916Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2025-12-23T18:42:25.984951Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [4500:281474976715668] at 72075186224037889 has finished 2025-12-23T18:42:25.985000Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:42:25.985033Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037889 2025-12-23T18:42:25.985069Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-12-23T18:42:25.985109Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037889 2025-12-23T18:42:25.998091Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:42:25.998202Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:42:25.998250Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [4500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-12-23T18:42:25.998333Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [3:1343:3111], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:42:25.998401Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:42:25.998639Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 4500} 2025-12-23T18:42:25.998703Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:42:25.998759Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:42:25.999070Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [3:1343:3111], Recipient [3:1170:2963]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2025-12-23T18:42:25.999122Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-12-23T18:42:25.999607Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [3:1170:2963], Recipient [3:1170:2963]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:42:25.999651Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:42:25.999709Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-12-23T18:42:25.999749Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:42:25.999791Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [4500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2025-12-23T18:42:25.999827Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2025-12-23T18:42:25.999870Z node 3 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [4500:281474976715668] at 72075186224037890 2025-12-23T18:42:25.999915Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Executed 2025-12-23T18:42:25.999956Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2025-12-23T18:42:25.999991Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2025-12-23T18:42:26.000025Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-12-23T18:42:26.000306Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Continue 2025-12-23T18:42:26.000346Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:42:26.000377Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037890 2025-12-23T18:42:26.000410Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:52: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2025-12-23T18:42:26.000446Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037890 2025-12-23T18:42:26.001148Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [3:1363:3129], Recipient [3:1170:2963]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-12-23T18:42:26.001194Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-12-23T18:42:26.001437Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2025-12-23T18:42:26.002188Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T18:42:26.004186Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2025-12-23T18:42:26.004254Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2025-12-23T18:42:26.070663Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2025-12-23T18:42:26.070764Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715668, at: 72075186224037890 2025-12-23T18:42:26.070978Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [3:1170:2963], Recipient [3:1170:2963]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:42:26.071022Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:42:26.071088Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-12-23T18:42:26.071129Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:42:26.071172Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [4500:281474976715668] at 72075186224037890 for ReadTableScan 2025-12-23T18:42:26.071208Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-12-23T18:42:26.071250Z node 3 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [4500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2025-12-23T18:42:26.071299Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Executed 2025-12-23T18:42:26.071342Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2025-12-23T18:42:26.071391Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2025-12-23T18:42:26.071423Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-12-23T18:42:26.071668Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is DelayComplete 2025-12-23T18:42:26.071705Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2025-12-23T18:42:26.071737Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2025-12-23T18:42:26.071780Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit CompletedOperations 2025-12-23T18:42:26.071836Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Executed 2025-12-23T18:42:26.071866Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2025-12-23T18:42:26.071893Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [4500:281474976715668] at 72075186224037890 has finished 2025-12-23T18:42:26.071928Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:42:26.071959Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2025-12-23T18:42:26.071989Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-12-23T18:42:26.072020Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2025-12-23T18:42:26.084053Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:42:26.084126Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:42:26.084171Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [4500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-12-23T18:42:26.084255Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1343:3111], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:42:26.084342Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridge |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-ordinaryuser >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TConsoleTests::TestScaleRecommenderPolicies |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnRange >> TFlatTest::SplitEmptyToMany [GOOD] >> TFlatTest::SplitEmptyTwice |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-ordinaryuser |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] >> TestProgram::YqlKernelContains >> TestProgram::JsonValueBinary >> TestProgram::YqlKernelContains [GOOD] >> TestScript::StepMerging [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink [GOOD] |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-anonymous |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |94.7%| [LD] {RESULT} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-clusteradmin >> Cdc::Drop[YdsRunner] [GOOD] >> Cdc::Drop[TopicRunner] >> TestProgram::JsonValueBinary [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging [GOOD] |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> Cdc::StringEscaping [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning >> TestProgram::Like ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:42:11.350701Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:11.468674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:11.480342Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:42:11.480692Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:11.480770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:11.792915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:11.793064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:11.884707Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515328133004 != 1766515328133008 2025-12-23T18:42:11.901634Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:11.948824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:12.059597Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0047f9/r3tmp/tmpiSTVfx/pdisk_1.dat 2025-12-23T18:42:12.372735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:12.393164Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:12.497351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:12.542479Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:891:2767]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:42:12.543801Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:891:2767]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:42:12.544119Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2767] 2025-12-23T18:42:12.544369Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:12.555262Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:893:2769]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:42:12.598376Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:891:2767]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:42:12.598818Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:893:2769]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:42:12.599122Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:893:2769] 2025-12-23T18:42:12.599436Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:12.608223Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:893:2769]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:42:12.609211Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:12.609382Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:12.611206Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:42:12.611315Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:42:12.611376Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:42:12.611811Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:12.611990Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:12.612078Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:922:2767] in generation 1 2025-12-23T18:42:12.612498Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:12.612597Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:12.613962Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:42:12.614026Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:42:12.614065Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:42:12.614349Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:12.614448Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:12.614546Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:923:2769] in generation 1 2025-12-23T18:42:12.625482Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:12.674531Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:42:12.674773Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:42:12.674906Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:926:2788] 2025-12-23T18:42:12.674943Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:42:12.674977Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:42:12.675030Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:12.675361Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:891:2767], Recipient [1:891:2767]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:42:12.675434Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:42:12.675555Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:12.675591Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:42:12.675649Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:42:12.675697Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:927:2789] 2025-12-23T18:42:12.675718Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:42:12.675738Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:42:12.675761Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:42:12.676055Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:893:2769], Recipient [1:893:2769]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:42:12.676088Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:42:12.676233Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:42:12.676346Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:42:12.676503Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:42:12.676567Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:42:12.676616Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:42:12.676652Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:42:12.676681Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:42:12.676714Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:42:12.676755Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:42:12.676796Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T18:42:12.676870Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T18:42:12.677004Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:896:2770], Recipient [1:891:2767]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:42:12.677038Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:42:12.677091Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2763], serverId# [1:896:2770], sessionId# [0:0:0] 2025-12-23T18:42:12.677137Z node 1 :TX_DATASHARD DEBUG: datashard__prog ... on plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:42:28.959915Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:10] at 72075186224037888 has finished 2025-12-23T18:42:28.971023Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-12-23T18:42:28.971092Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:10] at 72075186224037888 on unit DirectOp 2025-12-23T18:42:28.971136Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status GENERIC_ERROR 2025-12-23T18:42:29.147030Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [3:67:2114] Handle TEvExecuteKqpTransaction 2025-12-23T18:42:29.147111Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [3:67:2114] TxId# 281474976715662 ProcessProposeKqpTransaction 2025-12-23T18:42:29.150548Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [3:1402:3184], Recipient [3:889:2768]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-12-23T18:42:29.150795Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:42:29.150855Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v23000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v23000/18446744073709551615 ImmediateWriteEdgeReplied# v23000/18446744073709551615 2025-12-23T18:42:29.150900Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v23000/18446744073709551615 2025-12-23T18:42:29.150967Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit CheckRead 2025-12-23T18:42:29.151068Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2025-12-23T18:42:29.151108Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:42:29.151147Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:42:29.151184Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:42:29.151237Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:11] at 72075186224037888 2025-12-23T18:42:29.151275Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2025-12-23T18:42:29.151298Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:42:29.151319Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:42:29.151342Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:42:29.151469Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-12-23T18:42:29.151742Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[3:1402:3184], 0} after executionsCount# 1 2025-12-23T18:42:29.151803Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[3:1402:3184], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:42:29.151898Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[3:1402:3184], 0} finished in read 2025-12-23T18:42:29.151967Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2025-12-23T18:42:29.151994Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:42:29.152020Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:42:29.152046Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:42:29.152097Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2025-12-23T18:42:29.152117Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:42:29.152142Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:11] at 72075186224037888 has finished 2025-12-23T18:42:29.152179Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:42:29.152284Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:42:29.152965Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [3:1402:3184], Recipient [3:889:2768]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:42:29.153022Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2025-12-23T18:42:29.311449Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [3:67:2114] Handle TEvExecuteKqpTransaction 2025-12-23T18:42:29.311528Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [3:67:2114] TxId# 281474976715663 ProcessProposeKqpTransaction 2025-12-23T18:42:29.315355Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [3:1432:3208], Recipient [3:1114:2929]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 44 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2025-12-23T18:42:29.315544Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-12-23T18:42:29.315611Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037889 CompleteEdge# v8000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v7000/18446744073709551615 ImmediateWriteEdgeReplied# v7000/18446744073709551615 2025-12-23T18:42:29.315672Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037889 changed HEAD read to non-repeatable v23000/18446744073709551615 2025-12-23T18:42:29.315742Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-12-23T18:42:29.315838Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2025-12-23T18:42:29.315880Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-12-23T18:42:29.315921Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-12-23T18:42:29.315954Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-12-23T18:42:29.316005Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2025-12-23T18:42:29.316048Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2025-12-23T18:42:29.316071Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-12-23T18:42:29.316093Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-12-23T18:42:29.316115Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-12-23T18:42:29.316220Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 44 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-12-23T18:42:29.316536Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[3:1432:3208], 0} after executionsCount# 1 2025-12-23T18:42:29.316601Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[3:1432:3208], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:42:29.316702Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[3:1432:3208], 0} finished in read 2025-12-23T18:42:29.316768Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2025-12-23T18:42:29.316794Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-12-23T18:42:29.316821Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-12-23T18:42:29.316847Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-12-23T18:42:29.316892Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2025-12-23T18:42:29.316913Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-12-23T18:42:29.316941Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2025-12-23T18:42:29.316990Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-12-23T18:42:29.317105Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-12-23T18:42:29.318322Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [3:1432:3208], Recipient [3:1114:2929]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:42:29.318387Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\" ... ?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> TestProgram::YqlKernelStartsWithScalar |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> BridgeGet::PartRestorationAcrossBridge [GOOD] >> TestProgram::YqlKernelStartsWithScalar [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N6(0):{\"p\":{\"v\":\"001\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N0(0):{\"p\":{\"v\":\"uid\"},\"o\":\"16\",\"t\":\"Const\"}\n"]; N2[shape=box, label="N4(15):{\"i\":\"7,16\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"17\",\"t\":\"Calculation\"}\nREMOVE:16"]; N1 -> N2[label="1"]; N4 -> N2[label="2"]; N3[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N10 -> N3[label="1"]; N4[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N3 -> N4[label="1"]; N5[shape=box, label="N7(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"18\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N5[label="1"]; N4 -> N5[label="2"]; N6[shape=box, label="N5(23):{\"i\":\"17\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"19\",\"t\":\"Calculation\"}\nREMOVE:17"]; N2 -> N6[label="1"]; N7[shape=box, label="N8(23):{\"i\":\"18\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"20\",\"t\":\"Calculation\"}\nREMOVE:18"]; N5 -> N7[label="1"]; N8[shape=box, label="N9(54):{\"i\":\"19,20\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"21\",\"t\":\"Calculation\"}\nREMOVE:19,20"]; N6 -> N8[label="1"]; N7 -> N8[label="2"]; N9[shape=box, label="N10(54):{\"i\":\"21\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N8 -> N9[label="1"]; N10[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N1->N10->N3->N4->N2->N6->N0->N5->N7->N8->N9[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1},{"from":4}]},{"owner_id":3,"inputs":[{"from":10}]},{"owner_id":4,"inputs":[{"from":3}]},{"owner_id":5,"inputs":[{"from":0},{"from":4}]},{"owner_id":6,"inputs":[{"from":2}]},{"owner_id":7,"inputs":[{"from":5}]},{"owner_id":8,"inputs":[{"from":6},{"from":7}]},{"owner_id":9,"inputs":[{"from":8}]},{"owner_id":10,"inputs":[]}],"nodes":{"1":{"p":{"p":{"v":"uid"},"o":"16","t":"Const"},"w":0,"id":1},"3":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":3},"8":{"p":{"i":"19,20","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"21","t":"Calculation"},"w":54,"id":8},"2":{"p":{"i":"7,16","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"17","t":"Calculation"},"w":15,"id":2},"0":{"p":{"p":{"v":"001"},"o":"15","t":"Const"},"w":0,"id":0},"5":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"18","t":"Calculation"},"w":15,"id":5},"9":{"p":{"i":"21","t":"Projection"},"w":54,"id":9},"7":{"p":{"i":"18","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"20","t":"Calculation"},"w":23,"id":7},"4":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":4},"10":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":10},"6":{"p":{"i":"17","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"19","t":"Calculation"},"w":23,"id":6}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow11BooleanTypeE; >> Cdc::RacyCreateAndSend [GOOD] >> Cdc::RacySplitAndDropTable |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-dbadmin |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"Lorem\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"Lorem"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridge [GOOD] Test command err: RandomSeed# 16694607377172631160 originalGroupIndex# 0 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 0 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 1 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 1 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 2 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 2 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-anonymous >> TTxAllocatorClientTest::ZeroRange |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-dbadmin |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] >> TTxAllocatorClientTest::InitiatingRequest >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink >> TTxAllocatorClientTest::InitiatingRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2025-12-23T18:42:33.329938Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-12-23T18:42:33.330568Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-12-23T18:42:33.331544Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-12-23T18:42:33.333759Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.334405Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-12-23T18:42:33.349035Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.349152Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.349260Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.349348Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-12-23T18:42:33.349516Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.349645Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-12-23T18:42:33.349794Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-12-23T18:42:33.350841Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2025-12-23T18:42:33.351526Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.351595Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.351711Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-12-23T18:42:33.351760Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 2025-12-23T18:42:33.352015Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-12-23T18:42:33.352220Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-12-23T18:42:33.352460Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-12-23T18:42:33.352667Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-12-23T18:42:33.352849Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2025-12-23T18:42:33.353390Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.353479Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.353591Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2025-12-23T18:42:33.353633Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 5000 to# 10000 2025-12-23T18:42:33.353841Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-12-23T18:42:33.354037Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-12-23T18:42:33.354218Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-12-23T18:42:33.354516Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-12-23T18:42:33.354681Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2025-12-23T18:42:33.355128Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.355285Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.355397Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2025-12-23T18:42:33.355440Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 10000 to# 15000 2025-12-23T18:42:33.355654Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> KqpUserConstraint::KqpReadNull-UploadNull >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-ordinaryuser |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.7%| [TA] $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::SplitEmptyTwice [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:03.428602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:03.428729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:03.428780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:03.428823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:03.428871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:03.428910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:03.428966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:03.429087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:03.430085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:03.430411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:03.603493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:03.603572Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:03.616059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:03.616304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:03.616471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:03.633204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:03.633457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:03.634356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:03.634672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:03.639925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:03.640206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:03.641674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:03.641751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:03.641884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:03.641946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:03.642002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:03.642178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:03.913623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.914909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.915068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.915158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.915274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.915367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.915510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.915609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.915703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.915785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.915857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.915948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.916038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.916123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:03.916213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 42:31.257718Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2025-12-23T18:42:31.257750Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2025-12-23T18:42:31.257946Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.258062Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.258174Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.258382Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.258469Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.258639Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.259121Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.259268Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.259767Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.259882Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.260186Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.260369Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.260520Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.260654Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.260978Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.261083Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.261566Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.261887Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.262001Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.262083Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.262268Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.262345Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.262412Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:42:31.272709Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:31.279510Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:31.279625Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:31.279762Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:31.279845Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:31.279916Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:31.280233Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:781:2750] sender: [5:837:2058] recipient: [5:15:2062] 2025-12-23T18:42:31.319890Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:31.319969Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-12-23T18:42:31.498469Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2025-12-23T18:42:31.498662Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:31.498718Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:31.499034Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:31.499096Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:831:2789], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-12-23T18:42:31.499928Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 0 2025-12-23T18:42:33.500945Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:33.512042Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:33.521486Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:33.521657Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2025-12-23T18:42:33.522319Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:33.522565Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 283us result status StatusSuccess 2025-12-23T18:42:33.523173Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 4 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx9G8sHogQ4LW1ZK6SZFy\nMCXzpdCAusvAh5UctRSOxJouGlyv5ve8uzgaxiRpy1cn65s3hLjsziOJdg5Jy/A1\nZrJnaP0PCioFM9kuJEc5t1wf8nXVyk7oGDtQw5bsT8r27W1mBDo3Ktrgpfu/nqWw\nEzIevTUP2zcAW0fBL07001/WLAbFE6PF/KPeVlltjBWeev4TgC0XLYNo++B/sr0o\nEdNFiSR4YOikksl6HhUvS1Be/ILDt0NAcZGHf3ns7shnpeYvRFRMl4tP2PeRkkNg\n7PCB7oKJpPLG1l9nbEK25Sf/zASZI2jUnTdS7ZuCkprxgL8PyuaD7P+le8jEDPam\nOQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1766601748759 } PublicKeys { KeyId: 2 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArQM83V2Qvt/IybHgXc9z\nkzbKH42ADr9huLAGWshwGuEWVYA9htdnNQpR3tehTFwtt0seE8NE4jLSGxGEdX5Y\nRYKQWkaPOkX6oAtqLrEG8auPNjEomIC6ODJ12lwadrPm9K6XKe/rWiRcFqj7HZrj\nP1mu4u+Kw8fCOpoYXeLnRyU5qdczL7N+xBbvmFyrroCkJ21iPnlrYbrF53sjAF+o\nrz0SaAAAFqHS2BoYL8342p0dP2S08ndMFj+E/XkVPd1Va8NayA5gyQ4I4mCgOwJ3\nDYBHAVTd5IO1vtA2Jz8SnihtPJEBAUyxRUI0JbOUfPSlkTxtc6dVKpKNi0WDiAGj\nfQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1766601749112 } PublicKeys { KeyId: 3 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2e5PgktD5d7t8NLxiMCa\nqCN+6KQ1k+KyAyriKBrY2ftzMIB8NxuOlOc3isbOXgGznNnmlyrfkx2wr/C9jAZb\nauH1OlK8M5IaOGzLosBG58ir94/rcSHbOVVhFnc1M9inQRxZy5PDNUJwl+V+Xf23\nod/JPLYpke2Y+iky6EXTvvM/0hgs8NiN2SHKGUMiBp78xXtS8G94YbqbeasN5KxK\nkI2bFh+j2ee8riaPr7nLlUdgvNdItg7vTHMwBOO2GPcLPbQDM3vrtkrYtiW2dC/9\nwHo0tEucy8jrc3knkwgm/bNrHCth/onuTwd0QWn/afyOsdFfci0s5+o/tVQeBrr1\n7QIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1766601751494 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2025-12-23T18:42:33.928774Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-12-23T18:42:33.929215Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-12-23T18:42:33.929861Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-12-23T18:42:33.931566Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.932100Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-12-23T18:42:33.943067Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.943158Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.943235Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.943310Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-12-23T18:42:33.943453Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.943570Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-12-23T18:42:33.943695Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-12-23T18:42:33.944520Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2025-12-23T18:42:33.945035Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.945099Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.945184Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-12-23T18:42:33.945220Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TConsoleTests::TestScaleRecommenderPolicies [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always [GOOD] Test command err: Trying to start YDB, gRPC: 23441, MsgBus: 7783 2025-12-23T18:39:41.870110Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124932327692722:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:41.870345Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:39:41.927262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0061d6/r3tmp/tmp5kisin/pdisk_1.dat 2025-12-23T18:39:42.702479Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:42.795483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:42.795575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:42.960532Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:42.990407Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124932327692510:2081] 1766515181817906 != 1766515181817909 2025-12-23T18:39:43.040134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:43.040849Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:43.050398Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 23441, node 1 2025-12-23T18:39:43.476137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:43.476157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:43.476166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:43.480229Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7783 TClient is connected to server localhost:7783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515183111 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:44.769877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:44.809369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:39:44.822909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:45.157956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:45.592348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:45.805780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:46.857355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124932327692722:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:46.857448Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:49.623876Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124966687432765:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:49.624020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:49.626396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124966687432775:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:49.626480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.299051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.349384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.397720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.432779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.484057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.549873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.638958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.754320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:50.977018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124970982400964:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.977097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.977397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124970982400970:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:50.977449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124970982400969:2494], DatabaseId: /Root, PoolId: default, ... e access permissions } 2025-12-23T18:42:18.915503Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125604776902189:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:18.915818Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587125604776902191:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:18.915901Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:18.922203Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:42:18.950269Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7587125604776902193:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:42:19.007060Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7587125609071869540:3785] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=360;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=248;columns=2; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; Trying to start YDB, gRPC: 21974, MsgBus: 2089 2025-12-23T18:42:24.267054Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7587125631312010416:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:42:24.267142Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0061d6/r3tmp/tmpy7zkcf/pdisk_1.dat 2025-12-23T18:42:24.304542Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:42:24.483798Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:24.506858Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:42:24.507498Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:24.507661Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:24.530998Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21974, node 13 2025-12-23T18:42:24.691696Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:42:24.691735Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:42:24.691748Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:42:24.691899Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:42:25.095277Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2089 2025-12-23T18:42:25.282268Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:2089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515344629 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:42:25.736348Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:42:25.767223Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:42:29.266784Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7587125631312010416:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:42:29.266931Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:42:30.833813Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7587125657081815517:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:30.835586Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7587125657081815513:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:30.835778Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:30.838961Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7587125657081815537:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:30.839116Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:30.844414Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:42:30.863891Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7587125657081815526:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:42:30.960723Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7587125657081815580:2861] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1232;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2128;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2128;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1232;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=560;columns=4; |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |94.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |94.8%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false >> TSchemeShardLoginFinalize::NoPublicKeys >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DropColumn >> TContinuousBackupWithRebootsTests::Basic [GOOD] |94.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-anonymous >> TWebLoginService::AuditLogEmptySIDsLoginSuccess >> TWebLoginService::AuditLogLoginSuccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2025-12-23T18:40:23.913158Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125114488347697:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:23.913353Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004f02/r3tmp/tmp4ATGf7/pdisk_1.dat 2025-12-23T18:40:24.303621Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:40:24.383531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:24.383675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:24.412072Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125114488347592:2081] 1766515223902055 != 1766515223902058 2025-12-23T18:40:24.426209Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:24.435304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:24.436224Z node 1 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:21291) connection closed with error: Connection refused 2025-12-23T18:40:24.437042Z node 1 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-12-23T18:40:24.504923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:40:24.939601Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:27.821568Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125130515071074:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:27.821649Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004f02/r3tmp/tmpWRTYZG/pdisk_1.dat 2025-12-23T18:40:27.863091Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:40:27.960946Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:27.964498Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587125130515071025:2081] 1766515227819326 != 1766515227819329 2025-12-23T18:40:27.969793Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:27.969932Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:27.972289Z node 2 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#28,[::1]:9828) connection closed with error: Connection refused 2025-12-23T18:40:27.973129Z node 2 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-12-23T18:40:27.990087Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:28.154665Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:40:28.869187Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:31.720190Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587125148796063261:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:31.720277Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004f02/r3tmp/tmpLgxQuG/pdisk_1.dat 2025-12-23T18:40:31.734949Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:40:31.824432Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:31.826907Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587125148796063235:2081] 1766515231719220 != 1766515231719223 2025-12-23T18:40:31.844101Z node 3 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:21614) connection closed with error: Connection refused 2025-12-23T18:40:31.844516Z node 3 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-12-23T18:40:31.863753Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:31.863817Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:31.870678Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:31.997834Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:40:32.726020Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:35.249301Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587125163738789266:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:35.249510Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004f02/r3tmp/tmpzoWfbo/pdisk_1.dat 2025-12-23T18:40:35.292319Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:40:35.410713Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:35.412454Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:35.412733Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:35.444010Z node 4 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#25,[::1]:16839) connection closed with error: Connection refused 2025-12-23T18:40:35.445088Z node 4 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-12-23T18:40:35.454567Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:35.564799Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:40:36.243607Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:38.978136Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587125175481541315:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:38.978172Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004f02/r3tmp/tmpqR06Wp/pdisk_1.dat 2025-12-23T18:40:38.990654Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:40:39.073840Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:39.100859Z node 5 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#30,[::1]:27454) connection closed with error: Connection refused 2025-12-23T18:40:39.101538Z node 5 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-12-23T18:40:39.112734Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:39.112832Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:39.119131Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:39.269945Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:40:39.982609Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:42.925844Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587125194006802879:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:42.925900Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004f02/r3tmp/tmppeOWJI/pdisk_1.dat 2025-12-23T18:40:42.969909Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:40:43.043077Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:43.055747Z nod ... ,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:41:57.769511Z node 18 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:41:57.994057Z node 18 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:41:58.490305Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:04.486506Z node 19 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7587125546609590489:2136];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:42:04.487044Z node 19 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004f02/r3tmp/tmpDQx5C6/pdisk_1.dat 2025-12-23T18:42:04.623535Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:42:05.014849Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:42:05.242966Z node 19 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [19:7587125546609590391:2081] 1766515324468917 != 1766515324468920 2025-12-23T18:42:05.273218Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:05.276573Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:05.276676Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:05.279753Z node 19 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#30,[::1]:19911) connection closed with error: Connection refused 2025-12-23T18:42:05.290421Z node 19 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-12-23T18:42:05.310253Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:05.527341Z node 19 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:05.575373Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:42:11.503855Z node 20 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7587125575601140035:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:42:11.503929Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004f02/r3tmp/tmpBThxz4/pdisk_1.dat 2025-12-23T18:42:11.545685Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:42:11.661463Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:11.662309Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:11.665068Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:11.685876Z node 20 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#32,[::1]:2092) connection closed with error: Connection refused 2025-12-23T18:42:11.688609Z node 20 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-12-23T18:42:11.705938Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:11.784680Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:42:12.517522Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:17.660040Z node 21 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7587125602557868561:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:42:17.660225Z node 21 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004f02/r3tmp/tmpXpoe54/pdisk_1.dat 2025-12-23T18:42:17.745203Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:42:17.850619Z node 21 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:17.875335Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:17.875477Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:17.876688Z node 21 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#34,[::1]:9358) connection closed with error: Connection refused 2025-12-23T18:42:17.877462Z node 21 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-12-23T18:42:17.909504Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:17.958860Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:42:18.673256Z node 21 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:23.575569Z node 22 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7587125627277501004:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:42:23.575651Z node 22 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004f02/r3tmp/tmp6lj28b/pdisk_1.dat 2025-12-23T18:42:23.617763Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:42:23.721189Z node 22 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [22:7587125627277500977:2081] 1766515343574660 != 1766515343574663 2025-12-23T18:42:23.735829Z node 22 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:23.739477Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:23.739562Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:23.742001Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:23.744315Z node 22 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#36,[::1]:27013) connection closed with error: Connection refused 2025-12-23T18:42:23.744950Z node 22 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-12-23T18:42:23.869175Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:42:24.583680Z node 22 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:29.857938Z node 23 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7587125653808516158:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:42:29.858021Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004f02/r3tmp/tmpAywcx8/pdisk_1.dat 2025-12-23T18:42:29.881250Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:42:29.997231Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:29.999471Z node 23 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [23:7587125653808516128:2081] 1766515349856900 != 1766515349856903 2025-12-23T18:42:30.024159Z node 23 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#38,[::1]:7617) connection closed with error: Connection refused 2025-12-23T18:42:30.025857Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:30.025996Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:30.026165Z node 23 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2025-12-23T18:42:30.051026Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:30.111020Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:42:30.866817Z node 23 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> TBlobStorageProxyTest::TestDoubleFailure >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true >> TBlobStorageProxyTest::TestQuadrupleGroups >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-dbadmin >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true >> TWebLoginService::AuditLogEmptySIDsLoginSuccess [GOOD] >> TWebLoginService::AuditLogAdminLoginSuccess >> TSchemeShardLoginFinalize::NoPublicKeys [GOOD] >> TSchemeShardLoginFinalize::InvalidPassword >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> TWebLoginService::AuditLogLoginBadPassword ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::Basic [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:113:2144] 2025-12-23T18:42:04.375481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:04.375579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:04.375635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:04.375678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:04.375720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:04.376351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:04.376413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:04.376478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:04.377881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:04.378287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:04.677964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:42:04.678105Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:04.679239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:04.696023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:04.696168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:04.696414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:04.711534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:04.712100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:04.712970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:04.713987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:04.720119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:04.720378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:04.721758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:04.721830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:04.722014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:04.722087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:04.722148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:04.722356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:05.023280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.024469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.024621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.024724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.024812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.024952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.025041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.025162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.025239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.025339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.025415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.025532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.025598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.025708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:05.025840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# ... , LocalPathId: 39] was 5 2025-12-23T18:42:36.459768Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:42:36.459848Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:42:36.459881Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:42:36.459918Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-12-23T18:42:36.459955Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:42:36.460023Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-12-23T18:42:36.460240Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:42:36.460318Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:42:36.460352Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:42:36.460383Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-12-23T18:42:36.463699Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:42:36.467931Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:42:36.468408Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:42:36.468607Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:42:36.468660Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:42:36.468830Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2025-12-23T18:42:36.468880Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-12-23T18:42:36.468935Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2025-12-23T18:42:36.468984Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-12-23T18:42:36.469037Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-12-23T18:42:36.469095Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-12-23T18:42:36.469182Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:42:36.469241Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:42:36.469426Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:42:36.469479Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2025-12-23T18:42:36.469507Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:1 2025-12-23T18:42:36.469547Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:42:36.469581Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2025-12-23T18:42:36.469610Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:2 2025-12-23T18:42:36.469672Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2025-12-23T18:42:36.470076Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:42:36.470200Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:42:36.470416Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:42:36.470490Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2025-12-23T18:42:36.470578Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:42:36.470641Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2025-12-23T18:42:36.470688Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:42:36.470962Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:42:36.471034Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:42:36.471259Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:42:36.471314Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:42:36.474450Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:42:36.474928Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:42:36.474998Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:42:36.475622Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:42:36.475742Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:42:36.475803Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [16:1161:3077] TestWaitNotification: OK eventTxId 104 2025-12-23T18:42:36.476614Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:42:36.476894Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 326us result status StatusPathDoesNotExist 2025-12-23T18:42:36.477105Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:42:36.477766Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:42:36.478016Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 289us result status StatusPathDoesNotExist 2025-12-23T18:42:36.478215Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |94.8%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-dbadmin >> KqpScan::ScanPg [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword >> TWebLoginService::AuditLogLoginBadPassword [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false >> TPQTest::Read_From_Different_Zones_What_Was_Written_With_Gaps [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> TBlobStorageProxyTest::TestSingleFailureMirror >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TSchemeShardLoginFinalize::InvalidPassword [GOOD] >> TSchemeShardLoginFinalize::Success >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false [GOOD] >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously >> TWebLoginService::AuditLogLdapLoginBadBind >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false >> TWebLoginService::AuditLogLdapLoginBadUser >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-anonymous >> TWebLoginService::AuditLogCreateModifyUser >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-system |94.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] Test command err: 2025-12-23T18:40:23.088396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:23.088461Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:23.279673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:24.481880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:40:24.655387Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:24.656007Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpnraqVn/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:24.656688Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpnraqVn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpnraqVn/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12198555998906587815 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:24.693885Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:24.694436Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpnraqVn/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:24.694717Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpnraqVn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpnraqVn/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15498792358524903679 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:24.766619Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:24.767099Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpnraqVn/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:24.767297Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpnraqVn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpnraqVn/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13811582628025039234 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:24.822209Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:24.822675Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpnraqVn/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:24.822977Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpnraqVn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpnraqVn/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15479169286006099922 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:24.826010Z node 2 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 ... disk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpmCS7u3/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12890846533019526532 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:42:31.276578Z node 146 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:42:31.277184Z node 146 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpmCS7u3/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:42:31.277390Z node 146 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpmCS7u3/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpmCS7u3/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7287392402001786686 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:42:31.336607Z node 147 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:42:31.337447Z node 147 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpmCS7u3/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:42:31.337806Z node 147 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpmCS7u3/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpmCS7u3/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3503360709141085100 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:42:31.387727Z node 151 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:42:31.388449Z node 151 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpmCS7u3/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:42:31.388810Z node 151 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpmCS7u3/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpmCS7u3/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13038289811160126634 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:42:31.438614Z node 149 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:42:31.439346Z node 149 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpmCS7u3/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:42:31.439687Z node 149 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpmCS7u3/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f05/r3tmp/tmpmCS7u3/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14638376423557146946 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:42:31.924411Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:31.924512Z node 145 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:32.032209Z node 145 :STATISTICS WARN: tx_init.cpp:298: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2025-12-23T18:42:35.883019Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:35.883120Z node 154 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:36.031977Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink [GOOD] >> TBlobStorageProxyTest::TestPersistence >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-clusteradmin [GOOD] >> TWebLoginService::AuditLogCreateModifyUser [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning [GOOD] >> Cdc::RacySplitAndDropTable [GOOD] >> TWebLoginService::AuditLogLogout >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false >> TSchemeShardLoginTest::BanUnbanUser >> Cdc::ShouldDeliverChangesOnSplitMerge >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-system >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 >> Cdc::RenameTable >> TWebLoginService::AuditLogLogout [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:42:01.689261Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:01.690345Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:01.699247Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:678:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:42:01.906766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:01.908449Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:01.916355Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:683:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:42:01.916798Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:01.917051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:01.918950Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:01.919110Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:02.441461Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:02.585479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:02.585636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:02.586547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:02.586626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:02.644789Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:42:02.645825Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:02.646167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:02.780439Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:42:02.797986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034d2/r3tmp/tmpPEtqT0/pdisk_1.dat 2025-12-23T18:42:03.818005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:03.903826Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:03.903984Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:04.005847Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-12-23T18:42:04.006179Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-12-23T18:42:04.006378Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:42:04.006764Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:42:04.308479Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:223:2182] Handle TEvProposeTransaction 2025-12-23T18:42:04.308545Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:223:2182] TxId# 281474976710657 ProcessProposeTransaction 2025-12-23T18:42:04.308654Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:223:2182] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:1563:3023] 2025-12-23T18:42:04.743410Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:1563:3023] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:42:04.743564Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:1563:3023] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:42:04.744341Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:1563:3023] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:42:04.744459Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:1563:3023] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:42:04.745112Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:1563:3023] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:42:04.745300Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:1563:3023] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:42:04.745438Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:1563:3023] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-12-23T18:42:04.753353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:04.753978Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:1563:3023] txid# 281474976710657 HANDLE EvClientConnected 2025-12-23T18:42:04.768209Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:1563:3023] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-12-23T18:42:04.768333Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:1563:3023] txid# 281474976710657 SEND to# [1:1489:2979] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2025-12-23T18:42:04.950714Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1614:2393] 2025-12-23T18:42:04.951244Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:05.070119Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:05.070277Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:05.071953Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:42:05.072038Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:42:05.072097Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:42:05.072452Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:05.072771Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:05.072870Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1638:2393] in generation 1 2025-12-23T18:42:05.099679Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:42:05.172618Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:42:05.172842Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:42:05.172969Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1641:2410] 2025-12-23T18:42:05.173028Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:42:05.173065Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:42:05.173118Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:42:05.173867Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:42:05.173975Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:42:05.174074Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:42:05.174142Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:42:05.174199Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:42:05.174273Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:42:05.174474Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clien ... ode 3 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710662, task: 3. pass away 2025-12-23T18:42:23.395509Z node 3 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710662;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:42:23.397431Z node 3 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710662, taskId: 3. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-12-23T18:42:23.397635Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [3:1911:3215] TxId: 281474976710662. Ctx: { TraceId: 01kd689ypw21z063jpv6bs6aj7, Database: , SessionId: ydb://session/3?node_id=3&id=NmY4NTdmNDktNmE3NzkzODYtOWVhY2I1NTgtYjNmNzI2ODA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [3:1918:3244], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 395754 DurationUs: 3000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 393577 FinishTimeMs: 1766515343395 InputRows: 1 InputBytes: 6 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 95 BuildCpuTimeUs: 393482 HostName: "ghrun-ou4bsyjxme" NodeId: 3 StartTimeMs: 1766515343392 CreateTimeMs: 1766515342734 UpdateTimeMs: 1766515343395 } MaxMemoryUsage: 1048576 } 2025-12-23T18:42:23.397689Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710662. Ctx: { TraceId: 01kd689ypw21z063jpv6bs6aj7, Database: , SessionId: ydb://session/3?node_id=3&id=NmY4NTdmNDktNmE3NzkzODYtOWVhY2I1NTgtYjNmNzI2ODA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [3:1918:3244] 2025-12-23T18:42:23.397820Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [3:1911:3215] TxId: 281474976710662. Ctx: { TraceId: 01kd689ypw21z063jpv6bs6aj7, Database: , SessionId: ydb://session/3?node_id=3&id=NmY4NTdmNDktNmE3NzkzODYtOWVhY2I1NTgtYjNmNzI2ODA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:42:23.397906Z node 3 :KQP_EXECUTER TRACE: kqp_executer_impl.h:1400: ActorId: [3:1911:3215] TxId: 281474976710662. Ctx: { TraceId: 01kd689ypw21z063jpv6bs6aj7, Database: , SessionId: ydb://session/3?node_id=3&id=NmY4NTdmNDktNmE3NzkzODYtOWVhY2I1NTgtYjNmNzI2ODA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState 2025-12-23T18:42:23.397974Z node 3 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [3:1911:3215] TxId: 281474976710662. Ctx: { TraceId: 01kd689ypw21z063jpv6bs6aj7, Database: , SessionId: ydb://session/3?node_id=3&id=NmY4NTdmNDktNmE3NzkzODYtOWVhY2I1NTgtYjNmNzI2ODA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.679584s ReadRows: 100 ReadBytes: 800 ru: 453 rate limiter was not found force flag: 1 2025-12-23T18:42:23.398962Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976710661] shutting down 2025-12-23T18:42:23.399134Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:223:2182] Handle TEvProposeTransaction 2025-12-23T18:42:23.399163Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:223:2182] TxId# 0 ProcessProposeTransaction 2025-12-23T18:42:23.399274Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [3:223:2182] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1943:3254] SnapshotReq marker# P0 2025-12-23T18:42:23.400142Z node 3 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [3:1945:3254] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-12-23T18:42:23.400346Z node 3 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [3:1945:3254] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-12-23T18:42:23.400449Z node 3 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [3:1943:3254] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 ... waiting for SysViewsRoster update finished 2025-12-23T18:42:31.883438Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:31.885071Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:31.888129Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:42:31.902520Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:31.904495Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:31.907902Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:42:31.908633Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:31.908837Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:31.911226Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:31.911389Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:32.347700Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:32.477696Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:32.477902Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:32.479971Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:32.480108Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:32.528524Z node 5 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-12-23T18:42:32.529498Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:32.530022Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:32.618430Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:42:32.648477Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034d2/r3tmp/tmpUzEPhV/pdisk_1.dat 2025-12-23T18:42:33.333581Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:33.403166Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:33.403335Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:33.759468Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:34.391570Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1724:3111], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:34.391719Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1734:3116], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:34.392160Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:34.393202Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1740:3121], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:34.393482Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:34.399543Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:42:34.861894Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1738:3119], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:42:34.954147Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:1832:3185] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:42:37.211575Z node 5 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976710661] shutting down |94.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:42:37.261766Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:42:37.362724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:42:37.375647Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:42:37.376035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:37.376124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:42:37.670420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:37.670547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:37.751814Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515354336655 != 1766515354336659 2025-12-23T18:42:37.774308Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:37.824057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:37.931257Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003da1/r3tmp/tmp94NbDE/pdisk_1.dat 2025-12-23T18:42:38.221597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:38.237793Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:38.344779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:38.750194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1070:2896], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:38.750294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1080:2901], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:38.750350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:38.751172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1085:2905], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:38.751322Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:42:38.755927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:42:38.889786Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1084:2904], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:42:38.969072Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1141:2942] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_scan/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitEmptyTwice [GOOD] Test command err: 2025-12-23T18:38:11.775600Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124547113162482:2163];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:11.776172Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057fd/r3tmp/tmpxRY5rq/pdisk_1.dat 2025-12-23T18:38:12.264399Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:38:12.321710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:12.321942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:12.380082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:12.530896Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124547113162356:2081] 1766515091704267 != 1766515091704270 2025-12-23T18:38:12.556530Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:12.563146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:38:12.747781Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24068 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515092699 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:38:13.397980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:38:13.451415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:38:13.491229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:38:13.969088Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.015s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-12-23T18:38:14.001556Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1155 521 2626)b }, ecr=1.000 2025-12-23T18:38:14.074086Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.010s,wait=0.004s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1621 647 6413)b }, ecr=1.000 2025-12-23T18:38:14.088146Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.012s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2390 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1766515093700 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name:... (TRUNCATED) TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1766515093700 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name:... (TRUNCATED) 2025-12-23T18:38:16.744391Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124547113162482:2163];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:38:16.744485Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; waiting... 2025-12-23T18:38:19.175291Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.54, eph 1} end=Done, 2 blobs 201r (max 201), put Spent{time=0.039s,wait=0.020s,interrupts=1} Part{ 1 pk, lobs 0 +0, (58340 0 0)b }, ecr=1.000 2025-12-23T18:38:19.519184Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.83, eph 1} end=Done, 2 blobs 789r (max 789), put Spent{time=0.049s,wait=0.017s,interrupts=1} Part{ 1 pk, lobs 0 +0, (51817 0 0)b }, ecr=1.000 2025-12-23T18:38:19.626454Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.594, eph 1} end=Done, 2 blobs 3r (max 3), put Spent{time=0.014s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2025-12-23T18:38:19.739259Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.125, eph 2} end=Done, 2 blobs 455r (max 456), put Spent{time=0.052s,wait=0.011s,interrupts=1} Part{ 1 pk, lobs 0 +0, (131688 0 0)b }, ecr=1.000 2025-12-23T18:38:19.791377Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.139, eph 1} end=Done, 2 blobs 2r (max 2), put Spent{time=0.017s,wait=0.016s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-12-23T18:38:19.890581Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.140, eph 1} end=Done, 2 blobs 2r (max 2), put Spent{time=0.116s,wait=0.099s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-12-23T18:38:19.891951Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.141, eph 1} end=Done, 2 blobs 501r (max 501), put Spent{time=0.117s,wait=0.055s,interrupts=1} Part{ 1 pk, lobs 0 +0, (31966 0 0)b }, ecr=1.000 2025-12-23T18:38:19.907801Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.142, eph 1} end=Done, 2 blobs 1500r (max 1500), put Spent{time=0.071s,wait=0.017s,interrupts=1} Part{ 1 pk, lobs 0 +0, (103070 0 0)b }, ecr=1.000 2025-12-23T18:38:19.954479Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.150, eph 2} end=Done, 2 blobs 1542r (max 1545), put Spent{time=0.092s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (101140 0 0)b }, ecr=1.000 2025-12-23T18:38:20.035938Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.731, eph 1} end=Done, 2 blobs 10001r (max 10001), put Spent{time=0.142s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-12-23T18:38:20.327393Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.196, eph 3} end=Done, 2 blobs 706r (max 707), put Spent{time=0.068s,wait=0.013s,interrupts=1} Part{ 1 pk, lobs 0 +0, (204220 0 0)b }, ecr=1.000 2025-12-23T18:38:20.486511Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.215, eph 3} end=Done, 2 blobs 2295r (max 2298), put Spent{time=0.076s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (150463 0 0)b }, ecr=1.000 2025-12-23T18:38:20.575202Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1113, eph 2} end=Done, 2 blobs 3r (max 5), put Spent{time=0.016s,wait=0.011s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2025-12-23T18:38:20.692761Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.281, eph 4} end=Done, 2 blobs 960r (max 961), put Spent{time=0.049s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (277568 0 0)b }, ecr=1.000 2025-12-23T18:38:20.747372Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.301, eph 2} end=Done, 2 blobs 2r (max 3), put Spent{time=0.032s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-12-23T18:38:20.771487Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.303, eph 2} end=Done, 2 blobs 2r (max 3), put Spent{time=0.056s,wait=0.035s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-12-23T18:38:20.785561Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.299, eph 2} end=Done, 2 blobs 3006r (max 3006), put Spent{time=0.075s,wait=0.018s,interrupts=1} Part{ 1 pk, lobs 0 +0, (206360 0 0)b }, ecr=1.000 2025-12-23T18:38:20.825139Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.305, eph 2} end=Done, 2 blobs 1004r (max 1004), put Spent{time=0.107s,wait=0.025s,interrupts=1} Part{ 1 pk, lobs 0 +0, (63855 0 0)b }, ecr=1.000 2025-12-23T18:38:20.869070Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.315, eph 4} end=Done, 2 blobs 3054r (max 3054), put Spent{time=0.102s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (200176 0 0)b }, ecr=1.000 2025-12-23T18:38:20.975098Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1239, eph 2} end=Done, 2 blob ... H_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-12-23T18:42:30.556083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587125657151001758 RawX2: 4503608217307453 } TabletId: 72075186224037893 State: 4 2025-12-23T18:42:30.556156Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:42:30.556463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587125657151001769 RawX2: 4503608217307454 } TabletId: 72075186224037895 State: 4 2025-12-23T18:42:30.556501Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037895, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:42:30.556630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587125657151001576 RawX2: 4503608217307434 } TabletId: 72075186224037891 State: 4 2025-12-23T18:42:30.556655Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:42:30.556781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587125652856033961 RawX2: 4503608217307374 } TabletId: 72075186224037889 State: 4 2025-12-23T18:42:30.556822Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:42:30.557182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587125657151001575 RawX2: 4503608217307433 } TabletId: 72075186224037890 State: 4 2025-12-23T18:42:30.557221Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:42:30.557426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587125657151001757 RawX2: 4503608217307452 } TabletId: 72075186224037894 State: 4 2025-12-23T18:42:30.557675Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:42:30.557916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:42:30.557981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:42:30.558089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:42:30.558116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:42:30.558192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:42:30.558217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:42:30.558405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:42:30.558436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:42:30.558504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:42:30.558528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:42:30.558593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:42:30.558625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:42:30.560228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-12-23T18:42:30.560482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2025-12-23T18:42:30.560677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2025-12-23T18:42:30.560832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2025-12-23T18:42:30.560962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-12-23T18:42:30.561093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2025-12-23T18:42:30.561195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-12-23T18:42:30.561304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2025-12-23T18:42:30.561472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-12-23T18:42:30.561610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2025-12-23T18:42:30.561726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-12-23T18:42:30.561883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2025-12-23T18:42:30.564205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:42:30.564243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2025-12-23T18:42:30.564311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:42:30.565568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2025-12-23T18:42:30.565587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-12-23T18:42:30.565627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:8 2025-12-23T18:42:30.565637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-12-23T18:42:30.565657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-12-23T18:42:30.565666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-12-23T18:42:30.565802Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-12-23T18:42:30.565828Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found 2025-12-23T18:42:30.565843Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-12-23T18:42:30.565860Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-12-23T18:42:30.565876Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-12-23T18:42:30.565893Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-12-23T18:42:30.567127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2025-12-23T18:42:30.567142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-12-23T18:42:30.567184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-12-23T18:42:30.567196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-12-23T18:42:30.567233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2025-12-23T18:42:30.567261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-12-23T18:42:30.570450Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogCreateModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:37.293237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:37.293323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:37.293369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:37.293429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:37.293478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:37.293510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:37.293569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:37.293660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:37.294506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:37.294808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:37.383950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:37.384014Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:37.389937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:37.390116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:37.390245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:37.393622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:37.393821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:37.394550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:37.394789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:37.399087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:37.399271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:37.400497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:37.400559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:37.400678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:37.400746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:37.400810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:37.400943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.412692Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:42:37.525889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:42:37.526073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.526236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:42:37.526281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:42:37.526447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:42:37.526507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:37.529213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:37.529369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:42:37.529502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.529549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:42:37.529580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:42:37.529611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:42:37.531615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.531669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:42:37.531711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:42:37.533367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.533422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.533461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:42:37.533546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:42:37.537129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:42:37.538849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:42:37.539040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:42:37.540113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:37.540246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:37.540303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:42:37.540575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:42:37.540629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:42:37.540801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:42:37.540872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:42:37.542652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:37.542699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ration: MODIFY USER, path: /MyRoot 2025-12-23T18:42:40.404896Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:40.404964Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:40.405148Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:40.405190Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:209:2210], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-12-23T18:42:40.405655Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:42:40.405749Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:42:40.405786Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:42:40.405823Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:42:40.405864Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:42:40.405953Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-12-23T18:42:40.408100Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 AUDIT LOG buffer(6): 2025-12-23T18:42:40.307420Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T18:42:40.352365Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-12-23T18:42:40.369383Z: component=schemeshard, tx_id=102, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2025-12-23T18:42:40.378868Z: component=schemeshard, tx_id=103, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[blocking] 2025-12-23T18:42:40.387378Z: component=schemeshard, tx_id=104, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[unblocking] 2025-12-23T18:42:40.400524Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] AUDIT LOG checked line: 2025-12-23T18:42:40.400524Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2025-12-23T18:42:40.411753Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user1" Password: "password1" CanLogin: false } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:42:40.419212Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-12-23T18:42:40.419431Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2025-12-23T18:42:40.419488Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:42:40.419546Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2025-12-23T18:42:40.419598Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:42:40.419688Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:42:40.419779Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-12-23T18:42:40.419828Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:42:40.419878Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2025-12-23T18:42:40.419934Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-12-23T18:42:40.419980Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-12-23T18:42:40.422842Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:40.423019Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2025-12-23T18:42:40.423257Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:40.423329Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:40.423580Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:40.423642Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:209:2210], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-12-23T18:42:40.424355Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-12-23T18:42:40.424493Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-12-23T18:42:40.424550Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-12-23T18:42:40.424615Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-12-23T18:42:40.424675Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:42:40.424797Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-12-23T18:42:40.427560Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 AUDIT LOG buffer(7): 2025-12-23T18:42:40.307420Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T18:42:40.352365Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-12-23T18:42:40.369383Z: component=schemeshard, tx_id=102, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2025-12-23T18:42:40.378868Z: component=schemeshard, tx_id=103, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[blocking] 2025-12-23T18:42:40.387378Z: component=schemeshard, tx_id=104, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[unblocking] 2025-12-23T18:42:40.400524Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2025-12-23T18:42:40.419026Z: component=schemeshard, tx_id=106, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] AUDIT LOG checked line: 2025-12-23T18:42:40.419026Z: component=schemeshard, tx_id=106, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:37.378128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:37.378214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:37.378259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:37.378302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:37.378342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:37.378378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:37.378454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:37.378556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:37.379505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:37.379810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:37.474242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:37.474305Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:37.487654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:37.487916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:37.488052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:37.495482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:37.495748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:37.496631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:37.496912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:37.499279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:37.499514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:37.500848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:37.500919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:37.501064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:37.501142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:37.501203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:37.501359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.509649Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:42:37.660348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:42:37.660627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.660871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:42:37.660940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:42:37.661191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:42:37.661428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:37.664090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:37.664376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:42:37.664608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.664687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:42:37.664742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:42:37.664784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:42:37.667175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.667257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:42:37.667302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:42:37.670681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.670770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.670833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:42:37.670928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:42:37.683602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:42:37.685748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:42:37.685954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:42:37.687017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:37.687185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:37.687267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:42:37.687650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:42:37.687721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:42:37.687916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:42:37.688031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:42:37.690408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:37.690447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... done id#101:0 progress is 1/1 2025-12-23T18:42:40.687450Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:42:40.687503Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:42:40.687550Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:42:40.687616Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:42:40.687681Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:42:40.687733Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:42:40.687775Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:42:40.687820Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-12-23T18:42:40.687864Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-12-23T18:42:40.688190Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [5:272:2262] Bootstrap 2025-12-23T18:42:40.689569Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [5:272:2262] Become StateWork (SchemeCache [5:278:2268]) 2025-12-23T18:42:40.690647Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [5:272:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:42:40.693794Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:40.693934Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-12-23T18:42:40.694179Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:40.694231Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:40.694456Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:40.694508Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:42:40.695586Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:40.695723Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:40.695773Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:42:40.695825Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-12-23T18:42:40.695884Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:42:40.696012Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:42:40.696580Z node 5 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-12-23T18:42:40.697758Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-12-23T18:42:40.698119Z node 5 :HTTP WARN: login_page.cpp:102: 127.0.0.1:0 POST /login 2025-12-23T18:42:40.699754Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:40.699803Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-12-23T18:42:40.750169Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:40.760034Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:40.760188Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:40.760237Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:40.760495Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2025-12-23T18:42:40.760559Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:40.760605Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:209:2210], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-12-23T18:42:40.761487Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-12-23T18:42:40.762157Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:40.762366Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 244us result status StatusSuccess 2025-12-23T18:42:40.762958Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1X5hb3dCflIy/xmQIGyx\nyr4C1xaGygdDJs9LXpfu17iZzI6RQzYgDsAdPKYximJC09q0n6FhErWpC+Ky9BoY\nFThNeH+DuWdmbYOlZu5Sv8ytBoWqcIaPpcrF/NWTl0KggcbIt/+0u8rppBdQChTT\nOujbEsinqfEBWV09HGz7qhPxPMFfBvu2J1dPFCvZ5Go14VRdgv4aV020G9vVPdKO\nQaIjXhRe3DCh0cdW+sAZfYSmgf+1T2cf+ncPoEyGpwkIPN8RJcPKPxtsPXY16RQ8\nST1YVLikXxgm7ohrLbS9nVVCOMFkOM12Ppt/K5hb/S5iWIy+Fe7BL9dZDsIxO/3S\n4QIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1766601760745 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:40.763480Z node 5 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout 2025-12-23T18:42:40.763545Z node 5 :HTTP ERROR: login_page.cpp:326: Logout: No ydb_session_id cookie 2025-12-23T18:42:40.764079Z node 5 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout 2025-12-23T18:42:40.764848Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (589A015B): Token is not in correct format 2025-12-23T18:42:40.764930Z node 5 :HTTP ERROR: login_page.cpp:326: Logout: Token is not in correct format 2025-12-23T18:42:40.765314Z node 5 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2025-12-23T18:42:40.657998Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T18:42:40.687056Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-12-23T18:42:40.754302Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzY2NTU4NTYwLCJpYXQiOjE3NjY1MTUzNjAsInN1YiI6InVzZXIxIn0.**, login_user_level=admin 2025-12-23T18:42:40.766584Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzY2NTU4NTYwLCJpYXQiOjE3NjY1MTUzNjAsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2025-12-23T18:42:40.766584Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzY2NTU4NTYwLCJpYXQiOjE3NjY1MTUzNjAsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-dbadmin |94.8%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest |94.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-ordinaryuser >> TBlobStorageProxyTest::TestSingleFailureMirror [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet >> TSchemeShardLoginTest::BanUnbanUser [GOOD] >> TSchemeShardLoginTest::BanUserWithWaiting >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] >> TBlobStorageProxyTest::TestProxyPutSingleTimeout >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> TSchemeShardLoginFinalize::Success [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-ordinaryuser >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] >> TPQTest::IncompleteProxyResponse >> TBlobStorageProxyTest::TestBlockPersistence >> TBlobStorageProxyTest::TestEmptyDiscover ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] Test command err: 2025-12-23T18:42:38.000847Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002efe/r3tmp/tmpz0aJ75//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2025-12-23T18:42:38.001996Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002efe/r3tmp/tmpz0aJ75//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2025-12-23T18:42:38.021240Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:42:38.022544Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:42:41.330703Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002efe/r3tmp/tmpsJw5La//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2025-12-23T18:42:41.331642Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002efe/r3tmp/tmpsJw5La//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2025-12-23T18:42:41.338241Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:42:41.338393Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:36.650512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:36.650620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:36.650669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:36.650711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:36.650793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:36.650833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:36.651022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:36.651121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:36.652010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:36.652249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:36.740450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:36.740517Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:36.747046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:36.747357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:36.747507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:36.751187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:36.751424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:36.752215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:36.752501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:36.755337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:36.755558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:36.756575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:36.756638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:36.756814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:36.756862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:36.756908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:36.757072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:36.876796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.877604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.877688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.877819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.877866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.877904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.877957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.878001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.878041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.878102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.878159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.878227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.878271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.878320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.878369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 18:42:42.700420Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:42:42.700469Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:42:42.700519Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:42:42.700572Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:42:42.700682Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:42:42.703249Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:42:42.703761Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:42.704214Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [5:672:2662] Bootstrap 2025-12-23T18:42:42.705557Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [5:672:2662] Become StateWork (SchemeCache [5:677:2667]) 2025-12-23T18:42:42.705957Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:42.706147Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 225us result status StatusSuccess 2025-12-23T18:42:42.706604Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:42.707129Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [5:672:2662] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:42:42.708899Z node 5 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 2025-12-23T18:42:42.709682Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:42.709723Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-12-23T18:42:42.774641Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: Cannot find user: user1, at schemeshard: 72057594046678944 2025-12-23T18:42:42.774752Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:42.774797Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:42.775006Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:42.775058Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:209:2210], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-12-23T18:42:42.775566Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 0 2025-12-23T18:42:42.775907Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:42.776107Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 232us result status StatusSuccess 2025-12-23T18:42:42.776609Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0cj5xknNkK52eogYiBPW\nP+3DmvhzS3f4jXUdnRv3Z81nAIKK4BZbiRUMOuzDJbTY8EbS4IWtGuECyJK0eyas\nsI+aIaVTYGxbynt7zwbBVPkzBg9pe1FQGdGWPnOpHg9rsBwQWVFLBo4qMq3dhU7X\nZq9kZeUtR6DcIZv7IEFnjjSc4BhiVWb3fga6BYp70ZzfcZf6qxk8PjrGLFv6HfVm\n2D0wm51CNYv3hfw3pun6peuhRebKTH/HAKm3HQmp/h0UVf+5iaGgH6RFclGSyVKh\n3pKRU3RsR7jPEXHtUm/6Mj65d1gFyEqr0HeR9e+S0pL9A3QhgwNXqT1NpDRSFyBt\nLQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1766601762772 } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] Test command err: 2025-12-23T18:42:39.639053Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002ef9/r3tmp/tmpUCZ7P1//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2025-12-23T18:42:39.649025Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:37.007934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:37.008013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:37.008044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:37.008073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:37.008115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:37.008141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:37.008189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:37.008266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:37.008927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:37.009143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:37.076738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:37.076828Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:37.083447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:37.083669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:37.083817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:37.087183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:37.087413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:37.088207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:37.088444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:37.090508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:37.090656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:37.091942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:37.092012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:37.092157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:37.092217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:37.092278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:37.092455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.219381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.220444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.220568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.220673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.220745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.220919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.221013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.221086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.221160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.221270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.221340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.221418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.221512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.221617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.221713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ne_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:42:43.022734Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 14] 2025-12-23T18:42:43.022779Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 28] name: top_queries_by_cpu_time_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:42:43.022800Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 28] 2025-12-23T18:42:43.022835Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 37] name: top_queries_by_duration_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:42:43.022861Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 37] 2025-12-23T18:42:43.022889Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 25] name: top_queries_by_duration_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:42:43.022912Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 25] 2025-12-23T18:42:43.022951Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 15] name: top_queries_by_read_bytes_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:42:43.022985Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 15] 2025-12-23T18:42:43.023015Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 16] name: top_queries_by_read_bytes_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:42:43.023035Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 16] 2025-12-23T18:42:43.023060Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 10] name: top_queries_by_request_units_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:42:43.023080Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2025-12-23T18:42:43.023105Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 29] name: top_queries_by_request_units_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:42:43.023127Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 29] 2025-12-23T18:42:43.023162Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:43.023183Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:42:43.023525Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-12-23T18:42:43.023642Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2025-12-23T18:42:43.023679Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:42:43.023712Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2025-12-23T18:42:43.023746Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:42:43.023824Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:42:43.023878Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-12-23T18:42:43.023917Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:42:43.023979Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2025-12-23T18:42:43.024030Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-12-23T18:42:43.024067Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-12-23T18:42:43.026456Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:43.026550Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2025-12-23T18:42:43.026714Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:43.026806Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:43.026983Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:43.027028Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:209:2210], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-12-23T18:42:43.027488Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 106 2025-12-23T18:42:43.027585Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 106 2025-12-23T18:42:43.027623Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-12-23T18:42:43.027656Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-12-23T18:42:43.027695Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:42:43.027779Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-12-23T18:42:43.029831Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-12-23T18:42:43.030485Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:43.030701Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 244us result status StatusSuccess 2025-12-23T18:42:43.031256Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dir1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:41:59.550786Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:41:59.555847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:41:59.556707Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:678:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:41:59.861986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:41:59.878628Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:41:59.891387Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:683:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:41:59.891883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:41:59.892167Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:41:59.899912Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:41:59.900104Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:42:00.675922Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:00.876453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:00.876616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:00.877444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:42:00.877537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:42:00.926959Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:42:00.927850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:00.928177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:42:01.095875Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:42:01.108793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034e5/r3tmp/tmpFgk9Sz/pdisk_1.dat 2025-12-23T18:42:02.097995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:42:02.171471Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:02.171603Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:42:02.275322Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-12-23T18:42:02.275677Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:42:02.275869Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-12-23T18:42:02.276016Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:42:02.582128Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:223:2182] Handle TEvProposeTransaction 2025-12-23T18:42:02.582203Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:223:2182] TxId# 281474976710657 ProcessProposeTransaction 2025-12-23T18:42:02.582326Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:223:2182] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:1562:3022] 2025-12-23T18:42:02.792499Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:1562:3022] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:42:02.792654Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:1562:3022] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:42:02.795720Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:1562:3022] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:42:02.795941Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:1562:3022] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:42:02.796536Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:1562:3022] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:42:02.796744Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:1562:3022] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:42:02.796914Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:1562:3022] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-12-23T18:42:02.800106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:42:02.800975Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:1562:3022] txid# 281474976710657 HANDLE EvClientConnected 2025-12-23T18:42:02.808772Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:1562:3022] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-12-23T18:42:02.808862Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:1562:3022] txid# 281474976710657 SEND to# [1:1489:2979] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2025-12-23T18:42:02.920985Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1639:3079] 2025-12-23T18:42:02.921278Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:02.977818Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1641:3080] 2025-12-23T18:42:02.978068Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:02.994265Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:02.994861Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:02.996839Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:42:02.996921Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:42:02.996984Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:42:02.997415Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:02.997840Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:02.997930Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:1730:3079] in generation 1 2025-12-23T18:42:02.998645Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1644:3082] 2025-12-23T18:42:02.999057Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:42:03.026583Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:03.026729Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:03.028279Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-12-23T18:42:03.028357Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037894 2025-12-23T18:42:03.028426Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037894 2025-12-23T18:42:03.028754Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:42:03.029200Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:42:03.029305Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037894 persisting started state actor id [1:1749:3080] in generation 1 2025-12-23T18:42:03.036586Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:42:03.036891Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:42:03.038453Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037892 2025-12-23T18:42:03.038537Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037892 2025-12-23T18:42:03.038600Z node 1 :TX_DATASHARD DEBUG: datashard.cpp: ... 1474976710665, task: 3. pass away 2025-12-23T18:42:41.341617Z node 5 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710665;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:42:41.343432Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710665, taskId: 3. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-12-23T18:42:41.422375Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 1970-01-01T00:00:06.009000Z, after 1.511488s 2025-12-23T18:42:41.422995Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:326: ActorId: [5:2167:3238] TxId: 281474976710665. Ctx: { TraceId: 01kd68ae7cb1rczhknha7q95qz, Database: , SessionId: ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send TEvStreamData to [5:1930:3238], seqNo: 1, nRows: 1 2025-12-23T18:42:41.423392Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [5:2167:3238] TxId: 281474976710665. Ctx: { TraceId: 01kd68ae7cb1rczhknha7q95qz, Database: , SessionId: ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [5:2177:3378], task: 4, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 514736 Tasks { TaskId: 4 StageId: 2 CpuTimeUs: 512936 FinishTimeMs: 1766515361339 InputRows: 1 InputBytes: 6 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 170 BuildCpuTimeUs: 512766 HostName: "ghrun-ou4bsyjxme" NodeId: 5 StartTimeMs: 1766515361339 CreateTimeMs: 1766515359604 CurrentWaitOutputTimeUs: 82 UpdateTimeMs: 1766515361339 } MaxMemoryUsage: 1048576 } 2025-12-23T18:42:41.423498Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [5:2167:3238] TxId: 281474976710665. Ctx: { TraceId: 01kd68ae7cb1rczhknha7q95qz, Database: , SessionId: ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [5:2176:3377], CA [5:2177:3378], 2025-12-23T18:42:41.424180Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [5:2167:3238] TxId: 281474976710665. Ctx: { TraceId: 01kd68ae7cb1rczhknha7q95qz, Database: , SessionId: ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [5:2176:3377], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 105727 DurationUs: 51000 Tasks { TaskId: 3 StageId: 1 CpuTimeUs: 100007 FinishTimeMs: 1766515361340 InputRows: 2 InputBytes: 12 OutputRows: 1 OutputBytes: 6 ComputeCpuTimeUs: 252 BuildCpuTimeUs: 99755 HostName: "ghrun-ou4bsyjxme" NodeId: 5 StartTimeMs: 1766515361289 CreateTimeMs: 1766515359504 UpdateTimeMs: 1766515361341 } MaxMemoryUsage: 1048576 } 2025-12-23T18:42:41.424272Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710665. Ctx: { TraceId: 01kd68ae7cb1rczhknha7q95qz, Database: , SessionId: ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [5:2176:3377] 2025-12-23T18:42:41.424338Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [5:2167:3238] TxId: 281474976710665. Ctx: { TraceId: 01kd68ae7cb1rczhknha7q95qz, Database: , SessionId: ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [5:2177:3378], 2025-12-23T18:42:41.424425Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 4000 2025-12-23T18:42:41.424532Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037890: waitStep# 4000 readStep# 4000 observedStep# 4000 ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } format: FORMAT_VALUE } SeqNo: 1 QueryResultIndex: 0 ChannelId: 4 VirtualTimestamp { Step: 3500 TxId: 281474976710663 } Finished: true 2025-12-23T18:42:41.427543Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:515: TxId: 281474976710665, send ack to channelId: 4, seqNo: 1, enough: 0, freeSpace: 100, to: [5:2181:3378] 2025-12-23T18:42:41.427656Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_channels.cpp:179: TxId: 281474976710665, task: 4. Received channel data ack for channelId: 4, seqNo: 1, lastSentSeqNo: 1, freeSpace: 100, early finish: 0 2025-12-23T18:42:41.427754Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_channels.cpp:207: TxId: 281474976710665, task: 4. PeerState, peerState:(freeSpace:100;inFlightBytes:0;inFlightCount:0;), sentSeqNo: 1, ackSeqNo: 1 2025-12-23T18:42:41.427814Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_channels.cpp:220: TxId: 281474976710665, task: 4. Resume compute actor 2025-12-23T18:42:41.427996Z node 5 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [5:2177:3378], TxId: 281474976710665, task: 4. Ctx: { TraceId : 01kd68ae7cb1rczhknha7q95qz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:42:41.428044Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:2000: SelfId: [5:2177:3378], TxId: 281474976710665, task: 4. Ctx: { TraceId : 01kd68ae7cb1rczhknha7q95qz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Poll inputs 2025-12-23T18:42:41.428094Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:2015: SelfId: [5:2177:3378], TxId: 281474976710665, task: 4. Ctx: { TraceId : 01kd68ae7cb1rczhknha7q95qz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Poll sources 2025-12-23T18:42:41.428160Z node 5 :KQP_COMPUTE TRACE: dq_sync_compute_actor_base.h:37: SelfId: [5:2177:3378], TxId: 281474976710665, task: 4. Ctx: { TraceId : 01kd68ae7cb1rczhknha7q95qz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Resume execution, run status: Finished 2025-12-23T18:42:41.428194Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:405: SelfId: [5:2177:3378], TxId: 281474976710665, task: 4. Ctx: { TraceId : 01kd68ae7cb1rczhknha7q95qz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. ProcessOutputsState.Inflight: 0 2025-12-23T18:42:41.428227Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:445: SelfId: [5:2177:3378], TxId: 281474976710665, task: 4. Ctx: { TraceId : 01kd68ae7cb1rczhknha7q95qz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Do not drain channelId: 4, finished 2025-12-23T18:42:41.428268Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710665, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [1] 2025-12-23T18:42:41.428294Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710665, task: 4. Tasks execution finished 2025-12-23T18:42:41.428325Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [5:2177:3378], TxId: 281474976710665, task: 4. Ctx: { TraceId : 01kd68ae7cb1rczhknha7q95qz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:42:41.428435Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710665, task: 4. pass away 2025-12-23T18:42:41.428551Z node 5 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710665;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:42:41.432059Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710665, taskId: 4. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-12-23T18:42:41.432579Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [5:2167:3238] TxId: 281474976710665. Ctx: { TraceId: 01kd68ae7cb1rczhknha7q95qz, Database: , SessionId: ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [5:2177:3378], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 515941 DurationUs: 89000 Tasks { TaskId: 4 StageId: 2 CpuTimeUs: 512947 FinishTimeMs: 1766515361428 InputRows: 1 InputBytes: 6 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 181 BuildCpuTimeUs: 512766 HostName: "ghrun-ou4bsyjxme" NodeId: 5 StartTimeMs: 1766515361339 CreateTimeMs: 1766515359604 UpdateTimeMs: 1766515361428 } MaxMemoryUsage: 1048576 } 2025-12-23T18:42:41.432669Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976710665. Ctx: { TraceId: 01kd68ae7cb1rczhknha7q95qz, Database: , SessionId: ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [5:2177:3378] 2025-12-23T18:42:41.432898Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [5:2167:3238] TxId: 281474976710665. Ctx: { TraceId: 01kd68ae7cb1rczhknha7q95qz, Database: , SessionId: ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:42:41.432990Z node 5 :KQP_EXECUTER TRACE: kqp_executer_impl.h:1400: ActorId: [5:2167:3238] TxId: 281474976710665. Ctx: { TraceId: 01kd68ae7cb1rczhknha7q95qz, Database: , SessionId: ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState 2025-12-23T18:42:41.433067Z node 5 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [5:2167:3238] TxId: 281474976710665. Ctx: { TraceId: 01kd68ae7cb1rczhknha7q95qz, Database: , SessionId: ydb://session/3?node_id=5&id=OTM0ODI1MGEtMzQzYjQ5MDMtYjQxNzMwNjctOTQyMzkzZTA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.036989s ReadRows: 100 ReadBytes: 800 ru: 100 rate limiter was not found force flag: 1 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 1045 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_scan/unittest >> Cdc::DropColumn [GOOD] >> Cdc::DropIndex >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-dbadmin >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-dbadmin >> TPQTest::IncompleteProxyResponse [GOOD] >> TPQTest::SmallMsgCompactificationWithRebootsTest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-clusteradmin >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVCollectVGetRace >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestDoubleGroups >> TBlobStorageProxyTest::TestVPutVGetPersistence >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] |94.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |94.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |94.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |94.8%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscover |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBlockPersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] >> TBlobStorageProxyTest::TestVGetNoData |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:36.056965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:36.057059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:36.057135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:36.057182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:36.057219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:36.057258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:36.057310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:36.057410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:36.058261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:36.058580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:36.143146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:36.143215Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:36.149468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:36.149674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:36.149800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:36.153063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:36.153288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:36.154040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:36.154299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:36.156206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:36.156356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:36.157178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:36.157221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:36.157298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:36.157344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:36.157388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:36.157522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:36.303050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.304317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.304464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.304547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.304639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.304792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.304892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.304970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.305038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.305120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.305190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.305296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.305396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.305485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:36.305569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-12-23T18:42:42.178867Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:42.183599Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:42.183775Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:42.183837Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:42.184452Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:42.184520Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:42.184573Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:209:2210], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-12-23T18:42:42.185126Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2025-12-23T18:42:42.185398Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:42.185485Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:42.190472Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:42.192420Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:42.192643Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:42.192704Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:42.200385Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:42.202702Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:42.203196Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:42.203447Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 285us result status StatusSuccess 2025-12-23T18:42:42.204000Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwqjH5aBldd1sIJm+klkz\ngTMVbnGL3RQukcqzE+kialk4Zb4Bt/e4xIFdaSCju0GbBWWmdAC3Gk/eoefnbKJm\nh6/5lF6UR/2zhqw/WHbNd/v0wcD6tyb3zjHBJReJteY1qG0kXM/XGg+gDrwiyMoW\nEtIcn6b/XXxQ4rF4du2bZH5WWg4pmALmVFEAJz+t+n2lMWuA+EXTQIPHcOXkYzyr\nWIS1Z3JyWq2GStu38nBYcIqXnT7KsIkmafpPn/WixBBhUHacTFHioTMw/CO/8vfY\nqlJtfz7ngYFTZphkzPif+7VFkBK2GnF5qFEbSiC9S1DkgtyYun636jxQNkW7BVjR\nVwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1766601762173 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:46.207214Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:46.212355Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:46.215615Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:46.215967Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:46.216342Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:46.216445Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:46.221847Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:46.224202Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:46.224591Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:46.224681Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:46.229864Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:46.234185Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:46.234703Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:46.234862Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:46.240319Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:46.246519Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2025-12-23T18:42:46.247175Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:46.247444Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 311us result status StatusSuccess 2025-12-23T18:42:46.248068Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwqjH5aBldd1sIJm+klkz\ngTMVbnGL3RQukcqzE+kialk4Zb4Bt/e4xIFdaSCju0GbBWWmdAC3Gk/eoefnbKJm\nh6/5lF6UR/2zhqw/WHbNd/v0wcD6tyb3zjHBJReJteY1qG0kXM/XGg+gDrwiyMoW\nEtIcn6b/XXxQ4rF4du2bZH5WWg4pmALmVFEAJz+t+n2lMWuA+EXTQIPHcOXkYzyr\nWIS1Z3JyWq2GStu38nBYcIqXnT7KsIkmafpPn/WixBBhUHacTFHioTMw/CO/8vfY\nqlJtfz7ngYFTZphkzPif+7VFkBK2GnF5qFEbSiC9S1DkgtyYun636jxQNkW7BVjR\nVwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1766601762173 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-ordinaryuser >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:37.955972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:37.956082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:37.956136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:37.956194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:37.956258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:37.956296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:37.956378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:37.956471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:37.957510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:37.957868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:38.044501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:38.044606Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:38.052642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:38.052897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:38.053043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:38.057325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:38.057532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:38.058392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:38.058618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:38.060809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:38.061028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:38.062226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:38.062289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:38.062431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:38.062490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:38.062563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:38.062699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:38.196290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.197261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.197398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.197488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.197531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.197627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.197688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.197734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.197779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.197822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.197864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.197922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.197970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.198051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:38.198106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... LocalPathId: 2] 2025-12-23T18:42:46.671664Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 16] 2025-12-23T18:42:46.671694Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 10] name: top_queries_by_request_units_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:42:46.671723Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2025-12-23T18:42:46.671751Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 29] name: top_queries_by_request_units_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:42:46.671774Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 29] 2025-12-23T18:42:46.671807Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:46.671841Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:42:46.671882Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 39] name: DirSub1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:42:46.671904Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:42:46.672423Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 107:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-12-23T18:42:46.672561Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2025-12-23T18:42:46.672605Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-12-23T18:42:46.672649Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2025-12-23T18:42:46.672686Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-12-23T18:42:46.672746Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:42:46.672808Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: false 2025-12-23T18:42:46.672858Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-12-23T18:42:46.672902Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2025-12-23T18:42:46.672941Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 107, publications: 1, subscribers: 0 2025-12-23T18:42:46.672981Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 107, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-12-23T18:42:46.676086Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 107, response: Status: StatusSuccess TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:46.676260Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE USER, path: /MyRoot 2025-12-23T18:42:46.676562Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:46.676627Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:46.676901Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:46.676964Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:209:2210], at schemeshard: 72057594046678944, txId: 107, path id: 1 2025-12-23T18:42:46.677747Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 107 2025-12-23T18:42:46.677907Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 107 2025-12-23T18:42:46.677966Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 107 2025-12-23T18:42:46.678023Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-12-23T18:42:46.678078Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:42:46.678228Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 107, subscribers: 0 2025-12-23T18:42:46.681858Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 TestModificationResult got TxId: 107, wait until txId: 107 2025-12-23T18:42:46.682722Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:46.683036Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 351us result status StatusSuccess 2025-12-23T18:42:46.683600Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 2 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirSub1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:46.684587Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1/DirSub1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:46.684860Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1/DirSub1" took 263us result status StatusSuccess 2025-12-23T18:42:46.685314Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1/DirSub1" PathDescription { Self { Name: "DirSub1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:46.686293Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:46.686417Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: Cannot find user: user1, at schemeshard: 72057594046678944 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-clusteradmin >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> TBlobStorageProxyTest::TestPartialGetBlock >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-ordinaryuser >> TPQTest::SmallMsgCompactificationWithRebootsTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:37.249185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:37.249304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:37.249349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:37.249392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:37.249438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:37.249475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:37.249534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:37.249652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:37.250613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:37.251044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:37.346389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:37.346479Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:37.354598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:37.354804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:37.354933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:37.362341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:37.362546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:37.363432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:37.363767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:37.366663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:37.366958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:37.368380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:37.368459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:37.368616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:37.368678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:37.368734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:37.368914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.518437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.519673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.519823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.519904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.520004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.520073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.520166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.520250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.520337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.520439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.520509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.520623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.520698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.520796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.520888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:43.254105Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-12-23T18:42:43.254341Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:43.254395Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:43.254602Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:43.254649Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:209:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:42:43.255246Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:43.255389Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:43.255442Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:42:43.255495Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-12-23T18:42:43.255551Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:42:43.255674Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:42:43.257597Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-12-23T18:42:43.257980Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:43.258028Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-12-23T18:42:43.366960Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:43.370524Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:43.370681Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:43.370732Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:43.371351Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:43.371411Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:43.371460Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:209:2210], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-12-23T18:42:43.372064Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2025-12-23T18:42:43.372369Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:43.372473Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:43.377450Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:43.381938Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:43.382217Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:43.382296Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:43.386816Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:43.388921Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:43.389326Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:43.389422Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:43.397836Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:43.401826Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:43.405147Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user1" CanLogin: false } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:42:43.405690Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-12-23T18:42:43.405808Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:42:43.405848Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:42:43.405894Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:42:43.405927Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:42:43.405987Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:42:43.406048Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:42:43.406092Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:42:43.406135Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:42:43.406174Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 1, subscribers: 0 2025-12-23T18:42:43.406213Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-12-23T18:42:43.408910Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSuccess TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:43.409043Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2025-12-23T18:42:43.409275Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:43.409335Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:43.409532Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:43.409582Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-12-23T18:42:43.410157Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:42:43.410284Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:42:43.410337Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:42:43.410383Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-12-23T18:42:43.410448Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:42:43.410570Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:42:43.412758Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:42:47.415225Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:47.415383Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: account is blocked, at schemeshard: 72057594046678944 |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestSingleFailure [GOOD] >> TBlobStorageProxyTest::TestGetMultipart >> TBlobStorageProxyTest::TestBlock >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-dbadmin >> Cdc::RenameTable [GOOD] >> Cdc::ResolvedTimestamps >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> TBlobStorageProxyTest::TestProxyPutInvalidSize |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror >> TBlobStorageProxyTest::TestDoubleEmptyGet >> Cdc::ShouldDeliverChangesOnSplitMerge [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentMoveTable [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentMoveIndex |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2025-12-23T18:42:46.906141Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002f0c/r3tmp/tmpyWkww6//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2025-12-23T18:42:46.941995Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyGetSingleTimeout |94.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |94.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |94.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::SmallMsgCompactificationWithRebootsTest [GOOD] Test command err: 2025-12-23T18:38:26.044361Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:108:2057] recipient: [1:106:2138] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:113:2057] recipient: [1:106:2138] 2025-12-23T18:38:26.126138Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:38:26.130630Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:38:26.131001Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:38:26.131107Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:38:26.131149Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-12-23T18:38:26.131200Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4927: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-12-23T18:38:26.131246Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:26.131298Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:154:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:159:2057] recipient: [1:152:2172] Leader for TabletID 72057594037927937 is [1:112:2142] sender: [1:182:2057] recipient: [1:14:2061] 2025-12-23T18:38:26.149865Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:181:2193], now have 1 active actors on pipe 2025-12-23T18:38:26.150022Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:38:26.170102Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1470: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2025-12-23T18:38:26.173490Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2025-12-23T18:38:26.173667Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:38:26.174735Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2025-12-23T18:38:26.174936Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:38:26.175432Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:38:26.175834Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2025-12-23T18:38:26.178535Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:38:26.178586Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-12-23T18:38:26.178620Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:188:2142] 2025-12-23T18:38:26.178666Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:38:26.178730Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:38:26.180318Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:38:26.181685Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:38:26.181783Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:26.181831Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:38:26.181894Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:38:26.181932Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2025-12-23T18:38:26.181969Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:26.182012Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:38:26.182099Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-12-23T18:38:26.182144Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:38:26.182179Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:38:26.182240Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2025-12-23T18:38:26.182272Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2025-12-23T18:38:26.182320Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2025-12-23T18:38:26.182374Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2025-12-23T18:38:26.182422Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:26.182712Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:38:26.182867Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:38:26.182931Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:38:26.183160Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:38:26.183397Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:38:26.186376Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:38:26.186482Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:38:26.186568Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:38:26.186617Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:26.186674Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:38:26.186717Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:38:26.186778Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:38:26.186830Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:38:26.187196Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:195:2201], now have 1 active actors on pipe Set pipe for create session: [1:199:2204] 2025-12-23T18:38:26.188327Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:199:2204], now have 1 active actors on pipe Leader for TabletID 72057594037927938 is [1:158:2176] sender: [1:200:2057] recipient: [1:14:2061] 2025-12-23T18:38:26.188618Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T1 ... UE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 50 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2025-12-23T18:42:48.540091Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2025-12-23T18:42:48.540121Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2025-12-23T18:42:48.540161Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 27:0 isTruncatedBlob 0 2025-12-23T18:42:48.540302Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 27:0 isTruncatedBlob 0 hasNonZeroParts 0 isMiddlePartOfMessage 0 2025-12-23T18:42:48.541599Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:350: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Send EvRead (Compact state) from offset: 50:0 2025-12-23T18:42:48.541816Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 14 Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer offset 50 partno 0 count 4294967295 size 4294967295 endOffset 67 max time lag 0ms effective offset 50 2025-12-23T18:42:48.542142Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 14 added 2 blobs, size 5223238 count 17 last offset 61, current partition end offset: 67 2025-12-23T18:42:48.542173Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 14. Send blob request. 2025-12-23T18:42:48.542232Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 50 partno 0 count 11 parts_count 0 source 1 size 3379747 accessed 4 times before, last time 1970-01-01T00:00:00.000000Z 2025-12-23T18:42:48.542269Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 61 partno 0 count 6 parts_count 0 source 1 size 1843491 accessed 2 times before, last time 1970-01-01T00:00:00.000000Z 2025-12-23T18:42:48.542310Z node 159 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 14. All 2 blobs are from cache. 2025-12-23T18:42:48.542368Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 50 partno 0 count 11 parts 0 suffix '0' 2025-12-23T18:42:48.542417Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 61 partno 0 count 6 parts 0 suffix '0' 2025-12-23T18:42:48.542495Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2025-12-23T18:42:48.542640Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 50 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2025-12-23T18:42:48.542704Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 52 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2025-12-23T18:42:48.542762Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 54 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2025-12-23T18:42:48.542802Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 56 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2025-12-23T18:42:48.542843Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 58 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2025-12-23T18:42:48.542881Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 60 totakecount 11 count 1 size 307240 from pos 0 cbcount 1 2025-12-23T18:42:48.542968Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 61 totakecount 6 count 2 size 614475 from pos 0 cbcount 2 2025-12-23T18:42:48.543107Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2025-12-23T18:42:48.543145Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2025-12-23T18:42:48.543186Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 50:0 isTruncatedBlob 0 2025-12-23T18:42:48.543547Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 50 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2025-12-23T18:42:48.543830Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 51 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2025-12-23T18:42:48.544068Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 52 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2025-12-23T18:42:48.544247Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 53 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2025-12-23T18:42:48.544449Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 54 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2025-12-23T18:42:48.544649Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 55 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2025-12-23T18:42:48.544808Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 56 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2025-12-23T18:42:48.545140Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 57 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2025-12-23T18:42:48.545328Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 58 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2025-12-23T18:42:48.545521Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 59 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2025-12-23T18:42:48.545698Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 60 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2025-12-23T18:42:48.545761Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 isTruncatedBlob 0 hasNonZeroParts 1 isMiddlePartOfMessage 0 2025-12-23T18:42:48.548287Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000027_00000_0000000023_00000 2025-12-23T18:42:48.548398Z node 159 :PERSQUEUE DEBUG: partition.cpp:4522: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2025-12-23T18:42:48.548651Z node 159 :PERSQUEUE DEBUG: partition.cpp:4530: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2025-12-23T18:42:48.548736Z node 159 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:42:48.548872Z node 159 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 50 partNo 0 count 11 size 167 2025-12-23T18:42:48.548921Z node 159 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000027_00000_0000000023_00000(+) to d0000000000_00000000000000000027_00000_0000000023_00000(+) 2025-12-23T18:42:48.553968Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 27 count 23 actorID [159:138:2142] 2025-12-23T18:42:48.554056Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 50 count 11 size 3379747 actorID [159:138:2142] is actual 1 2025-12-23T18:42:48.554148Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 50 count 11 size 167 actorID [159:138:2142] 2025-12-23T18:42:48.554267Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 27 partno 0 count 23 parts 0 suffix '0' size 263 2025-12-23T18:42:48.554334Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 50 partno 0 count 11 parts 0 suffix '0' size 3379747 2025-12-23T18:42:48.556197Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 50 partno 0 count 11 parts 0 suffix '0' size 167 2025-12-23T18:42:48.556614Z node 159 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:42:48.556674Z node 159 :PERSQUEUE DEBUG: partition.cpp:2161: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2025-12-23T18:42:48.556720Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response 2025-12-23T18:42:48.562519Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [159:303:2290], now have 1 active actors on pipe 2025-12-23T18:42:48.562688Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:42:48.562754Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-12-23T18:42:48.562888Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 61 for user __ydb_compaction_consumer 2025-12-23T18:42:48.563295Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [159:305:2292], now have 1 active actors on pipe Got start offset = 50 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-dbadmin >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardSecretTest::AsyncCreateDifferentSecrets >> TSchemeShardSecretTest::AlterExistingSecretMultipleTImes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:36.965902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:36.966021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:36.966087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:36.966132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:36.966178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:36.966228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:36.966300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:36.966386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:36.967380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:36.967691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:37.041263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:37.041310Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:37.047669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:37.047894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:37.048033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:37.051619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:37.051822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:37.052645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:37.052886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:37.055144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:37.055354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:37.056494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:37.056556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:37.056704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:37.056763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:37.056808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:37.056965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:37.214804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.215980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.216120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.216198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.216261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.216418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.216540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.216609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.216684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.216781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.216880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.216959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.217032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.217116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:37.217213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6678944 2025-12-23T18:42:45.866995Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:45.870136Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:45.870344Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:45.870389Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:45.870832Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:45.870899Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:45.870959Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:213:2213], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-12-23T18:42:45.871653Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2025-12-23T18:42:45.871996Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:45.872105Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:45.877360Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:45.880014Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:45.880409Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:45.880513Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:45.885978Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:45.888388Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:45.888805Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:45.888896Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:45.909969Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:45.912507Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:45.913032Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:45.913169Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2025-12-23T18:42:45.913604Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:45.913708Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2025-12-23T18:42:45.914220Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:45.914473Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 304us result status StatusSuccess 2025-12-23T18:42:45.915110Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2QzOIgkkwuQIhF9z/mD6\nUlLZS736fUefvVJuYmIH/rlzwrN0E31zUY9RBtGAphXPuZfx2hieJ5VV1fKjE/9N\nknzs9v75OJUBRZSp6Lb0goVpf2w8nEm3uj95vI8UUnC9KybMyYUBcnIS+ArRQJwF\n2QQdztjT1oLYfiuJYDR1OatdM7enX7C3Uj+G3NAgvg0e9cCm5vf9EBXNyn1S5JA0\ne1YFvv4S/K2o6OAvdC8je8mwNpTdJwQLajGvXAQ3aErOfwbRRnzMqasVAZ296yS4\nKY69IEYq3iVnBH9hBblSeFj3CmD8zaD+Taf5O2pM99rlWDE6Xsj7buNq8D/wbMvE\nEQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1766601765861 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:49.916405Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:49.929979Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:49.936814Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:49.937521Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2025-12-23T18:42:49.938056Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2025-12-23T18:42:49.938196Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2025-12-23T18:42:49.944614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2025-12-23T18:42:49.967791Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2025-12-23T18:42:49.968638Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:49.968933Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 352us result status StatusSuccess 2025-12-23T18:42:49.969615Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2QzOIgkkwuQIhF9z/mD6\nUlLZS736fUefvVJuYmIH/rlzwrN0E31zUY9RBtGAphXPuZfx2hieJ5VV1fKjE/9N\nknzs9v75OJUBRZSp6Lb0goVpf2w8nEm3uj95vI8UUnC9KybMyYUBcnIS+ArRQJwF\n2QQdztjT1oLYfiuJYDR1OatdM7enX7C3Uj+G3NAgvg0e9cCm5vf9EBXNyn1S5JA0\ne1YFvv4S/K2o6OAvdC8je8mwNpTdJwQLajGvXAQ3aErOfwbRRnzMqasVAZ296yS4\nKY69IEYq3iVnBH9hBblSeFj3CmD8zaD+Taf5O2pM99rlWDE6Xsj7buNq8D/wbMvE\nEQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1766601765861 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-system >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob |94.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |94.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |94.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |94.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |94.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |94.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |94.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |94.9%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |94.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test >> TBlobStorageProxyTest::TestDoubleGroups [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-clusteradmin >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] >> TBlobStorageProxyTest::TestGetMultipart [GOOD] >> TBlobStorageProxyTest::TestGetFail >> TSchemeShardSecretTest::EmptySecretName |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardSecretTest::CreateNotInDatabase >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TSchemeShardSecretTest::CreateSecret >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] Test command err: 2025-12-23T18:42:45.444490Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002ef3/r3tmp/tmp9793rg//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2025-12-23T18:42:45.447474Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:42:48.440608Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002ef3/r3tmp/tmp9793rg//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2025-12-23T18:42:48.449892Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:42:49.931276Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002ef3/r3tmp/tmp9793rg//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2025-12-23T18:42:49.937237Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:42:50.741640Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002ef3/r3tmp/tmp9793rg//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2025-12-23T18:42:50.764245Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:42:51.328677Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002ef3/r3tmp/tmp9793rg//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2025-12-23T18:42:51.386686Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TSchemeShardSecretTest::AsyncCreateDifferentSecrets [GOOD] >> TSchemeShardSecretTest::AsyncCreateSameSecret >> TBlobStorageProxyTest::TestGetFail [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-system |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> TSchemeShardSecretTest::CreateSecretOverExistingObject >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TSchemeShardSecretTest::AlterExistingSecretMultipleTImes [GOOD] >> TSchemeShardSecretTest::AlterUnexistingSecret |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSecretTest::EmptySecretName [GOOD] >> TSchemeShardSecretTest::DropUnexistingSecret >> TSchemeShardSecretTest::InheritPermissionsWithDifferentInheritanceTypes >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSecretTest::CreateSecretInSubdomain |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |94.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |94.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred >> TSchemeShardSecretTest::CreateNotInDatabase [GOOD] >> TSchemeShardSecretTest::AsyncDropSameSecret >> Cdc::DropIndex [GOOD] >> Cdc::InitialScan |94.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardSecretTest::AsyncCreateSameSecret [GOOD] >> TSchemeShardSecretTest::AsyncAlterSameSecret |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardSecretTest::AlterUnexistingSecret [GOOD] >> TSchemeShardSecretTest::AlterNotASecret >> TSchemeShardSecretTest::CreateSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs >> TSchemeShardSecretTest::DefaultDescribeSecret >> TSchemeShardSecretTest::CreateSecretOverExistingObject [GOOD] >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions >> TSchemeShardSecretTest::DropUnexistingSecret [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] |94.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |94.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |94.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] >> TSchemeShardSecretTest::AsyncDropSameSecret [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink >> TSchemeShardSecretTest::CreateSecretInSubdomain [GOOD] >> TSchemeShardSecretTest::CreateSecretInheritPermissions >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-dbadmin |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup |94.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSecretTest::AsyncAlterSameSecret [GOOD] >> TSchemeShardSecretTest::InheritPermissionsWithDifferentInheritanceTypes [GOOD] >> TSchemeShardSecretTest::ReadOnlyMode >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs [GOOD] >> TSchemeShardSecretTest::AlterNotASecret [GOOD] >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-clusteradmin |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::DropUnexistingSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:53.303330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:53.303423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:53.303466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:53.303500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:53.303548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:53.303580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:53.303632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:53.303763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:53.304596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:53.304846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:53.391296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:53.391359Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:53.395441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:53.395569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:53.395665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:53.397866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:53.398001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:53.398522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:53.398708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:53.400137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:53.400274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:53.401222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:53.401271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:53.401352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:53.401388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:53.401467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:53.401583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:53.523974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.525009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.525150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.525227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.525289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.525352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.525417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.525519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.525620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.525680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.525734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.525838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.525940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.526015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.526080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2:55.576672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:42:55.576788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:42:55.581681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:42:55.581817Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:42:55.582120Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:55.582236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:55.582281Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:42:55.582519Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:42:55.582573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:42:55.582734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:42:55.582827Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:42:55.584794Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:55.584905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:55.585116Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:55.585165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:42:55.585502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:42:55.585556Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:42:55.585670Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:42:55.585709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:42:55.585788Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:42:55.585828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:42:55.585871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:42:55.585914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:42:55.585952Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:42:55.585983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:42:55.586058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:42:55.586099Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:42:55.586134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:42:55.586690Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:42:55.586826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:42:55.586878Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:42:55.586922Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:42:55.586963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:42:55.587044Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:42:55.590581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:42:55.591040Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:55.591458Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:673:2663] Bootstrap 2025-12-23T18:42:55.592564Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:673:2663] Become StateWork (SchemeCache [2:678:2668]) 2025-12-23T18:42:55.592802Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:55.592981Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/test-secret" took 196us result status StatusPathDoesNotExist 2025-12-23T18:42:55.593163Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/test-secret" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:42:55.593669Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:673:2663] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:42:55.596932Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResults wait txId: 101 2025-12-23T18:42:55.599842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropSecret Drop { Name: "test-secret" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:42:55.599993Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_secret.cpp:120: [72057594046678944] TDropSecret Propose, opId: 101:0, path: /MyRoot/test-secret 2025-12-23T18:42:55.600102Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/test-secret', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-12-23T18:42:55.607334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:55.607585Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/test-secret', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: DROP SECRET, path: /MyRoot/test-secret TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:42:55.607889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:42:55.607937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:42:55.608270Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:42:55.608355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:42:55.608390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:689:2679] TestWaitNotification: OK eventTxId 101 |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] Test command err: 2025-12-23T18:42:53.790217Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002ee8/r3tmp/tmpkPHWLd//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2025-12-23T18:42:53.846250Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002ee8/r3tmp/tmpkPHWLd//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2025-12-23T18:42:53.846633Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:42:53.847029Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AsyncDropSameSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:53.619940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:53.620007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:53.620040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:53.620069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:53.620095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:53.620119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:53.620156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:53.620221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:53.620923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:53.621175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:53.723410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:53.723467Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:53.729801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:53.730035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:53.730164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:53.733535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:53.733719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:53.734513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:53.734760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:53.736815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:53.737033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:53.738181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:53.738243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:53.738362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:53.738406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:53.738505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:53.738673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:53.905077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.906113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.906236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.906306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.906370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.906439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.906513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.906606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.906697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.906887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.906966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.907086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.907205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.907298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:53.907401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... te, at schemeshard: 72057594046678944 2025-12-23T18:42:56.194256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-12-23T18:42:56.194302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2025-12-23T18:42:56.194338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 FAKE_COORDINATOR: Erasing txId 103 2025-12-23T18:42:56.194789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:42:56.194840Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:42:56.194936Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:42:56.194971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:42:56.195007Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:42:56.195042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:42:56.195079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-12-23T18:42:56.195120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:42:56.195179Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:42:56.195219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:42:56.195283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:42:56.195343Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2025-12-23T18:42:56.195376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-12-23T18:42:56.195404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2025-12-23T18:42:56.195436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2025-12-23T18:42:56.196333Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:42:56.196431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:42:56.196469Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:42:56.196504Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-12-23T18:42:56.196549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:42:56.197380Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:42:56.197452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:42:56.197481Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:42:56.197506Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2025-12-23T18:42:56.197532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:42:56.198937Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:42:56.199015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:42:56.199043Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:42:56.199071Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2025-12-23T18:42:56.199105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:42:56.199177Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-12-23T18:42:56.199549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:42:56.199599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:42:56.199696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:42:56.201542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:42:56.203594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:42:56.203732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:42:56.203839Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 105 TestModificationResults wait txId: 105 TestModificationResult got TxId: 104, wait until txId: 105 TestModificationResults wait txId: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 105 2025-12-23T18:42:56.204266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:42:56.204315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 TestWaitNotification wait txId: 106 2025-12-23T18:42:56.204407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-12-23T18:42:56.204431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-12-23T18:42:56.204903Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:42:56.205041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:42:56.205087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:765:2755] 2025-12-23T18:42:56.205236Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-12-23T18:42:56.205340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:42:56.205374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:765:2755] TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 2025-12-23T18:42:56.206059Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:56.206270Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 264us result status StatusPathDoesNotExist 2025-12-23T18:42:56.206437Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/dir\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/dir/test-secret" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/dir" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSecretTest::DefaultDescribeSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretOverExistingSecret ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AsyncAlterSameSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:52.269031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:52.269173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:52.269227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:52.269275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:52.269328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:52.269364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:52.269452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:52.269555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:52.270586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:52.270958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:52.383267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:52.383376Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:52.391219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:52.391549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:52.391756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:52.395862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:52.396105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:52.397019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:52.397324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:52.400258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:52.400471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:52.401778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:52.401847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:52.401995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:52.402049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:52.402158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:52.402317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:52.573434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.574674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.574902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.574981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.575057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.575130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.575200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.575337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.575412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.575477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.575556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.575654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.575755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.575842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.575932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... n_alter_secret.cpp:31: [72057594046678944] TAlterSecret TPropose operationId# 103:0 ProgressState 2025-12-23T18:42:56.530779Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-12-23T18:42:56.530913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:42:56.531808Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateAlter)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-12-23T18:42:56.531982Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateAlter), operation: ALTER SECRET, path: /MyRoot/dir/test-secret 2025-12-23T18:42:56.533269Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-12-23T18:42:56.533392Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2025-12-23T18:42:56.533764Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:56.533911Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 12884904047 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:56.533971Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_secret.cpp:44: [72057594046678944] TAlterSecret TPropose operationId# 103:0HandleReply TEvOperationPlan: step# 5000040 2025-12-23T18:42:56.534082Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2025-12-23T18:42:56.534260Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:42:56.536302Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:56.536382Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:42:56.536574Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:56.536625Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2210], at schemeshard: 72057594046678944, txId: 103, path id: 39 FAKE_COORDINATOR: Erasing txId 103 2025-12-23T18:42:56.537039Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:42:56.537100Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:42:56.537229Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:42:56.537278Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:42:56.537327Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:42:56.537366Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:42:56.537413Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-12-23T18:42:56.537467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:42:56.537510Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:42:56.537549Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:42:56.537635Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:42:56.537688Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-12-23T18:42:56.537758Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2025-12-23T18:42:56.538337Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:42:56.538456Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:42:56.538503Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:42:56.538547Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2025-12-23T18:42:56.538610Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:42:56.538710Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-12-23T18:42:56.544380Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-12-23T18:42:56.544728Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:42:56.544778Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2025-12-23T18:42:56.544848Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:42:56.544865Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:42:56.545270Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:42:56.545343Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:42:56.545373Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:763:2752] 2025-12-23T18:42:56.545429Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:42:56.545539Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:42:56.545558Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:763:2752] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2025-12-23T18:42:56.545896Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2025-12-23T18:42:56.546074Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 207us result status StatusSuccess 2025-12-23T18:42:56.546362Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value-new" Version: 1 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 18542, MsgBus: 20285 ... waiting for SysViewsRoster update finished 2025-12-23T18:39:18.523366Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:39:18.628820Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:39:18.637443Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:39:18.637702Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:39:18.637766Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:18.951283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:18.951424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:19.029518Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515154967899 != 1766515154967903 2025-12-23T18:39:19.043392Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:19.092530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:19.313502Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004113/r3tmp/tmprllodT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18542, node 1 2025-12-23T18:39:19.930424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:19.930516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:19.930572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:19.931004Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:19.943725Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:20285 TClient is connected to server localhost:20285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:20.415967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:20.465231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:20.809127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:21.247023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:21.585758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:39:22.944631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1908:3515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:22.945350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:22.946171Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1982:3534], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:22.946298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:23.027316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:23.186075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:23.503468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:23.808864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:24.078657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:24.426318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:24.721152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:25.044389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:25.581814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2795:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:25.581991Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:25.582528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:25.582626Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2802:4182], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:25.582698Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't hav ... in __push_back_slow_path /-S/contrib/libs/cxxsupp/libcxx/include/vector:1541:47 #7 0x000021b819c9 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/vector:1569:13 #8 0x000021b819c9 in grpc_core::Server::Start() /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:702:17 #9 0x000021b90aee in grpc_server_start /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:1715:37 #10 0x000021bb5297 in grpc::Server::Start(grpc::ServerCompletionQueue**, unsigned long) /-S/contrib/libs/grpc/src/cpp/server/server_cc.cc:1214:3 #11 0x000021199f64 in grpc::ServerBuilder::BuildAndStart() /-S/contrib/libs/grpc/src/cpp/server/server_builder.cc:445:11 #12 0x000021189519 in NYdbGrpc::TGRpcServer::Start() /-S/ydb/library/grpc/server/grpc_server.cpp:249:23 #13 0x00003f83c025 in NKikimr::Tests::TServer::EnableGRpc(NYdbGrpc::TServerOptions const&, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:813:21 #14 0x00003f8403f7 in NKikimr::Tests::TServer::EnableGRpc(unsigned short, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:817:9 #15 0x00004e0c39ae in NKikimr::NKqp::TKikimrRunner::TKikimrRunner(NKikimr::NKqp::TKikimrSettings const&) /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:239:17 #16 0x00001b2b4ff9 in NKikimr::NKqp::NTestSuiteKqpLocksTricky::TTestCaseTestNoLocksIssueInteractiveTx::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp:137:23 #17 0x00001b28e487 in operator() /-S/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp:33:1 #18 0x00001b28e487 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp:33:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #19 0x00001b28e487 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp:33:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #20 0x00001b28e487 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #21 0x00001b28e487 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #22 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #23 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #24 0x00001bf315f9 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #25 0x00001beff667 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #26 0x00001b28d58c in NKikimr::NKqp::NTestSuiteKqpLocksTricky::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp:33:1 #27 0x00001bf00e1f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #28 0x00001bf2b45c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #29 0x7fe926753d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x00001b748f1d in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021b9a4c5 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:271:10 #2 0x000021b9a4c5 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:295:10 #3 0x000021b9a4c5 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:103:32 #4 0x000021b9a4c5 in __allocate_at_least > *> > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x000021b9a4c5 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:354:25 #6 0x000021b9a4c5 in std::__y1::deque>, std::__y1::allocator>>>::__add_back_capacity() /-S/contrib/libs/cxxsupp/libcxx/include/deque:2186:51 #7 0x000021b9738c in push_back /-S/contrib/libs/cxxsupp/libcxx/include/deque:1594:5 #8 0x000021b9738c in push /-S/contrib/libs/cxxsupp/libcxx/include/queue:385:57 #9 0x000021b9738c in grpc_core::Server::RealRequestMatcher::MatchOrQueue(unsigned long, grpc_core::Server::CallData*) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:345:18 #10 0x000021b8f86e in grpc_core::Server::CallData::PublishNewRpc(void*, y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:1505:7 #11 0x0000212e6b61 in Run /-S/contrib/libs/grpc/src/core/lib/iomgr/closure.h:303:5 #12 0x0000212e6b61 in grpc_core::FilterStackCall::BatchControl::PostCompletion() /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:1332:5 #13 0x0000212e83f4 in grpc_core::FilterStackCall::BatchControl::ReceivingStreamReady(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:1403:5 #14 0x000021303791 in operator() /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:1764:9 #15 0x000021303791 in grpc_core::FilterStackCall::StartBatch(grpc_op const*, unsigned long, void*, bool)::$_1::__invoke(void*, y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:1764:9 #16 0x00002120027e in exec_ctx_run /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.cc:45:3 #17 0x00002120027e in grpc_core::ExecCtx::Flush() /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.cc:72:9 #18 0x0000213cad24 in end_worker /-S/contrib/libs/grpc/src/core/lib/iomgr/ev_epoll1_linux.cc #19 0x0000213cad24 in pollset_work(grpc_pollset*, grpc_pollset_worker**, grpc_core::Timestamp) /-S/contrib/libs/grpc/src/core/lib/iomgr/ev_epoll1_linux.cc:1049:3 #20 0x0000213c68f8 in pollset_work(grpc_pollset*, grpc_pollset_worker**, grpc_core::Timestamp) /-S/contrib/libs/grpc/src/core/lib/iomgr/ev_posix.cc:249:7 #21 0x0000213ea638 in grpc_pollset_work(grpc_pollset*, grpc_pollset_worker**, grpc_core::Timestamp) /-S/contrib/libs/grpc/src/core/lib/iomgr/pollset.cc:48:10 #22 0x0000213a51d8 in cq_next(grpc_completion_queue*, gpr_timespec, void*) /-S/contrib/libs/grpc/src/core/lib/surface/completion_queue.cc:1036:29 #23 0x000021bede6e in grpc::CompletionQueue::AsyncNextInternal(void**, bool*, gpr_timespec) /-S/contrib/libs/grpc/src/cpp/common/completion_queue_cc.cc:166:15 #24 0x00002248c13f in Next /-S/contrib/libs/grpc/include/grpcpp/completion_queue.h:182:13 #25 0x00002248c13f in NYdbGrpc::Dev::PullEvents(grpc::CompletionQueue*) /-S/ydb/public/sdk/cpp/src/library/grpc/client/grpc_client_low.cpp:212:18 #26 0x00001ba7a85e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #27 0x00001ba7a85e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #28 0x00001ba7a85e in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #29 0x00001ba7adbc in Execute /-S/util/thread/factory.h:15:13 #30 0x00001ba7adbc in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #31 0x00001ba6eda4 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:245:20 #32 0x00001b70ca36 in asan_thread_start(void*) /-S/contrib/libs/clang20-rt/lib/asan/asan_interceptors.cpp:239:28 Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x00001b748f1d in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021205bc1 in grpc_core::internal::StatusAllocHeapPtr(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/gprpp/status_helper.cc:427:25 #2 0x00002133aca2 in grpc_core::CallCombiner::Cancel(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/iomgr/call_combiner.cc:233:25 #3 0x0000212e149e in grpc_core::FilterStackCall::CancelWithError(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:1037:18 #4 0x0000212dcbbc in grpc_core::Call::CancelWithStatus(grpc_status_code, char const*) /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:366:3 #5 0x0000212fecc3 in grpc_call_cancel_with_status /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:3499:30 #6 0x000021bde7e6 in grpc::ServerContextBase::TryCancel() const /-S/contrib/libs/grpc/src/cpp/server/server_context.cc:347:7 #7 0x00002118617c in NYdbGrpc::TGrpcServiceProtectiable::StopService() /-S/ydb/library/grpc/server/grpc_server.cpp:64:26 #8 0x00002118c98e in NYdbGrpc::TGRpcServer::Stop() /-S/ydb/library/grpc/server/grpc_server.cpp:278:18 #9 0x00001b27cb26 in Shutdown /-S/ydb/core/testlib/test_client.h:443:33 #10 0x00001b27cb26 in ShutdownGRpc /-S/ydb/core/testlib/test_client.h:395:30 #11 0x00001b27cb26 in NKikimr::NKqp::TKikimrRunner::~TKikimrRunner() /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:178:17 #12 0x00001b2b8124 in NKikimr::NKqp::NTestSuiteKqpLocksTricky::TTestCaseTestNoLocksIssueInteractiveTx::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp:250:5 #13 0x00001b28e487 in operator() /-S/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp:33:1 #14 0x00001b28e487 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp:33:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #15 0x00001b28e487 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp:33:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #16 0x00001b28e487 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #17 0x00001b28e487 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #18 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #19 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #20 0x00001bf315f9 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #21 0x00001beff667 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #22 0x00001b28d58c in NKikimr::NKqp::NTestSuiteKqpLocksTricky::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp:33:1 #23 0x00001bf00e1f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #24 0x00001bf2b45c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #25 0x7fe926753d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) SUMMARY: AddressSanitizer: 428360 byte(s) leaked in 5250 allocation(s). |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:54.032688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:54.032833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:54.032876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:54.032939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:54.032988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:54.033026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:54.033090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:54.033201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:54.034388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:54.034946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:54.159321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:54.159399Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:54.175535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:54.175823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:54.176001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:54.183848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:54.184065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:54.184911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:54.185163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:54.187438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:54.187626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:54.189019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:54.189091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:54.189223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:54.189266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:54.189370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:54.189526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:54.378015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.379154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.379316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.379408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.379472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.379535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.379606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.379727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.379814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.379882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.379964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.380065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.380154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.380244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.380333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eration IsReadyToDone TxId: 101 ready parts: 3/3 2025-12-23T18:42:56.873318Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2025-12-23T18:42:56.873342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-12-23T18:42:56.873362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: false 2025-12-23T18:42:56.873383Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-12-23T18:42:56.873413Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:42:56.873440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:42:56.873500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:42:56.873527Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2025-12-23T18:42:56.873542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:1 2025-12-23T18:42:56.873564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:42:56.873578Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2025-12-23T18:42:56.873592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:2 2025-12-23T18:42:56.873610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2025-12-23T18:42:56.873631Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 4, subscribers: 0 2025-12-23T18:42:56.873660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-12-23T18:42:56.873700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 6 2025-12-23T18:42:56.873718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2025-12-23T18:42:56.873735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2025-12-23T18:42:56.875405Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:56.875501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:56.875538Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:42:56.875609Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-12-23T18:42:56.875663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:42:56.876357Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:56.876439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:56.876476Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:42:56.876507Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2025-12-23T18:42:56.876539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:42:56.877907Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:56.877973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:56.877994Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:42:56.878017Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2025-12-23T18:42:56.878053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:42:56.878378Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:56.878423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:56.878441Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:42:56.878460Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2025-12-23T18:42:56.878479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:42:56.878530Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:42:56.880671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:42:56.880777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:42:56.884003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:42:56.884104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:42:56.884279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:42:56.884311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:42:56.884639Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:42:56.884720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:42:56.884750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:721:2710] TestWaitNotification: OK eventTxId 101 2025-12-23T18:42:56.885102Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir1/dir2/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2025-12-23T18:42:56.885263Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir1/dir2/test-secret" took 210us result status StatusSuccess 2025-12-23T18:42:56.885503Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir1/dir2/test-secret" PathDescription { Self { Name: "test-secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value" Version: 0 } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSecretTest::CreateSecretInheritPermissions [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] |94.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:54.596194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:54.596292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:54.596336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:54.596380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:54.596428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:54.596458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:54.596517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:54.596622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:54.597579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:54.597878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:54.700099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:54.700173Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:54.707102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:54.707377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:54.707608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:54.715487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:54.715744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:54.716614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:54.716851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:54.719373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:54.719576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:54.720821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:54.720890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:54.721019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:54.721070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:54.721176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:54.721319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:54.887475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.888309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.888416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.888471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.888515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.888557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.888602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.888684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.888737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.888811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.888851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.888921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.888983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.889038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:54.889097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ration: 2 LocalPathId: 40 Version: 6 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:42:57.115637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 6 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:42:57.115671Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:42:57.115721Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 6 2025-12-23T18:42:57.115760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:42:57.117487Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:42:57.117576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:42:57.117608Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:42:57.117704Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 5 2025-12-23T18:42:57.117743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:42:57.117835Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-12-23T18:42:57.122450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:42:57.122571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:42:57.124167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-12-23T18:42:57.124441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:42:57.124491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-12-23T18:42:57.124990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:42:57.125099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:42:57.125146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:846:2835] TestWaitNotification: OK eventTxId 105 2025-12-23T18:42:57.125722Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:57.125959Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/secret" took 253us result status StatusSuccess 2025-12-23T18:42:57.126361Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/secret" PathDescription { Self { Name: "secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 104 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 1 EffectiveACLVersion: 3 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:57.126959Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/subdir/secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:57.127115Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/subdir/secret" took 167us result status StatusSuccess 2025-12-23T18:42:57.127435Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/subdir/secret" PathDescription { Self { Name: "secret" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 1 EffectiveACLVersion: 3 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:57.127963Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/subdir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:57.128127Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/subdir" took 187us result status StatusSuccess 2025-12-23T18:42:57.128527Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/subdir" PathDescription { Self { Name: "subdir" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\004\032\005user2 \003(\001" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "secret" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AlterNotASecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:52.411155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:52.411284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:52.411356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:52.411395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:52.411436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:52.411476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:52.411549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:52.411647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:52.412605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:52.412923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:52.512203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:52.512292Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:52.547707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:52.548055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:52.548216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:52.563361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:52.563611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:52.564503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:52.564765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:52.575701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:52.575952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:52.577519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:52.577607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:52.577762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:52.577817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:52.577967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:52.578161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:52.776474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.778189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.778348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.778434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.778506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.778576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.778657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.778796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.778879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.778948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.779015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.779104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.779193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.779283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:52.779391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:56.841404Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:42:56.841515Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:56.841551Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:42:56.841598Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:213:2213], at schemeshard: 72057594046678944, txId: 101, path id: 38 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:42:56.841886Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:42:56.841940Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:42:56.842079Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:42:56.842121Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:42:56.842194Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:42:56.842244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:42:56.842295Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:42:56.842344Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:42:56.842391Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:42:56.842436Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:42:56.842537Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:42:56.842586Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-12-23T18:42:56.842633Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:42:56.842664Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2025-12-23T18:42:56.843505Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:56.843604Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:56.843656Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:42:56.843712Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:42:56.843776Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:42:56.844456Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:56.844529Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:42:56.844557Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:42:56.844586Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:42:56.844630Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:42:56.844713Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:42:56.847947Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:42:56.848046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:42:56.848267Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:42:56.848324Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:42:56.848751Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:42:56.848856Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:42:56.848901Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:712:2701] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-12-23T18:42:56.852803Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSecret AlterSecret { Name: "dir" Value: "" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:42:56.853503Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_secret.cpp:113: [72057594046678944] TAlterSecret Propose, path: /MyRoot/dir, opId: 102:0 2025-12-23T18:42:56.853690Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:42:56.856241Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/dir\', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-12-23T18:42:56.856506Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), operation: ALTER SECRET, path: /MyRoot/dir TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:42:56.856817Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:42:56.856866Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:42:56.857273Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:42:56.857374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:42:56.857434Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:719:2708] TestWaitNotification: OK eventTxId 102 2025-12-23T18:42:56.857916Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2025-12-23T18:42:56.858109Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 221us result status StatusSuccess 2025-12-23T18:42:56.858519Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-clusteradmin |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest |94.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |94.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |94.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TSchemeShardSecretTest::ReadOnlyMode [GOOD] >> TSchemeShardSecretTest::CreateSecretOverExistingSecret [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretInheritPermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:55.073439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:55.073601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:55.073646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:55.073690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:55.073735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:55.073768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:55.073827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:55.073929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:55.074929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:55.075256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:55.169567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:55.169636Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:55.184073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:55.184319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:55.184482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:55.191744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:55.191966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:55.192804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:55.193049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:55.196424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:55.196619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:55.197872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:55.197939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:55.198068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:55.198114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:55.198222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:55.198370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:55.375948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.377227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.377394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.377485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.377566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.377650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.377734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.377866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.377958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.378055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.378147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.378254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.378367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.378468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.378573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:57.915149Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/secret" took 199us result status StatusSuccess 2025-12-23T18:42:57.915461Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/secret" PathDescription { Self { Name: "secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user2 \003(\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:57.915836Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:57.915933Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 116us result status StatusSuccess 2025-12-23T18:42:57.916249Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "secret" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:57.916851Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:57.916978Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 145us result status StatusSuccess 2025-12-23T18:42:57.917334Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user2 \003(\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:57.917878Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:57.917999Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 140us result status StatusSuccess 2025-12-23T18:42:57.918449Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "secret" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest |94.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:42:55.017897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:55.018166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:55.018210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:55.018250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:55.018283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:55.018332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:55.018396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:55.018472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:55.019601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:55.019873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:55.107322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:55.107373Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:55.113563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:55.114088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:55.114244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:55.118917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:55.119096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:55.119869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:55.120079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:55.121977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:55.122170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:55.123332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:55.123411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:55.123523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:55.123568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:55.123672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:55.123791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:55.264044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.264780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.264858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.264906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.265162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.265221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.265266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.265352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.265408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.265455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.265489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.265545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.265603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.265658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:55.265730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... : true 2025-12-23T18:42:58.619985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-12-23T18:42:58.620202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:42:58.620584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000039 2025-12-23T18:42:58.621179Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:58.621323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 8589936750 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:42:58.621387Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_secret.cpp:66: [72057594046678944] TCreateSecret::TPropose, opId: 103:0HandleReply TEvOperationPlan: step# 5000039 2025-12-23T18:42:58.621564Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2025-12-23T18:42:58.621754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:42:58.621826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 FAKE_COORDINATOR: Erasing txId 103 2025-12-23T18:42:58.625325Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:58.625369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:42:58.625511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:42:58.625653Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:58.625690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:840:2797], at schemeshard: 72057594046678944, txId: 103, path id: 38 2025-12-23T18:42:58.625727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:840:2797], at schemeshard: 72057594046678944, txId: 103, path id: 39 2025-12-23T18:42:58.626031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:42:58.626074Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:42:58.626184Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:42:58.626221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:42:58.626264Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:42:58.626303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:42:58.626343Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-12-23T18:42:58.626391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:42:58.626430Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:42:58.626473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:42:58.626560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:42:58.626604Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-12-23T18:42:58.626776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2025-12-23T18:42:58.626822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2025-12-23T18:42:58.627689Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:42:58.627787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:42:58.627826Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:42:58.627872Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2025-12-23T18:42:58.627928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:42:58.629022Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:42:58.629107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:42:58.629139Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:42:58.629170Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2025-12-23T18:42:58.629202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:42:58.629275Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-12-23T18:42:58.636056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:42:58.639741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:42:58.640174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:42:58.640248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:42:58.640732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:42:58.640849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:42:58.640896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:876:2831] TestWaitNotification: OK eventTxId 103 2025-12-23T18:42:58.641475Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-name" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:58.641719Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-name" took 286us result status StatusSuccess 2025-12-23T18:42:58.642112Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-name" PathDescription { Self { Name: "test-name" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-name" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-clusteradmin |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretOverExistingSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:42:56.196671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:56.196737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:56.196770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:56.196792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:56.196818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:56.196843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:56.196879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:56.196953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:56.197644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:56.197948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:56.311327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:42:56.311369Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:56.328527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:56.328767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:56.329013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:56.343519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:56.343797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:56.344755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:56.345022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:56.350682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:56.350937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:56.352622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:56.352708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:56.352945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:56.353012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:56.353197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:56.353382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:56.537287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.538481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.538658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.538824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.538897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.538978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.539057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.539170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.539284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.539388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.539485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.539595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.539678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.539772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:56.539875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 12-23T18:42:58.915155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2025-12-23T18:42:58.915211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2025-12-23T18:42:58.915970Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:42:58.916050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:42:58.916087Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:42:58.916123Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2025-12-23T18:42:58.916167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:42:58.916974Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:42:58.917048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:42:58.917076Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:42:58.917103Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2025-12-23T18:42:58.917130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:42:58.917196Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:42:58.919831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:42:58.920793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:42:58.920954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:42:58.920986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:42:58.921231Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:42:58.921327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:42:58.921356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:733:2722] TestWaitNotification: OK eventTxId 102 2025-12-23T18:42:58.921667Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:42:58.921806Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 169us result status StatusSuccess 2025-12-23T18:42:58.922062Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-12-23T18:42:58.924828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/dir" OperationType: ESchemeOpCreateSecret CreateSecret { Name: "test-secret" Value: "test-value-new" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:42:58.925082Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_secret.cpp:152: [72057594046678944] TCreateSecret Propose, path: /MyRoot/dir/test-secret, opId: 103:0 2025-12-23T18:42:58.925138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_secret.cpp:160: [72057594046678944] TCreateSecret Propose, path: /MyRoot/dir/test-secret, opId: 103:0, secretDescription (without secret parts): Name: "test-secret" 2025-12-23T18:42:58.925268Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:42:58.927146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-12-23T18:42:58.927380Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges), operation: CREATE SECRET, path: /MyRoot/dir/test-secret TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:42:58.927603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:42:58.927633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:42:58.927899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:42:58.927965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:42:58.927993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:741:2730] TestWaitNotification: OK eventTxId 103 2025-12-23T18:42:58.928374Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2025-12-23T18:42:58.928536Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 180us result status StatusSuccess 2025-12-23T18:42:58.928858Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value-init" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest |94.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots >> TPQCDTest::TestPrioritizeLocalDatacenter >> TestShred::ShredManualLaunch >> TPQCDTest::TestDiscoverClusters |94.9%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TPragmaSettingOlap-IsSnapshotIsolation [GOOD] Test command err: Trying to start YDB, gRPC: 27798, MsgBus: 24971 ... waiting for SysViewsRoster update finished 2025-12-23T18:39:17.341377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:39:17.471720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:39:17.485878Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:39:17.486311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:39:17.486427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:17.816613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:17.816757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:17.990184Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515153300714 != 1766515153300718 2025-12-23T18:39:18.021737Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:18.070186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:18.257821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004120/r3tmp/tmp5TOw5p/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27798, node 1 2025-12-23T18:39:18.800580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:18.800645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:18.800676Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:18.800990Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:18.815489Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24971 TClient is connected to server localhost:24971 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:19.568480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:19.652693Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:923:2788], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.652880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.652996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2793], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.653969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:937:2796], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.654153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.659211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:19.768042Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:938:2797], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:39:19.818712Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:994:2834] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:20.114509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:20.446175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:23.927004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:29.052789Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Write conflict with concurrent transaction.;tx_id=281474976715669; 2025-12-23T18:39:29.077226Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 281474976715669 at tablet 72075186224037989 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2025-12-23T18:39:29.077455Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 281474976715669 at tablet 72075186224037989 Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2025-12-23T18:39:29.077928Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [1:9059:8429], Table: `/Root/KV2` ([72057594046644480:44:1]), SessionActorId: [1:8823:8429]Got LOCKS BROKEN for table `/Root/KV2`. ShardID=72075186224037989, Sink=[1:9059:8429].{
: Error: Write conflict with concurrent transaction., code: 2001 } 2025-12-23T18:39:29.078426Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:8848:8429], SessionActorId: [1:8823:8429], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Write conflict with concurrent transaction., code: 2001 . sessionActorId=[1:8823:8429]. 2025-12-23T18:39:29.079870Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=MTI0MGE1YzctZjNiM2IyZDUtMTliNzJjYzEtYzEzYzVjYTg=, ActorId: [1:8823:8429], ActorState: ExecuteState, TraceId: 01kd684myyb0wyp2qmzj5267c9, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:9053:8429] from: [1:8848:8429] 2025-12-23T18:39:29.080160Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:9053:8429] TxId: 281474976715665. Ctx: { TraceId: 01kd684myyb0wyp2qmzj5267c9, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MTI0MGE1YzctZjNiM2IyZDUtMTliNzJjYzEtYzEzYzVjYTg=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Write conflict with concurrent transaction., code: 2001 } } 2025-12-23T18:39:29.080630Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=MTI0MGE1YzctZjNiM2IyZDUtMTliNzJjYzEtYzEzYzVjYTg=, ActorId: [1:8823:8429], ActorState: ExecuteState, TraceId: 01kd684myyb0wyp2qmzj5267c9, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV2`." issue_code: 2001 severity: 1 issues { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } } GRpc shutdown warning: left infly: 1, spent: 3.440809 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 7.031349 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 10.663397 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 14.319355 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 17.890693 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 21.508272 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 25.106728 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: left infly: 1, spent: 28.807392 sec. GRpcShutdownDeadline: 30 GRpc shutdown warning: fai ... ver.cpp:249:23 #13 0x00003f83c025 in NKikimr::Tests::TServer::EnableGRpc(NYdbGrpc::TServerOptions const&, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:813:21 #14 0x00003f8403f7 in NKikimr::Tests::TServer::EnableGRpc(unsigned short, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:817:9 #15 0x00004e0c39ae in NKikimr::NKqp::TKikimrRunner::TKikimrRunner(NKikimr::NKqp::TKikimrSettings const&) /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:239:17 #16 0x00001b3ca739 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #17 0x00001b3ca739 in NKikimr::NKqp::TTableDataModificationTester::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:38:18 #18 0x00001b5b4d5f in NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TTestCaseTPragmaSettingOltp::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:634:16 #19 0x00001b565ce7 in operator() /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1 #20 0x00001b565ce7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #21 0x00001b565ce7 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #22 0x00001b565ce7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #23 0x00001b565ce7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #24 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #25 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #26 0x00001bf315f9 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #27 0x00001beff667 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #28 0x00001b56503c in NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1 #29 0x00001bf00e1f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #30 0x00001bf2b45c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #31 0x7f9a4013cd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x00001b748f1d in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021205bc1 in grpc_core::internal::StatusAllocHeapPtr(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/gprpp/status_helper.cc:427:25 #2 0x00002133aca2 in grpc_core::CallCombiner::Cancel(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/iomgr/call_combiner.cc:233:25 #3 0x0000212e149e in grpc_core::FilterStackCall::CancelWithError(y_absl::lts_y_20250127::Status) /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:1037:18 #4 0x0000212dcbbc in grpc_core::Call::CancelWithStatus(grpc_status_code, char const*) /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:366:3 #5 0x0000212fecc3 in grpc_call_cancel_with_status /-S/contrib/libs/grpc/src/core/lib/surface/call.cc:3499:30 #6 0x000021bde7e6 in grpc::ServerContextBase::TryCancel() const /-S/contrib/libs/grpc/src/cpp/server/server_context.cc:347:7 #7 0x00002118617c in NYdbGrpc::TGrpcServiceProtectiable::StopService() /-S/ydb/library/grpc/server/grpc_server.cpp:64:26 #8 0x00002118c98e in NYdbGrpc::TGRpcServer::Stop() /-S/ydb/library/grpc/server/grpc_server.cpp:278:18 #9 0x00001b27cb26 in Shutdown /-S/ydb/core/testlib/test_client.h:443:33 #10 0x00001b27cb26 in ShutdownGRpc /-S/ydb/core/testlib/test_client.h:395:30 #11 0x00001b27cb26 in NKikimr::NKqp::TKikimrRunner::~TKikimrRunner() /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:178:17 #12 0x00001b5b4d9d in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:70:5 #13 0x00001b5b4d9d in reset /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:286:7 #14 0x00001b5b4d9d in ~unique_ptr /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:255:71 #15 0x00001b5b4d9d in ~TTableDataModificationTester /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:15:7 #16 0x00001b5b4d9d in ~TPragmaSetting /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:529:11 #17 0x00001b5b4d9d in NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TTestCaseTPragmaSettingOltp::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:635:5 #18 0x00001b565ce7 in operator() /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1 #19 0x00001b565ce7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #20 0x00001b565ce7 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #21 0x00001b565ce7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #22 0x00001b565ce7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #23 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #24 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #25 0x00001bf315f9 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #26 0x00001beff667 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #27 0x00001b56503c in NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1 #28 0x00001bf00e1f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #29 0x00001bf2b45c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #30 0x7f9a4013cd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) Indirect leak of 8 byte(s) in 1 object(s) allocated from: #0 0x00001b748f1d in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021b85219 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:271:10 #2 0x000021b85219 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:295:10 #3 0x000021b85219 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:103:32 #4 0x000021b85219 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x000021b85219 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:354:25 #6 0x000021b85219 in __push_back_slow_path /-S/contrib/libs/cxxsupp/libcxx/include/vector:1541:47 #7 0x000021b85219 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/vector:1557:13 #8 0x000021b85219 in grpc_core::Server::RegisterCompletionQueue(grpc_completion_queue*) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:794:8 #9 0x000021199a67 in grpc::ServerBuilder::BuildAndStart() /-S/contrib/libs/grpc/src/cpp/server/server_builder.cc:396:5 #10 0x000021189519 in NYdbGrpc::TGRpcServer::Start() /-S/ydb/library/grpc/server/grpc_server.cpp:249:23 #11 0x00003f83c025 in NKikimr::Tests::TServer::EnableGRpc(NYdbGrpc::TServerOptions const&, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:813:21 #12 0x00003f8403f7 in NKikimr::Tests::TServer::EnableGRpc(unsigned short, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:817:9 #13 0x00004e0c39ae in NKikimr::NKqp::TKikimrRunner::TKikimrRunner(NKikimr::NKqp::TKikimrSettings const&) /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:239:17 #14 0x00001b3ca739 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #15 0x00001b3ca739 in NKikimr::NKqp::TTableDataModificationTester::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:38:18 #16 0x00001b5b4d5f in NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TTestCaseTPragmaSettingOltp::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:634:16 #17 0x00001b565ce7 in operator() /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1 #18 0x00001b565ce7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #19 0x00001b565ce7 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #20 0x00001b565ce7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #21 0x00001b565ce7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #22 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #23 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #24 0x00001bf315f9 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #25 0x00001beff667 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #26 0x00001b56503c in NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:17:1 #27 0x00001bf00e1f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #28 0x00001bf2b45c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #29 0x7f9a4013cd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) SUMMARY: AddressSanitizer: 428565 byte(s) leaked in 5232 allocation(s). |94.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fqrun/fqrun >> TestShred::ManualLaunch3Cycles |94.9%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |94.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TestShred::Run3CyclesForTables >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-dbadmin >> TTxDataShardMiniKQL::CrossShard_5_AllToAll >> TTxDataShardMiniKQL::ReadSpecialColumns >> TTxDataShardMiniKQL::WriteKeyTooLarge >> TTxDataShardMiniKQL::CrossShard_1_Cycle >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-system |94.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::WriteEraseRead |94.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |94.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] |95.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> TTxDataShardMiniKQL::Write >> TTxDataShardMiniKQL::ReadConstant >> TTxDataShardMiniKQL::ReadSpecialColumns [GOOD] >> TTxDataShardMiniKQL::SelectRange >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] >> TTxDataShardMiniKQL::WriteKeyTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [GOOD] >> TestShred::ShredWithCopyTable >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize >> Cdc::ResolvedTimestamps [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] Test command err: 2025-12-23T18:42:58.609806Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:42:58.661542Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:42:58.707771Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:42:58.740741Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:42:58.742614Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:42:58.742729Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:2:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest >> TestShred::ShredWithMerge >> TestShred::ShredWithSplit >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan+UseSink >> TestShred::SimpleTestForTables >> TestShred::SimpleTestForTopic >> TTxDataShardMiniKQL::SelectRange [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey >> TTxDataShardMiniKQL::ReadConstant [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::TableStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] Test command err: 2025-12-23T18:42:59.181647Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:2:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:42:59.207316Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:42:59.223827Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:42:59.756443Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:0:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:42:59.756638Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:2:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:0:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-12-23T18:42:59.756723Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:3:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:0:0] targetVDisk# [3e000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest >> Cdc::ShouldBreakLocksOnConcurrentMoveIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentDropIndex >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-clusteradmin |95.0%| [TA] $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {RESULT} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx |95.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms >> Cdc::InitialScan [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] Test command err: 2025-12-23T18:43:01.608955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:01.609005Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:01.610514Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:43:01.629245Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:43:01.629581Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:43:01.629839Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:43:01.688112Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:43:01.696101Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:43:01.696191Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:43:01.697984Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:43:01.698137Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:43:01.698210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:43:01.698547Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:43:01.698628Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:43:01.698704Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:43:01.796009Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:43:01.834638Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:43:01.834925Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:43:01.835044Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:43:01.835084Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:43:01.835124Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:43:01.835179Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:01.835443Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:01.835498Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:01.835898Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:43:01.836012Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:43:01.836160Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:01.836225Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:01.836271Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:43:01.836313Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:43:01.836349Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:43:01.836383Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:43:01.836445Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:43:01.836541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:01.836591Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:01.836635Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:43:01.845287Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nx\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\016\n\010__tablet\030\004 9\032\023\n\r__updateEpoch\030\004 :\032\020\n\n__updateNo\030\004 ;(\"J\014/Root/table1\222\002\013\th\020\000\000\000\000\000\000\020\r" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:43:01.845373Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:43:01.845501Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:01.845729Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:43:01.845783Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:43:01.845849Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:43:01.845919Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:43:01.845962Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:43:01.846013Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:43:01.846059Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:01.846427Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:43:01.846479Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:43:01.846521Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:01.846578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:01.846641Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:43:01.846674Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:01.846713Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:43:01.846773Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:01.846802Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:43:01.862236Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:01.862336Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:01.862382Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:01.862428Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:43:01.862547Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:43:01.863280Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:01.863361Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:01.863416Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:43:01.863571Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:43:01.863609Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:43:01.863773Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:01.863852Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:43:01.863918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:43:01.863962Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:43:01.869823Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:43:01.869913Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:01.870173Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:01.870207Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:01.870263Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:01.870297Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:43:01.870340Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:43:01.870379Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:43:01 ... ine.cpp:1937: Add [0:7] at 9437184 to execution unit BuildAndWaitDependencies 2025-12-23T18:43:04.132390Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit BuildAndWaitDependencies 2025-12-23T18:43:04.132435Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-12-23T18:43:04.132485Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 9437184 2025-12-23T18:43:04.132522Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2025-12-23T18:43:04.132547Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit BuildAndWaitDependencies 2025-12-23T18:43:04.132571Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit BlockFailPoint 2025-12-23T18:43:04.132594Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit BlockFailPoint 2025-12-23T18:43:04.132620Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2025-12-23T18:43:04.132645Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit BlockFailPoint 2025-12-23T18:43:04.132668Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit ExecuteDataTx 2025-12-23T18:43:04.132690Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit ExecuteDataTx 2025-12-23T18:43:04.133204Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:7] at tablet 9437184 with status COMPLETE 2025-12-23T18:43:04.133273Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:7] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 3, SelectRangeBytes: 46, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:43:04.133329Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2025-12-23T18:43:04.133354Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit ExecuteDataTx 2025-12-23T18:43:04.133379Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:04.133407Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit FinishPropose 2025-12-23T18:43:04.133450Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 7 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-12-23T18:43:04.133507Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is DelayComplete 2025-12-23T18:43:04.133542Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:04.133575Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit CompletedOperations 2025-12-23T18:43:04.133608Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit CompletedOperations 2025-12-23T18:43:04.133650Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2025-12-23T18:43:04.133672Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit CompletedOperations 2025-12-23T18:43:04.133699Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 9437184 has finished 2025-12-23T18:43:04.133755Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:04.133792Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:7] at 9437184 on unit FinishPropose 2025-12-23T18:43:04.133830Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:04.137511Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2025-12-23T18:43:04.137572Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2025-12-23T18:43:04.137900Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:310:2291], Recipient [3:240:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:04.137936Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:04.137977Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:309:2290], serverId# [3:310:2291], sessionId# [0:0:0] 2025-12-23T18:43:04.138126Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\342\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\002\203\004\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?6\003?4e\005\001\013?:\003?8m\005\001\003?<\002\003?>\000\003?@\000\003?B\000\006\004?F\003\203\014\000\003\203\014\000\003\003?H\000\377\007\002\000\005?\032\005?\026?r\000\005?\030\003\005? \005?\034?r\000\006\000?\036\003?x\005?&\006\ 2025-12-23T18:43:04.138168Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:43:04.138245Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:04.138945Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit CheckDataTx 2025-12-23T18:43:04.139037Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2025-12-23T18:43:04.139075Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit CheckDataTx 2025-12-23T18:43:04.139110Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit BuildAndWaitDependencies 2025-12-23T18:43:04.139141Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit BuildAndWaitDependencies 2025-12-23T18:43:04.139178Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-12-23T18:43:04.139230Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:8] at 9437184 2025-12-23T18:43:04.139268Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2025-12-23T18:43:04.139304Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit BuildAndWaitDependencies 2025-12-23T18:43:04.139325Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit BlockFailPoint 2025-12-23T18:43:04.139351Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit BlockFailPoint 2025-12-23T18:43:04.139377Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2025-12-23T18:43:04.139397Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit BlockFailPoint 2025-12-23T18:43:04.139418Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit ExecuteDataTx 2025-12-23T18:43:04.139442Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit ExecuteDataTx 2025-12-23T18:43:04.144085Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:8] at tablet 9437184 with status COMPLETE 2025-12-23T18:43:04.144182Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:8] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 2, SelectRangeBytes: 31, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:43:04.144245Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2025-12-23T18:43:04.144275Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit ExecuteDataTx 2025-12-23T18:43:04.144303Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:04.144333Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit FinishPropose 2025-12-23T18:43:04.144378Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 8 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-12-23T18:43:04.144459Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is DelayComplete 2025-12-23T18:43:04.144493Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:04.144529Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit CompletedOperations 2025-12-23T18:43:04.144561Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit CompletedOperations 2025-12-23T18:43:04.144604Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2025-12-23T18:43:04.144626Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit CompletedOperations 2025-12-23T18:43:04.144659Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:8] at 9437184 has finished 2025-12-23T18:43:04.144744Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:04.144780Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:8] at 9437184 on unit FinishPropose 2025-12-23T18:43:04.144824Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2025-12-23T18:42:55.439734Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002edb/r3tmp/tmpAqkdao//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2025-12-23T18:42:55.446966Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:42:57.408980Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002edb/r3tmp/tmpAqkdao//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2025-12-23T18:42:57.471132Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:42:59.071951Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002edb/r3tmp/tmpAqkdao//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2025-12-23T18:42:59.090903Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:43:00.807519Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002edb/r3tmp/tmpAqkdao//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 4 2025-12-23T18:43:00.858070Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 4 VDISK[0:_:0:3:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:43:02.562156Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002edb/r3tmp/tmpAqkdao//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 5 2025-12-23T18:43:02.591401Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 5 VDISK[0:_:0:4:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:43:03.647933Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/002edb/r3tmp/tmpAqkdao//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 6 2025-12-23T18:43:03.649816Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 6 VDISK[0:_:0:5:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TestShred::SimpleTestForAllSupportedObjects >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-system >> KqpPg::JoinWithQueryService+StreamLookup >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] Test command err: 2025-12-23T18:43:02.730141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:02.730200Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:02.731704Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:43:02.746963Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:43:02.747307Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:43:02.747591Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:43:02.799426Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:43:02.806867Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:43:02.806981Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:43:02.808970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:43:02.809113Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:43:02.809181Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:43:02.809556Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:43:02.809648Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:43:02.809728Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:43:02.910445Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:43:02.947523Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:43:02.947723Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:43:02.947849Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:43:02.947912Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:43:02.947948Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:43:02.947984Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:02.948135Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.948187Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.948508Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:43:02.948642Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:43:02.948796Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:02.948861Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:02.948905Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:43:02.948944Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:43:02.948976Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:43:02.949009Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:43:02.949056Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:43:02.949159Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.949206Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.949249Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:43:02.956175Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:43:02.956238Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:43:02.956354Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:02.956614Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:43:02.956654Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:43:02.956694Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:43:02.956752Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:43:02.956783Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:43:02.956810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:43:02.956835Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:02.957127Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:43:02.957174Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:43:02.957214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:02.957247Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:02.957293Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:43:02.957312Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:02.957339Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:43:02.957362Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:02.957381Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:43:02.969217Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:02.969280Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:02.969314Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:02.969367Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:43:02.969448Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:43:02.969944Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:228:2224], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.970005Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.970061Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:227:2223], serverId# [1:228:2224], sessionId# [0:0:0] 2025-12-23T18:43:02.970205Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:43:02.970245Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:43:02.970358Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:02.970395Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:43:02.970446Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:43:02.970485Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:43:02.974193Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:43:02.974260Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:02.974434Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.974471Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.974516Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:02.974545Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:43:02.974593Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:43:02.974633Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:43:02.974697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 25-12-23T18:43:04.934691Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2025-12-23T18:43:04.934859Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2025-12-23T18:43:04.935023Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [3:240:2232], Recipient [3:240:2232]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:04.935088Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:04.935462Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:43:04.935560Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:43:04.935689Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5936: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 240 RawX2: 12884904120 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-12-23T18:43:04.935776Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [3:26:2073], Recipient [3:240:2232]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-12-23T18:43:04.935827Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-12-23T18:43:04.935867Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-12-23T18:43:04.935904Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:04.935975Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [3:26:2073], Recipient [3:240:2232]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-12-23T18:43:04.936004Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-12-23T18:43:04.936063Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-12-23T18:43:04.936131Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:04.936170Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:04.936209Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:43:04.936252Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:43:04.936289Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:43:04.936328Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:43:04.936371Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:43:04.936475Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877760, Sender [3:286:2269], Recipient [3:240:2232]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [3:290:2273] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-12-23T18:43:04.936505Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3186: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-12-23T18:43:04.936578Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [3:127:2152], Recipient [3:240:2232]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-12-23T18:43:04.936613Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-12-23T18:43:04.936649Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-12-23T18:43:04.936721Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-12-23T18:43:04.949268Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [3:286:2269], Recipient [3:240:2232]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [3:286:2269] ServerId: [3:290:2273] } 2025-12-23T18:43:04.949336Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-12-23T18:43:05.007231Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2025-12-23T18:43:05.007307Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2025-12-23T18:43:05.007579Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:296:2277], Recipient [3:240:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:05.007616Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:05.007661Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:294:2276], serverId# [3:296:2277], sessionId# [0:0:0] 2025-12-23T18:43:05.007854Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\365\001\037\004\0021\nvalue\005\205\n\205\002\207\205\002\207\203\001H\006\002\205\004\205\002?\006\002\205\000\034MyReads MyWrites\205\004\205\002?\006\002\206\202\024Reply\024Write?\014\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\010)\211\n?\006\203\005\004\200\205\002\203\004\006\213\002\203\004\203\004$SelectRow\000\003?\036 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000?\004\005?\"\003? p\001\013?&\003?$T\001\003?(\000\037\002\000\005?\016\005?\n?8\000\005?\014\003\005?\024\005?\020?8\000\006\000?\022\003?>\005?\032\006\000?\030\001\037/ \0018\001" TxId: 2 ExecLevel: 0 Flags: 0 2025-12-23T18:43:05.007890Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:43:05.007965Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:05.008617Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-12-23T18:43:05.008701Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2025-12-23T18:43:05.008741Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-12-23T18:43:05.008775Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-12-23T18:43:05.008813Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-12-23T18:43:05.008853Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T18:43:05.008905Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 9437184 2025-12-23T18:43:05.008944Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2025-12-23T18:43:05.008970Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-12-23T18:43:05.008993Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BlockFailPoint 2025-12-23T18:43:05.009016Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BlockFailPoint 2025-12-23T18:43:05.009038Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2025-12-23T18:43:05.009057Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BlockFailPoint 2025-12-23T18:43:05.009081Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-12-23T18:43:05.009103Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-12-23T18:43:05.009491Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-12-23T18:43:05.009552Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:43:05.009611Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2025-12-23T18:43:05.009636Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-12-23T18:43:05.009659Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:05.009686Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-12-23T18:43:05.009724Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-12-23T18:43:05.009783Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is DelayComplete 2025-12-23T18:43:05.009810Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:05.009844Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-12-23T18:43:05.009885Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-12-23T18:43:05.009927Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2025-12-23T18:43:05.009947Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-12-23T18:43:05.009975Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 9437184 has finished 2025-12-23T18:43:05.010028Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:05.010060Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-12-23T18:43:05.010098Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] |95.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-dbadmin >> TestShred::ShredManualLaunch [GOOD] |95.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |95.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |95.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |95.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |95.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |95.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] Test command err: 2025-12-23T18:43:01.726496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:01.726551Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:01.730645Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:43:01.746251Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:43:01.746641Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:43:01.747013Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:43:01.803667Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:43:01.814129Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:43:01.814234Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:43:01.816488Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:43:01.816642Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:43:01.816704Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:43:01.817107Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:43:01.817202Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:43:01.817291Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:43:01.915314Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:43:01.948833Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:43:01.949031Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:43:01.949128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:43:01.949161Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:43:01.949194Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:43:01.949224Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:01.949412Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:01.949467Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:01.949759Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:43:01.949864Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:43:01.950002Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:01.950050Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:01.950086Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:43:01.950121Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:43:01.950151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:43:01.950182Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:43:01.950219Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:43:01.950311Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:01.950367Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:01.950407Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:43:01.960646Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:43:01.960722Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:43:01.960823Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:01.961025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:43:01.961073Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:43:01.961122Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:43:01.961179Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:43:01.961218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:43:01.961250Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:43:01.961282Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:01.961617Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:43:01.961666Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:43:01.961705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:01.961741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:01.961791Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:43:01.961816Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:01.961849Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:43:01.961882Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:01.961904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:43:01.975516Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:01.975594Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:01.975629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:01.975666Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:43:01.975735Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:43:01.976276Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:228:2224], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:01.976335Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:01.976381Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:227:2223], serverId# [1:228:2224], sessionId# [0:0:0] 2025-12-23T18:43:01.976496Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:43:01.976527Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:43:01.976640Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:01.976683Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:43:01.976734Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:43:01.976770Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:43:01.984566Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:43:01.984650Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:01.984896Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:01.984939Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:01.984997Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:01.985035Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:43:01.985069Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:43:01.985105Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:43:01.985159Z node 1 :TX_DATASHARD TRACE: dat ... [0:2] at 9437184 on unit FinishPropose 2025-12-23T18:43:05.496804Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:43:05.496840Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:05.496887Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-12-23T18:43:05.496929Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-12-23T18:43:05.496992Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2025-12-23T18:43:05.497026Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-12-23T18:43:05.497068Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 9437184 has finished 2025-12-23T18:43:05.521795Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:05.521874Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-12-23T18:43:05.521983Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 5 ms, status: COMPLETE 2025-12-23T18:43:05.522126Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:06.069445Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2025-12-23T18:43:06.069520Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2025-12-23T18:43:06.069835Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:305:2285], Recipient [3:240:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:06.069869Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:06.069917Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:304:2284], serverId# [3:305:2285], sessionId# [0:0:0] 2025-12-23T18:43:06.275449Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\332\201\200\010\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\004\203\004\203\001H\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000\013?\024\003?\020\251\003\003?\022\006bar\003\005?\030\003?\026\007\000\000\000\001xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx 2025-12-23T18:43:06.277676Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:43:06.277861Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:06.337005Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit CheckDataTx 2025-12-23T18:43:06.337137Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2025-12-23T18:43:06.337205Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit CheckDataTx 2025-12-23T18:43:06.337276Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit BuildAndWaitDependencies 2025-12-23T18:43:06.337327Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit BuildAndWaitDependencies 2025-12-23T18:43:06.337379Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-12-23T18:43:06.337468Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 9437184 2025-12-23T18:43:06.337524Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2025-12-23T18:43:06.337550Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit BuildAndWaitDependencies 2025-12-23T18:43:06.337575Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit BlockFailPoint 2025-12-23T18:43:06.337599Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit BlockFailPoint 2025-12-23T18:43:06.337624Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2025-12-23T18:43:06.337647Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit BlockFailPoint 2025-12-23T18:43:06.337669Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit ExecuteDataTx 2025-12-23T18:43:06.337692Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-12-23T18:43:06.337766Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-12-23T18:43:06.337838Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:126: Operation [0:3] at 9437184 requested 46269686 more memory 2025-12-23T18:43:06.337890Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Restart 2025-12-23T18:43:06.338054Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:06.338122Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-12-23T18:43:06.338202Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-12-23T18:43:06.365310Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:3] at 9437184 exceeded memory limit 50463990 and requests 403711920 more for the next try 2025-12-23T18:43:06.369224Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 3 released its data 2025-12-23T18:43:06.369353Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Restart 2025-12-23T18:43:06.369755Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:06.369797Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-12-23T18:43:06.420206Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 3 at 9437184 restored its data 2025-12-23T18:43:06.420305Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-12-23T18:43:06.516400Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:3] at tablet 9437184 with status COMPLETE 2025-12-23T18:43:06.516520Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:3] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 16777223, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:43:06.516614Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:43:06.516659Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit ExecuteDataTx 2025-12-23T18:43:06.516717Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:06.516769Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit FinishPropose 2025-12-23T18:43:06.516814Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is DelayComplete 2025-12-23T18:43:06.516853Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:06.516898Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit CompletedOperations 2025-12-23T18:43:06.516935Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit CompletedOperations 2025-12-23T18:43:06.516985Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2025-12-23T18:43:06.517015Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit CompletedOperations 2025-12-23T18:43:06.517058Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 9437184 has finished 2025-12-23T18:43:06.593717Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:06.593789Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:3] at 9437184 on unit FinishPropose 2025-12-23T18:43:06.593845Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 3 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-12-23T18:43:06.593944Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:06.661197Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-12-23T18:43:06.661278Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-12-23T18:43:06.666700Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:238:2231], Recipient [3:240:2232]: NKikimr::TEvTablet::TEvFollowerGcApplied >> KqpPg::EmptyQuery+useSink |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> KqpPg::CreateTableBulkUpsertAndRead >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2025-12-23T18:43:00.565277Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125785583247937:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:00.565387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:43:00.657253Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004669/r3tmp/tmpniWZ4O/pdisk_1.dat 2025-12-23T18:43:00.946909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:00.985075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:00.985204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:01.032923Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:01.079628Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:01.080939Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125785583247900:2081] 1766515380563916 != 1766515380563919 TServer::EnableGrpc on GrpcPort 64430, node 1 2025-12-23T18:43:01.223099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:01.234289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/004669/r3tmp/yandexsGe1t9.tmp 2025-12-23T18:43:01.234322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/004669/r3tmp/yandexsGe1t9.tmp 2025-12-23T18:43:01.258950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/004669/r3tmp/yandexsGe1t9.tmp 2025-12-23T18:43:01.259153Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31356 PQClient connected to localhost:64430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:43:01.585010Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515381106 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:01.650568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:01.667575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-12-23T18:43:01.692520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:43:04.331773Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125802763118043:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.331966Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.335124Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125802763118056:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.335206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125802763118057:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.335556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.339558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:04.372025Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125802763118060:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-12-23T18:43:04.575389Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125802763118126:2591] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:04.609005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:04.728569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:04.762906Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125802763118141:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:43:04.765053Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YzJlNjEwNDItMTA0MDczMzgtNTAyZDdmNzctYjk4NWRiNDQ=, ActorId: [1:7587125802763118041:2324], ActorState: ExecuteState, TraceId: 01kd68b7zv6cwvne4r8aspt2j3, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:43:04.785930Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:43:04.808413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-12-23T18:43:05.566065Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125785583247937:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:05.566166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredManualLaunch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:43:00.842896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:00.843010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:00.843057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:00.843105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:00.843160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:00.843193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:00.843258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:00.843355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:00.844360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:00.844643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:00.923770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:00.923853Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:00.934072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:00.934385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:00.934549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:00.943314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:00.943589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:00.944461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:00.944734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:00.955745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:00.956030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:00.957390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:00.957486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:00.957669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:00.957720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:00.957825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:00.957990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:01.137816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.138918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.139096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.139206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.139280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.139377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.139457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.139561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.139672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.139748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.139819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.139941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.140034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.140134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.140233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1 TabletId: 72075186233409550 Status: OK 2025-12-23T18:43:04.950593Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5464: StateWork, processing event TEvDataShard::TEvVacuumResult 2025-12-23T18:43:04.950633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__tenant_shred_manager.cpp:546: TTxCompleteShredShard Execute at schemestard: 72075186233409546 2025-12-23T18:43:04.950688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__tenant_shred_manager.cpp:309: [TenantShredManager] [Finished] Shred is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 32], tabletId# 72075186233409550, shardIdx# 72075186233409546:5 in# 79 ms, next wakeup in# 14.921000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2025-12-23T18:43:04.950757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__tenant_shred_manager.cpp:326: [TenantShredManager] Shred in shards is completed. Send response to root schemeshard 2025-12-23T18:43:04.950787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__tenant_shred_manager.cpp:349: [TenantShredManager] Complete: Generation# 1 2025-12-23T18:43:04.954034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__tenant_shred_manager.cpp:571: TTxCompleteShredShard Complete at schemestard: 72075186233409546, NeedResponseComplete# false 2025-12-23T18:43:04.954242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__tenant_shred_manager.cpp:571: TTxCompleteShredShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2025-12-23T18:43:04.954470Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:3046:4659], Recipient [1:694:2678]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:04.954503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:04.954525Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:43:04.954645Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125514, Sender [1:861:2812], Recipient [1:694:2678]: NKikimrScheme.TEvTenantShredResponse PathId { OwnerId: 72057594046678944 LocalId: 38 } Generation: 1 Status: COMPLETED 2025-12-23T18:43:04.954668Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5467: StateWork, processing event TEvSchemeShard::TEvTenantShredResponse 2025-12-23T18:43:04.954723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:591: TTxCompleteShredTenant Execute at schemeshard: 72057594046678944 2025-12-23T18:43:04.954794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38] in# 81 ms, next wakeup# 599.919000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-12-23T18:43:04.954859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2025-12-23T18:43:04.955044Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877760, Sender [1:3045:4658], Recipient [1:861:2812]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:3046:4659] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-12-23T18:43:04.955068Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5342: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-12-23T18:43:04.955094Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6116: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409546 2025-12-23T18:43:04.956319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-12-23T18:43:04.956360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:04.956694Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877760, Sender [1:3050:4663], Recipient [1:694:2678]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:3051:4664] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-12-23T18:43:04.956759Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5342: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-12-23T18:43:04.956789Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6116: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-12-23T18:43:04.956931Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:696:2679], Recipient [1:694:2678]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-12-23T18:43:04.956966Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:04.957010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:04.957058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:04.957089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-12-23T18:43:04.957134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:04.957178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:05.941371Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:694:2678]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:05.941446Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:05.941492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:05.941696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:694:2678], Recipient [1:694:2678]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:05.941737Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:05.941928Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:696:2679], Recipient [1:694:2678]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-12-23T18:43:05.941973Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:05.942004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:05.942075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:05.942126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-12-23T18:43:05.942182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:05.942258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:06.573294Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:694:2678]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:06.573385Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:06.573564Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:694:2678]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:06.573607Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:06.573640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:06.573772Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:694:2678], Recipient [1:694:2678]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:06.573814Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:06.573992Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:696:2679], Recipient [1:694:2678]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-12-23T18:43:06.574030Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:06.574064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:06.574126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:06.574166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-12-23T18:43:06.574232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2025-12-23T18:43:06.576729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-12-23T18:43:06.577468Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:3100:4713], Recipient [1:694:2678]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:06.577533Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:06.577569Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:43:06.577720Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125519, Sender [1:676:2666], Recipient [1:694:2678]: NKikimrScheme.TEvShredInfoRequest 2025-12-23T18:43:06.577756Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5468: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-12-23T18:43:06.577797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8213: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] Test command err: 2025-12-23T18:43:00.647784Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125785577672370:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:00.647871Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b6/r3tmp/tmp09zbhf/pdisk_1.dat 2025-12-23T18:43:00.958854Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:00.992767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:00.992915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:01.050020Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125785577672335:2081] 1766515380644035 != 1766515380644038 2025-12-23T18:43:01.057693Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:01.070672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31562, node 1 2025-12-23T18:43:01.306876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:01.378503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0046b6/r3tmp/yandexjOy6G4.tmp 2025-12-23T18:43:01.378533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0046b6/r3tmp/yandexjOy6G4.tmp 2025-12-23T18:43:01.447087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0046b6/r3tmp/yandexjOy6G4.tmp 2025-12-23T18:43:01.447353Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19008 PQClient connected to localhost:31562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:43:01.667361Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515381127 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:01.701213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... 2025-12-23T18:43:01.799198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:43:01.823133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-12-23T18:43:04.478777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125802757542456:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.478999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.479423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125802757542494:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.479471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125802757542495:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.479565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.483438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:04.494398Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125802757542498:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-12-23T18:43:04.550621Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125802757542563:2592] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:04.805084Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125802757542578:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:43:04.805619Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=OGNkOTM1NTEtNWViMzU4MTMtNDA4M2I1YzktNzQzYjFhZWQ=, ActorId: [1:7587125802757542454:2322], ActorState: ExecuteState, TraceId: 01kd68b8393pzt6arj2nysmmxj, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:43:04.806504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:04.808340Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:43:04.893496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:04.982958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-12-23T18:43:05.647967Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125785577672370:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:05.648116Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-clusteradmin >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> KqpPg::ReadPgArray >> KqpPg::ReadPgArray [GOOD] >> KqpPg::TableArrayInsert+useSink >> KqpPg::NoTableQuery+useSink >> KqpPg::InsertFromSelect_Simple+useSink >> TestShred::SimpleTestForTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2025-12-23T18:43:04.570104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:04.570167Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:04.571832Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:43:04.587309Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:43:04.587664Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:43:04.587951Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:43:04.637361Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:43:04.644545Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:43:04.644641Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:43:04.646431Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:43:04.646597Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:43:04.646656Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:43:04.647091Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:43:04.647176Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:43:04.647258Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:43:04.728369Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:43:04.771469Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:43:04.771659Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:43:04.771763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:43:04.771800Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:43:04.771833Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:43:04.771866Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:04.772016Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:04.772086Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:04.772362Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:43:04.772450Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:43:04.772607Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:04.772660Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:04.772698Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:43:04.772733Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:43:04.772777Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:43:04.772816Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:43:04.772861Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:43:04.772968Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:04.773033Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:04.773077Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:43:04.780427Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:43:04.780498Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:43:04.780605Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:04.780789Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:43:04.780835Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:43:04.780884Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:43:04.780941Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:43:04.780981Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:43:04.781019Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:43:04.781052Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:04.781437Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:43:04.781496Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:43:04.781537Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:04.781572Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:04.781623Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:43:04.781652Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:04.781684Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:43:04.781717Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:04.781740Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:43:04.793715Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:04.793785Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:04.793834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:04.793886Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:43:04.793971Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:43:04.794528Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:04.794592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:04.794637Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:43:04.794780Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-12-23T18:43:04.794814Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:43:04.794933Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:04.794999Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2025-12-23T18:43:04.795040Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:43:04.795080Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:43:04.803407Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:43:04.803487Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:04.803785Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:04.803837Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:04.803896Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:04.803936Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:43:04.803972Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:43:04.804031Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-12-23T18:43:04.804069Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-12-23T18:43:04. ... ressTransaction} at tablet 9437185 (3 by [3:372:2317]) from queue queue_transaction 2025-12-23T18:43:07.512635Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:372:2317]) to queue queue_transaction 2025-12-23T18:43:07.512673Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_transaction from 16.938813 to 33.877626 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:372:2317])) 2025-12-23T18:43:07.512749Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 97002198b}, Memory{0 dyn 97002198} 2025-12-23T18:43:07.512795Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:07.512822Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2025-12-23T18:43:07.513618Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 5 at 9437184 restored its data 2025-12-23T18:43:07.757572Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [6:5] at tablet 9437184 with status COMPLETE 2025-12-23T18:43:07.757637Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [6:5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:43:07.757710Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:43:07.757738Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2025-12-23T18:43:07.757762Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [6:5] at 9437184 to execution unit CompleteOperation 2025-12-23T18:43:07.757790Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2025-12-23T18:43:07.758032Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is DelayComplete 2025-12-23T18:43:07.758061Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2025-12-23T18:43:07.758089Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [6:5] at 9437184 to execution unit CompletedOperations 2025-12-23T18:43:07.758113Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2025-12-23T18:43:07.758145Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is Executed 2025-12-23T18:43:07.758168Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2025-12-23T18:43:07.758194Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [6:5] at 9437184 has finished 2025-12-23T18:43:07.758222Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:07.758248Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:43:07.758275Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:43:07.758306Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:43:07.758418Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-12-23T18:43:07.758469Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 97002198b}, Memory{0 dyn 0} 2025-12-23T18:43:07.758640Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:261:2231]) (release resources {0, 97002198}) 2025-12-23T18:43:07.758700Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_transaction from 33.877626 to 16.938813 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:261:2231])) 2025-12-23T18:43:07.758836Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2025-12-23T18:43:07.758871Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437185 on unit ExecuteDataTx 2025-12-23T18:43:07.759888Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 5 at 9437185 restored its data 2025-12-23T18:43:07.963387Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [6:5] at tablet 9437185 with status COMPLETE 2025-12-23T18:43:07.963504Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [6:5] at 9437185: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:43:07.963609Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437185 is ExecutedNoMoreRestarts 2025-12-23T18:43:07.963676Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437185 executing on unit ExecuteDataTx 2025-12-23T18:43:07.963731Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [6:5] at 9437185 to execution unit CompleteOperation 2025-12-23T18:43:07.963790Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437185 on unit CompleteOperation 2025-12-23T18:43:07.964094Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437185 is DelayComplete 2025-12-23T18:43:07.964138Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437185 executing on unit CompleteOperation 2025-12-23T18:43:07.964191Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [6:5] at 9437185 to execution unit CompletedOperations 2025-12-23T18:43:07.964234Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437185 on unit CompletedOperations 2025-12-23T18:43:07.964270Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437185 is Executed 2025-12-23T18:43:07.964298Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437185 executing on unit CompletedOperations 2025-12-23T18:43:07.964341Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [6:5] at 9437185 has finished 2025-12-23T18:43:07.964387Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:07.964431Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2025-12-23T18:43:07.964481Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2025-12-23T18:43:07.964526Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2025-12-23T18:43:07.964719Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-12-23T18:43:07.964810Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 97002198b}, Memory{0 dyn 0} 2025-12-23T18:43:07.965006Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:372:2317]) (release resources {0, 97002198}) 2025-12-23T18:43:07.965071Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_transaction from 16.938813 to 0.000000 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:372:2317])) 2025-12-23T18:43:07.977627Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:10} commited cookie 1 for step 9 2025-12-23T18:43:07.977691Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-12-23T18:43:07.977728Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2025-12-23T18:43:07.977814Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:43:07.977896Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-12-23T18:43:07.977939Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-12-23T18:43:07.978271Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [3:350:2317], Recipient [3:460:2402]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-12-23T18:43:07.978328Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:43:07.978405Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 2025-12-23T18:43:07.978509Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:10} commited cookie 1 for step 9 2025-12-23T18:43:07.978551Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:43:07.978583Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2025-12-23T18:43:07.978627Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:43:07.978676Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-12-23T18:43:07.978704Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:07.978967Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [3:239:2231], Recipient [3:460:2402]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-12-23T18:43:07.979005Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:43:07.979041Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> TestShred::SimpleTestForTopic [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForTables [GOOD] >> KqpPg::InsertNoTargetColumns_Simple+useSink Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:43:03.837841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:03.837940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:03.837979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:03.838023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:03.838064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:03.838096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:03.838174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:03.838257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:03.839181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:03.839493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:03.919389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:03.919460Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:03.927434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:03.927684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:03.927838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:03.932016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:03.932257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:03.932999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:03.933213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:03.935409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:03.935634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:03.936894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:03.936982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:03.937131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:03.937180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:03.937284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:03.937419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:04.082162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.083255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.083399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.083494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.083573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.083645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.083713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.083820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.083898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.083990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.084064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.084166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.084260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.084350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:04.084438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-12-23T18:43:08.070200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:08.070664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877760, Sender [1:2777:4454], Recipient [1:703:2687]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:2778:4455] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-12-23T18:43:08.070705Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5342: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-12-23T18:43:08.070758Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6116: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-12-23T18:43:08.070881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:706:2689], Recipient [1:703:2687]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-12-23T18:43:08.070914Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:08.070954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:08.071018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:08.071058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-12-23T18:43:08.071141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:08.071208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:08.450495Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:703:2687]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.450586Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.450690Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:870:2821]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.450734Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.450841Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1441:3330]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.450871Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.450953Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:1441:3330], Recipient [1:1441:3330]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.450986Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.451065Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:703:2687], Recipient [1:703:2687]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.451088Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.451145Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:870:2821], Recipient [1:870:2821]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.451171Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.483084Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:703:2687]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:08.483168Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:08.483213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:08.483480Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:706:2689], Recipient [1:703:2687]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-12-23T18:43:08.483521Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:08.483565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:08.483664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:08.483737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-12-23T18:43:08.483813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:08.483876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:08.754919Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:870:2821]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.754989Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.755054Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1441:3330]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.755077Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.755123Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:703:2687]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.755149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.755245Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:870:2821], Recipient [1:870:2821]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.755302Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.755383Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:1441:3330], Recipient [1:1441:3330]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.755431Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.755500Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:703:2687], Recipient [1:703:2687]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.755524Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.788942Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:703:2687]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:08.789019Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:08.789048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:08.789250Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:706:2689], Recipient [1:703:2687]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-12-23T18:43:08.789293Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:08.789326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:08.789404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:08.789458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-12-23T18:43:08.789527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.934000s, Timestamp# 1970-01-01T00:00:05.204000Z 2025-12-23T18:43:08.789566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2025-12-23T18:43:08.796069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-12-23T18:43:08.796850Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:2799:4476], Recipient [1:703:2687]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:08.796920Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:08.796973Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:43:08.797182Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125519, Sender [1:686:2676], Recipient [1:703:2687]: NKikimrScheme.TEvShredInfoRequest 2025-12-23T18:43:08.797238Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5468: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-12-23T18:43:08.797282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8213: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForTopic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:43:03.614861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:03.614958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:03.615020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:03.615063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:03.615100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:03.615135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:03.615219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:03.615325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:03.616180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:03.616487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:03.711367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:03.711419Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:03.722521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:03.722721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:03.723047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:03.734486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:03.734706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:03.735430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:03.735628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:03.739668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:03.739874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:03.741105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:03.741189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:03.741385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:03.741432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:03.741484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:03.741611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:03.905181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.906259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.906410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.906501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.906582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.906664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.906860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.906973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.907072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.907145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.907241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.907332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.907426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.907515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.907624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-12-23T18:43:07.863147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:07.863561Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877760, Sender [1:2048:3860], Recipient [1:697:2680]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:2049:3861] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-12-23T18:43:07.863609Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5342: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-12-23T18:43:07.863640Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6116: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-12-23T18:43:07.863778Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:705:2686], Recipient [1:697:2680]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-12-23T18:43:07.863812Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:07.863852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:07.863932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:07.863981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-12-23T18:43:07.864044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:07.864101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:08.411021Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:697:2680]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.411090Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.411152Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:864:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.411171Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.411231Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1254:3136]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.411251Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:08.411302Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:864:2815], Recipient [1:864:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.411322Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.411367Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:1254:3136], Recipient [1:1254:3136]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.411384Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.411416Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:697:2680], Recipient [1:697:2680]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.411432Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:08.434041Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:697:2680]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:08.434119Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:08.434165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:08.434461Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:705:2686], Recipient [1:697:2680]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-12-23T18:43:08.434499Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:08.434528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:08.434590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:08.434634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-12-23T18:43:08.434688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:08.434728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:09.005794Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:864:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:09.005881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:09.005949Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1254:3136]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:09.005976Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:09.006028Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:697:2680]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:09.006053Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:09.006111Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:1254:3136], Recipient [1:1254:3136]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:09.006140Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:09.006225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:697:2680], Recipient [1:697:2680]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:09.006268Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:09.006329Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:864:2815], Recipient [1:864:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:09.006353Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:09.027173Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:697:2680]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:09.027275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:09.027325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:09.027578Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:705:2686], Recipient [1:697:2680]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-12-23T18:43:09.027612Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:09.027646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:09.027724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:09.027763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-12-23T18:43:09.027830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.979000s, Timestamp# 1970-01-01T00:00:05.158000Z 2025-12-23T18:43:09.027872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2025-12-23T18:43:09.030263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-12-23T18:43:09.030949Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:2070:3882], Recipient [1:697:2680]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:09.031009Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:09.031053Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:43:09.031210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125519, Sender [1:682:2671], Recipient [1:697:2680]: NKikimrScheme.TEvShredInfoRequest 2025-12-23T18:43:09.031244Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5468: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-12-23T18:43:09.031304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8213: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-anonymous >> TestShred::ManualLaunch3Cycles [GOOD] >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC >> KqpPg::CreateTableSerialColumns+useSink >> KqpPg::TypeCoercionInsert-useSink >> KqpPg::TypeCoercionBulkUpsert >> KqpPg::JoinWithQueryService+StreamLookup [GOOD] >> KqpPg::Insert_Serial+useSink >> TestShred::ShredWithSplit [GOOD] >> TestShred::ShredWithMerge [GOOD] >> TPQCDTest::TestDiscoverClusters [GOOD] >> TestShred::Run3CyclesForTables [GOOD] >> TestShred::Run3CyclesForAllSupportedObjects >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder >> ObjectStorageListingTest::FilterListing >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system >> KqpPg::EmptyQuery+useSink [GOOD] >> KqpPg::EmptyQuery-useSink >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-dbadmin [GOOD] >> TestShred::ShredWithCopyTable [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithSplit [GOOD] Test command err: 2025-12-23T18:43:03.146494Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] Leader for TabletID 72057594046678944 is [1:71:2106] sender: [1:75:2058] recipient: [1:61:2102] 2025-12-23T18:43:03.178782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:03.178899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:03.178951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:03.179003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:03.179047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:03.179092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:03.179158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:03.179239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:03.180162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:03.180465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:03.273959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:03.274025Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:03.281290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:03.282366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:03.282553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:03.289187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:03.289465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:03.290183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:03.290523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:03.291770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:03.291958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:03.293098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:03.293160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:03.293291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:03.293343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:03.293390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:03.293549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:03.474427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.475667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.475826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.475906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.475996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.476085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.476185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.476280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.476384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.476457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.476572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.476687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.476793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.476869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.476935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.s ... ::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.591210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.623249Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.623348Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.623460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:407:2401], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.623484Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.633964Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.634034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.634130Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.634152Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.666439Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.666510Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.666627Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:407:2401], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.666673Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.677168Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.677249Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.677351Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.677386Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.710027Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.710113Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.710252Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:407:2401], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.710286Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.721462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269553162, Sender [1:1377:3244], Recipient [1:500:2462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409550 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1200 Memory: 89229 Storage: 5024539 } ShardState: 2 UserTablePartOwners: 72075186233409550 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-12-23T18:43:10.721549Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5284: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-12-23T18:43:10.721606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] state 'Ready' dataSize 5019511 rowCount 49 cpuUsage 0.12 2025-12-23T18:43:10.721726Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] raw table stats: DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-12-23T18:43:10.721761Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-12-23T18:43:10.722022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269553162, Sender [1:1380:3246], Recipient [1:500:2462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1079 Memory: 89253 Storage: 5127032 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-12-23T18:43:10.722060Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5284: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-12-23T18:43:10.722093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] state 'Ready' dataSize 5121950 rowCount 50 cpuUsage 0.1079 2025-12-23T18:43:10.722181Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] raw table stats: DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-12-23T18:43:10.733994Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:10.734063Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:10.734088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:10.734262Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:406:2400], Recipient [1:407:2401]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-12-23T18:43:10.734286Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:10.734312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:10.734375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:10.734403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-12-23T18:43:10.734470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 29.997500s, Timestamp# 1970-01-01T00:01:10.002500Z 2025-12-23T18:43:10.734502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 20 s 2025-12-23T18:43:10.735067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-12-23T18:43:10.737134Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:1851:3616], Recipient [1:407:2401]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:10.737186Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:10.737221Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:43:10.737383Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125519, Sender [1:392:2392], Recipient [1:407:2401]: NKikimrScheme.TEvShredInfoRequest 2025-12-23T18:43:10.737414Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5468: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-12-23T18:43:10.737446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8213: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithMerge [GOOD] Test command err: 2025-12-23T18:43:03.166307Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] Leader for TabletID 72057594046678944 is [1:71:2106] sender: [1:75:2058] recipient: [1:61:2102] 2025-12-23T18:43:03.202581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:03.202691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:03.202731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:03.202851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:03.202897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:03.203107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:03.203188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:03.203260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:03.204213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:03.204622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:03.303402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:03.303465Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:03.311041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:03.312215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:03.312423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:03.318728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:03.324352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:03.325075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:03.325462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:03.326469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:03.326670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:03.327859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:03.327920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:03.328037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:03.328089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:03.328128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:03.328302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:03.492665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.493868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.494018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.494099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.494186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.494276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.494375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.494480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.494590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.494679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.494853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.494978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.495079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.495154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.495224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.s ... MeasureSelfResponseTime 2025-12-23T18:43:10.403208Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.403284Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.403345Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.403366Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.435045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.435125Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.435219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:407:2401], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.435242Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.445691Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.445758Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.445834Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.445864Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.477979Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.478054Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.478145Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:407:2401], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.478165Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.488565Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.488641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.488722Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.488747Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.520662Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.520743Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.520863Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:407:2401], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.520896Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.531316Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.531417Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.531515Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.531544Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.567970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.568048Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:10.568194Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:407:2401], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.568228Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:10.579304Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269553162, Sender [1:1582:3400], Recipient [1:500:2462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1631 Memory: 90453 Storage: 10149823 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2025-12-23T18:43:10.579376Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5284: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-12-23T18:43:10.579453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] state 'Ready' dataSize 10141461 rowCount 99 cpuUsage 0.1631 2025-12-23T18:43:10.579573Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] raw table stats: DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-12-23T18:43:10.579620Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-12-23T18:43:10.590115Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:10.590199Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:10.590233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:10.590514Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:406:2400], Recipient [1:407:2401]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-12-23T18:43:10.590558Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:10.590585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:10.590653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:10.590688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-12-23T18:43:10.590776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 29.997500s, Timestamp# 1970-01-01T00:01:10.002500Z 2025-12-23T18:43:10.590832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 20 s 2025-12-23T18:43:10.591432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-12-23T18:43:10.594797Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:1897:3656], Recipient [1:407:2401]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:10.594866Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:10.594913Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:43:10.595064Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125519, Sender [1:392:2392], Recipient [1:407:2401]: NKikimrScheme.TEvShredInfoRequest 2025-12-23T18:43:10.595106Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5468: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-12-23T18:43:10.595141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8213: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] Test command err: 2025-12-23T18:43:00.723598Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125785103092620:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:00.724220Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:43:00.762261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00466c/r3tmp/tmp2X8cFj/pdisk_1.dat 2025-12-23T18:43:01.034818Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:01.096917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:01.097037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:01.168680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:01.196292Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:01.199079Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125785103092516:2081] 1766515380703478 != 1766515380703481 TServer::EnableGrpc on GrpcPort 8618, node 1 2025-12-23T18:43:01.292783Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:01.379495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/00466c/r3tmp/yandexcImbAx.tmp 2025-12-23T18:43:01.379550Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/00466c/r3tmp/yandexcImbAx.tmp 2025-12-23T18:43:01.379735Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/00466c/r3tmp/yandexcImbAx.tmp 2025-12-23T18:43:01.379841Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62180 PQClient connected to localhost:8618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:43:01.737635Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515381281 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:01.778365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:01.803383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-12-23T18:43:01.837515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:43:04.649367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125802282962653:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.649473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125802282962664:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.649539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.650179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125802282962668:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.650274Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:04.654190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:04.672722Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125802282962667:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-12-23T18:43:04.846153Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125802282962735:2589] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:04.877092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:04.970152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:05.030437Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125802282962745:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:43:05.033615Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MTY1YzgzZmQtOGY4NDVhNDMtNGExM2JjMTAtMjJkNzZiNGU=, ActorId: [1:7587125802282962636:2324], ActorState: ExecuteState, TraceId: 01kd68b8a76a7fdjt4t9cyry23, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:43:05.036408Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:43:05.046992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-12-23T18:43:05.722899Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125785103092620:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:05.723020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TestShred::SimpleTestForAllSupportedObjects [GOOD] |95.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |95.0%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |95.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |95.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpPg::NoTableQuery+useSink [GOOD] >> KqpPg::NoTableQuery-useSink >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithCopyTable [GOOD] Test command err: 2025-12-23T18:43:02.830651Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] Leader for TabletID 72057594046678944 is [1:71:2106] sender: [1:75:2058] recipient: [1:61:2102] 2025-12-23T18:43:02.862639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:02.862713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:02.862760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:02.862802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:02.862828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:02.862863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:02.862919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:02.863003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:02.863837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:02.864181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:02.953742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:02.953813Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:02.961961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:02.971441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:02.971734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:02.978458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:02.986899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:02.987759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:02.988290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:02.989510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:02.989727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:02.990949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:02.991012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:02.991140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:02.991191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:02.991238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:02.991428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:03.167067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.168200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.168340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.168412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.168502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.168587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.168686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.168794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.168894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.168978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.169083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.169200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.169308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.169385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:03.169453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.s ... e_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409546:7 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 33], pathId map=SimpleCopy, is column=0, is olap=0, RowCount 50, DataSize 5121950 2025-12-23T18:43:11.289788Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409552, followerId 0 2025-12-23T18:43:11.289828Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:235: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:7 with partCount# 1, rowCount# 50, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:50.000000Z at schemeshard 72075186233409546 2025-12-23T18:43:11.289857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-12-23T18:43:11.289925Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2025-12-23T18:43:11.303127Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:43:11.303208Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:43:11.303243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409546, queue size# 0 2025-12-23T18:43:11.325102Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.325196Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.325344Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:407:2401], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.325380Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.335920Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.336000Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.336100Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.336133Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.368661Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.368746Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.368902Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:407:2401], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.368939Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.379441Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.379545Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.379645Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.379677Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.412103Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.412170Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.412229Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:407:2401], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.412252Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.422759Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.422841Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.422943Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.422974Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.455447Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.455526Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.455653Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:407:2401], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.455689Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.466147Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.466218Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.466324Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.466348Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.498637Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.498724Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.499022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:407:2401], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.499058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.509578Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:407:2401]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:11.509681Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:11.509717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:11.509981Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:406:2400], Recipient [1:407:2401]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-12-23T18:43:11.510031Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:11.510063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:11.510115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:11.510157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-12-23T18:43:11.510238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 14.999500s, Timestamp# 1970-01-01T00:01:25.000500Z 2025-12-23T18:43:11.510283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 35 s 2025-12-23T18:43:11.510920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-12-23T18:43:11.514148Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:2108:3822], Recipient [1:407:2401]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:11.514213Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:11.514254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:43:11.514440Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125519, Sender [1:392:2392], Recipient [1:407:2401]: NKikimrScheme.TEvShredInfoRequest 2025-12-23T18:43:11.514478Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5468: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-12-23T18:43:11.514522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8213: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> Cdc::InitialScan_WithTopicSchemeTx [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForAllSupportedObjects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:43:05.611949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:05.612059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:05.612104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:05.612149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:05.612191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:05.612224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:05.612277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:05.612359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:05.613301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:05.613589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:05.715161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:05.715238Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:05.727293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:05.727559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:05.727728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:05.732139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:05.732333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:05.733173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:05.733431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:05.735695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:05.735930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:05.737198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:05.737273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:05.737405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:05.737452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:05.737563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:05.737697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:05.884653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.885790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.885939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.886019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.886083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.886150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.886223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.886335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.886441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.886505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.886585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.886684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.886800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.886910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:05.887004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-12-23T18:43:10.960377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:10.960780Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877760, Sender [1:3110:4723], Recipient [1:694:2678]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:3111:4724] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-12-23T18:43:10.960832Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5342: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-12-23T18:43:10.960881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6116: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-12-23T18:43:10.961056Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:696:2679], Recipient [1:694:2678]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-12-23T18:43:10.961115Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:10.961155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:10.961215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:10.961253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-12-23T18:43:10.961308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:10.961366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:11.472641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:694:2678]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.472727Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.472809Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1432:3295]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.472838Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.472892Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:861:2812]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.472942Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:11.473024Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:694:2678], Recipient [1:694:2678]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.473056Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.473170Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:1432:3295], Recipient [1:1432:3295]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.473201Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.473264Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:861:2812], Recipient [1:861:2812]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.473294Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:11.525273Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:694:2678]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:11.525361Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:11.525410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:11.525659Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:696:2679], Recipient [1:694:2678]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-12-23T18:43:11.525712Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:11.525743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:11.525811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:11.525853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-12-23T18:43:11.525907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:11.525971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:12.018480Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:694:2678]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:12.018558Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:12.018617Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1432:3295]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:12.018634Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:12.018665Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:861:2812]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:12.018690Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:12.018729Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:694:2678], Recipient [1:694:2678]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:12.018784Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:12.018885Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:1432:3295], Recipient [1:1432:3295]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:12.018915Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:12.018985Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:861:2812], Recipient [1:861:2812]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:12.019002Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:12.073592Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:694:2678]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:12.073685Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:12.073720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2025-12-23T18:43:12.074018Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [1:696:2679], Recipient [1:694:2678]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-12-23T18:43:12.074062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:12.074103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:12.074178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:12.074214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-12-23T18:43:12.074278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.920000s, Timestamp# 1970-01-01T00:00:05.213000Z 2025-12-23T18:43:12.074324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2025-12-23T18:43:12.080237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-12-23T18:43:12.081086Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:3130:4743], Recipient [1:694:2678]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:12.081155Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:12.081195Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:43:12.081354Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125519, Sender [1:676:2666], Recipient [1:694:2678]: NKikimrScheme.TEvShredInfoRequest 2025-12-23T18:43:12.081382Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5468: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-12-23T18:43:12.081417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8213: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> DataShardVolatile::DistributedUpsertRestartAfterPlan+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan-UseSink >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-system |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-anonymous >> KqpPg::InsertNoTargetColumns_Simple+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Simple-useSink |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> Cdc::ShouldBreakLocksOnConcurrentDropIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentCancelBuildIndex |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system [GOOD] >> KqpPg::Insert_Serial+useSink [GOOD] >> KqpPg::Insert_Serial-useSink >> KqpPg::CreateTableSerialColumns+useSink [GOOD] >> KqpPg::CreateTableSerialColumns-useSink |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> BridgeGet::PartRestorationAcrossBridgeOnDiscover [GOOD] |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpPg::EmptyQuery-useSink [GOOD] >> KqpPg::DuplicatedColumns+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:50.227778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:50.227875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:50.227928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:50.227992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:50.228030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:50.228065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:50.228141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:50.228210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:50.229108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:50.229391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:50.365453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:50.365525Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:50.366448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:50.386684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:50.386883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:50.387077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:50.402549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:50.402881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:50.403628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:50.404012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:50.407073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:50.407263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:50.408554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:50.408620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:50.408786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:50.408831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:50.408873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:50.409036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.416193Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-12-23T18:40:50.544346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:50.544581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.544800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:40:50.544867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:40:50.545334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:40:50.545403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:50.547747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:50.547956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:40:50.548137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.548184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:40:50.548222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:40:50.548257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:40:50.552020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.552071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:40:50.552100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:40:50.559963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.560027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.560089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:50.560141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:40:50.563991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:40:50.568118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:40:50.568343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:40:50.569627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:50.569779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:50.569840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:50.570116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:40:50.570174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:50.570361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:40:50.570452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:50.575885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard ... as 3 2025-12-23T18:43:12.663187Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2025-12-23T18:43:12.663217Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2025-12-23T18:43:12.663246Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2025-12-23T18:43:12.663287Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2025-12-23T18:43:12.663310Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2025-12-23T18:43:12.664910Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:12.665001Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:12.665036Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:43:12.665069Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2025-12-23T18:43:12.665107Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2025-12-23T18:43:12.666254Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:12.666365Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:12.666402Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:43:12.666435Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2025-12-23T18:43:12.666470Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2025-12-23T18:43:12.667799Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:12.667892Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:12.667928Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:43:12.667960Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2025-12-23T18:43:12.667993Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2025-12-23T18:43:12.669175Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:12.669267Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:12.669300Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:43:12.669332Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2025-12-23T18:43:12.669365Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2025-12-23T18:43:12.669432Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2025-12-23T18:43:12.672492Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:43:12.672651Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:43:12.674713Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:43:12.674892Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2025-12-23T18:43:12.676362Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2025-12-23T18:43:12.676411Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2025-12-23T18:43:12.678251Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2025-12-23T18:43:12.678399Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2025-12-23T18:43:12.678445Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2692:4680] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2025-12-23T18:43:12.680054Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2025-12-23T18:43:12.680104Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2025-12-23T18:43:12.680235Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2025-12-23T18:43:12.680264Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2025-12-23T18:43:12.680328Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2025-12-23T18:43:12.680357Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2025-12-23T18:43:12.680420Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2025-12-23T18:43:12.680478Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2025-12-23T18:43:12.680540Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2025-12-23T18:43:12.680569Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2025-12-23T18:43:12.682532Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2025-12-23T18:43:12.682781Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2025-12-23T18:43:12.682822Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2695:4683] 2025-12-23T18:43:12.683117Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2025-12-23T18:43:12.683335Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2025-12-23T18:43:12.683362Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2695:4683] 2025-12-23T18:43:12.683476Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2025-12-23T18:43:12.683586Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2025-12-23T18:43:12.683671Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2025-12-23T18:43:12.683694Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2695:4683] 2025-12-23T18:43:12.683816Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2025-12-23T18:43:12.683871Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2025-12-23T18:43:12.683893Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2695:4683] 2025-12-23T18:43:12.683986Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2025-12-23T18:43:12.684014Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2695:4683] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnDiscover [GOOD] Test command err: RandomSeed# 16642976524206021539 readBody# 1 mask1# 0 mask2# 0 mask3# 0 *** performing bridge discover maxId#[0:0:0:0:0:0:0] readBody# 1 mask1# 0 mask2# 0 mask3# 1 *** performing bridge discover maxId#[100501:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 2 *** performing bridge discover maxId#[100502:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 3 *** performing bridge discover maxId#[100503:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 4 *** performing bridge discover maxId#[100504:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 5 *** performing bridge discover maxId#[100505:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 6 *** performing bridge discover maxId#[100506:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 7 *** performing bridge discover maxId#[100507:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 0 *** performing bridge discover maxId#[100508:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 1 *** performing bridge discover maxId#[100509:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 2 *** performing bridge discover maxId#[100510:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 3 *** performing bridge discover maxId#[100511:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 4 *** performing bridge discover maxId#[100512:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 5 *** performing bridge discover maxId#[100513:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 6 *** performing bridge discover maxId#[100514:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 7 *** performing bridge discover maxId#[100515:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 0 *** performing bridge discover maxId#[100516:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 1 *** performing bridge discover maxId#[100517:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 2 *** performing bridge discover maxId#[100518:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 3 *** performing bridge discover maxId#[100519:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 4 *** performing bridge discover maxId#[100520:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 5 *** performing bridge discover maxId#[100521:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 6 *** performing bridge discover maxId#[100522:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 7 *** performing bridge discover maxId#[100523:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 0 *** performing bridge discover maxId#[100524:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 1 *** performing bridge discover maxId#[100525:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 2 *** performing bridge discover maxId#[100526:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 3 *** performing bridge discover maxId#[100527:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 4 *** performing bridge discover maxId#[100528:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 5 *** performing bridge discover maxId#[100529:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 6 *** performing bridge discover maxId#[100530:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 7 *** performing bridge discover maxId#[100531:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 0 *** performing bridge discover maxId#[100532:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 1 *** performing bridge discover maxId#[100533:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 2 *** performing bridge discover maxId#[100534:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 3 *** performing bridge discover maxId#[100535:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 4 *** performing bridge discover maxId#[100536:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 5 *** performing bridge discover maxId#[100537:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 6 *** performing bridge discover maxId#[100538:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 7 *** performing bridge discover maxId#[100539:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 0 *** performing bridge discover maxId#[100540:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 1 *** performing bridge discover maxId#[100541:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 2 *** performing bridge discover maxId#[100542:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 3 *** performing bridge discover maxId#[100543:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 4 *** performing bridge discover maxId#[100544:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 5 *** performing bridge discover maxId#[100545:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 6 *** performing bridge discover maxId#[100546:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 7 *** performing bridge discover maxId#[100547:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 0 *** performing bridge discover maxId#[100548:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 1 *** performing bridge discover maxId#[100549:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 2 *** performing bridge discover maxId#[100550:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 3 *** performing bridge discover maxId#[100551:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 4 *** performing bridge discover maxId#[100552:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 5 *** performing bridge discover maxId#[100553:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 6 *** performing bridge discover maxId#[100554:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 7 *** performing bridge discover maxId#[100555:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 0 *** performing bridge discover maxId#[100556:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 1 *** performing bridge discover maxId#[100557:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 2 *** performing bridge discover maxId#[100558:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 3 *** performing bridge discover maxId#[100559:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 4 *** performing bridge discover maxId#[100560:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 5 *** performing bridge discover maxId#[100561:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 6 *** performing bridge discover maxId#[100562:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 7 *** performing bridge discover maxId#[100563:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 0 *** performing bridge discover maxId#[100564:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 1 *** performing bridge discover maxId#[100565:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 2 *** performing bridge discover maxId#[100566:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 3 *** performing bridge discover maxId#[100567:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 4 *** performing bridge discover maxId#[100568:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 5 *** performing bridge discover maxId#[100569:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 6 *** performing bridge discover maxId#[100570:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 7 *** performing bridge discover maxId#[100571:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 1 mask3# 0 *** performing bridge discover maxId#[100572:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 1 mask3# 1 *** performing bridge discover maxId#[100573:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# ... iscover maxId#[101450:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 6 mask3# 7 *** performing bridge discover maxId#[101451:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 0 *** performing bridge discover maxId#[101452:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 1 *** performing bridge discover maxId#[101453:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 2 *** performing bridge discover maxId#[101454:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 3 *** performing bridge discover maxId#[101455:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 4 *** performing bridge discover maxId#[101456:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 5 *** performing bridge discover maxId#[101457:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 6 *** performing bridge discover maxId#[101458:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 7 *** performing bridge discover maxId#[101459:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 0 *** performing bridge discover maxId#[101460:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 1 *** performing bridge discover maxId#[101461:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 2 *** performing bridge discover maxId#[101462:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 3 *** performing bridge discover maxId#[101463:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 4 *** performing bridge discover maxId#[101464:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 5 *** performing bridge discover maxId#[101465:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 6 *** performing bridge discover maxId#[101466:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 7 *** performing bridge discover maxId#[101467:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 0 *** performing bridge discover maxId#[101468:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 1 *** performing bridge discover maxId#[101469:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 2 *** performing bridge discover maxId#[101470:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 3 *** performing bridge discover maxId#[101471:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 4 *** performing bridge discover maxId#[101472:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 5 *** performing bridge discover maxId#[101473:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 6 *** performing bridge discover maxId#[101474:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 7 *** performing bridge discover maxId#[101475:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 0 *** performing bridge discover maxId#[101476:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 1 *** performing bridge discover maxId#[101477:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 2 *** performing bridge discover maxId#[101478:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 3 *** performing bridge discover maxId#[101479:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 4 *** performing bridge discover maxId#[101480:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 5 *** performing bridge discover maxId#[101481:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 6 *** performing bridge discover maxId#[101482:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 7 *** performing bridge discover maxId#[101483:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 0 *** performing bridge discover maxId#[101484:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 1 *** performing bridge discover maxId#[101485:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 2 *** performing bridge discover maxId#[101486:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 3 *** performing bridge discover maxId#[101487:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 4 *** performing bridge discover maxId#[101488:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 5 *** performing bridge discover maxId#[101489:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 6 *** performing bridge discover maxId#[101490:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 7 *** performing bridge discover maxId#[101491:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 0 *** performing bridge discover maxId#[101492:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 1 *** performing bridge discover maxId#[101493:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 2 *** performing bridge discover maxId#[101494:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 3 *** performing bridge discover maxId#[101495:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 4 *** performing bridge discover maxId#[101496:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 5 *** performing bridge discover maxId#[101497:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 6 *** performing bridge discover maxId#[101498:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 7 *** performing bridge discover maxId#[101499:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 0 *** performing bridge discover maxId#[101500:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 1 *** performing bridge discover maxId#[101501:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 2 *** performing bridge discover maxId#[101502:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 3 *** performing bridge discover maxId#[101503:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 4 *** performing bridge discover maxId#[101504:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 5 *** performing bridge discover maxId#[101505:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 6 *** performing bridge discover maxId#[101506:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 7 *** performing bridge discover maxId#[101507:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 0 *** performing bridge discover maxId#[101508:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 1 *** performing bridge discover maxId#[101509:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 2 *** performing bridge discover maxId#[101510:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 3 *** performing bridge discover maxId#[101511:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 4 *** performing bridge discover maxId#[101512:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 5 *** performing bridge discover maxId#[101513:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 6 *** performing bridge discover maxId#[101514:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 7 *** performing bridge discover maxId#[101515:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 0 *** performing bridge discover maxId#[101516:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 1 *** performing bridge discover maxId#[101517:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 2 *** performing bridge discover maxId#[101518:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 3 *** performing bridge discover maxId#[101519:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 4 *** performing bridge discover maxId#[101520:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 5 *** performing bridge discover maxId#[101521:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 6 *** performing bridge discover maxId#[101522:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 7 *** performing bridge discover maxId#[101523:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> ObjectStorageListingTest::FilterListing [GOOD] |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter >> KqpPg::NoTableQuery-useSink [GOOD] >> KqpPg::PgCreateTable |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:43:14.413630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:14.502596Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:43:14.512741Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:14.513070Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:14.513127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:14.780994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:14.781151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:14.866353Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515391666235 != 1766515391666239 2025-12-23T18:43:14.879434Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:14.928058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:15.027198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002be8/r3tmp/tmpMYQOD5/pdisk_1.dat 2025-12-23T18:43:15.324932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:15.340090Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:15.447328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:15.487306Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:43:15.487602Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:43:15.542723Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:43:15.542948Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:43:15.544818Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:43:15.544917Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:43:15.544986Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:43:15.545404Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:43:15.545582Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:43:15.545682Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:43:15.556599Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:43:15.599792Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:43:15.600061Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:43:15.600208Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:43:15.600251Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:43:15.600294Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:43:15.600332Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:43:15.600930Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:43:15.601057Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:43:15.601582Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:43:15.601632Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:15.601680Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:43:15.601725Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:43:15.601810Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:43:15.601954Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:43:15.602263Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:43:15.602366Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:43:15.604464Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:43:15.615463Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:43:15.615646Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:43:15.734677Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:43:15.740627Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:43:15.740750Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:43:15.742106Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:43:15.742179Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:43:15.742239Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:43:15.742590Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:43:15.742831Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:43:15.743340Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:43:15.743437Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:43:15.745995Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:43:15.746607Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:15.749288Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:43:15.749353Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:43:15.749908Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:43:15.749999Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:43:15.751965Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:43:15.752031Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:43:15.752092Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:43:15.752177Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:43:15.752236Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:43:15.752361Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:43:15.765117Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:43:15.766846Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:43:15.766928Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:43:15.767790Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:43:15.777099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:950:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:15.777209Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:961:2813], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:15.777280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:15.778236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:966:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:15.778336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:15.782122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:15.787925Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:43:15.905802Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:43:15.908858Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:964:2816], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:15.996342Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1022:2855] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:16.369571Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1053:2872], serverId# [1:1054:2873], sessionId# [0:0:0] 2025-12-23T18:43:16.370143Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2025-12-23T18:43:16.370405Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-12-23T18:43:16.381502Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:43:16.387552Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1061:2879], serverId# [1:1062:2880], sessionId# [0:0:0] 2025-12-23T18:43:16.387891Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-12-23T18:43:16.388089Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2025-12-23T18:43:16.388337Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:1061:2879], serverId# [1:1062:2880], sessionId# [0:0:0] 2025-12-23T18:43:16.390555Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1067:2885], serverId# [1:1068:2886], sessionId# [0:0:0] 2025-12-23T18:43:16.390841Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-12-23T18:43:16.391061Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2025-12-23T18:43:16.391238Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:1067:2885], serverId# [1:1068:2886], sessionId# [0:0:0] |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:49.485862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:49.485974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:49.486020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:49.486061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:49.486122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:49.486152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:49.486208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:49.486284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:49.487241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:49.487564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:49.617115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:49.617205Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:49.618063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:49.633877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:49.634059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:49.634241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:49.643273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:49.643573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:49.644406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:49.644856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:49.648047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:49.648251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:49.649493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:49.649577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:49.649712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:49.649764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:49.649809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:49.649983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:49.657876Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-12-23T18:40:49.804543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:49.804790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:49.805045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:40:49.805102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:40:49.805375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:40:49.805432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:49.807995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:49.808254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:40:49.808525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:49.808589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:40:49.808627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:40:49.808681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:40:49.810921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:49.810986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:40:49.811029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:40:49.812882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:49.812930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:49.812985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:49.813043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:40:49.816857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:40:49.819252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:40:49.819457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:40:49.820662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:49.820829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:49.820883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:49.821176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:40:49.821230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:49.821404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:40:49.821486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:49.823697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard ... as 3 2025-12-23T18:43:14.855339Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2025-12-23T18:43:14.855371Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2025-12-23T18:43:14.855403Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2025-12-23T18:43:14.855429Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2025-12-23T18:43:14.855457Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2025-12-23T18:43:14.857235Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:14.857336Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:14.857377Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:43:14.857417Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2025-12-23T18:43:14.857459Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2025-12-23T18:43:14.858973Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:14.859072Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:14.859104Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:43:14.859137Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2025-12-23T18:43:14.859175Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2025-12-23T18:43:14.860491Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:14.860580Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:14.860615Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:43:14.860648Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2025-12-23T18:43:14.860686Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2025-12-23T18:43:14.862029Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:14.862120Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:14.862155Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:43:14.862188Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2025-12-23T18:43:14.862223Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2025-12-23T18:43:14.862298Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2025-12-23T18:43:14.865777Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:43:14.865918Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:43:14.867996Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:43:14.868086Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2025-12-23T18:43:14.869238Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2025-12-23T18:43:14.869275Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2025-12-23T18:43:14.870443Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2025-12-23T18:43:14.870566Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2025-12-23T18:43:14.870603Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2692:4680] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2025-12-23T18:43:14.872051Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2025-12-23T18:43:14.872098Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2025-12-23T18:43:14.872184Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2025-12-23T18:43:14.872211Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2025-12-23T18:43:14.872273Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2025-12-23T18:43:14.872300Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2025-12-23T18:43:14.872361Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2025-12-23T18:43:14.872388Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2025-12-23T18:43:14.872444Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2025-12-23T18:43:14.872471Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2025-12-23T18:43:14.874400Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2025-12-23T18:43:14.874555Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2025-12-23T18:43:14.874587Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2695:4683] 2025-12-23T18:43:14.874796Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2025-12-23T18:43:14.874981Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2025-12-23T18:43:14.875006Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2695:4683] 2025-12-23T18:43:14.875098Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2025-12-23T18:43:14.875166Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2025-12-23T18:43:14.875224Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2025-12-23T18:43:14.875244Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2695:4683] 2025-12-23T18:43:14.875381Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2025-12-23T18:43:14.875438Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2025-12-23T18:43:14.875460Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2695:4683] 2025-12-23T18:43:14.875553Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2025-12-23T18:43:14.875571Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2695:4683] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithoutLoginPlaceholders [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV4List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV6List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-ordinaryuser >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute >> KqpPg::InsertNoTargetColumns_Simple-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial-useSink |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |95.0%| [PY] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/objcopy_e64be2702e6aadcfe4f62214e0.o |95.0%| [PY] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/objcopy_e64be2702e6aadcfe4f62214e0.o >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD >> KqpPg::Insert_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText+useSink >> KqpPg::DuplicatedColumns+useSink [GOOD] >> KqpPg::DuplicatedColumns-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:43:00.935323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:00.935418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:00.935457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:00.935499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:00.935541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:00.935570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:00.935635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:00.935707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:00.936581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:00.936869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:01.030553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:01.030620Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:01.049743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:01.049958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:01.050137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:01.068962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:01.069190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:01.069843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:01.070075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:01.074299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:01.074490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:01.075786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:01.075848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:01.076011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:01.076063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:01.076112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:01.076230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:01.245532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.246554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.246700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.246819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.246903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.246979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.247094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.247186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.247253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.247335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.247393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.247453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.247555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.247664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.247759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... nt [2:695:2679]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 100 Completed: false Progress10k: 0 2025-12-23T18:43:17.647582Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:17.647620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:17.647679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:17.647719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:646: TTxCompleteShredBSC Unknown generation#100, Expected gen# 52 at schemestard: 72057594046678944 2025-12-23T18:43:17.647798Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2025-12-23T18:43:17.648176Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [2:703:2685], Recipient [2:695:2679]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: false Progress10k: 0 2025-12-23T18:43:17.648213Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:17.648235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:17.648271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:17.648300Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-12-23T18:43:17.649596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-12-23T18:43:17.649674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:17.649731Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:18.181534Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:18.181629Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:18.181746Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:695:2679], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:18.181784Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:18.192303Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1439:3300]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:18.192404Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:18.192483Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:862:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:18.192514Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:18.192586Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:862:2814], Recipient [2:862:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:18.192621Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:18.192715Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:1439:3300], Recipient [2:1439:3300]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:18.192746Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:18.255722Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:18.255820Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:18.255881Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2025-12-23T18:43:18.256163Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [2:703:2685], Recipient [2:695:2679]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: false Progress10k: 5000 2025-12-23T18:43:18.256207Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:18.256242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:18.256324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:18.256374Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-12-23T18:43:18.256449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:18.256520Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:18.629918Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:18.630013Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:18.630096Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:695:2679], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:18.630128Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:18.640752Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1439:3300]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:18.640845Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:18.640929Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:862:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:18.640968Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:18.641061Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:1439:3300], Recipient [2:1439:3300]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:18.641101Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:18.641201Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:862:2814], Recipient [2:862:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:18.641232Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:18.704289Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:18.704396Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:18.704435Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2025-12-23T18:43:18.704737Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [2:703:2685], Recipient [2:695:2679]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: true Progress10k: 10000 2025-12-23T18:43:18.704778Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:18.704814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:18.704898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:18.704939Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-12-23T18:43:18.704990Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 101, duration# 2 s 2025-12-23T18:43:18.707375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-12-23T18:43:18.708209Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [2:4800:6085], Recipient [2:695:2679]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:18.708281Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:18.708330Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:43:18.708418Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125519, Sender [2:3953:5408], Recipient [2:695:2679]: NKikimrScheme.TEvShredInfoRequest 2025-12-23T18:43:18.708447Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5468: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-12-23T18:43:18.708509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8213: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> LdapAuthProviderTest::LdapServerIsUnavailable >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-anonymous >> KqpPg::CreateTableSerialColumns-useSink [GOOD] >> KqpPg::DropIndex >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-ordinaryuser >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] >> test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood >> Cdc::InitialScan_TopicAutoPartitioning [GOOD] >> Cdc::InitialScanDebezium >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> KqpPg::TypeCoercionInsert+useSink >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute >> TestShred::Run3CyclesForAllSupportedObjects [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-dbadmin |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD >> TTxDataShardMiniKQL::CrossShard_1_Cycle [GOOD] >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy |95.0%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> ObjectStorageListingTest::ListingNoFilter [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad >> TSchemeShardTest::ManyDirs [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups [GOOD] |95.0%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |95.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |95.0%| [AR] {RESULT} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::Run3CyclesForAllSupportedObjects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:43:01.439254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:01.439357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:01.439396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:01.439441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:01.439478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:01.439506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:01.439559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:01.439644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:01.440507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:01.440822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:01.535452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:01.535528Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:01.555960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:01.556263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:01.556426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:01.566585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:01.566844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:01.568143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:01.568397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:01.571073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:01.571309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:01.572484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:01.572562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:01.572698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:01.572741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:01.572840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:01.572987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:01.751243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.752311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.752498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.752590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.752657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.752751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.752833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.752911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.752980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.753041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.753142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.753219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.753317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.753417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:01.753564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__root_shred_manager.cpp:591: TTxCompleteShredTenant Execute at schemeshard: 72057594046678944 2025-12-23T18:43:21.151348Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38] in# 81 ms, next wakeup# 593.919000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-12-23T18:43:21.151422Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2025-12-23T18:43:21.155786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-12-23T18:43:21.155851Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2025-12-23T18:43:21.156091Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [2:703:2685], Recipient [2:695:2679]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-12-23T18:43:21.156136Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:21.156168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:21.156235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:21.156267Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-12-23T18:43:21.156322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:21.156374Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:21.581908Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:862:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:21.581990Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:21.582069Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:21.582100Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:21.582158Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1440:3302]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:21.582187Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:21.582246Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:695:2679], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:21.582279Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:21.582352Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:1440:3302], Recipient [2:1440:3302]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:21.582381Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:21.582438Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:862:2814], Recipient [2:862:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:21.582467Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:21.603419Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:21.603507Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:21.603542Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2025-12-23T18:43:21.603896Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [2:703:2685], Recipient [2:695:2679]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-12-23T18:43:21.603945Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:21.603978Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:21.604055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:21.604102Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-12-23T18:43:21.604163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:21.604214Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:21.984319Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:862:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:21.984408Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:21.984470Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:21.984496Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:21.984540Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1440:3302]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:21.984565Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:21.984621Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:695:2679], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:21.984649Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:21.984711Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:1440:3302], Recipient [2:1440:3302]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:21.984728Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:21.984767Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:862:2814], Recipient [2:862:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:21.984782Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:22.005780Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:22.005863Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:22.005897Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2025-12-23T18:43:22.006131Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [2:703:2685], Recipient [2:695:2679]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-12-23T18:43:22.006170Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:22.006200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:22.006272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:22.006304Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-12-23T18:43:22.006349Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.918000s, Timestamp# 1970-01-01T00:00:11.218000Z 2025-12-23T18:43:22.006387Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 3, duration# 2 s 2025-12-23T18:43:22.008400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-12-23T18:43:22.008916Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [2:4821:6106], Recipient [2:695:2679]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:22.008982Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:22.009016Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:43:22.009175Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125519, Sender [2:680:2670], Recipient [2:695:2679]: NKikimrScheme.TEvShredInfoRequest 2025-12-23T18:43:22.009201Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5468: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-12-23T18:43:22.009229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8213: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-anonymous |95.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |95.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |95.0%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> LdapAuthProviderTest::LdapServerIsUnavailable [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost >> DataShardVolatile::DistributedUpsertRestartAfterPlan-UseSink [GOOD] >> DataShardVolatile::CompactedVolatileChangesCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:43:20.209802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:20.331258Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:43:20.346340Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:20.346710Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:20.346829Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:20.712656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:20.712787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:20.790906Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515397207022 != 1766515397207026 2025-12-23T18:43:20.807914Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:20.855409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:20.961757Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002a89/r3tmp/tmpsIkwF4/pdisk_1.dat 2025-12-23T18:43:21.266435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:21.281333Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:21.385898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:21.418570Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:43:21.418823Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:43:21.457067Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:43:21.457244Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:43:21.459159Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:43:21.459261Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:43:21.459319Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:43:21.459721Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:43:21.459918Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:43:21.460026Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:43:21.470943Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:43:21.500223Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:43:21.500461Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:43:21.500610Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:43:21.500650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:43:21.500696Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:43:21.500738Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:43:21.501281Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:43:21.501386Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:43:21.501856Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:43:21.501912Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:21.501962Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:43:21.502008Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:43:21.502072Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:43:21.502219Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:43:21.502505Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:43:21.502606Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:43:21.504551Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:43:21.515436Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:43:21.515579Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:43:21.633970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:43:21.638541Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:43:21.638636Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:43:21.639818Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:43:21.639888Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:43:21.639938Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:43:21.640184Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:43:21.640332Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:43:21.640626Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:43:21.640691Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:43:21.654543Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:43:21.655031Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:21.656874Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:43:21.656937Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:43:21.657353Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:43:21.657419Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:43:21.658947Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:43:21.659004Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:43:21.659051Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:43:21.659101Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:43:21.659147Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:43:21.659224Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:43:21.670959Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:43:21.672807Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T18:43:21.672912Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:43:21.673880Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T18:43:21.684107Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:950:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:21.684257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:961:2813], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:21.684347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:21.685535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:966:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:21.685653Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:21.690526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:21.696813Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:43:21.813052Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:43:21.816008Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:964:2816], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:21.894689Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1022:2855] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:22.246148Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1053:2872], serverId# [1:1054:2873], sessionId# [0:0:0] 2025-12-23T18:43:22.246673Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2025-12-23T18:43:22.246888Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-12-23T18:43:22.258069Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:43:22.263791Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1061:2879], serverId# [1:1062:2880], sessionId# [0:0:0] 2025-12-23T18:43:22.264036Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-12-23T18:43:22.264204Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2025-12-23T18:43:22.264423Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:1061:2879], serverId# [1:1062:2880], sessionId# [0:0:0] |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpPg::DuplicatedColumns-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-clusteradmin >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase >> KqpPg::InsertNoTargetColumns_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-12-23T18:42:08.817859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:08.817961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:08.818016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:08.818060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:08.818094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:08.818127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:08.818178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:08.818257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:08.819300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:08.819626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:08.955297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:42:08.955391Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:08.956409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:08.968150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:08.968289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:08.968470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:08.981342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:08.982109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:08.982962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:08.983376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:08.988758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:08.988986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:08.991028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:08.991112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:08.991332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:08.991383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:08.991448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:08.991703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:09.169328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.170423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.170554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.170633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.170723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.170861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.170942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.171019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.171093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.171179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.171263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.171341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.171432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.171524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:09.171616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# ... 83647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "3_continuousBackupImpl" TopicPath: "/MyRoot/Table/3_continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409555 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409556 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 50 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:43:22.580989Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:43:22.581204Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl1" took 223us result status StatusSuccess 2025-12-23T18:43:22.581582Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl1" PathDescription { Self { Name: "IncrBackupImpl1" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:43:22.582673Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:43:22.582913Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl2" took 283us result status StatusSuccess 2025-12-23T18:43:22.583395Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl2" PathDescription { Self { Name: "IncrBackupImpl2" PathId: 46 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000042 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 46 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:43:22.584559Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:43:22.584775Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl3" took 236us result status StatusSuccess 2025-12-23T18:43:22.585178Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl3" PathDescription { Self { Name: "IncrBackupImpl3" PathId: 49 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 49 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.0%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> KqpPg::DropIndex [GOOD] >> KqpPg::CreateUniqPgColumn+useSink >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad >> KqpPg::InsertValuesFromTableWithDefaultText+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText-useSink >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-ordinaryuser >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-clusteradmin >> Cdc::ResolvedTimestampsVolatileOutOfOrder [GOOD] >> Cdc::SequentialSplitMerge >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] >> TSchemeShardTest::FindSubDomainPathId >> Cdc::ShouldBreakLocksOnConcurrentCancelBuildIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentDropStream >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-ordinaryuser >> LdapAuthProviderTest::LdapRequestWithEmptyHost [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn >> test_sql_negative.py::test[watermarks-bad_column-default.txt] >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test |95.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-system >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-ordinaryuser >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> BasicUsage::WriteSessionSwitchDatabases [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system |95.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stability/tool/tool |95.0%| [LD] {RESULT} $(B)/ydb/tests/stability/tool/tool |95.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/tool >> KqpPg::InsertFromSelect_NoReorder+useSink [GOOD] >> KqpPg::DropTablePg >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:131:2155] sender: [1:132:2058] recipient: [1:114:2144] 2025-12-23T18:42:35.076733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:42:35.076864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:35.076932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:42:35.076978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:42:35.077018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:42:35.077063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:42:35.077123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:42:35.077243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:42:35.078232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:35.078522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:42:35.213036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:42:35.213138Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:42:35.214079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:42:35.226331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:42:35.226477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:42:35.226722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:42:35.245022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:42:35.246006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:42:35.246856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:42:35.247301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:42:35.255327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:35.255527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:42:35.257302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:42:35.257400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:42:35.257640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:42:35.257700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:42:35.257748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:42:35.257991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:42:35.431742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.432902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.433059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.433151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.433255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.433351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.433436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.433544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.433618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.433720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.433815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.433901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.433983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.434064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2216] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:42:35.434187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# ... :27.514605Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2025-12-23T18:43:27.515128Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2025-12-23T18:43:27.515201Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710757:0 ProgressState 2025-12-23T18:43:27.515390Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2025-12-23T18:43:27.515441Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2025-12-23T18:43:27.515505Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2025-12-23T18:43:27.515554Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2025-12-23T18:43:27.515605Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710757, ready parts: 3/3, is published: true 2025-12-23T18:43:27.515702Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:1380:3205] message: TxId: 281474976710757 2025-12-23T18:43:27.515776Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2025-12-23T18:43:27.515836Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:0 2025-12-23T18:43:27.515887Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710757:0 2025-12-23T18:43:27.516050Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:43:27.516107Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:1 2025-12-23T18:43:27.516137Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710757:1 2025-12-23T18:43:27.516176Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:43:27.516207Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:2 2025-12-23T18:43:27.516234Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710757:2 2025-12-23T18:43:27.516308Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2025-12-23T18:43:27.516666Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:43:27.516920Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:43:27.516978Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2025-12-23T18:43:27.517072Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:43:27.517129Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2025-12-23T18:43:27.517182Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:43:27.517461Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2025-12-23T18:43:27.517630Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:43:27.517695Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2025-12-23T18:43:27.521142Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:43:27.521324Z node 16 :CONTINUOUS_BACKUP ERROR: schemeshard_backup_incremental__progress.cpp:189: TIncrementalBackup::TTxProgress: Incremental backup with id# 0 not found 2025-12-23T18:43:28.241470Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:43:28.241793Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 390us result status StatusPathDoesNotExist 2025-12-23T18:43:28.241996Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:43:28.242723Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:43:28.242987Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 287us result status StatusPathDoesNotExist 2025-12-23T18:43:28.243143Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:43:28.243803Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:43:28.244073Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 290us result status StatusSuccess 2025-12-23T18:43:28.244596Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad >> KqpPg::InsertValuesFromTableWithDefault+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault-useSink >> KqpPg::CreateTableBulkUpsertAndRead [GOOD] >> KqpPg::CopyTableSerialColumns+useSink |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-system >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] >> Cdc::InitialScanDebezium [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn >> test_sql_streaming.py::test[hop-GroupByHop-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2025-12-23T18:40:53.629990Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1766515253629949 2025-12-23T18:40:54.115294Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125246817439146:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:54.115348Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:40:54.214191Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125244457382794:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:54.214246Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:40:54.241716Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056fa/r3tmp/tmpku6Iph/pdisk_1.dat 2025-12-23T18:40:54.256715Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:40:54.517104Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:54.566355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:54.618622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:54.618759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:54.620575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:54.620629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:54.703600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:54.704942Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:40:54.707604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:54.772323Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:54.774925Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:54.783110Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 29434, node 1 2025-12-23T18:40:54.979571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0056fa/r3tmp/yandexywbOfy.tmp 2025-12-23T18:40:54.979616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0056fa/r3tmp/yandexywbOfy.tmp 2025-12-23T18:40:54.979849Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0056fa/r3tmp/yandexywbOfy.tmp 2025-12-23T18:40:54.980010Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:40:55.122032Z INFO: TTestServer started on Port 6667 GrpcPort 29434 2025-12-23T18:40:55.128081Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:55.239614Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6667 PQClient connected to localhost:29434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515254833 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:40:55.655673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:40:59.114890Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125246817439146:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:59.114970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:59.180476Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125265932219525:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:59.180568Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125265932219492:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:59.180684Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:40:59.191739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:40:59.215368Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587125244457382794:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:59.215433Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:40:59.224602Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587125265932219529:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-12-23T18:40:59.331146Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587125265932219570:2142] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:40:59.708815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:59.718172Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587125265932219576:2312], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:40:59.718701Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=NTliZjc5YjUtMzBmZjNlZGUtYzAwOGU1NzEtYzk3ZjY2ZjE=, ActorId: [2:7587125265932219487:2298], ActorState: ExecuteState, TraceId: 01kd687ds32qs8kd8pdvf94egv, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:40:59.721023Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:40:59.722765Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125268292277043:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and ... impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-12-23T18:43:26.943792Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-12-23T18:43:26.943849Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2023: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 3 partNo : 0 messageNo: 1 size 98 offset: -1 2025-12-23T18:43:26.943890Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:636: [72075186224037892][Partition][0][StateIdle] Received TPartition::TEvWrite 2025-12-23T18:43:26.943935Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:43:26.943949Z node 4 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:43:26.943979Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:43:26.943993Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:43:26.944011Z node 4 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:43:26.944148Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2025-12-23T18:43:26.945024Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 172 count 1 nextOffset 3 batches 1 2025-12-23T18:43:26.945061Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:43:26.945076Z node 4 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:43:26.945093Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:43:26.945571Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1704: [72075186224037892][Partition][0][StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 2 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000? size 160 WTime 1766515406945 2025-12-23T18:43:26.945784Z node 4 :PERSQUEUE DEBUG: read.h:275: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:43:26.945898Z node 4 :PERSQUEUE DEBUG: read.h:313: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 160 2025-12-23T18:43:26.951076Z node 4 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 1 size 160 actorID [4:7587125383688069016:2394] 2025-12-23T18:43:26.951154Z node 4 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037892][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:43:26.951221Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037892][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 105 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:43:26.951279Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-12-23T18:43:26.951318Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-12-23T18:43:26.951347Z node 4 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' size 160 2025-12-23T18:43:26.951492Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:43:26.951508Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:26.951522Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:43:26.951540Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:26.951556Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:43:26.951587Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:43:26.951624Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1198: [PQ: 72075186224037892] Topic 'rt3.dc1--test-topic' counters. CacheSize 480 CachedBlobs 3 2025-12-23T18:43:26.951662Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-12-23T18:43:26.952227Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-12-23T18:43:26.952931Z :DEBUG: [/Root] TraceId [] SessionId [src_id|84686a07-d637cd3e-4c527fc-222492d8_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-12-23T18:43:26.953094Z :DEBUG: [/Root] TraceId [] SessionId [src_id|84686a07-d637cd3e-4c527fc-222492d8_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 3 written { offset: 2 } } write_statistics { persisting_time { nanos: 5000000 } min_queue_wait_time { nanos: 1000000 } max_queue_wait_time { nanos: 1000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-12-23T18:43:26.953127Z :DEBUG: [/Root] TraceId [] SessionId [src_id|84686a07-d637cd3e-4c527fc-222492d8_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-12-23T18:43:26.953152Z :DEBUG: [/Root] TraceId [] SessionId [src_id|84686a07-d637cd3e-4c527fc-222492d8_0] MessageGroupId [src_id] Write session: acknoledged message 1 2025-12-23T18:43:26.954577Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: src_id|84686a07-d637cd3e-4c527fc-222492d8_0 grpc read done: success: 0 data: 2025-12-23T18:43:26.954602Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 4 sessionId: src_id|84686a07-d637cd3e-4c527fc-222492d8_0 grpc read failed 2025-12-23T18:43:26.954783Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 4 sessionId: src_id|84686a07-d637cd3e-4c527fc-222492d8_0 2025-12-23T18:43:26.954812Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: src_id|84686a07-d637cd3e-4c527fc-222492d8_0 is DEAD 2025-12-23T18:43:26.955133Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:43:26.955551Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [3:7587125812141692855:3156] destroyed 2025-12-23T18:43:26.955598Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T18:43:26.955632Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:43:26.955650Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:26.955664Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:43:26.955685Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:26.955698Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:43:26.956022Z :DEBUG: [/Root] TraceId [] SessionId [src_id|84686a07-d637cd3e-4c527fc-222492d8_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2025-12-23T18:43:26.956100Z :ERROR: [/Root] TraceId [] SessionId [src_id|84686a07-d637cd3e-4c527fc-222492d8_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2025-12-23T18:43:26.956135Z :ERROR: [/Root] TraceId [] SessionId [src_id|84686a07-d637cd3e-4c527fc-222492d8_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2025-12-23T18:43:26.956164Z :INFO: [/Root] TraceId [] SessionId [src_id|84686a07-d637cd3e-4c527fc-222492d8_0] MessageGroupId [src_id] Write session will now close 2025-12-23T18:43:26.956230Z :DEBUG: [/Root] TraceId [] SessionId [src_id|84686a07-d637cd3e-4c527fc-222492d8_0] MessageGroupId [src_id] Write session: aborting 2025-12-23T18:43:26.995922Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:43:26.995968Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:26.995983Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:43:26.996006Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:26.996032Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:43:27.049470Z :DEBUG: [/Root] TraceId [] SessionId [src_id|84686a07-d637cd3e-4c527fc-222492d8_0] MessageGroupId [src_id] Write session: destroy 2025-12-23T18:43:27.096276Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:43:27.096315Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:27.096330Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:43:27.096350Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:27.096368Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:43:27.196618Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:43:27.196675Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:27.196691Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:43:27.196715Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:27.196734Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |95.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultText-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError |95.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] |95.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2025-12-23T18:43:02.580595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:02.580672Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:02.585094Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:43:02.619366Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:43:02.619807Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:43:02.620177Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:43:02.681345Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:43:02.701660Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:43:02.701777Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:43:02.703933Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:43:02.704114Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:43:02.704182Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:43:02.704629Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:43:02.704714Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:43:02.704808Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:43:02.808168Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:43:02.842093Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:43:02.842341Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:43:02.842485Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:43:02.842528Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:43:02.842566Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:43:02.842621Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:02.847049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.847159Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.847646Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:43:02.847763Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:43:02.847944Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:02.848005Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:02.848051Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:43:02.848099Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:43:02.848134Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:43:02.848170Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:43:02.848214Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:43:02.848314Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.848361Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.848409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:43:02.862499Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:43:02.862601Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:43:02.862760Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:02.863045Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:43:02.863112Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:43:02.863178Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:43:02.863251Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:43:02.863329Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:43:02.863390Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:43:02.863457Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:02.863866Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:43:02.863945Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:43:02.864007Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:02.864060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:02.864139Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:43:02.864186Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:02.864226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:43:02.864266Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:02.864293Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:43:02.889325Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:02.889409Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:02.889453Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:02.889507Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:43:02.889604Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:43:02.890268Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.890329Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.890381Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:43:02.890557Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:43:02.890591Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:43:02.890793Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:02.890850Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:43:02.890922Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:43:02.890976Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:43:02.895751Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:43:02.895845Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:02.896183Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.896241Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.896322Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:02.896376Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:43:02.896419Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:43:02.896470Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:43:02.896535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... sion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-12-23T18:43:29.949734Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2025-12-23T18:43:29.949802Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:43:29.949867Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:43:29.949899Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2025-12-23T18:43:29.949928Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1002] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:29.949957Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2025-12-23T18:43:29.949990Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1002] at 9437184 is DelayComplete 2025-12-23T18:43:29.950017Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:29.950045Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1002] at 9437184 to execution unit CompletedOperations 2025-12-23T18:43:29.950073Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2025-12-23T18:43:29.950114Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1002] at 9437184 is Executed 2025-12-23T18:43:29.950137Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2025-12-23T18:43:29.950162Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1002] at 9437184 has finished 2025-12-23T18:43:29.969839Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:29.969911Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2025-12-23T18:43:29.969954Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-12-23T18:43:29.970036Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2025-12-23T18:43:29.975450Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2025-12-23T18:43:29.975510Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2025-12-23T18:43:29.976399Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:4553:6470], Recipient [3:240:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:29.976445Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:29.976485Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:4552:6469], serverId# [3:4553:6470], sessionId# [0:0:0] 2025-12-23T18:43:29.976927Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2025-12-23T18:43:29.976965Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:43:29.977057Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:29.977606Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2025-12-23T18:43:29.977667Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2025-12-23T18:43:29.977699Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2025-12-23T18:43:29.977730Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2025-12-23T18:43:29.977759Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2025-12-23T18:43:29.977795Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-12-23T18:43:29.977848Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1003] at 9437184 2025-12-23T18:43:29.977880Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2025-12-23T18:43:29.977902Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2025-12-23T18:43:29.977925Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit BlockFailPoint 2025-12-23T18:43:29.977948Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit BlockFailPoint 2025-12-23T18:43:29.977974Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2025-12-23T18:43:29.977996Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit BlockFailPoint 2025-12-23T18:43:29.978020Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2025-12-23T18:43:29.978042Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2025-12-23T18:43:29.978083Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-12-23T18:43:29.978423Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2025-12-23T18:43:29.978478Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:43:29.978561Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:43:29.978594Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2025-12-23T18:43:29.978621Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:29.978653Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2025-12-23T18:43:29.978686Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is DelayComplete 2025-12-23T18:43:29.978711Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:29.980005Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit CompletedOperations 2025-12-23T18:43:29.980068Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2025-12-23T18:43:29.980120Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2025-12-23T18:43:29.980147Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2025-12-23T18:43:29.980174Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1003] at 9437184 has finished 2025-12-23T18:43:30.020158Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-12-23T18:43:30.020220Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-12-23T18:43:30.022046Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:30.022104Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2025-12-23T18:43:30.022143Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-12-23T18:43:30.022218Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:30.025956Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:238:2231], Recipient [3:240:2232]: NKikimr::TEvTablet::TEvFollowerGcApplied .2025-12-23T18:43:30.029263Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:4567:6483], Recipient [3:240:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:30.029334Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:30.029388Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:4566:6482], serverId# [3:4567:6483], sessionId# [0:0:0] 2025-12-23T18:43:30.029874Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553160, Sender [3:4565:6481], Recipient [3:240:2232]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1719 LastUpdateTime: 1719 KeyAccessSample { } } >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-dbadmin |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute >> KqpPg::InsertFromSelect_Simple+useSink [GOOD] >> KqpPg::InsertFromSelect_Simple-useSink >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-ordinaryuser >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system [GOOD] >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] >> TestShred::SchemeShardCounterDoesNotConsistWithBscCounter >> KqpPg::TableArrayInsert+useSink [GOOD] >> KqpPg::TableArrayInsert-useSink >> Cdc::SequentialSplitMerge [GOOD] >> Cdc::MustNotLoseSchemaSnapshot >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword >> TPQCDTest::TestUnavailableWithoutNetClassifier >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx >> KqpPg::CreateUniqPgColumn+useSink [GOOD] >> KqpPg::CreateUniqPgColumn-useSink >> DataShardVolatile::CompactedVolatileChangesCommit [GOOD] >> DataShardVolatile::CompactedVolatileChangesAbort >> KqpPg::InsertValuesFromTableWithDefault-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:37:45.019605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:37:45.019719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:45.019765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:37:45.019799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:37:45.019836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:37:45.019871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:37:45.019952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:37:45.020078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:37:45.021049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:37:45.021373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:37:45.137313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:37:45.137367Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:45.145565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:37:45.145827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:37:45.145992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:37:45.150011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:37:45.150225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:37:45.151024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:37:45.151235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:37:45.153476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:45.153710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:37:45.154999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:37:45.155061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:37:45.155190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:37:45.155235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:37:45.155275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:37:45.155412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:37:45.393575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.394620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.394949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.395070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.395192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.395259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.395329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.395401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.395473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.395560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.395644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.395712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.395775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.395847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:37:45.395977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 10:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:43:30.889585Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 39 2025-12-23T18:43:30.890088Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:43:30.890136Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:43:30.890351Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:43:30.890397Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:43:30.890445Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:43:30.890497Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:43:30.890545Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:43:30.890601Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:43:30.890663Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:43:30.890710Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:43:30.890964Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:43:30.891031Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:43:30.891079Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2025-12-23T18:43:30.891117Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2025-12-23T18:43:30.892000Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:43:30.892072Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:43:30.892108Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:43:30.892167Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2025-12-23T18:43:30.892222Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:43:30.893053Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:43:30.893118Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:43:30.893145Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:43:30.893167Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2025-12-23T18:43:30.893191Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:43:30.893254Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:43:30.896536Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:43:30.896624Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:43:30.896916Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:43:30.896975Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:43:30.897548Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:43:30.897691Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:43:30.897762Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [17:877:2829] TestWaitNotification: OK eventTxId 102 2025-12-23T18:43:30.898471Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:43:30.898857Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 434us result status StatusSuccess 2025-12-23T18:43:30.899521Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:43:30.900255Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:43:30.900531Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 310us result status StatusSuccess 2025-12-23T18:43:30.901067Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:43:31.262266Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__find_subdomain_path_id.cpp:20: FindTabletSubDomainPathId for tablet 72075186233409546 >> Cdc::ShouldBreakLocksOnConcurrentDropStream [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildSyncIndex |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood >> KqpPg::CopyTableSerialColumns+useSink [GOOD] >> KqpPg::CopyTableSerialColumns-useSink >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-ordinaryuser >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-dbadmin >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-system >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:112:2143] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:112:2143] Leader for TabletID 9437184 is [1:134:2156] sender: [1:135:2057] recipient: [1:112:2143] 2025-12-23T18:43:01.605408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:01.605472Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:01.606634Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:43:01.616213Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:43:01.616489Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:43:01.616711Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:43:01.626198Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:43:01.663021Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:43:01.664224Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:43:01.665970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:43:01.666118Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:43:01.666197Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:43:01.666599Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:43:01.667881Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:43:01.667981Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:203:2156] in generation 2 Leader for TabletID 9437184 is [1:134:2156] sender: [1:214:2057] recipient: [1:14:2061] 2025-12-23T18:43:01.766764Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:43:01.807711Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:43:01.807961Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:43:01.808079Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2025-12-23T18:43:01.808179Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:43:01.808220Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:43:01.808268Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:01.808541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:01.808595Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:01.808949Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:43:01.809057Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:43:01.809173Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:01.809220Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:01.809278Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:43:01.809320Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:43:01.809356Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:43:01.809394Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:43:01.809439Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:43:01.809557Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:215:2214], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:01.809605Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:01.809656Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:215:2214], sessionId# [0:0:0] 2025-12-23T18:43:01.825085Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:43:01.825177Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:43:01.825273Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:01.825537Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:43:01.825594Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:43:01.825652Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:43:01.825722Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:43:01.825760Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:43:01.825799Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:43:01.825835Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:01.826227Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:43:01.826286Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:43:01.826324Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:01.826376Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:01.826433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:43:01.826465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:01.826502Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:43:01.826535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:01.826561Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:43:01.840739Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:01.840856Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:01.840901Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:01.840946Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:43:01.841031Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:43:01.841641Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:01.841701Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:01.841766Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2025-12-23T18:43:01.841913Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-12-23T18:43:01.841951Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:43:01.842088Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:01.842137Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2025-12-23T18:43:01.842195Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:43:01.842237Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:43:01.850520Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:43:01.850610Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:01.850999Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:01.851068Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:01.851139Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:01.851184Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:43:01.851222Z node 1 :TX_DATASHARD TRACE: datashard_pipelin ... rd_impl.h:3153: StateWork, received event# 269877761, Sender [21:297:2278], Recipient [21:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:35.180369Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:35.180436Z node 21 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [21:296:2277], serverId# [21:297:2278], sessionId# [0:0:0] 2025-12-23T18:43:35.180728Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [21:103:2137], Recipient [21:238:2230]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 90194315353 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2025-12-23T18:43:35.180777Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:43:35.180914Z node 21 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:35.182197Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-12-23T18:43:35.182325Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2025-12-23T18:43:35.182381Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-12-23T18:43:35.182434Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-12-23T18:43:35.182485Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-12-23T18:43:35.182537Z node 21 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T18:43:35.182631Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 9437184 2025-12-23T18:43:35.182688Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2025-12-23T18:43:35.182718Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-12-23T18:43:35.182765Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BlockFailPoint 2025-12-23T18:43:35.182799Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BlockFailPoint 2025-12-23T18:43:35.182832Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2025-12-23T18:43:35.182866Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BlockFailPoint 2025-12-23T18:43:35.182892Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-12-23T18:43:35.182925Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-12-23T18:43:35.182983Z node 21 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T18:43:35.183051Z node 21 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:126: Operation [0:2] at 9437184 requested 132390 more memory 2025-12-23T18:43:35.183102Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2025-12-23T18:43:35.183488Z node 21 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:35.183554Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-12-23T18:43:35.183616Z node 21 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T18:43:35.184886Z node 21 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 132518 and requests 1060144 more for the next try 2025-12-23T18:43:35.185101Z node 21 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 2 released its data 2025-12-23T18:43:35.185172Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2025-12-23T18:43:35.185470Z node 21 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:35.185513Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-12-23T18:43:35.186551Z node 21 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 2 at 9437184 restored its data 2025-12-23T18:43:35.186626Z node 21 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T18:43:35.187293Z node 21 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 1192662 and requests 9541296 more for the next try 2025-12-23T18:43:35.187430Z node 21 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 2 released its data 2025-12-23T18:43:35.187477Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2025-12-23T18:43:35.187674Z node 21 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:35.187712Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-12-23T18:43:35.188428Z node 21 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 2 at 9437184 restored its data 2025-12-23T18:43:35.188486Z node 21 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T18:43:35.189045Z node 21 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 10733958 and requests 85871664 more for the next try 2025-12-23T18:43:35.189162Z node 21 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 2 released its data 2025-12-23T18:43:35.189203Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2025-12-23T18:43:35.189384Z node 21 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:35.189448Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-12-23T18:43:35.190080Z node 21 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 2 at 9437184 restored its data 2025-12-23T18:43:35.190134Z node 21 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T18:43:35.495668Z node 21 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-12-23T18:43:35.495803Z node 21 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:43:35.495920Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:43:35.495971Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-12-23T18:43:35.496037Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:35.496102Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-12-23T18:43:35.496241Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:43:35.496293Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:35.496350Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-12-23T18:43:35.496408Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-12-23T18:43:35.496483Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2025-12-23T18:43:35.496530Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-12-23T18:43:35.496589Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 9437184 has finished 2025-12-23T18:43:35.510331Z node 21 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:35.510423Z node 21 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-12-23T18:43:35.510524Z node 21 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-12-23T18:43:35.510646Z node 21 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:35.511838Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [21:302:2283], Recipient [21:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:35.511926Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:35.511989Z node 21 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [21:301:2282], serverId# [21:302:2283], sessionId# [0:0:0] 2025-12-23T18:43:35.512185Z node 21 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268830214, Sender [21:300:2281], Recipient [21:238:2230]: NKikimrTabletBase.TEvGetCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:50.597709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:50.597834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:50.597885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:50.597927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:50.597962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:50.597996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:50.598050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:50.598107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:50.599189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:50.599505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:50.724460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:50.724535Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:50.725455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:50.741601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:50.741764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:50.741941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:50.750152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:50.750439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:50.751260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:50.751620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:50.754529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:50.754758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:50.756055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:50.756125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:50.756273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:50.756323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:50.756368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:50.756528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.764573Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-12-23T18:40:50.905898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:50.906171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.906412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:40:50.906486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:40:50.906777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:40:50.906844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:50.909364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:50.909622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:40:50.909848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.909933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:40:50.909975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:40:50.910010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:40:50.912072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.912132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:40:50.912181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:40:50.913972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.914032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.914093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:50.914147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:40:50.925812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:40:50.928030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:40:50.928243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:40:50.929422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:50.929555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:50.929606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:50.929875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:40:50.929928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:50.930103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:40:50.930199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:50.932357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard ... as 3 2025-12-23T18:43:33.055355Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2025-12-23T18:43:33.055392Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2025-12-23T18:43:33.055426Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2025-12-23T18:43:33.055455Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2025-12-23T18:43:33.055483Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2025-12-23T18:43:33.057374Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:33.057489Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:33.057530Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:43:33.057569Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2025-12-23T18:43:33.057616Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2025-12-23T18:43:33.059047Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:33.059145Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:33.059181Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:43:33.059239Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2025-12-23T18:43:33.059280Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2025-12-23T18:43:33.060738Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:33.060843Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:33.060883Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:43:33.060923Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2025-12-23T18:43:33.060961Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2025-12-23T18:43:33.062170Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:33.062267Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:43:33.062303Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:43:33.062338Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2025-12-23T18:43:33.062378Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2025-12-23T18:43:33.062454Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2025-12-23T18:43:33.066073Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:43:33.066227Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:43:33.068573Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:43:33.068711Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2025-12-23T18:43:33.070434Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2025-12-23T18:43:33.070485Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2025-12-23T18:43:33.072490Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2025-12-23T18:43:33.072618Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2025-12-23T18:43:33.072659Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2692:4680] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2025-12-23T18:43:33.074195Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2025-12-23T18:43:33.074245Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2025-12-23T18:43:33.074353Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2025-12-23T18:43:33.074386Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2025-12-23T18:43:33.074453Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2025-12-23T18:43:33.074486Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2025-12-23T18:43:33.074550Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2025-12-23T18:43:33.074581Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2025-12-23T18:43:33.074644Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2025-12-23T18:43:33.074676Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2025-12-23T18:43:33.076844Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2025-12-23T18:43:33.077052Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2025-12-23T18:43:33.077093Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2695:4683] 2025-12-23T18:43:33.077350Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2025-12-23T18:43:33.077585Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2025-12-23T18:43:33.077621Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2695:4683] 2025-12-23T18:43:33.077759Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2025-12-23T18:43:33.077912Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2025-12-23T18:43:33.078016Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2025-12-23T18:43:33.078049Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2695:4683] 2025-12-23T18:43:33.078187Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2025-12-23T18:43:33.078266Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2025-12-23T18:43:33.078298Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2695:4683] 2025-12-23T18:43:33.078455Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2025-12-23T18:43:33.078488Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2695:4683] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood >> KqpPg::DropTablePg [GOOD] >> KqpPg::DropTablePgMultiple >> TPQCDTest::TestUnavailableWithoutClustersList >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword [GOOD] >> LdapAuthProviderTest::LdapFetchGroupsWithDelayUpdateSecurityState >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> Cdc::InitialScanUpdatedRows >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne >> TPQCDTest::TestUnavailableWithoutBoth >> TPQCDTest::TestRelatedServicesAreRunning >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-dbadmin >> TestShred::SchemeShardCounterDoesNotConsistWithBscCounter [GOOD] >> TestShred::Run3CyclesForTopics >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-system >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad >> LdapAuthProviderTest::LdapFetchGroupsWithDelayUpdateSecurityState [GOOD] >> LdapAuthProviderTest::CanGetErrorIfAppropriateLoginProviderIsAbsent >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink >> DataShardVolatile::NotCachingAbortingDeletes+UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes-UseSink >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithOneLoginPlaceholder [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithSearchAttribute [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system >> Cdc::MustNotLoseSchemaSnapshot [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-clusteradmin >> KqpPg::CopyTableSerialColumns-useSink [GOOD] >> KqpPg::CreateIndex >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::DropTableIfExists ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2025-12-23T18:43:37.965212Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125947814900923:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:37.965323Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004651/r3tmp/tmpLArJwX/pdisk_1.dat 2025-12-23T18:43:38.234282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:38.234426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:38.275258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:38.282950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:38.331934Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:38.336062Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125947814900897:2081] 1766515417963427 != 1766515417963430 TServer::EnableGrpc on GrpcPort 28523, node 1 2025-12-23T18:43:38.411885Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/004651/r3tmp/yandexMoZVBE.tmp 2025-12-23T18:43:38.411915Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/004651/r3tmp/yandexMoZVBE.tmp 2025-12-23T18:43:38.412107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/004651/r3tmp/yandexMoZVBE.tmp 2025-12-23T18:43:38.412227Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:38.469971Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:38.975233Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:40.858387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125960699803745:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:40.858387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125960699803737:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:40.858493Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:40.858910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125960699803752:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:40.859017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:40.864288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:40.878271Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125960699803751:2368], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-12-23T18:43:40.979082Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125960699803808:2564] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:41.301503Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125960699803827:2376], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:43:41.301971Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=M2U4Zjk1YjYtZGZjOWI5YjAtYmVmYjk0ZTAtZDg5OGE2MWU=, ActorId: [1:7587125960699803735:2363], ActorState: ExecuteState, TraceId: 01kd68cbnr72yh00zr5m7hejh5, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:43:41.322718Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |95.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] Test command err: 2025-12-23T18:43:34.404649Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125934475817717:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:34.407900Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004665/r3tmp/tmpgHLCAA/pdisk_1.dat 2025-12-23T18:43:34.622200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:34.627785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:34.627907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:34.659261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:34.721422Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19732, node 1 2025-12-23T18:43:34.808638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:34.808659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:34.808670Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:34.808770Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:34.850580Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11721 PQClient connected to localhost:19732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515414776 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:35.100138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:43:35.120328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:43:35.131668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:43:35.142671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-12-23T18:43:35.412394Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:37.423615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125947360720527:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:37.423719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:37.423900Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125947360720539:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:37.423929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125947360720540:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:37.424032Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:37.428142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:37.441781Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125947360720543:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-12-23T18:43:37.612511Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125947360720609:2590] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:37.641176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:37.746446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:37.797417Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125947360720617:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:43:37.798212Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=NzVhMGRmZjgtYWE4N2FiMjctOTc3NDRmYjQtNDczOGNkZA==, ActorId: [1:7587125947360720525:2324], ActorState: ExecuteState, TraceId: 01kd68c8aeakznkccaxrjy3t1r, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:43:37.801109Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:43:37.839059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-12-23T18:43:39.398550Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125934475817717:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:39.398643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] Test command err: 2025-12-23T18:43:19.448746Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125867777949059:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:19.449189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:43:19.528083Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b2/r3tmp/tmpttxxXw/pdisk_1.dat 2025-12-23T18:43:19.889601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:19.889719Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:19.895954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:19.943007Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 25078, node 1 2025-12-23T18:43:20.010148Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:20.151177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:20.151212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:20.151220Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:20.151317Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:20.212429Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:20.398920Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:20.403331Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:20.403387Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:20.404273Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:62253, port: 62253 2025-12-23T18:43:20.404987Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:20.443588Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:20.487078Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:20.487690Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:20.487753Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:20.532115Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:20.577913Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:20.579136Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****Hxyg (56E64DA9) () has now valid token of ldapuser@ldap 2025-12-23T18:43:20.579278Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:23.579710Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****Hxyg (56E64DA9) 2025-12-23T18:43:23.579852Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:62253, port: 62253 2025-12-23T18:43:23.579981Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:23.589243Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:23.589693Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:62253 return no entries 2025-12-23T18:43:23.589986Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****Hxyg (56E64DA9) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:62253 return no entries)' 2025-12-23T18:43:24.448004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125867777949059:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:24.448081Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:43:27.582985Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****Hxyg (56E64DA9) 2025-12-23T18:43:31.115303Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125918378928561:2160];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:31.115689Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b2/r3tmp/tmpfiO8iO/pdisk_1.dat 2025-12-23T18:43:31.125770Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:31.203188Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:31.204812Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587125918378928439:2081] 1766515411105446 != 1766515411105449 2025-12-23T18:43:31.247382Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:31.247495Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:31.250511Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28041, node 2 2025-12-23T18:43:31.309131Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:31.349326Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:31.349369Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:31.349377Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:31.349482Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:31.590405Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:31.592724Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:31.592759Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:31.593394Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:24284, port: 24284 2025-12-23T18:43:31.593454Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:31.602421Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:31.602889Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:24284. Server is busy 2025-12-23T18:43:31.603093Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****Fsfg (77A1FA7C) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:24284. Server is busy)' 2025-12-23T18:43:31.603328Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:31.603355Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:31.604122Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:24284, port: 24284 2025-12-23T18:43:31.604183Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:31.609825Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:31.610234Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:24284. Server is busy 2025-12-23T18:43:31.610436Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****Fsfg (77A1FA7C) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:24284. Server is busy)' 2025-12-23T18:43:32.118852Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:34.120015Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****Fsfg (77A1FA7C) 2025-12-23T18:43:34.120297Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:34.120350Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:34.121705Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:24284, port: 24284 2025-12-23T18:43:34.121778Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:34.128204Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:34.128600Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:24284. Server is busy 2025-12-23T18:43:34.128898Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****Fsfg (77A1FA7C) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:24284. Server is busy)' 2025-12-23T18:43:36.111917Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587125918378928561:2160];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:36.112055Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:43:37.125446Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****Fsfg (77A1FA7C) 2025-12-23T18:43:37.125691Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:37.125710Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:37.126523Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:24284, port: 24284 2025-12-23T18:43:37.126582Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:37.134343Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:37.175023Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:37.175512Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:37.175559Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:37.223131Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:37.267089Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:37.268040Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****Fsfg (77A1FA7C) () has now valid token of ldapuser@ldap 2025-12-23T18:43:41.127876Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****Fsfg (77A1FA7C) 2025-12-23T18:43:41.127967Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:24284, port: 24284 2025-12-23T18:43:41.128025Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:41.132727Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:41.179125Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:41.179698Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:41.179744Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:41.227077Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:41.275115Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:41.275971Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****Fsfg (77A1FA7C) () has now valid token of ldapuser@ldap |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> KqpPg::CreateUniqPgColumn-useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2025-12-23T18:43:39.138008Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125956725850171:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:39.138080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00464e/r3tmp/tmpTw5HlZ/pdisk_1.dat 2025-12-23T18:43:39.344129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:39.349007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:39.349099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:39.406536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:39.407590Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15548, node 1 2025-12-23T18:43:39.507476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:39.507503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:39.507512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:39.507619Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:39.602197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:40.149021Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:41.758019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125965315785682:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:41.758020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125965315785671:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:41.758090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:41.758308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125965315785686:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:41.758392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:41.762921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:41.773670Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125965315785685:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-12-23T18:43:41.903022Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125965315785742:2559] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:42.319667Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125965315785763:2373], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:43:42.320190Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MmQwYTdhZi1kMWRhYWVmNi0xNjMzYTU5Ni0zMGFhZGQyMg==, ActorId: [1:7587125965315785669:2359], ActorState: ExecuteState, TraceId: 01kd68cchw1fp15e6e1x86969k, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:43:42.333201Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |95.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> KqpPg::TypeCoercionInsert+useSink [GOOD] >> KqpPg::TableSelect+useSink >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-dbadmin >> KqpPg::TypeCoercionInsert-useSink [GOOD] >> KqpPg::V1CreateTable >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] Test command err: 2025-12-23T18:43:22.149304Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125880257004263:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:22.151101Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ad/r3tmp/tmpGAbCRT/pdisk_1.dat 2025-12-23T18:43:22.378557Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:22.385244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:22.385357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:22.400059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:22.492342Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125880257004233:2081] 1766515402146673 != 1766515402146676 2025-12-23T18:43:22.502037Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3523, node 1 2025-12-23T18:43:22.559507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:22.559536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:22.559543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:22.559654Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:22.654043Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:22.655934Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:22.659378Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:22.659413Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:22.662544Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:2580, port: 2580 2025-12-23T18:43:22.662626Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:22.686729Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:22.731301Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:22.775908Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:22.776557Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:22.776598Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:22.820074Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:22.866539Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:22.875250Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****XUmg (AD494103) () has now valid token of ldapuser@ldap 2025-12-23T18:43:25.544412Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125896172507915:2186];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:25.544478Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:43:25.584959Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ad/r3tmp/tmp8ffGCX/pdisk_1.dat 2025-12-23T18:43:25.718843Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:25.739511Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:25.741790Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:25.742514Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587125896172507767:2081] 1766515405515257 != 1766515405515260 2025-12-23T18:43:25.742639Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:25.760927Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2990, node 2 2025-12-23T18:43:25.833931Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:25.833968Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:25.833976Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:25.834090Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:25.908723Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:26.097169Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:26.099980Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:26.100022Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:26.100617Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:20723, port: 20723 2025-12-23T18:43:26.100704Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:26.119357Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:26.163219Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:26.211407Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****3xtQ (B2C33EF1) () has now valid token of ldapuser@ldap 2025-12-23T18:43:28.983245Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587125906532492490:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:28.983325Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ad/r3tmp/tmp1qSTpX/pdisk_1.dat 2025-12-23T18:43:28.995635Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:29.057707Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:29.059305Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587125906532492464:2081] 1766515408982323 != 1766515408982326 TServer::EnableGrpc on GrpcPort 1525, node 3 2025-12-23T18:43:29.095249Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:29.095353Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:29.099714Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:29.128421Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:29.128445Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:29.128453Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:29.128524Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:29.221696Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:29.224427Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:29.224449Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:29.224990Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://qqq:30793 ldap://localhost:30793 ldap://localhost:11111, port: 30793 2025-12-23T18:43:29.225037Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:29.253914Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:29.299249Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:29.343171Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc= ... ryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:29.390148Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:29.435316Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:29.436652Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****Gq2Q (A373964F) () has now valid token of ldapuser@ldap 2025-12-23T18:43:29.439257Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:32.520697Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587125923841616906:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:32.520775Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ad/r3tmp/tmpzWM01r/pdisk_1.dat 2025-12-23T18:43:32.547225Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:32.641112Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:32.676965Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:32.677045Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:32.685456Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62572, node 4 2025-12-23T18:43:32.755558Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:32.755584Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:32.755591Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:32.755687Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:32.852346Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:32.944255Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:32.946727Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:32.946770Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:32.947445Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:19216, port: 19216 2025-12-23T18:43:32.947530Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:32.983924Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:33.031392Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-12-23T18:43:33.075598Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****vRKw (24697E3E) () has now valid token of ldapuser@ldap 2025-12-23T18:43:36.577545Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587125941890325297:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:36.577614Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ad/r3tmp/tmpVQ410c/pdisk_1.dat 2025-12-23T18:43:36.598571Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:36.666222Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:36.667620Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7587125941890325255:2081] 1766515416576908 != 1766515416576911 2025-12-23T18:43:36.704848Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:36.704952Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:36.710044Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19570, node 5 2025-12-23T18:43:36.759161Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:36.759180Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:36.759188Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:36.759293Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:36.784324Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:36.890010Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:36.894064Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:36.894093Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:36.894848Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:15941, port: 15941 2025-12-23T18:43:36.894919Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:36.913851Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:36.959778Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:15941. Invalid credentials 2025-12-23T18:43:36.960297Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****_3GA (B31AD2DF) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:15941. Invalid credentials)' 2025-12-23T18:43:40.014373Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587125958982699472:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:40.014481Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ad/r3tmp/tmpMKEqvw/pdisk_1.dat 2025-12-23T18:43:40.033896Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:40.101488Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:40.103633Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7587125958982699438:2081] 1766515420013129 != 1766515420013132 2025-12-23T18:43:40.139929Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:40.140024Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:40.145199Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24461, node 6 2025-12-23T18:43:40.188548Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:40.188579Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:40.188588Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:40.188674Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:40.321524Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:40.337465Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:40.341620Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:40.341659Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:40.342546Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:12277, port: 12277 2025-12-23T18:43:40.342655Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:40.354154Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:40.395191Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:12277. Invalid credentials 2025-12-23T18:43:40.395675Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****9U9Q (32A283FF) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:12277. Invalid credentials)' |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-12-23T18:43:21.437857Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125879154118151:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:21.437993Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ae/r3tmp/tmpbdQC75/pdisk_1.dat 2025-12-23T18:43:21.646550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:21.646686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:21.671914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:21.704944Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:21.705316Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:21.718754Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125879154118125:2081] 1766515401436269 != 1766515401436272 TServer::EnableGrpc on GrpcPort 28075, node 1 2025-12-23T18:43:21.808962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:21.808995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:21.809008Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:21.809121Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:21.883949Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:21.888141Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:21.888182Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:21.889103Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:26671, port: 26671 2025-12-23T18:43:21.889877Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:21.903866Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:21.951080Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:22.000219Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****3vSg (9921D767) () has now valid token of ldapuser@ldap 2025-12-23T18:43:22.001054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:24.757950Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125888521519892:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:24.758509Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ae/r3tmp/tmpGt20Pu/pdisk_1.dat 2025-12-23T18:43:24.772231Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:24.852987Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:24.897216Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:24.897305Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:24.904346Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7994, node 2 2025-12-23T18:43:24.959148Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:24.959175Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:24.959189Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:24.959324Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:24.959464Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:25.031820Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:25.035719Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:25.035748Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:25.036658Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:30023, port: 30023 2025-12-23T18:43:25.036790Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:25.050990Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:25.095134Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:25.095672Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:25.095725Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:25.139153Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:25.183383Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:25.184206Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****_mrQ (8CDD2112) () has now valid token of ldapuser@ldap 2025-12-23T18:43:28.517206Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587125906814152557:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:28.517273Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ae/r3tmp/tmp36pO5Q/pdisk_1.dat 2025-12-23T18:43:28.530762Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:28.598535Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:28.598620Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:28.598832Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:28.600213Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587125906814152534:2081] 1766515408516763 != 1766515408516766 2025-12-23T18:43:28.620433Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21131, node 3 2025-12-23T18:43:28.665502Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:28.665534Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:28.665542Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:28.665624Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:28.752845Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:28.785760Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:28.789075Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:28.789106Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:28.789779Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:30255, port: 30255 2025-12-23T18:43:28.789856Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:28.802037Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:28.843350Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****3AFg (8F8FB198) () has now valid token of ldapuser@ldap 2025-12-23T18:43:32.282132Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587125925336404150:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:32 ... ,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:32.448280Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18479, node 4 2025-12-23T18:43:32.525022Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:32.525053Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:32.525084Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:32.525173Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:32.599868Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:32.622229Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:32.622677Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:32.622700Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:32.623440Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://qqq:2788 ldap://localhost:2788 ldap://localhost:11111, port: 2788 2025-12-23T18:43:32.623559Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:32.644235Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:32.690983Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:32.691480Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:32.691522Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:32.735100Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:32.779055Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:32.779979Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****lckg (09050721) () has now valid token of ldapuser@ldap 2025-12-23T18:43:35.993061Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587125936756565798:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:35.993144Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ae/r3tmp/tmpYgtb24/pdisk_1.dat 2025-12-23T18:43:36.010568Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:36.072892Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:36.075726Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7587125936756565775:2081] 1766515415991969 != 1766515415991972 2025-12-23T18:43:36.099244Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:36.099350Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:36.127224Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32748, node 5 2025-12-23T18:43:36.171951Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:36.171977Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:36.171985Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:36.172074Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:36.223601Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:36.227597Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:36.227628Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:36.228402Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:7052, port: 7052 2025-12-23T18:43:36.228508Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:36.249338Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-12-23T18:43:36.294076Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:36.294576Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:36.294608Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-12-23T18:43:36.339004Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-12-23T18:43:36.387172Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-12-23T18:43:36.388222Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****TLPQ (B324ED21) () has now valid token of ldapuser@ldap 2025-12-23T18:43:36.389833Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:39.851376Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587125954201339877:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:39.852061Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ae/r3tmp/tmpAsVV4N/pdisk_1.dat 2025-12-23T18:43:39.864760Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:39.953970Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:39.955351Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7587125954201339839:2081] 1766515419848327 != 1766515419848330 2025-12-23T18:43:39.970438Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:39.970537Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:39.997020Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62713, node 6 2025-12-23T18:43:40.051642Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:40.051674Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:40.051682Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:40.051773Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:40.124808Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:40.203953Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:40.208233Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:40.208271Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:40.209084Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:14366, port: 14366 2025-12-23T18:43:40.209200Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:40.214989Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-12-23T18:43:40.215104Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:14366. Bad search filter 2025-12-23T18:43:40.215370Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****g5Hw (AA9465A4) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:14366. Bad search filter)' |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> LocalTableWriter::ConsistentWrite >> ReadLoad::ShouldReadIterate >> LdapAuthProviderTest::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] Test command err: 2025-12-23T18:43:39.376419Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125954830733604:2145];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:39.377124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00464d/r3tmp/tmpMJo6ZK/pdisk_1.dat 2025-12-23T18:43:39.615184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:39.619420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:39.619539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:39.625692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:39.729173Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3059, node 1 2025-12-23T18:43:39.779798Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/00464d/r3tmp/yandexPvppyc.tmp 2025-12-23T18:43:39.779820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/00464d/r3tmp/yandexPvppyc.tmp 2025-12-23T18:43:39.779950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/00464d/r3tmp/yandexPvppyc.tmp 2025-12-23T18:43:39.780015Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:39.793480Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3211 PQClient connected to localhost:3059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515419739 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:40.041811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:43:40.377269Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:42.454671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125967715636342:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:42.454781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125967715636333:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:42.455189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:42.456174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125967715636351:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:42.456300Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:42.458522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:42.469353Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125967715636348:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-12-23T18:43:42.658406Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125967715636416:2587] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:42.687246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:42.776989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:42.828054Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587125967715636424:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:43:42.828635Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=NGJjMzFhOTUtMzAxNTU0YjMtZTVlZTg3YmEtYmVjOGVkOTk=, ActorId: [1:7587125967715636324:2322], ActorState: ExecuteState, TraceId: 01kd68cd7mf1atsy55fanhy7b5, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:43:42.831100Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:43:42.867723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); |95.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildSyncIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex >> UpsertLoad::ShouldWriteDataBulkUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-12-23T18:43:19.335959Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125869583275552:2145];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:19.336346Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b3/r3tmp/tmpY5wgYE/pdisk_1.dat 2025-12-23T18:43:19.654842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:19.660056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:19.660153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:19.690360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:19.806978Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125869583275435:2081] 1766515399317928 != 1766515399317931 2025-12-23T18:43:19.820557Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:19.842825Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 21081, node 1 2025-12-23T18:43:19.896901Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:19.896932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:19.896941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:19.897036Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:19.971819Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:19.979464Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:19.979510Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:19.981071Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:27276, port: 27276 2025-12-23T18:43:19.981390Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:20.051296Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:20.099071Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:20.148209Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****tIBQ (BF6871D6) () has now valid token of ldapuser@ldap 2025-12-23T18:43:22.768530Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125880715404841:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:22.768604Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b3/r3tmp/tmp2NNL1M/pdisk_1.dat 2025-12-23T18:43:22.795664Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:22.902577Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:22.920426Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:22.920540Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:22.936058Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:22.965310Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 31043, node 2 2025-12-23T18:43:23.036615Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:23.036638Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:23.036645Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:23.036744Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:23.129802Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:23.133043Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:23.133087Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:23.133880Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:11690, port: 11690 2025-12-23T18:43:23.133995Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:23.195049Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:23.239643Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****6umA (3B27F747) () has now valid token of ldapuser@ldap 2025-12-23T18:43:26.360049Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587125900058442398:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:26.360132Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b3/r3tmp/tmpjMU1Gh/pdisk_1.dat 2025-12-23T18:43:26.382840Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:26.438382Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:26.439776Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587125900058442350:2081] 1766515406348526 != 1766515406348529 2025-12-23T18:43:26.469811Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:26.469909Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:26.481467Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22508, node 3 2025-12-23T18:43:26.559356Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:26.559383Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:26.559390Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:26.559467Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:26.616507Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:26.846907Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:26.850191Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:26.850223Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:26.850954Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:6752, port: 6752 2025-12-23T18:43:26.851025Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:26.907242Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:26.955092Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:26.955589Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:26.955655Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:26.999086Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:27.043660Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:27.046844Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****cAOQ (16D28475) () has now valid token of ldapuser@ldap 2025-12-23T18:43:29.700658Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587125911890202919:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18: ... ileState: Disconnected -> Connecting 2025-12-23T18:43:33.686523Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4563, node 5 2025-12-23T18:43:33.733433Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:33.733452Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:33.733459Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:33.733519Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:33.764176Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:33.867105Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:33.870669Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:33.870715Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:33.871499Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://qqq:3428 ldaps://localhost:3428 ldaps://localhost:11111, port: 3428 2025-12-23T18:43:33.871619Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:33.947346Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:33.991101Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:33.991761Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:33.991824Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:34.035309Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:34.083149Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:34.084403Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****s9Lg (C1CF775E) () has now valid token of ldapuser@ldap 2025-12-23T18:43:37.319341Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587125946402695113:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:37.319400Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b3/r3tmp/tmpLUTBj9/pdisk_1.dat 2025-12-23T18:43:37.342584Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:37.415785Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:37.418016Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7587125946402695082:2081] 1766515417318253 != 1766515417318256 2025-12-23T18:43:37.457709Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:37.457799Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:37.463272Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4476, node 6 2025-12-23T18:43:37.506765Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:37.506794Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:37.506802Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:37.506898Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:37.517560Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:37.732788Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:37.736792Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:37.736831Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:37.737618Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:13062, port: 13062 2025-12-23T18:43:37.737739Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:37.795394Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-12-23T18:43:37.843084Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:37.843711Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:37.843763Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-12-23T18:43:37.887183Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-12-23T18:43:37.931126Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-12-23T18:43:37.932442Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****B3ew (79B8A449) () has now valid token of ldapuser@ldap 2025-12-23T18:43:41.139102Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7587125962022170259:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:41.139166Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b3/r3tmp/tmpH5a0G7/pdisk_1.dat 2025-12-23T18:43:41.158133Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:41.236492Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:41.246806Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7587125962022170233:2081] 1766515421138072 != 1766515421138075 2025-12-23T18:43:41.270656Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:41.270727Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:41.278439Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28340, node 7 2025-12-23T18:43:41.319553Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:41.319581Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:41.319590Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:41.319684Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:41.384833Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:41.388490Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:41.388526Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:41.389252Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:65395, port: 65395 2025-12-23T18:43:41.389359Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:41.447318Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-12-23T18:43:41.447456Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:65395. Bad search filter 2025-12-23T18:43:41.448230Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****Z19w (E17A6C31) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:65395. Bad search filter)' 2025-12-23T18:43:41.449699Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |95.1%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-clusteradmin |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] Test command err: 2025-12-23T18:43:20.493549Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125872038474021:2189];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:20.493807Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046af/r3tmp/tmpiiOjZg/pdisk_1.dat 2025-12-23T18:43:20.802881Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:20.810818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:20.810949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:20.891774Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125872038473855:2081] 1766515400476215 != 1766515400476218 2025-12-23T18:43:20.901417Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:20.904190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20647, node 1 2025-12-23T18:43:20.979510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:20.979537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:20.979544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:20.979638Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:21.025098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:21.059286Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:21.062792Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:21.062828Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:21.064288Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://unavailablehost:25644, port: 25644 2025-12-23T18:43:21.064377Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:21.068854Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:184: Could not start TLS. Can't contact LDAP server 2025-12-23T18:43:21.069459Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****10og (34A26194) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2025-12-23T18:43:21.069768Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:21.069795Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:21.070692Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://unavailablehost:25644, port: 25644 2025-12-23T18:43:21.070800Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:21.074627Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:184: Could not start TLS. Can't contact LDAP server 2025-12-23T18:43:21.074784Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****10og (34A26194) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2025-12-23T18:43:23.951151Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125884359413511:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:23.951207Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046af/r3tmp/tmplqTCOC/pdisk_1.dat 2025-12-23T18:43:23.963946Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:24.034087Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:24.035766Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587125884359413485:2081] 1766515403949995 != 1766515403949998 2025-12-23T18:43:24.075320Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:24.075427Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:24.081496Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:24.082319Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32625, node 2 2025-12-23T18:43:24.127269Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:24.127299Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:24.127306Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:24.127417Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:24.254874Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:24.256713Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:24.256742Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:24.257408Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****uovQ (4D83D041) () has now permanent error message 'Could not login via LDAP (List of ldap server hosts is empty)' 2025-12-23T18:43:27.528323Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587125903916888505:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:27.528408Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046af/r3tmp/tmp04Micu/pdisk_1.dat 2025-12-23T18:43:27.547594Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:27.624742Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61898, node 3 2025-12-23T18:43:27.639301Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:27.639373Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:27.658018Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:27.671999Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:27.672020Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:27.672027Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:27.672101Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:27.835114Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:27.838150Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:27.838177Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:27.838908Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****xVqQ (3AFB3BB0) () has now permanent error message 'Could not login via LDAP (Parameter BaseDn is empty)' 2025-12-23T18:43:27.840241Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:30.877656Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587125916842708861:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:30.877722Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046af/r3tmp/tmp3cdWJQ/pdisk_1.dat 2025-12-23T18:43:30.909798Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:30.986045Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:30.990892Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587125916842708835:2081] 1766515410876617 != 1766515410876620 2025-12-23T18:43:31.024745Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:31.024845Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:31.027530Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30713, node 4 2025-12-23T18:43:31.069729Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empt ... node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587125932206174337:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:34.231913Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046af/r3tmp/tmpJPeA0E/pdisk_1.dat 2025-12-23T18:43:34.251898Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:34.306216Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:34.345736Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:34.345821Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 8514, node 5 2025-12-23T18:43:34.348013Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:34.395785Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:34.395818Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:34.395824Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:34.395908Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:34.407386Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:34.518988Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:34.519360Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:34.519372Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:34.519864Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****rBOg (63D7F3EE) () has now permanent error message 'Could not login via LDAP (Parameter BindPassword is empty)' 2025-12-23T18:43:38.178915Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587125951835820895:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:38.178978Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046af/r3tmp/tmpWtgD0K/pdisk_1.dat 2025-12-23T18:43:38.218155Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:38.292901Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7587125951835820869:2081] 1766515418177780 != 1766515418177783 2025-12-23T18:43:38.303312Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:38.307459Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:38.307548Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:38.325834Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5308, node 6 2025-12-23T18:43:38.398954Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:38.398988Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:38.398997Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:38.399085Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:38.412471Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:38.578993Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:38.579041Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:38.579119Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root, login state is not available yet, deffer token (eyJh****v1Jw (2A943826)) 2025-12-23T18:43:39.196205Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:39.579652Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:39.579690Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1557: Handle deferred tokens for database: /Root 2025-12-23T18:43:39.580037Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:39.580064Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:39.581079Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:5260, port: 5260 2025-12-23T18:43:39.581168Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:39.639364Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:39.683616Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:39.684125Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:39.684187Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:39.731647Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:39.775145Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:39.776743Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****v1Jw (2A943826) () has now valid token of ldapuser@ldap 2025-12-23T18:43:41.746404Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7587125964145645457:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:41.746472Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:43:41.757377Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046af/r3tmp/tmpAq4tyf/pdisk_1.dat 2025-12-23T18:43:41.855400Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:41.856919Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7587125964145645433:2081] 1766515421745899 != 1766515421745902 2025-12-23T18:43:41.857057Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:41.869116Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:41.869211Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:41.889331Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65451, node 7 2025-12-23T18:43:41.947692Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:41.947718Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:41.947727Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:41.947810Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:42.018499Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:42.018535Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:42.018569Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root, login state is not available yet, deffer token (eyJh****MujQ (483C148D)) 2025-12-23T18:43:42.091676Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:42.753511Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:44.750331Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****MujQ (483C148D) () has now permanent error message 'Login state is not available' 2025-12-23T18:43:44.750423Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:2352: Finish waiting for login providers for 1 databases: /Root, |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink >> ReadLoad::ShouldReadKqp >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer >> KqpPg::CreateIndex [GOOD] >> KqpPg::CreateNotNullPgColumn >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom >> test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-dbadmin >> UpsertLoad::ShouldWriteDataBulkUpsertBatch >> KqpPg::DropTableIfExists [GOOD] >> KqpPg::DropTableIfExists_GenericQuery >> test_sql_negative.py::test[watermarks-bad_column-default.txt] [FAIL] >> Cdc::InitialScanUpdatedRows [GOOD] >> Cdc::InitialScanAndLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:38:58.550938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:38:58.687720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:38:58.703293Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:38:58.703688Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:38:58.703780Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:59.026257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:59.026489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:59.100308Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515135464045 != 1766515135464049 2025-12-23T18:38:59.111302Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:59.163718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:59.271099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:38:59.511872Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002faf/r3tmp/tmpGGUB9p/pdisk_1.dat 2025-12-23T18:38:59.764733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:38:59.965097Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:38:59.965175Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:38:59.965310Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:865:2748] 2025-12-23T18:39:00.162457Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:865:2748] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:39:00.162608Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:865:2748] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:39:00.163623Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:39:00.163793Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:865:2748] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:39:00.164306Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:00.164489Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:865:2748] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:39:00.164692Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:865:2748] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:39:00.167323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:00.167979Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:39:00.168875Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:865:2748] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:39:00.168990Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:865:2748] txid# 281474976715657 SEND to# [1:846:2729] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:39:00.207739Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:39:00.209106Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:39:00.209475Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:890:2769] 2025-12-23T18:39:00.209867Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:39:00.266221Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:39:00.267232Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:39:00.267393Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:39:00.269661Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:39:00.269782Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:39:00.269850Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:39:00.270323Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:39:00.270502Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:39:00.270621Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:905:2769] in generation 1 2025-12-23T18:39:00.281721Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:39:00.335355Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:39:00.335659Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:39:00.335855Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:907:2779] 2025-12-23T18:39:00.335903Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:39:00.335970Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:39:00.336012Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:39:00.336311Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:890:2769], Recipient [1:890:2769]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:39:00.336384Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:39:00.336827Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:39:00.336959Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:39:00.337035Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:39:00.337086Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:39:00.337143Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:39:00.337189Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:39:00.337229Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:39:00.337292Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:39:00.337371Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:39:00.337513Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2770], Recipient [1:890:2769]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:39:00.337555Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:39:00.337637Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:888:2768], serverId# [1:892:2770], sessionId# [0:0:0] 2025-12-23T18:39:00.337766Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2770] 2025-12-23T18:39:00.337825Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:39:00.337966Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:39:00.338284Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:39:00.338366Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 ... ted with status OK role: Leader [27:1175:2970] 2025-12-23T18:43:44.562127Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037888] send queued [27:1175:2970] 2025-12-23T18:43:44.562233Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037888] push event to server [27:1175:2970] 2025-12-23T18:43:44.562390Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72075186224037888] shutdown pipe due to pending shutdown request [27:1175:2970] 2025-12-23T18:43:44.562493Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72075186224037888] notify reset [27:1175:2970] 2025-12-23T18:43:44.562870Z node 27 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [27:1174:2969], Recipient [27:917:2781]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 38 } CompactBorrowed: false 2025-12-23T18:43:44.563024Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} queued, type NKikimr::NDataShard::TDataShard::TTxCompactTable 2025-12-23T18:43:44.563183Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:43:44.563360Z node 27 :TABLET_EXECUTOR DEBUG: TCompactionLogic PrepareForceCompaction for 72075186224037888 table 1001, mode Full, forced state None, forced mode Full 2025-12-23T18:43:44.563562Z node 27 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 1 of 72075186224037888 tableId# 38 localTid# 1001, requested from [27:1174:2969], partsCount# 0, memtableSize# 656, memtableWaste# 3952, memtableRows# 2 2025-12-23T18:43:44.563770Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:43:44.563904Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:43:44.564244Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888: task 1, edge 9223372036854775807/0, generation 0 2025-12-23T18:43:44.564356Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:16} starting compaction 2025-12-23T18:43:44.564858Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} starting Scan{1 on 1001, Compact{72075186224037888.1.16, eph 1}} 2025-12-23T18:43:44.565035Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} started compaction 1 2025-12-23T18:43:44.565145Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888 started compaction 1 generation 0 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 16987351521151898222 2025-12-23T18:43:44.626650Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} Compact 1 on TGenCompactionParams{1001: gen 0 epoch +inf, 0 parts} step 16, product {tx status + 1 parts epoch 2} done 2025-12-23T18:43:44.627120Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CompactionFinished for 72075186224037888: compaction 1, generation 0 2025-12-23T18:43:44.627285Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 1, state Free, final id 0, final level 0 2025-12-23T18:43:44.627366Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 3, state Free, final id 0, final level 0 2025-12-23T18:43:44.627854Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:02.529381Z 2025-12-23T18:43:44.628037Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} queued, type NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs 2025-12-23T18:43:44.628169Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:43:44.628291Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-12-23T18:43:44.628399Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [27:1174:2969]pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T18:43:44.629177Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} hope 1 -> done Change{17, redo 83b alter 0b annex 0, ~{ 27 } -{ }, 0 gb} 2025-12-23T18:43:44.629343Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} release 4194304b of static, Memory{0 dyn 0} ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 8522186120996105518 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 8291127440517997928 ========= Starting an immediate read ========= 2025-12-23T18:43:44.862288Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037888] send [27:1107:2915] 2025-12-23T18:43:44.862407Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037888] push event to server [27:1107:2915] 2025-12-23T18:43:44.862814Z node 27 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [27:1200:2977], Recipient [27:917:2781]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-12-23T18:43:44.863080Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-12-23T18:43:44.863244Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:43:44.863406Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:43:44.863521Z node 27 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2502/281474976715662 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T18:43:44.863638Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v2502/18446744073709551615 2025-12-23T18:43:44.863789Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-12-23T18:43:44.863992Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T18:43:44.864098Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:43:44.864194Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:43:44.864277Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:43:44.864345Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2025-12-23T18:43:44.864433Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T18:43:44.864464Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:43:44.864487Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:43:44.864511Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:43:44.864695Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-12-23T18:43:44.865000Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is DelayComplete 2025-12-23T18:43:44.865066Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:43:44.865162Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:43:44.865247Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:43:44.865309Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T18:43:44.865335Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:43:44.865377Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2025-12-23T18:43:44.865454Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:43:44.865605Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{18, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:43:44.865744Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:43:44.960431Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-12-23T18:43:44.960662Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:43:44.961051Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{16, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-12-23T18:43:44.961229Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:43:44.962221Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} commited cookie 1 for step 17 2025-12-23T18:43:44.962585Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [27:534:2477] 2025-12-23T18:43:44.962704Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [27:534:2477] |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_volatile/unittest >> LocalTableWriter::ConsistentWrite [GOOD] >> UpsertLoad::ShouldCreateTable >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad >> TestShred::Run3CyclesForTopics [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_negative.py::test[watermarks-bad_column-default.txt] [FAIL] |95.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2025-12-23T18:43:45.593663Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125981642775582:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:45.593760Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002ff1/r3tmp/tmpbVAD3t/pdisk_1.dat 2025-12-23T18:43:45.833115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:45.833277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:45.873014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:45.891020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:45.892373Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:45.895430Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125981642775551:2081] 1766515425592193 != 1766515425592196 TClient is connected to server localhost:14129 TServer::EnableGrpc on GrpcPort 21717, node 1 2025-12-23T18:43:46.113422Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:46.144563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:46.144589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:46.144599Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:46.144734Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515425962 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:46.459686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:46.468258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766515426571 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) 2025-12-23T18:43:46.586172Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handshake: worker# [1:7587125985937743672:2488] 2025-12-23T18:43:46.587563Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:43:46.588003Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:43:46.588051Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Send handshake: worker# [1:7587125985937743672:2488] 2025-12-23T18:43:46.588540Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-12-23T18:43:46.594095Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-12-23T18:43:46.594313Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2025-12-23T18:43:46.594584Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587125985937743767:2549] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-12-23T18:43:46.594656Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:46.594801Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587125985937743767:2549] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-12-23T18:43:46.597311Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587125985937743767:2549] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:43:46.597367Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:46.597424Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2025-12-23T18:43:46.597756Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-12-23T18:43:46.598176Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-12-23T18:43:46.598535Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2025-12-23T18:43:46.598658Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2025-12-23T18:43:46.598862Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587125985937743767:2549] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-12-23T18:43:46.600634Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587125985937743767:2549] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:43:46.600676Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:46.600709Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2025-12-23T18:43:46.600936Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:46.601097Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-12-23T18:43:46.601266Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2025-12-23T18:43:46.601403Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587125985937743767:2549] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-12-23T18:43:46.602827Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587125985937743767:2549] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:43:46.602884Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:46.602919Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2025-12-23T18:43:46.603267Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587125985937743763:2549] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::Run3CyclesForTopics [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:43:33.849227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:33.849337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:33.849381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:33.849430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:33.849470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:33.849502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:33.849554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:33.849646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:33.850452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:33.850727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:33.920803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:33.920888Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:33.930623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:33.930872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:33.931006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:33.934770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:33.935008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:33.935748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:33.936026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:33.938278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:33.938499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:33.939562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:33.939636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:33.939730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:33.939769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:33.939845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:33.939970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:34.073744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.077709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.077898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.077984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.078055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.078122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.078217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.078294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.078364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.078428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.078519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.078596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.078684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.078782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:34.078873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... .cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 39] in# 16 ms, next wakeup# 593.984000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-12-23T18:43:47.518120Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2025-12-23T18:43:47.520016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2025-12-23T18:43:47.520430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-12-23T18:43:47.520464Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2025-12-23T18:43:47.520699Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [2:703:2685], Recipient [2:695:2679]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-12-23T18:43:47.520736Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:47.520763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:47.520809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:47.520841Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2025-12-23T18:43:47.520910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:47.521027Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:48.198534Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:48.198615Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:48.198704Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:862:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:48.198731Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:48.198802Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1248:3131]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:48.198830Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:48.198889Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:695:2679], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:48.198918Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:48.198990Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:862:2814], Recipient [2:862:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:48.199017Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:48.199092Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:1248:3131], Recipient [2:1248:3131]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:48.199128Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:48.209480Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:48.209567Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:48.209599Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2025-12-23T18:43:48.209850Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [2:703:2685], Recipient [2:695:2679]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-12-23T18:43:48.209890Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:48.209922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:48.209992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:48.210036Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2025-12-23T18:43:48.210094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-12-23T18:43:48.210142Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-12-23T18:43:48.881384Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1248:3131]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:48.881477Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:48.881554Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:48.881584Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:48.881639Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:862:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:48.881666Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:43:48.881729Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:1248:3131], Recipient [2:1248:3131]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:48.881760Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:48.881832Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:695:2679], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:48.881860Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:48.881915Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [2:862:2814], Recipient [2:862:2814]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:48.881941Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:43:48.892335Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:695:2679]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:48.892426Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2025-12-23T18:43:48.892459Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2025-12-23T18:43:48.892724Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 268637738, Sender [2:703:2685], Recipient [2:695:2679]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-12-23T18:43:48.892769Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5470: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-12-23T18:43:48.892800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8262: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-12-23T18:43:48.892872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2025-12-23T18:43:48.892905Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2025-12-23T18:43:48.892953Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.982000s, Timestamp# 1970-01-01T00:00:11.154000Z 2025-12-23T18:43:48.892988Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 3, duration# 2 s 2025-12-23T18:43:48.895081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-12-23T18:43:48.895788Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [2:2241:4038], Recipient [2:695:2679]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:48.895851Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:48.895895Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046678944 2025-12-23T18:43:48.896003Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125519, Sender [2:680:2670], Recipient [2:695:2679]: NKikimrScheme.TEvShredInfoRequest 2025-12-23T18:43:48.896039Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5468: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2025-12-23T18:43:48.896078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8213: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterTable >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-clusteradmin >> ReadLoad::ShouldReadIterate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql >> test_sql_streaming.py::test[hop-GroupByHop-default.txt] [FAIL] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-clusteradmin >> test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] |95.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink >> UpsertLoad::ShouldWriteKqpUpsert2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:51.334454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:51.334586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:51.334634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:51.334675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:51.334714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:51.334768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:51.334830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:51.334909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:51.335813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:51.336102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:51.470794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:51.470869Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:51.471696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:51.486590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:51.486795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:51.486979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:51.494083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:51.494307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:51.495071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:51.495415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:51.498048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:51.498239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:51.499450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:51.499519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:51.499662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:51.499710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:51.499752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:51.499910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:51.506507Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-12-23T18:40:51.642619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:51.645232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:51.645466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:40:51.645525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:40:51.645770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:40:51.645828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:51.655751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:51.655986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:40:51.656246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:51.656331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:40:51.656384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:40:51.656419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:40:51.658379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:51.658442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:40:51.658487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:40:51.665682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:51.665749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:51.665826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:51.665890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:40:51.669660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:40:51.675176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:40:51.675356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:40:51.676484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:51.676618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:51.676664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:51.677116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:40:51.677174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:51.677344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:40:51.677414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:51.680064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard ... 3, at schemeshard: 72057594046678944, txId: 253 2025-12-23T18:43:47.620247Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 180], version: 5 2025-12-23T18:43:47.620286Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 180] was 2 2025-12-23T18:43:47.622178Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 181 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2025-12-23T18:43:47.622262Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 181 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2025-12-23T18:43:47.622293Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 253 2025-12-23T18:43:47.622324Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 181], version: 2 2025-12-23T18:43:47.622359Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 181] was 2 2025-12-23T18:43:47.623097Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 178 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2025-12-23T18:43:47.623176Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 178 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2025-12-23T18:43:47.623220Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 253 2025-12-23T18:43:47.623252Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 178], version: 2 2025-12-23T18:43:47.623286Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 178] was 2 2025-12-23T18:43:47.623353Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 253, subscribers: 0 2025-12-23T18:43:47.625417Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2025-12-23T18:43:47.625572Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2025-12-23T18:43:47.628000Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2025-12-23T18:43:47.628117Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2025-12-23T18:43:47.628185Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 TestModificationResult got TxId: 253, wait until txId: 253 TestWaitNotification wait txId: 253 2025-12-23T18:43:47.630459Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 253: send EvNotifyTxCompletion 2025-12-23T18:43:47.630509Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 253 2025-12-23T18:43:47.632890Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 253, at schemeshard: 72057594046678944 2025-12-23T18:43:47.632996Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 253: got EvNotifyTxCompletionResult 2025-12-23T18:43:47.633032Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 253: satisfy waiter [32:4231:6217] TestWaitNotification: OK eventTxId 253 TestWaitNotification wait txId: 245 2025-12-23T18:43:47.635017Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 245: send EvNotifyTxCompletion 2025-12-23T18:43:47.635062Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 245 TestWaitNotification wait txId: 246 2025-12-23T18:43:47.635162Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 246: send EvNotifyTxCompletion 2025-12-23T18:43:47.635205Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 246 TestWaitNotification wait txId: 247 2025-12-23T18:43:47.635271Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 247: send EvNotifyTxCompletion 2025-12-23T18:43:47.635297Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 247 TestWaitNotification wait txId: 248 2025-12-23T18:43:47.635353Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 248: send EvNotifyTxCompletion 2025-12-23T18:43:47.635380Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 248 TestWaitNotification wait txId: 249 2025-12-23T18:43:47.635442Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 249: send EvNotifyTxCompletion 2025-12-23T18:43:47.635468Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 249 TestWaitNotification wait txId: 250 2025-12-23T18:43:47.635527Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 250: send EvNotifyTxCompletion 2025-12-23T18:43:47.635553Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 250 TestWaitNotification wait txId: 251 2025-12-23T18:43:47.635609Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 251: send EvNotifyTxCompletion 2025-12-23T18:43:47.635637Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 251 TestWaitNotification wait txId: 252 2025-12-23T18:43:47.635693Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 252: send EvNotifyTxCompletion 2025-12-23T18:43:47.635725Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 252 2025-12-23T18:43:47.638378Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 245, at schemeshard: 72057594046678944 2025-12-23T18:43:47.638636Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 245: got EvNotifyTxCompletionResult 2025-12-23T18:43:47.638678Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 245: satisfy waiter [32:4234:6220] 2025-12-23T18:43:47.639079Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 246, at schemeshard: 72057594046678944 2025-12-23T18:43:47.639642Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 247, at schemeshard: 72057594046678944 2025-12-23T18:43:47.639717Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 246: got EvNotifyTxCompletionResult 2025-12-23T18:43:47.639749Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 246: satisfy waiter [32:4234:6220] 2025-12-23T18:43:47.639951Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 248, at schemeshard: 72057594046678944 2025-12-23T18:43:47.640181Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 247: got EvNotifyTxCompletionResult 2025-12-23T18:43:47.640218Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 247: satisfy waiter [32:4234:6220] 2025-12-23T18:43:47.640295Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 249, at schemeshard: 72057594046678944 2025-12-23T18:43:47.640445Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 250, at schemeshard: 72057594046678944 2025-12-23T18:43:47.640502Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 248: got EvNotifyTxCompletionResult 2025-12-23T18:43:47.640532Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 248: satisfy waiter [32:4234:6220] 2025-12-23T18:43:47.640718Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 251, at schemeshard: 72057594046678944 2025-12-23T18:43:47.640773Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 249: got EvNotifyTxCompletionResult 2025-12-23T18:43:47.640800Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 249: satisfy waiter [32:4234:6220] 2025-12-23T18:43:47.640972Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 252, at schemeshard: 72057594046678944 2025-12-23T18:43:47.641027Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 250: got EvNotifyTxCompletionResult 2025-12-23T18:43:47.641053Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 250: satisfy waiter [32:4234:6220] 2025-12-23T18:43:47.641198Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 251: got EvNotifyTxCompletionResult 2025-12-23T18:43:47.641227Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 251: satisfy waiter [32:4234:6220] 2025-12-23T18:43:47.641369Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 252: got EvNotifyTxCompletionResult 2025-12-23T18:43:47.641401Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 252: satisfy waiter [32:4234:6220] TestWaitNotification: OK eventTxId 245 TestWaitNotification: OK eventTxId 246 TestWaitNotification: OK eventTxId 247 TestWaitNotification: OK eventTxId 248 TestWaitNotification: OK eventTxId 249 TestWaitNotification: OK eventTxId 250 TestWaitNotification: OK eventTxId 251 TestWaitNotification: OK eventTxId 252 >> KqpPg::CreateUniqComplexPgColumn+useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn-useSink >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> LocalTableWriter::ApplyInCorrectOrder >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [FAIL] >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom >> LocalTableWriter::WaitTxIds >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TBlobStorageProxyTest::TestEmptyRange >> KqpPg::CreateNotNullPgColumn [GOOD] >> KqpPg::CreateSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:43:50.604790Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:50.731696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:43:50.744624Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:50.745032Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:50.745130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:51.054036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:51.054171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:51.134607Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515427488921 != 1766515427488925 2025-12-23T18:43:51.155212Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:51.201963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:51.289680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004339/r3tmp/tmpVnlEGZ/pdisk_1.dat 2025-12-23T18:43:51.587893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:51.602201Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:51.705622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:51.971416Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-12-23T18:43:51.971596Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:951:2809], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2025-12-23T18:43:52.065677Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:951:2809], subTag: 2} TUpsertActor finished in 0.093676s, errors=0 2025-12-23T18:43:52.065778Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:952:2810] with tag# 2 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [FAIL] |95.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable >> TTxDataShardMiniKQL::CrossShard_3_AllToOne [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll >> DataShardVolatile::NotCachingAbortingDeletes-UseSink [GOOD] >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:43:50.542507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:50.646391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:43:50.656484Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:50.656962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:50.657065Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:50.905606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:50.905747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:50.977985Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515427892008 != 1766515427892012 2025-12-23T18:43:50.992639Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:51.037812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:51.141512Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004336/r3tmp/tmpkykzSW/pdisk_1.dat 2025-12-23T18:43:51.429485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:51.444645Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:51.547462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:51.822289Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2025-12-23T18:43:51.822420Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2025-12-23T18:43:51.825328Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 2} started# 5 actors each with inflight# 4 2025-12-23T18:43:51.825393Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-12-23T18:43:51.825466Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-12-23T18:43:51.825495Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-12-23T18:43:51.825517Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-12-23T18:43:51.825550Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-12-23T18:43:51.828760Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 1} session: ydb://session/3?node_id=1&id=NjM1MWJhNy1mOTgyOTdkOS0zNTdkMzAwYy0zZjMzMjRmYQ== 2025-12-23T18:43:51.831660Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 2} session: ydb://session/3?node_id=1&id=NzViYmI0ZGEtNGFiNDFlYWYtNzFkYWEzNjMtNDE2ZWQwZjM= 2025-12-23T18:43:51.831734Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 3} session: ydb://session/3?node_id=1&id=YWYzYThmMzgtZGZlMmM0NTgtYzdhZDcyZDktNTRmNjA5Yjk= 2025-12-23T18:43:51.832966Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 4} session: ydb://session/3?node_id=1&id=MjNjM2E4YzctMjM1NDFlM2EtOGQ4OTgzODktNjcxMDE3ZmE= 2025-12-23T18:43:51.834354Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 5} session: ydb://session/3?node_id=1&id=ZmIxOTg2ZDItZjczOWEzMTMtNTRiZDU2NzYtYzY0NDdmZTE= 2025-12-23T18:43:51.837617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:965:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:51.837690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:992:2844], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:51.837738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:993:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:51.837795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:51.837881Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:51.837931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:51.838008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:51.839369Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1009:2861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:51.839482Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:51.844236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:51.909169Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1010:2862] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:43:51.909690Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1015:2867] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:43:51.910223Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1016:2868] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:43:51.910557Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1020:2869] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:43:52.029435Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1003:2855], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:52.029562Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1004:2856], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:52.029615Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1005:2857], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:52.029695Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1006:2858], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:52.029745Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1007:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:52.065316Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1097:2926] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:52.565635Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 5} finished in 1766515432.565567s, errors=0 2025-12-23T18:43:52.565816Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1766515432565 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:52.579087Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1170:2964] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:52.641895Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 4} finished in 1766515432.641849s, errors=0 2025-12-23T18:43:52.642208Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1766515432641 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:52.655753Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1221:2986] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:52.722225Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 3} finished in 1766515432.722182s, errors=0 2025-12-23T18:43:52.722509Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1766515432722 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:52.736765Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1272:3008] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:52.801846Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 1} finished in 1766515432.801808s, errors=0 2025-12-23T18:43:52.802105Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1766515432801 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:52.815482Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1323:3030] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:52.878905Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 2} finished in 1766515432.878866s, errors=0 2025-12-23T18:43:52.879247Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1766515432878 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:52.879315Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 2} finished in 1.054126s, oks# 20, errors# 0 2025-12-23T18:43:52.879413Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:952:2810] with tag# 2 >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> KqpPg::EquiJoin+useSink |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-system >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] >> LocalTableWriter::ApplyInCorrectOrder [GOOD] >> LocalTableWriter::WriteTable >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LocalTableWriter::DataAlongWithHeartbeat >> LocalTableWriter::WaitTxIds [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:43:52.045771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:52.132365Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:43:52.142300Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:52.142651Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:52.142726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:52.431800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:52.431940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:52.520990Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515429507014 != 1766515429507018 2025-12-23T18:43:52.535380Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:52.584229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:52.684438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004331/r3tmp/tmpRxeVIv/pdisk_1.dat 2025-12-23T18:43:52.973882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:52.988830Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:53.094591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:53.362906Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-12-23T18:43:53.363021Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-12-23T18:43:53.367223Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 2} started# 5 actors each with inflight# 4 2025-12-23T18:43:53.367343Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-12-23T18:43:53.367450Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-12-23T18:43:53.367485Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-12-23T18:43:53.367512Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-12-23T18:43:53.367549Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-12-23T18:43:53.371593Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 1} session: ydb://session/3?node_id=1&id=ZWMyODEyZTQtN2IxNzk1ZjItMjE3MTFjZjgtZTZjZjMzNGQ= 2025-12-23T18:43:53.375581Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 2} session: ydb://session/3?node_id=1&id=YjZhNjc0YjEtZjQ2ZWVjZTgtMTRkNjcyNzctMjY3MjE5ZmM= 2025-12-23T18:43:53.375652Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 3} session: ydb://session/3?node_id=1&id=ZWRiNTM0NC1kYzA5ZDUzMy1iNzE1YmNhOS1hNGJhYmVkZQ== 2025-12-23T18:43:53.377577Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 4} session: ydb://session/3?node_id=1&id=ZTVkMmU5ZGItYjVmNTZkZDktYmI1ODhiZmYtOTEyOTNhZGQ= 2025-12-23T18:43:53.379805Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 5} session: ydb://session/3?node_id=1&id=NzkzNTllZDgtM2JmMDNkMzktNDJkNTMxZTgtODczMDNjNTU= 2025-12-23T18:43:53.384402Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:965:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:53.384530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:992:2844], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:53.384583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:993:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:53.384644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:53.384731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:53.384791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:53.384873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:53.386559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1009:2861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:53.386727Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:53.393296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:53.470070Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1010:2862] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:43:53.471160Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1015:2867] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:43:53.472206Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1016:2868] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:43:53.472779Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1020:2869] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:43:53.594781Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1003:2855], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:53.594931Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1004:2856], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:53.594999Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1005:2857], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:53.595085Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1006:2858], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:53.595151Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1007:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:53.630614Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1097:2926] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:54.122334Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1169:2963] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:54.140209Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 3} finished in 1766515434.140137s, errors=0 2025-12-23T18:43:54.140690Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1766515434140 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:54.190485Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 2} finished in 1766515434.190417s, errors=0 2025-12-23T18:43:54.190834Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1766515434190 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:54.205267Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1220:2985] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:54.271158Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 5} finished in 1766515434.271104s, errors=0 2025-12-23T18:43:54.271491Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1766515434271 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:54.285417Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1271:3007] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:54.348627Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 4} finished in 1766515434.348568s, errors=0 2025-12-23T18:43:54.348926Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1766515434348 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:54.362879Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1322:3029] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:54.428393Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 1} finished in 1766515434.428341s, errors=0 2025-12-23T18:43:54.428767Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1766515434428 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:54.428871Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 2} finished in 1.062127s, oks# 20, errors# 0 2025-12-23T18:43:54.429062Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:952:2810] with tag# 2 >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:43:49.364835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:49.481348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:43:49.491479Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:49.491795Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:49.491868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:49.801668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:49.801836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:49.885851Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515426513845 != 1766515426513849 2025-12-23T18:43:49.909255Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:49.957205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:50.042955Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00433a/r3tmp/tmph6QdRk/pdisk_1.dat 2025-12-23T18:43:50.364482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:50.380305Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:50.487287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:50.785322Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-12-23T18:43:50.785488Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:951:2809], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2025-12-23T18:43:50.847363Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:951:2809], subTag: 2} TUpsertActor finished in 0.061434s, errors=0 2025-12-23T18:43:50.847472Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:952:2810] with tag# 2 ... waiting for SysViewsRoster update finished 2025-12-23T18:43:53.901657Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:53.908704Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:53.909026Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:53.909121Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:54.109233Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:54.109322Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:54.125274Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766515431290213 != 1766515431290216 2025-12-23T18:43:54.131677Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:54.176312Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:54.256357Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00433a/r3tmp/tmpLziape/pdisk_1.dat 2025-12-23T18:43:54.571738Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:54.585328Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:54.688085Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:54.903036Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-12-23T18:43:54.903182Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:951:2809], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" 2025-12-23T18:43:54.959138Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:951:2809], subTag: 2} TUpsertActor finished in 0.055612s, errors=0 2025-12-23T18:43:54.959288Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:952:2810] with tag# 2 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::StringEscaping |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] Test command err: 2025-12-23T18:43:52.647937Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126011545961397:2148];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:52.649731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002feb/r3tmp/tmpe8ts1m/pdisk_1.dat 2025-12-23T18:43:52.874196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:52.874316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:52.907088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:52.926064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:52.957948Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:52.959401Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126011545961277:2081] 1766515432643702 != 1766515432643705 TClient is connected to server localhost:16866 TServer::EnableGrpc on GrpcPort 65496, node 1 2025-12-23T18:43:53.139269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:53.139291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:53.139298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:53.139443Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:53.140813Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515432983 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:53.432403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:53.446270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766515433536 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) 2025-12-23T18:43:53.560315Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handshake: worker# [1:7587126015840929393:2484] 2025-12-23T18:43:53.560638Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:43:53.560902Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:43:53.560949Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Send handshake: worker# [1:7587126015840929393:2484] 2025-12-23T18:43:53.561337Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-12-23T18:43:53.575874Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-12-23T18:43:53.576103Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-12-23T18:43:53.576298Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126015840929487:2545] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-12-23T18:43:53.576371Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:53.576448Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126015840929487:2545] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-12-23T18:43:53.578641Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126015840929487:2545] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:43:53.578724Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:53.578793Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-12-23T18:43:53.579294Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-12-23T18:43:53.579774Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-12-23T18:43:53.579875Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 },{ Order: 3 BodySize: 48 }] } 2025-12-23T18:43:53.580031Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126015840929487:2545] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 3 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-12-23T18:43:53.581599Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126015840929487:2545] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:43:53.581660Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:53.581699Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015840929484:2545] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3] } 2025-12-23T18:43:53.653892Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WaitTxIds [GOOD] Test command err: 2025-12-23T18:43:52.774040Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126011295625879:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:52.774129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002fea/r3tmp/tmpPpOCNW/pdisk_1.dat 2025-12-23T18:43:53.001418Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:53.019050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:53.019191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:53.024673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:53.114660Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:53.160974Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10527 TServer::EnableGrpc on GrpcPort 28779, node 1 2025-12-23T18:43:53.350071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:53.350105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:53.350115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:53.350206Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515433172 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:53.654898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:53.666426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:43:53.781721Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766515433767 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) 2025-12-23T18:43:53.804234Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Handshake: worker# [1:7587126015590594058:2548] 2025-12-23T18:43:53.804533Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:43:53.804883Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:43:53.804925Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Send handshake: worker# [1:7587126015590594058:2548] 2025-12-23T18:43:53.805242Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-12-23T18:43:53.810593Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-12-23T18:43:53.810770Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-12-23T18:43:53.810998Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126015590594061:2547] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-12-23T18:43:53.811073Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:53.811149Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126015590594061:2547] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-12-23T18:43:53.813468Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126015590594061:2547] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:43:53.813538Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:53.813600Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-12-23T18:43:54.806111Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-12-23T18:43:54.806338Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 }] } 2025-12-23T18:43:54.806582Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126015590594061:2547] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-12-23T18:43:54.808682Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126015590594061:2547] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:43:54.808757Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:54.808802Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126015590594057:2547] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:43:48.574867Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:48.668481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:43:48.680029Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:48.680374Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:48.680522Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:48.926674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:48.926792Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:48.984382Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515425769949 != 1766515425769953 2025-12-23T18:43:49.005970Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:49.055622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:49.158792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004342/r3tmp/tmptdlKDK/pdisk_1.dat 2025-12-23T18:43:49.457275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:49.472102Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:49.579373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:49.892439Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2025-12-23T18:43:49.894413Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:951:2809], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2025-12-23T18:43:49.944285Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:951:2809], subTag: 1} TUpsertActor finished in 0.049511s, errors=0 2025-12-23T18:43:49.944961Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-12-23T18:43:49.945120Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:334: ReadIteratorLoadScenario# [1:960:2818] with id# {Tag: 0, parent: [1:951:2809], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-12-23T18:43:49.946423Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:397: ReadIteratorLoadScenario# {Tag: 0, parent: [1:951:2809], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-12-23T18:43:49.946566Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:963:2821] 2025-12-23T18:43:49.946676Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 1} Bootstrap called, sample# 0 2025-12-23T18:43:49.946719Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 1} Connect to# 72075186224037888 called 2025-12-23T18:43:49.947046Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-12-23T18:43:49.954534Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 1} finished in 0.007415s, read# 1000 2025-12-23T18:43:49.954991Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:963:2821] with chunkSize# 0 finished: 0 { DurationMs: 7 OperationsOK: 1000 OperationsError: 0 } 2025-12-23T18:43:49.955164Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:966:2824] 2025-12-23T18:43:49.955245Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 2} Bootstrap called, sample# 0 2025-12-23T18:43:49.955280Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 2} Connect to# 72075186224037888 called 2025-12-23T18:43:49.955578Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-12-23T18:43:50.212781Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 2} finished in 0.257152s, read# 1000 2025-12-23T18:43:50.212904Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:966:2824] with chunkSize# 1 finished: 0 { DurationMs: 257 OperationsOK: 1000 OperationsError: 0 } 2025-12-23T18:43:50.212988Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:969:2827] 2025-12-23T18:43:50.213031Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 3} Bootstrap called, sample# 0 2025-12-23T18:43:50.213055Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 3} Connect to# 72075186224037888 called 2025-12-23T18:43:50.213260Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-12-23T18:43:50.272112Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 3} finished in 0.058798s, read# 1000 2025-12-23T18:43:50.272241Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:969:2827] with chunkSize# 10 finished: 0 { DurationMs: 58 OperationsOK: 1000 OperationsError: 0 } 2025-12-23T18:43:50.272361Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:972:2830] 2025-12-23T18:43:50.272416Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 4} Bootstrap called, sample# 1000 2025-12-23T18:43:50.272450Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 4} Connect to# 72075186224037888 called 2025-12-23T18:43:50.272679Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-12-23T18:43:50.275056Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 4} finished in 0.001881s, sampled# 1000, iter finished# 1, oks# 1000 2025-12-23T18:43:50.275161Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:507: ReadIteratorLoadScenario# {Tag: 0, parent: [1:951:2809], subTag: 3} received keyCount# 1000 2025-12-23T18:43:50.275294Z node 1 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:552: ReadIteratorLoadScenario# {Tag: 0, parent: [1:951:2809], subTag: 3} started read actor with id# [1:975:2833] 2025-12-23T18:43:50.275336Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:79: TReadIteratorPoints# {Tag: 0, parent: [1:960:2818], subTag: 5} Bootstrap called, will read keys# 1000 2025-12-23T18:43:50.674917Z node 1 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:560: ReadIteratorLoadScenario# {Tag: 0, parent: [1:951:2809], subTag: 3} received point times# 1000, Inflight left# 0 2025-12-23T18:43:50.675113Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:483: headread with inflight# 1 finished: 0 { DurationMs: 399 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 35\n" } 2025-12-23T18:43:50.675300Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:617: ReadIteratorLoadScenario# {Tag: 0, parent: [1:951:2809], subTag: 3} finished in 0.729995s with report: { DurationMs: 7 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 257 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 58 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 399 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 35\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-12-23T18:43:50.675662Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:960:2818] with tag# 3 ... waiting for SysViewsRoster update finished 2025-12-23T18:43:53.885225Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:53.894042Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:53.894391Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:53.894474Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:54.154457Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:54.154609Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:54.174384Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766515431124644 != 1766515431124647 2025-12-23T18:43:54.181562Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:54.227427Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:54.307528Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004342/r3tmp/tmpHboR8Y/pdisk_1.dat 2025-12-23T18:43:54.582663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:54.596245Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:54.698390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:54.923931Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2025-12-23T18:43:54.924302Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:951:2809], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2025-12-23T18:43:54.936428Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:951:2809], subTag: 1} TUpsertActor finished in 0.011810s, errors=0 2025-12-23T18:43:54.936955Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-12-23T18:43:54.937071Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:334: ReadIteratorLoadScenario# [2:960:2818] with id# {Tag: 0, parent: [2:951:2809], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-12-23T18:43:54.938020Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:397: ReadIteratorLoadScenario# {Tag: 0, parent: [2:951:2809], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-12-23T18:43:54.938116Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:963:2821] 2025-12-23T18:43:54.938207Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 1} Bootstrap called, sample# 0 2025-12-23T18:43:54.938241Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 1} Connect to# 72075186224037888 called 2025-12-23T18:43:54.938442Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-12-23T18:43:54.939035Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 1} finished in 0.000553s, read# 10 2025-12-23T18:43:54.939187Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:963:2821] with chunkSize# 0 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-12-23T18:43:54.939287Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:966:2824] 2025-12-23T18:43:54.939327Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 2} Bootstrap called, sample# 0 2025-12-23T18:43:54.939348Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 2} Connect to# 72075186224037888 called 2025-12-23T18:43:54.939507Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-12-23T18:43:54.941075Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 2} finished in 0.001541s, read# 10 2025-12-23T18:43:54.941151Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:966:2824] with chunkSize# 1 finished: 0 { DurationMs: 1 OperationsOK: 10 OperationsError: 0 } 2025-12-23T18:43:54.941205Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:969:2827] 2025-12-23T18:43:54.941231Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 3} Bootstrap called, sample# 0 2025-12-23T18:43:54.941255Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 3} Connect to# 72075186224037888 called 2025-12-23T18:43:54.941405Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-12-23T18:43:54.941955Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 3} finished in 0.000524s, read# 10 2025-12-23T18:43:54.942042Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:969:2827] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-12-23T18:43:54.942126Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:972:2830] 2025-12-23T18:43:54.942167Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 4} Bootstrap called, sample# 10 2025-12-23T18:43:54.942214Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 4} Connect to# 72075186224037888 called 2025-12-23T18:43:54.942396Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-12-23T18:43:54.942691Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [2:960:2818], subTag: 4} finished in 0.000255s, sampled# 10, iter finished# 1, oks# 10 2025-12-23T18:43:54.942772Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:507: ReadIteratorLoadScenario# {Tag: 0, parent: [2:951:2809], subTag: 3} received keyCount# 10 2025-12-23T18:43:54.942875Z node 2 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:552: ReadIteratorLoadScenario# {Tag: 0, parent: [2:951:2809], subTag: 3} started read actor with id# [2:975:2833] 2025-12-23T18:43:54.942911Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:79: TReadIteratorPoints# {Tag: 0, parent: [2:960:2818], subTag: 5} Bootstrap called, will read keys# 10 2025-12-23T18:43:55.297243Z node 2 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:560: ReadIteratorLoadScenario# {Tag: 0, parent: [2:951:2809], subTag: 3} received point times# 1000, Inflight left# 0 2025-12-23T18:43:55.297472Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:483: headread with inflight# 1 finished: 0 { DurationMs: 354 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 30\n" } 2025-12-23T18:43:55.297660Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:617: ReadIteratorLoadScenario# {Tag: 0, parent: [2:951:2809], subTag: 3} finished in 0.360413s with report: { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 1 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 354 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 30\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-12-23T18:43:55.297766Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:960:2818] with tag# 3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2025-12-23T18:43:19.445536Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125866895774430:2204];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:19.445680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b1/r3tmp/tmpP88RMi/pdisk_1.dat 2025-12-23T18:43:19.784486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:19.784597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:19.805940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:19.818837Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125866895774254:2081] 1766515399432338 != 1766515399432341 2025-12-23T18:43:19.838542Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:19.850448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23605, node 1 2025-12-23T18:43:19.953307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:19.953328Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:19.953335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:19.953426Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:19.999357Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:20.003330Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:20.003372Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:20.004766Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:65095, port: 65095 2025-12-23T18:43:20.004900Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:20.068273Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-12-23T18:43:20.113610Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****VA7w (94297C15) () has now valid token of ldapuser@ldap 2025-12-23T18:43:20.126822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:22.843507Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125879481962575:2120];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:22.845867Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b1/r3tmp/tmpPX376d/pdisk_1.dat 2025-12-23T18:43:22.867274Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:22.965988Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:22.971499Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587125879481962481:2081] 1766515402838918 != 1766515402838921 2025-12-23T18:43:22.980641Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:22.980744Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:22.985908Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17244, node 2 2025-12-23T18:43:23.096530Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:23.096554Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:23.096561Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:23.096637Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:23.162475Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:23.244112Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:23.247655Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:23.247711Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:23.248392Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:11862, port: 11862 2025-12-23T18:43:23.248487Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:23.307251Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:11862. Invalid credentials 2025-12-23T18:43:23.307701Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****Bgdw (AA727DB5) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:11862. Invalid credentials)' 2025-12-23T18:43:26.351034Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587125897686900644:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:26.351613Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b1/r3tmp/tmpcV98oY/pdisk_1.dat 2025-12-23T18:43:26.375365Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:26.437409Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:26.470426Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:26.470522Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:26.475521Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63254, node 3 2025-12-23T18:43:26.519361Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:26.519390Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:26.519398Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:26.519490Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:26.603202Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:26.606591Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:26.606626Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:26.607426Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:15392, port: 15392 2025-12-23T18:43:26.607497Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:26.671312Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:15392. Invalid credentials 2025-12-23T18:43:26.671952Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****GS8g (92D8CB1C) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:15392. Invalid credentials)' 2025-12-23T18:43:26.673876Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:29.787664Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587125911519446237:2157];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:29.789546Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b1/r3tmp/tmpS6zegs/pdisk_1.dat 2025-12-23T18:43:29.817393Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:29.925903Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:29.964229Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:29.964325Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:29.968712Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected T ... 1681757:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:33.477724Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b1/r3tmp/tmpr9RVap/pdisk_1.dat 2025-12-23T18:43:33.488124Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:33.581515Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:33.593902Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7587125929591681650:2081] 1766515413475093 != 1766515413475096 2025-12-23T18:43:33.618909Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:33.618998Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:33.622513Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64646, node 5 2025-12-23T18:43:33.660168Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:33.675512Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:33.675536Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:33.675544Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:33.675629Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:33.775112Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:33.776945Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:33.776975Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:33.777727Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:19331, port: 19331 2025-12-23T18:43:33.777786Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:33.839331Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:33.883106Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:33.883657Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:33.883712Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:33.931147Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:33.975094Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:33.976227Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****_-WQ (1F52156D) () has now valid token of ldapuser@ldap 2025-12-23T18:43:34.481254Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:37.480177Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****_-WQ (1F52156D) 2025-12-23T18:43:37.480307Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:19331, port: 19331 2025-12-23T18:43:37.480415Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:37.535309Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:37.583147Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:37.583714Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:37.583752Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:37.627096Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:37.671133Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:37.672341Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****_-WQ (1F52156D) () has now valid token of ldapuser@ldap 2025-12-23T18:43:38.477535Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587125929591681757:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:38.477646Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:43:42.484102Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****_-WQ (1F52156D) 2025-12-23T18:43:44.652329Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587125975724618068:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:44.652801Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b1/r3tmp/tmpxKXnUy/pdisk_1.dat 2025-12-23T18:43:44.666183Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:44.732314Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:44.767841Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:44.767948Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:44.773065Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28945, node 6 2025-12-23T18:43:44.830228Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:44.830260Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:44.830268Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:44.830360Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:44.903708Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:45.032955Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:45.036445Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:45.036490Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:45.037325Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:14200, port: 14200 2025-12-23T18:43:45.037447Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:45.091372Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:45.135840Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****Q4qQ (D4A00119) () has now valid token of ldapuser@ldap 2025-12-23T18:43:45.657779Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:49.654863Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7587125975724618068:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:49.654960Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:43:49.659136Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****Q4qQ (D4A00119) 2025-12-23T18:43:49.659312Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:14200, port: 14200 2025-12-23T18:43:49.659379Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:49.719292Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:49.768023Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****Q4qQ (D4A00119) () has now valid token of ldapuser@ldap 2025-12-23T18:43:52.660777Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****Q4qQ (D4A00119) |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-clusteradmin |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |95.1%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:43:50.804425Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:50.922927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:43:50.935618Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:50.935876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:50.935941Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:51.188769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:51.188894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:51.256135Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515428311842 != 1766515428311846 2025-12-23T18:43:51.265672Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:51.309414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:51.411658Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004335/r3tmp/tmpGYhxZf/pdisk_1.dat 2025-12-23T18:43:51.695011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:51.709979Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:51.814061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:52.109350Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2025-12-23T18:43:52.109555Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:951:2809], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2025-12-23T18:43:52.184924Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:951:2809], subTag: 2} TUpsertActor finished in 0.074922s, errors=0 2025-12-23T18:43:52.185067Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:952:2810] with tag# 2 ... waiting for SysViewsRoster update finished 2025-12-23T18:43:55.222259Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:55.232401Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:55.232837Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:55.232931Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:55.481524Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:55.481642Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:55.497399Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766515432675432 != 1766515432675435 2025-12-23T18:43:55.503542Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:55.549953Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:55.618612Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004335/r3tmp/tmpiARKgJ/pdisk_1.dat 2025-12-23T18:43:55.955120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:55.969355Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:56.073749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:56.299937Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-12-23T18:43:56.300109Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:951:2809], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2025-12-23T18:43:56.356693Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:951:2809], subTag: 2} TUpsertActor finished in 0.056218s, errors=0 2025-12-23T18:43:56.356784Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:952:2810] with tag# 2 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> KqpPg::CreateSequence [GOOD] >> KqpPg::AlterSequence >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad >> LocalTableWriter::SupportedTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:43:54.775115Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:54.883112Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:43:54.894468Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:54.894921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:54.894986Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:55.142467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:55.142617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:55.204820Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515432156825 != 1766515432156829 2025-12-23T18:43:55.214060Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:55.256905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:55.346093Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00432f/r3tmp/tmpeuvOoq/pdisk_1.dat 2025-12-23T18:43:55.633646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:55.648129Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:55.752431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:56.034376Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-12-23T18:43:56.034503Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-12-23T18:43:56.038862Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 2} started# 5 actors each with inflight# 4 2025-12-23T18:43:56.038957Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-12-23T18:43:56.039055Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-12-23T18:43:56.039086Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-12-23T18:43:56.039114Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-12-23T18:43:56.039143Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-12-23T18:43:56.042704Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 1} session: ydb://session/3?node_id=1&id=YTQ5MzAwNWEtMjBlNmM2NjctOGRlMzMwYjAtYWMzNTk3Nzc= 2025-12-23T18:43:56.046410Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 2} session: ydb://session/3?node_id=1&id=ZGRmMWQ4NTgtYWFjNWQ5YWUtODZhOGYxMzctODdjNjIxZDc= 2025-12-23T18:43:56.046490Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 3} session: ydb://session/3?node_id=1&id=ZDg5NDU1MDItOTVjMzc5NmItOWM3YWRiYWMtODlkYzczNzM= 2025-12-23T18:43:56.048255Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 4} session: ydb://session/3?node_id=1&id=ODlhZTRhZmMtZDBhY2FmZjQtZDcxMjYyNGQtNmU2OTBjMTI= 2025-12-23T18:43:56.050209Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 5} session: ydb://session/3?node_id=1&id=OWQ5YTAxNDUtNmE5MDFjYmQtN2IwNGNmMDAtMWQzYzE5ZmQ= 2025-12-23T18:43:56.054355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:965:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:56.054508Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:992:2844], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:56.054568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:993:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:56.054631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:56.054728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:56.054823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:56.054914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:56.056611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1009:2861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:56.056790Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:56.063021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:56.102682Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1010:2862] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:43:56.103416Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1015:2867] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:43:56.104144Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1016:2868] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:43:56.104527Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1020:2869] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:43:56.222384Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1003:2855], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:56.222520Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1004:2856], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:56.222563Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1005:2857], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:56.222595Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1006:2858], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:56.222663Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1007:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:56.258220Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1097:2926] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:56.818531Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1169:2963] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:56.834827Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 5} finished in 1766515436.834772s, errors=0 2025-12-23T18:43:56.835197Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1766515436834 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:56.883687Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 4} finished in 1766515436.883647s, errors=0 2025-12-23T18:43:56.883990Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1766515436883 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:56.897877Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1220:2985] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:56.965073Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 2} finished in 1766515436.965033s, errors=0 2025-12-23T18:43:56.965349Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1766515436965 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:56.978810Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1271:3007] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:57.044117Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 1} finished in 1766515437.044077s, errors=0 2025-12-23T18:43:57.044422Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1766515437044 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:57.059335Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1322:3029] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:57.127385Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:952:2810], subTag: 3} finished in 1766515437.127336s, errors=0 2025-12-23T18:43:57.127701Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:951:2809], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1766515437127 OperationsOK: 4 OperationsError: 0 } 2025-12-23T18:43:57.127800Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 2} finished in 1.089177s, oks# 20, errors# 0 2025-12-23T18:43:57.127957Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:952:2810] with tag# 2 |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> ReadLoad::ShouldReadKqp [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TSchemeShardSubDomainTest::RestartAtInFly >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-system >> LocalTableWriter::WriteTable [GOOD] >> LocalTableWriter::DecimalKeys >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex [GOOD] Test command err: 2025-12-23T18:40:37.763606Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125174683269394:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:37.763704Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002f3f/r3tmp/tmp5rx0io/pdisk_1.dat 2025-12-23T18:40:37.951935Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:38.006702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:38.006844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:38.074896Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:38.080876Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125174683269358:2081] 1766515237761632 != 1766515237761635 2025-12-23T18:40:38.093046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1859, node 1 2025-12-23T18:40:38.186112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:40:38.186146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:40:38.186153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:40:38.186252Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:40:38.219631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:38.223024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:38.227574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:38.250875Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7587125178978237492:2295] 2025-12-23T18:40:38.251135Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:38.265826Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:38.265896Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:38.269775Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:38.269842Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:38.269904Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:38.270284Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:38.270366Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:38.270409Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7587125178978237507:2295] in generation 1 2025-12-23T18:40:38.272230Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:38.308671Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:38.308823Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:38.308872Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7587125178978237509:2296] 2025-12-23T18:40:38.308883Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:38.308899Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:38.308921Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:38.309064Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:38.309131Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:38.309157Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:38.309183Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:38.309223Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:38.309246Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:38.309792Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7587125178978237489:2500], serverId# [1:7587125178978237493:2502], sessionId# [0:0:0] 2025-12-23T18:40:38.310252Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:38.310579Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:40:38.310646Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-12-23T18:40:38.312512Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:38.312735Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:38.312813Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:40:38.315296Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7587125178978237523:2518], serverId# [1:7587125178978237524:2519], sessionId# [0:0:0] 2025-12-23T18:40:38.320280Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1766515238362 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1766515238362 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:40:38.320319Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:38.320484Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:38.320587Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:38.320628Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:40:38.320654Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1766515238362:281474976710657] in PlanQueue unit at 72075186224037888 2025-12-23T18:40:38.320950Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1766515238362:281474976710657 keys extracted: 0 2025-12-23T18:40:38.321114Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:40:38.321221Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:38.321269Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:40:38.323391Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:40:38.323923Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:38.325001Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1766515238361 2025-12-23T18:40:38.325031Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:38.325068Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1766515238369 2025-12-23T18:40:38.325150Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1766515238362} 2025-12-23T18:40:38.325191Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:38.325219Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:38.325232Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:38.325256Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:40:38.325307Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1766515238362 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7587125174683269697:2135], exec latency: 2 ms, propose latency: 4 ms 2025-12-23T18:40:38.325351Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-12-23T18:40:38.325387Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:38.33 ... 1 2025-12-23T18:43:55.687778Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976710759] from 72075186224037891 at tablet 72075186224037891 send result to client [25:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:43:55.687896Z node 25 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037891 Sending notify to schemeshard 72057594046644480 txId 281474976710759 state Ready TxInFly 0 2025-12-23T18:43:55.688072Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-12-23T18:43:55.691990Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710759 datashard 72075186224037891 state Ready 2025-12-23T18:43:55.692141Z node 25 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2025-12-23T18:43:55.714118Z node 25 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 4500} 2025-12-23T18:43:55.714224Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:43:55.714274Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:43:55.714355Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976710759] from 72075186224037888 at tablet 72075186224037888 send result to client [25:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:43:55.714416Z node 25 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710759 state Ready TxInFly 0 2025-12-23T18:43:55.714504Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:43:55.717597Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710759 datashard 72075186224037888 state Ready 2025-12-23T18:43:55.717679Z node 25 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T18:43:55.765940Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:43:55.766024Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:55.766068Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:43:55.766113Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:55.766152Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:43:55.807986Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:43:55.808072Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:55.808114Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:43:55.808158Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:55.808194Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:43:55.839697Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:43:55.839790Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:55.839834Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:43:55.839879Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:55.839933Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:43:55.871516Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:43:55.871633Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:55.871689Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:43:55.871746Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:55.871793Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:43:55.903760Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:43:55.903838Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:55.903880Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:43:55.903923Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:43:55.903958Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:43:56.033056Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:8] at 72075186224037888 2025-12-23T18:43:56.033240Z node 25 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=8; 2025-12-23T18:43:56.033343Z node 25 :TX_DATASHARD INFO: datashard_write_operation.cpp:804: Write transaction 8 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2025-12-23T18:43:56.033629Z node 25 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 8 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-12-23T18:43:56.033912Z node 25 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 8 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-12-23T18:43:56.034016Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:43:56.034359Z node 25 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [25:1403:3072], Table: `/Root/Table` ([72057594046644480:38:3]), SessionActorId: [25:1346:3072]Got LOCKS BROKEN for table `/Root/Table`. ShardID=72075186224037888, Sink=[25:1403:3072].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-12-23T18:43:56.034613Z node 25 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [25:1396:3072], SessionActorId: [25:1346:3072], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Table`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[25:1346:3072]. 2025-12-23T18:43:56.035052Z node 25 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=25&id=MThiZWRmNzktNmZiNjc1YTgtYzdjNTg1YjAtOTI5ZTc5ZTM=, ActorId: [25:1346:3072], ActorState: ExecuteState, TraceId: 01kd68ctc49te9g10zp0r48hhh, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [25:1486:3072] from: [25:1396:3072] 2025-12-23T18:43:56.035347Z node 25 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [25:1486:3072] TxId: 281474976715665. Ctx: { TraceId: 01kd68ctc49te9g10zp0r48hhh, Database: , SessionId: ydb://session/3?node_id=25&id=MThiZWRmNzktNmZiNjc1YTgtYzdjNTg1YjAtOTI5ZTc5ZTM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Table`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-12-23T18:43:56.035932Z node 25 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=25&id=MThiZWRmNzktNmZiNjc1YTgtYzdjNTg1YjAtOTI5ZTc5ZTM=, ActorId: [25:1346:3072], ActorState: ExecuteState, TraceId: 01kd68ctc49te9g10zp0r48hhh, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/Table`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } 2025-12-23T18:43:56.037120Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:9] at 72075186224037888 2025-12-23T18:43:56.037209Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:9] at 72075186224037888 2025-12-23T18:43:56.037441Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-12-23T18:43:56.041990Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-12-23T18:43:56.042110Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-12-23T18:43:56.043007Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 3 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-12-23T18:43:56.043585Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 139 count 1 last offset 0, current partition end offset: 1 2025-12-23T18:43:56.043688Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2025-12-23T18:43:56.043850Z node 25 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 139 accessed 0 times before, last time 1970-01-01T00:00:04.000000Z 2025-12-23T18:43:56.044021Z node 25 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 3. All 1 blobs are from cache. 2025-12-23T18:43:56.044185Z node 25 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2025-12-23T18:43:56.044354Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2025-12-23T18:43:56.044646Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 121 from pos 0 cbcount 1 2025-12-23T18:43:56.045409Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-false >> TStoragePoolsQuotasTest::DifferentQuotasInteraction |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest >> TSchemeShardSubDomainTest::Delete >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-anonymous >> Cdc::ShouldBreakLocksOnConcurrentAlterTable [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAddIndex >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2025-12-23T18:43:22.516657Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125881779724339:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:22.517639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ac/r3tmp/tmpmdPr3k/pdisk_1.dat 2025-12-23T18:43:22.746715Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:22.764946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:22.765066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:22.794569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:22.847362Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:22.850801Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125881779724310:2081] 1766515402514839 != 1766515402514842 TServer::EnableGrpc on GrpcPort 16770, node 1 2025-12-23T18:43:22.908342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:22.908359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:22.908365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:22.908452Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:22.970059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:23.026504Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:23.030671Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:23.030706Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:23.031616Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:14346, port: 14346 2025-12-23T18:43:23.032464Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:23.050453Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-12-23T18:43:23.099633Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****hNMQ (399A3FE9) () has now valid token of ldapuser@ldap 2025-12-23T18:43:25.675220Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125894033641850:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:25.675319Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ac/r3tmp/tmpv0Bods/pdisk_1.dat 2025-12-23T18:43:25.730907Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:25.810677Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:25.812260Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:25.812349Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:25.812383Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587125894033641819:2081] 1766515405670191 != 1766515405670194 2025-12-23T18:43:25.848043Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15089, node 2 2025-12-23T18:43:25.927536Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:25.927558Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:25.927563Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:25.927658Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:25.972915Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:26.018874Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:26.022253Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:26.022289Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:26.023048Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:63837, port: 63837 2025-12-23T18:43:26.023170Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:26.035096Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:63837. Invalid credentials 2025-12-23T18:43:26.035375Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****ljpw (4B2B3506) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:63837. Invalid credentials)' 2025-12-23T18:43:29.089083Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587125910468234878:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:29.089132Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ac/r3tmp/tmpebGeQu/pdisk_1.dat 2025-12-23T18:43:29.101750Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:29.182909Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587125910468234854:2081] 1766515409088677 != 1766515409088680 2025-12-23T18:43:29.191423Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:29.202792Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:29.202877Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:29.207911Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6297, node 3 2025-12-23T18:43:29.256841Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:29.256876Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:29.256887Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:29.256972Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:29.265942Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:29.354671Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:29.355168Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:29.355183Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:29.355841Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:61559, port: 61559 2025-12-23T18:43:29.355921Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:29.367420Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:61559. Invalid credentials 2025-12-23T18:43:29.367688Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****VOPA (7205B564) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:61559. Invalid credentials)' 2025-12-23T18:43:32.709222Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587125923330085881:2138];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:32.709418Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ac/r3tmp/tmpGf5xaE/pdisk_1.dat 2025-12-23T18:43:32.751193Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:32.777545Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587125923330085781:2081] 1766515412702621 != 1766515412702624 2025-12-23T18:43:32.831778Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:32.864072Z node 4 :HIVE WARN: ... nitialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:43:36.224830Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ac/r3tmp/tmpdzyLMs/pdisk_1.dat 2025-12-23T18:43:36.316321Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:36.318252Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7587125940483424232:2081] 1766515416206732 != 1766515416206735 2025-12-23T18:43:36.329382Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:36.361236Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:36.361336Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:36.371871Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22096, node 5 2025-12-23T18:43:36.455438Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:36.455466Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:36.455474Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:36.455566Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:36.488192Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:36.546877Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:36.550510Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:36.550555Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:36.551290Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:10263, port: 10263 2025-12-23T18:43:36.551365Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:36.556002Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:36.599098Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:36.599625Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:36.599703Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:36.643146Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:36.687155Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:36.688119Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****vqvg (8E79B85A) () has now valid token of ldapuser@ldap 2025-12-23T18:43:37.214112Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:41.209212Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587125940483424326:2134];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:41.209374Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:43:41.216320Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****vqvg (8E79B85A) 2025-12-23T18:43:41.216426Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:10263, port: 10263 2025-12-23T18:43:41.216506Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:41.221362Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:41.267053Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:41.267642Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:41.267691Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:41.315091Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:41.359045Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:41.359927Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****vqvg (8E79B85A) () has now valid token of ldapuser@ldap 2025-12-23T18:43:44.217698Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****vqvg (8E79B85A) 2025-12-23T18:43:47.376968Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587125989796973944:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:47.377018Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046ac/r3tmp/tmpOkLcLs/pdisk_1.dat 2025-12-23T18:43:47.403265Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:47.490396Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:47.528310Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:47.528399Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:47.540759Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3395, node 6 2025-12-23T18:43:47.600110Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:47.600135Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:47.600142Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:47.600219Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:47.699947Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:47.704910Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:47.705386Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:47.705403Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:47.706254Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:17793, port: 17793 2025-12-23T18:43:47.706352Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:47.734976Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:47.779343Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****MEmQ (27632EA8) () has now valid token of ldapuser@ldap 2025-12-23T18:43:48.384888Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:51.381080Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****MEmQ (27632EA8) 2025-12-23T18:43:51.381192Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:17793, port: 17793 2025-12-23T18:43:51.381300Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:51.395775Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:51.439592Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****MEmQ (27632EA8) () has now valid token of ldapuser@ldap 2025-12-23T18:43:52.377885Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7587125989796973944:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:52.377996Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:43:55.383345Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****MEmQ (27632EA8) |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] |95.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] >> Cdc::InitialScanAndLimits [GOOD] >> Cdc::InitialScanComplete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2025-12-23T18:43:55.655593Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126023711938783:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:55.655934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002fd1/r3tmp/tmpqeZfVp/pdisk_1.dat 2025-12-23T18:43:55.862729Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:55.886206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:55.886356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:55.944276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:55.977822Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:55.979054Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126023711938672:2081] 1766515435652421 != 1766515435652424 2025-12-23T18:43:56.079400Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28003 TServer::EnableGrpc on GrpcPort 16293, node 1 2025-12-23T18:43:56.161623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:56.161663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:56.161671Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:56.161769Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515435993 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:56.418993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:56.427037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766515436518 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) 2025-12-23T18:43:56.542482Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126028006906883:2549] Handshake: worker# [1:7587126028006906792:2488] 2025-12-23T18:43:56.542877Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126028006906883:2549] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:43:56.543244Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126028006906883:2549] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:43:56.543314Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126028006906883:2549] Send handshake: worker# [1:7587126028006906792:2488] 2025-12-23T18:43:56.543668Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126028006906883:2549] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 35b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 23b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-12-23T18:43:56.543916Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126028006906883:2549] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 35 },{ Order: 3 BodySize: 23 }] } 2025-12-23T18:43:56.544116Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126028006906886:2549] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-12-23T18:43:56.544168Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126028006906883:2549] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:56.544263Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126028006906886:2549] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 35b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 23b }] } 2025-12-23T18:43:56.546366Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126028006906886:2549] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:43:56.546448Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126028006906883:2549] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:56.546493Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126028006906883:2549] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::StringEscaping [GOOD] >> TSchemeShardSubDomainTest::Delete [GOOD] >> TSchemeShardSubDomainTest::DeleteAdd >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] Test command err: 2025-12-23T18:43:55.891421Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126025081946638:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:55.891541Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002fd0/r3tmp/tmpJpuUgj/pdisk_1.dat 2025-12-23T18:43:56.106244Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:56.115622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:56.115747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:56.182675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:56.202327Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:56.203518Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126025081946605:2081] 1766515435890575 != 1766515435890578 2025-12-23T18:43:56.281166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:32559 TServer::EnableGrpc on GrpcPort 26794, node 1 2025-12-23T18:43:56.389699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:56.389718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:56.389727Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:56.389836Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32559 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515436259 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:56.693850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:56.699951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:43:56.703881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766515436791 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) 2025-12-23T18:43:56.830081Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126029376914817:2548] Handshake: worker# [1:7587126029376914818:2549] 2025-12-23T18:43:56.830512Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126029376914817:2548] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:43:56.831075Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126029376914817:2548] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:43:56.831157Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126029376914817:2548] Send handshake: worker# [1:7587126029376914818:2549] 2025-12-23T18:43:56.831675Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126029376914817:2548] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-12-23T18:43:56.857473Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126029376914817:2548] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-12-23T18:43:56.857666Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126029376914817:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-12-23T18:43:56.857872Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126029376914821:2548] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-12-23T18:43:56.857938Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126029376914817:2548] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:56.858050Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126029376914821:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-12-23T18:43:56.860179Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126029376914821:2548] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:43:56.860262Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126029376914817:2548] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:56.860319Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126029376914817:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-12-23T18:43:56.901108Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:43:59.081637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:59.081803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:59.081849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:59.081886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:59.081936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:59.081981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:59.082043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:59.082125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:59.083041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:59.083400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:59.173179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:59.173234Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:59.178917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:59.179181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:59.179323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:59.182445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:59.182648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:59.183390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:59.183674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:59.185712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:59.185900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:59.187066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:59.187128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:59.187371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:59.187418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:59.187460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:59.187633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.196354Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:43:59.329128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:43:59.329370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.329600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:43:59.329639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:43:59.329877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:43:59.329937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:59.332431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:59.332676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:43:59.332892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.332942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:43:59.332981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:43:59.333012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:43:59.335083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.335155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:43:59.335206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:43:59.336914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.336969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.337037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:43:59.337083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:43:59.340603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:43:59.342729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:43:59.342961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:43:59.344092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:59.344213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:43:59.344260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:43:59.344548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:43:59.344602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:43:59.344780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:43:59.344853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:43:59.347063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:59.347108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... CE: schemeshard__init.cpp:1820: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.542457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2025-12-23T18:43:59.542765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.543446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.543604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-12-23T18:43:59.543659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:43:59.543707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:43:59.543736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-12-23T18:43:59.544314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.544414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.544664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-12-23T18:43:59.545562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.546136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.546626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.546760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.547576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.547723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.547783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.547875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.548758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.549451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.550256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.550545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.550672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.550757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.550913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.550973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.551050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.558264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:59.560396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:59.560496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:59.560888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:59.560958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:59.561004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:59.563403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2025-12-23T18:43:59.626699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-12-23T18:43:59.626809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:460:2413] sender: [1:524:2058] recipient: [1:15:2062] 2025-12-23T18:43:59.627779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-12-23T18:43:59.627893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-12-23T18:43:59.627931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:522:2461] TestWaitNotification: OK eventTxId 100 2025-12-23T18:43:59.628483Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:43:59.628728Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 307us result status StatusSuccess 2025-12-23T18:43:59.629289Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:43:59.629872Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:43:59.630038Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 171us result status StatusSuccess 2025-12-23T18:43:59.630450Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:43:51.634222Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:51.737565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:43:51.746627Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:51.746948Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:51.747047Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:52.012339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:52.012469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:52.087869Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515429019281 != 1766515429019285 2025-12-23T18:43:52.102079Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:52.146679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:52.232609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004334/r3tmp/tmp2qEHvg/pdisk_1.dat 2025-12-23T18:43:52.531027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:52.548041Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:52.651445Z node 1 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:194: TLoad# 0 creates table# BrandNewTable in dir# /Root 2025-12-23T18:43:52.844641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:863:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:52.844763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:52.845079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:881:2751], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:52.845133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:52.859258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:53.173870Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2025-12-23T18:43:53.175664Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:860:2743], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-12-23T18:43:53.187754Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:860:2743], subTag: 1} TUpsertActor finished in 0.011774s, errors=0 2025-12-23T18:43:53.188034Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-12-23T18:43:53.188217Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:860:2743], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-12-23T18:43:53.245849Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:860:2743], subTag: 3} TUpsertActor finished in 0.057385s, errors=0 2025-12-23T18:43:53.245955Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:970:2821] with tag# 3 ... waiting for SysViewsRoster update finished 2025-12-23T18:43:56.677271Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:56.685089Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:56.685545Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:56.685684Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:56.975027Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:56.975221Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:56.993874Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766515433816203 != 1766515433816206 2025-12-23T18:43:57.001084Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:57.043454Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:57.112680Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004334/r3tmp/tmpskhfv9/pdisk_1.dat 2025-12-23T18:43:57.398099Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:57.412224Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:57.516973Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:57.741775Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2025-12-23T18:43:57.741868Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:951:2809], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 2025-12-23T18:43:58.147072Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:951:2809], subTag: 2} TUpsertActor finished in 0.404864s, errors=0 2025-12-23T18:43:58.147201Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:952:2810] with tag# 2 2025-12-23T18:43:58.153201Z node 2 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:174: TLoad# 0 drops table# table in dir# /Root 2025-12-23T18:43:58.171255Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:994:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:58.171410Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:58.171790Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1004:2855], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:58.171847Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:58.360056Z node 2 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:194: TLoad# 0 creates table# table in dir# /Root 2025-12-23T18:43:58.379587Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1044:2894], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:58.379753Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:58.380166Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1048:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:58.380223Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:58.391476Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:58.440568Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-12-23T18:43:58.669090Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2025-12-23T18:43:58.669864Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:990:2848], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" 2025-12-23T18:43:58.682612Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:990:2848], subTag: 1} TUpsertActor finished in 0.012310s, errors=0 2025-12-23T18:43:58.683045Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-12-23T18:43:58.683332Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:990:2848], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" 2025-12-23T18:43:58.742636Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:990:2848], subTag: 3} TUpsertActor finished in 0.058916s, errors=0 2025-12-23T18:43:58.742754Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:1137:2968] with tag# 3 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2025-12-23T18:42:33.012133Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-12-23T18:42:33.012636Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-12-23T18:42:33.013412Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-12-23T18:42:33.015213Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.015850Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2025-12-23T18:42:33.025669Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.025755Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.025800Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.026000Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-12-23T18:42:33.026172Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.026281Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2025-12-23T18:42:33.026389Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-12-23T18:42:33.027202Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2025-12-23T18:42:33.027859Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.027907Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:42:33.027989Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-12-23T18:42:33.028022Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> KqpPg::EquiJoin+useSink [GOOD] >> KqpPg::EquiJoin-useSink >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] |95.1%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::StringEscaping [GOOD] Test command err: 2025-12-23T18:43:56.733745Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126026097370044:2147];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:56.733834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002fc9/r3tmp/tmpAU3oOy/pdisk_1.dat 2025-12-23T18:43:56.927687Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:56.959790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:56.959891Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:57.012274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:57.052366Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126026097369924:2081] 1766515436728861 != 1766515436728864 2025-12-23T18:43:57.060218Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:57.108949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6880 TServer::EnableGrpc on GrpcPort 32702, node 1 2025-12-23T18:43:57.295504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:57.295545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:57.295555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:57.295707Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515437071 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:57.602118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:57.611087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766515437701 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) 2025-12-23T18:43:57.701398Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126030392338132:2546] Handshake: worker# [1:7587126030392338041:2485] 2025-12-23T18:43:57.701725Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126030392338132:2546] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:43:57.702024Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126030392338132:2546] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:43:57.702058Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126030392338132:2546] Send handshake: worker# [1:7587126030392338041:2485] 2025-12-23T18:43:57.702323Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126030392338132:2546] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-12-23T18:43:57.702495Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126030392338132:2546] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-12-23T18:43:57.702681Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126030392338135:2546] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-12-23T18:43:57.702712Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126030392338132:2546] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:57.702773Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126030392338135:2546] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-12-23T18:43:57.704694Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126030392338135:2546] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:43:57.704750Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126030392338132:2546] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:57.704790Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126030392338132:2546] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-12-23T18:43:57.738209Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas >> TSchemeShardSubDomainTest::ForceDropTwice |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> KqpPg::CreateUniqComplexPgColumn-useSink [GOOD] >> KqpPg::CreateTempTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:43:54.255325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:54.374352Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:43:54.387296Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:54.387698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:54.387808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:54.691635Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:54.691760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:54.749468Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515431384697 != 1766515431384701 2025-12-23T18:43:54.764306Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:54.807097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:54.917054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004330/r3tmp/tmpZjBTR3/pdisk_1.dat 2025-12-23T18:43:55.196826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:55.210814Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:55.314656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:55.596708Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-12-23T18:43:55.596899Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:951:2809], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2025-12-23T18:43:55.693072Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:951:2809], subTag: 2} TUpsertActor finished in 0.095792s, errors=0 2025-12-23T18:43:55.693147Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:952:2810] with tag# 2 ... waiting for SysViewsRoster update finished 2025-12-23T18:43:58.786512Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:58.795799Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:58.796199Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:58.796284Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:59.029417Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:59.029519Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:59.045265Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766515436133977 != 1766515436133980 2025-12-23T18:43:59.051332Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:59.096802Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:59.167185Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004330/r3tmp/tmpmHHJNc/pdisk_1.dat 2025-12-23T18:43:59.511347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:59.525935Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:59.630650Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:59.857922Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-12-23T18:43:59.858016Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:951:2809], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" 2025-12-23T18:43:59.946644Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:951:2809], subTag: 2} TUpsertActor finished in 0.088350s, errors=0 2025-12-23T18:43:59.946766Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:952:2810] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:43:59.640104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:59.640189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:59.640225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:59.640258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:59.640294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:59.640339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:59.640403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:59.640488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:59.641318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:59.641606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:59.723012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:59.723077Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:59.729504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:59.729713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:59.729851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:59.732795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:59.733004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:59.733719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:59.733949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:59.736777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:59.736941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:59.738125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:59.738228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:59.738358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:59.738401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:59.738439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:59.738573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.751942Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:43:59.880268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:43:59.880472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.880694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:43:59.880736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:43:59.880948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:43:59.881008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:59.883931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:59.884158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:43:59.884368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.884420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:43:59.884459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:43:59.884489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:43:59.886855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.886914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:43:59.886941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:43:59.888546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.888584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.888640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:43:59.888701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:43:59.891664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:43:59.893620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:43:59.893820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:43:59.894995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:59.895141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:43:59.895211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:43:59.895498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:43:59.895554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:43:59.895730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:43:59.895809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:43:59.898225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:59.898267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 2025-12-23T18:44:00.953218Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:00.953252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-12-23T18:44:00.953290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-12-23T18:44:00.953587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:44:00.953633Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:44:00.953760Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:44:00.953800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:44:00.953846Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:44:00.953882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:44:00.953921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:44:00.953968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:44:00.954012Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:44:00.954045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:44:00.954264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-12-23T18:44:00.954312Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:44:00.954354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-12-23T18:44:00.954393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-12-23T18:44:00.955216Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:44:00.955307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:44:00.955345Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:44:00.955396Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-12-23T18:44:00.955443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:00.956227Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:44:00.956320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:44:00.956356Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:44:00.956384Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-12-23T18:44:00.956415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-12-23T18:44:00.956482Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:44:00.959423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:44:00.960709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:44:00.960972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:44:00.961022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:44:00.961461Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:44:00.961565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:44:00.961604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:969:2801] TestWaitNotification: OK eventTxId 102 2025-12-23T18:44:00.962108Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:00.962322Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 248us result status StatusSuccess 2025-12-23T18:44:00.962800Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:00.963371Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:00.963521Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 178us result status StatusSuccess 2025-12-23T18:44:00.963924Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeLimitsRejects >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> LocalTableWriter::SupportedTypes [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-clusteradmin >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] >> LocalTableWriter::DecimalKeys [GOOD] >> TSchemeShardSubDomainTest::LS >> TSchemeShardSubDomainTest::Create >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables >> TSchemeShardSubDomainTest::Redefine >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:01.707603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:01.707706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:01.707747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:01.707785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:01.707826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:01.707855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:01.707905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:01.707988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:01.708875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:01.709164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:01.794848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:01.794908Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:01.800291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:01.800502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:01.800645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:01.803645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:01.803822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:01.804370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:01.804568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:01.806088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:01.806247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:01.807393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:01.807450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:01.807569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:01.807603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:01.807643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:01.807763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:01.814423Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:01.933744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:01.933937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:01.934132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:01.934176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:01.934406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:01.934475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:01.936455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:01.936688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:01.936875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:01.936926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:01.936961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:01.936994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:01.938708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:01.938804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:01.938851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:01.940350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:01.940386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:01.940428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:01.940460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:01.947739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:01.949182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:01.949378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:01.950187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:01.950296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:01.950339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:01.950637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:01.950727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:01.950914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:01.950987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:01.953082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:01.953126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 72057594046678944 2025-12-23T18:44:02.160425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409546 2025-12-23T18:44:02.161346Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186233409551 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186233409551 2025-12-23T18:44:02.162704Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-12-23T18:44:02.163239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-12-23T18:44:02.163369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-12-23T18:44:02.163787Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2025-12-23T18:44:02.164063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:44:02.164175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2025-12-23T18:44:02.166070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:44:02.166215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-12-23T18:44:02.167640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:02.167693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:02.167800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:44:02.168866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:02.168908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:02.168958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:02.169487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-12-23T18:44:02.169539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-12-23T18:44:02.170220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:44:02.170247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:44:02.170476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:44:02.170517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-12-23T18:44:02.172759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-12-23T18:44:02.172800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-12-23T18:44:02.172931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:44:02.172973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:44:02.173060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:44:02.173130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-12-23T18:44:02.175000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:02.175081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-12-23T18:44:02.175346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:44:02.175381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-12-23T18:44:02.175438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:44:02.175452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:44:02.175893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:44:02.175994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:44:02.176034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:44:02.176067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:668:2574] 2025-12-23T18:44:02.176180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:44:02.176216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:668:2574] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-12-23T18:44:02.176692Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:02.176876Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 190us result status StatusPathDoesNotExist 2025-12-23T18:44:02.177040Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:44:02.177428Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:02.177639Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 176us result status StatusSuccess 2025-12-23T18:44:02.178034Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-12-23T18:43:25.227299Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125896063205711:2201];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:25.227363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046aa/r3tmp/tmppCJJ64/pdisk_1.dat 2025-12-23T18:43:25.433581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:25.453126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:25.453233Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:25.523471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:25.561051Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:25.562298Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125896063205532:2081] 1766515405213522 != 1766515405213525 TServer::EnableGrpc on GrpcPort 3577, node 1 2025-12-23T18:43:25.685245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:25.735551Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:25.735571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:25.735578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:25.735691Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:25.872440Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:25.874977Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:25.875013Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:25.876685Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:1149, port: 1149 2025-12-23T18:43:25.876766Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:25.939271Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:25.987083Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:25.987688Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:25.987777Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:26.035097Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:26.087049Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:26.088602Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****ym4g (C1C17B30) () has now valid token of ldapuser@ldap 2025-12-23T18:43:26.228650Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:30.227165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125896063205711:2201];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:30.227296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:43:30.230281Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****ym4g (C1C17B30) 2025-12-23T18:43:30.230444Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:1149, port: 1149 2025-12-23T18:43:30.230543Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:30.302889Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:30.303375Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:1149 return no entries 2025-12-23T18:43:30.303860Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****ym4g (C1C17B30) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:1149 return no entries)' 2025-12-23T18:43:35.236996Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****ym4g (C1C17B30) 2025-12-23T18:43:36.770199Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125942914800883:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:36.771073Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046aa/r3tmp/tmpQabkSk/pdisk_1.dat 2025-12-23T18:43:36.788157Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:36.851272Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:36.852346Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587125942914800857:2081] 1766515416768953 != 1766515416768956 2025-12-23T18:43:36.889535Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:36.889624Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:36.891371Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11443, node 2 2025-12-23T18:43:36.964239Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:36.964264Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:36.964273Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:36.964361Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:37.038562Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:37.112260Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:37.116269Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:37.116311Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:37.116986Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:19350, port: 19350 2025-12-23T18:43:37.117055Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:37.171152Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:37.171598Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldaps://localhost:19350. Server is busy 2025-12-23T18:43:37.172010Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****ICXg (3A3E3C9D) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:19350. Server is busy)' 2025-12-23T18:43:37.172289Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:37.172316Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:37.173104Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:19350, port: 19350 2025-12-23T18:43:37.173158Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:37.231181Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:37.231664Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldaps://localhost:19350. Server is busy 2025-12-23T18:43:37.232075Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****ICXg (3A3E3C9D) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:19350. Server is busy)' 2025-12-23T18:43:37.775721Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:38.773752Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****ICXg (3A3E3C9D) 2025-12-23T18:43:38.774081Z node 2 :TICKET_ ... eyJh****YRWg (03D71540) () has now valid token of ldapuser@ldap 2025-12-23T18:43:51.247102Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587126004755840456:2084];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:51.247197Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046aa/r3tmp/tmpS63WJ0/pdisk_1.dat 2025-12-23T18:43:51.260161Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:51.334897Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:51.380038Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:51.380146Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:51.385184Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26763, node 4 2025-12-23T18:43:51.422954Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:51.428442Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:51.428487Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:51.428506Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:51.428589Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:51.492001Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:51.495141Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:51.495195Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:51.495972Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:24354, port: 24354 2025-12-23T18:43:51.496040Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:51.532544Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:51.575296Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:51.623709Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****sYxg (9F726022) () has now valid token of ldapuser@ldap 2025-12-23T18:43:54.693148Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587126018092670312:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:54.693227Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046aa/r3tmp/tmpffkyNX/pdisk_1.dat 2025-12-23T18:43:54.716688Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:54.766934Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:54.809258Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:54.809343Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:54.814909Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9129, node 5 2025-12-23T18:43:54.842165Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:54.842189Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:54.842203Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:54.842411Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:54.945035Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:54.949164Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:54.949205Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:54.949894Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:8632, port: 8632 2025-12-23T18:43:54.949997Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:54.965312Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:55.007334Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-12-23T18:43:55.055103Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:55.055729Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:55.055784Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-12-23T18:43:55.103195Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-12-23T18:43:55.147152Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-12-23T18:43:55.148464Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****D90Q (F42C80F4) () has now valid token of ldapuser@ldap 2025-12-23T18:43:55.149575Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:58.474527Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587126034583973586:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:58.474583Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046aa/r3tmp/tmpu05d2J/pdisk_1.dat 2025-12-23T18:43:58.487801Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:58.568919Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:58.571415Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7587126034583973559:2081] 1766515438473741 != 1766515438473744 2025-12-23T18:43:58.604223Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:58.604313Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:58.611240Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30481, node 6 2025-12-23T18:43:58.687010Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:58.687046Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:58.687055Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:58.687140Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:58.763141Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:58.765141Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:58.768890Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:58.768933Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:58.769807Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:24082, port: 24082 2025-12-23T18:43:58.769909Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:58.789400Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:58.835355Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-12-23T18:43:58.835478Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:24082. Bad search filter 2025-12-23T18:43:58.836478Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****hPFA (496AE0B5) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:24082. Bad search filter)' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2025-12-23T18:43:58.469530Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126036021270577:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:58.469650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002fc8/r3tmp/tmpUutF91/pdisk_1.dat 2025-12-23T18:43:58.720440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:58.720550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:58.758588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:58.780203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:58.800528Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:58.802239Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126036021270549:2081] 1766515438468081 != 1766515438468084 TClient is connected to server localhost:14801 TServer::EnableGrpc on GrpcPort 4495, node 1 2025-12-23T18:43:59.013334Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:59.014373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:59.014386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:59.014390Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:59.014495Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14801 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515438849 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:59.314022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:59.333196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766515439430 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "int32_value" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "u... (TRUNCATED) 2025-12-23T18:43:59.446087Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126040316238757:2547] Handshake: worker# [1:7587126040316238667:2487] 2025-12-23T18:43:59.446500Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126040316238757:2547] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:43:59.446913Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126040316238757:2547] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:43:59.446940Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126040316238757:2547] Send handshake: worker# [1:7587126040316238667:2487] 2025-12-23T18:43:59.447759Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126040316238757:2547] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 45b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 44b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 66b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 71b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 12 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 13 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 14 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 58b Offset: 15 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 16 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 17 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 18 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 76b Offset: 19 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 20 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 21 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 61b Offset: 22 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 23 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 24 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 46b Offset: 25 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 47b Offset: 26 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 50b Offset: 27 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 28 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 29 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 30 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 64b Offset: 31 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-12-23T18:43:59.448355Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126040316238757:2547] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2025-12-23T18:43:59.448621Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126040316238760:2547] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-12-23T18:43:59.448652Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126040316238757:2547] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:59.448851Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126040316238760:2547] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2025-12-23T18:43:59.473018Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126040316238760:2547] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:43:59.473086Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126040316238757:2547] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:43:59.473180Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126040316238757:2547] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } 2025-12-23T18:43:59.475884Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> TSchemeShardSubDomainTest::LS [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage-EnablePersistentPartitionStats >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-dbadmin >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:02.434463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:02.434526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:02.434552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:02.434577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:02.434601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:02.434623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:02.434689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:02.434768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:02.435435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:02.435722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:02.516337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:02.516394Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:02.522446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:02.522698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:02.522876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:02.526337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:02.526552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:02.527316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:02.527586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:02.529645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:02.529816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:02.531087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:02.531171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:02.531332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:02.531380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:02.531427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:02.531571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:02.538526Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:02.675268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:02.675563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:02.675803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:02.675869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:02.676108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:02.676184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:02.680012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:02.680304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:02.680588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:02.680654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:02.680698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:02.680732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:02.683239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:02.683318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:02.683359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:02.685495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:02.685547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:02.685613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:02.685678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:02.689598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:02.692104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:02.692355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:02.693626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:02.693778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:02.693832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:02.694149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:02.694210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:02.694404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:02.694506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:02.696897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:02.696959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... es.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:02.876149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:02.876276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:44:02.876600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-12-23T18:44:02.876647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-12-23T18:44:02.880243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2025-12-23T18:44:02.880361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:44:02.880385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:44:02.880918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:02.881415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:02.881468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:02.881543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:02.881782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:44:02.881825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-12-23T18:44:02.882443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-12-23T18:44:02.882477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-12-23T18:44:02.882604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6152: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-12-23T18:44:02.883265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:44:02.883304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:44:02.883363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:44:02.883399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-12-23T18:44:02.883553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:02.884987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2025-12-23T18:44:02.885208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-12-23T18:44:02.885246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-12-23T18:44:02.885315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:44:02.885334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-12-23T18:44:02.885412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:44:02.885433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:44:02.885869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-12-23T18:44:02.885980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-12-23T18:44:02.886012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:638:2549] 2025-12-23T18:44:02.886243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:44:02.886326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:44:02.886359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:638:2549] 2025-12-23T18:44:02.886419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:44:02.886498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:44:02.886538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:638:2549] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-12-23T18:44:02.886989Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:02.887194Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 204us result status StatusPathDoesNotExist 2025-12-23T18:44:02.887369Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:44:02.887812Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:02.887976Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 163us result status StatusPathDoesNotExist 2025-12-23T18:44:02.888095Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:44:02.888498Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:02.888675Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 142us result status StatusSuccess 2025-12-23T18:44:02.889092Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys [GOOD] Test command err: 2025-12-23T18:43:59.268630Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126042609570276:2175];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:59.270687Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002fbf/r3tmp/tmpo0Y7LH/pdisk_1.dat 2025-12-23T18:43:59.511316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:59.511432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:59.548316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:59.564972Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:59.573058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20682 TServer::EnableGrpc on GrpcPort 12174, node 1 2025-12-23T18:43:59.803644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:59.851242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:59.851286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:59.851296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:59.851417Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515439626 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:00.107030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:00.116136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766515440200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Decimal(1,0)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 1 DecimalScale: 0 } IsBuildInProgress: false } Columns { Name: "value" Type: "Decimal(35,10)" TypeId: 4865 ... (TRUNCATED) 2025-12-23T18:44:00.206290Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126046904538337:2548] Handshake: worker# [1:7587126046904538247:2488] 2025-12-23T18:44:00.206615Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126046904538337:2548] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:44:00.206936Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126046904538337:2548] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Decimal(1,0) : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:44:00.206970Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126046904538337:2548] Send handshake: worker# [1:7587126046904538247:2488] 2025-12-23T18:44:00.207310Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126046904538337:2548] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 57b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-12-23T18:44:00.207552Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126046904538337:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 57 },{ Order: 2 BodySize: 57 },{ Order: 3 BodySize: 57 }] } 2025-12-23T18:44:00.207769Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126046904538340:2548] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-12-23T18:44:00.207830Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126046904538337:2548] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:44:00.207988Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126046904538340:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2025-12-23T18:44:00.209885Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7587126046904538340:2548] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:44:00.209931Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126046904538337:2548] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-12-23T18:44:00.209991Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7587126046904538337:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2025-12-23T18:44:00.274028Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-false [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:02.906727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:02.906828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:02.906882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:02.906915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:02.906952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:02.906978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:02.907028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:02.907096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:02.907772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:02.908007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:02.982402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:02.982481Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:02.987505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:02.987663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:02.987746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:02.989913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:02.990048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:02.990602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:02.990862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:02.992268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:02.992409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:02.993263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:02.993309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:02.993439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:02.993491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:02.993523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:02.993635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:02.998487Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:03.082194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:03.082345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.082489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:03.082535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:03.082695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:03.082762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:03.084334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:03.084536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:03.084692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.084750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:03.084785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:03.084808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:03.086260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.086310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:03.086335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:03.087571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.087620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.087672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:03.087711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:03.090313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:03.091486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:03.091616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:03.092338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:03.092428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:03.092464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:03.092648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:03.092680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:03.092798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:03.092876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:03.094562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:03.094596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... meshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:44:03.175933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:03.175971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-12-23T18:44:03.176022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-12-23T18:44:03.176205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.176240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2025-12-23T18:44:03.176334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:44:03.176372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:03.176421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:44:03.176450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:03.176487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-12-23T18:44:03.176522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:03.176558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2025-12-23T18:44:03.176586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 100:0 2025-12-23T18:44:03.176777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-12-23T18:44:03.176826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-12-23T18:44:03.176856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-12-23T18:44:03.176903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-12-23T18:44:03.177639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:03.177721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:03.177755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:44:03.177795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-12-23T18:44:03.177859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:03.178730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:03.178827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:03.178854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:44:03.178882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-12-23T18:44:03.178907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-12-23T18:44:03.178970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-12-23T18:44:03.181795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-12-23T18:44:03.181877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-12-23T18:44:03.182067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-12-23T18:44:03.182135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-12-23T18:44:03.182494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-12-23T18:44:03.182570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-12-23T18:44:03.182605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:456:2412] TestWaitNotification: OK eventTxId 100 2025-12-23T18:44:03.183059Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:03.183328Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 249us result status StatusSuccess 2025-12-23T18:44:03.183829Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:03.184438Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:03.184608Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 184us result status StatusSuccess 2025-12-23T18:44:03.185004Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] >> KqpPg::AlterSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence |95.2%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] |95.2%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:03.195779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:03.195852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:03.195880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:03.195906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:03.195942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:03.195969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:03.196012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:03.196080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:03.196810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:03.197101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:03.269001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:03.269060Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:03.273883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:03.274103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:03.274236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:03.277256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:03.277435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:03.277975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:03.278237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:03.280111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:03.280265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:03.281428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:03.281482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:03.281619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:03.281661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:03.281702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:03.281861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.288277Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:03.407618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:03.407833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.408058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:03.408098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:03.408308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:03.408397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:03.410447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:03.410653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:03.410839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.410902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:03.410945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:03.410977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:03.412758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.412825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:03.412859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:03.414508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.414559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.414621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:03.414660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:03.417153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:03.418364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:03.418506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:03.419389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:03.419504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:03.419540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:03.419757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:03.419806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:03.419929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:03.420006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:03.422054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:03.422089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... : 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:44:03.604105Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:44:03.604152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-12-23T18:44:03.604197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-12-23T18:44:03.604273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-12-23T18:44:03.606681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2025-12-23T18:44:03.606796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:03.606853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:03.606870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:03.607747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:44:03.608185Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-12-23T18:44:03.608285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:44:03.608356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:03.608603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-12-23T18:44:03.608936Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2025-12-23T18:44:03.609884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-12-23T18:44:03.610070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-12-23T18:44:03.610455Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-12-23T18:44:03.611001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:44:03.611142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2025-12-23T18:44:03.612077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:03.612120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:03.612223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:44:03.612650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:03.612686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:03.612733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:03.614015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:44:03.614056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:44:03.614966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:44:03.614994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-12-23T18:44:03.615035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:44:03.615064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:44:03.616309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:03.616396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:44:03.616660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:44:03.616699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:44:03.617042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:44:03.617111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:44:03.617140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:577:2533] TestWaitNotification: OK eventTxId 104 2025-12-23T18:44:03.617623Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:03.617815Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 190us result status StatusPathDoesNotExist 2025-12-23T18:44:03.617985Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:44:03.618437Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:03.618599Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 163us result status StatusSuccess 2025-12-23T18:44:03.618977Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:43:59.552792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:59.552892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:59.552935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:59.552973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:59.553017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:59.553072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:59.553141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:59.553247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:59.554248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:59.554604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:59.648593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:59.648653Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:59.655422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:59.655699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:59.655841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:59.659568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:59.659774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:59.660787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:59.661070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:59.663590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:59.663784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:59.665110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:59.665198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:59.665404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:59.665464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:59.665515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:59.665669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.840430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.841580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.841726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.841824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.841904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.841971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.842152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.842238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.842316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.842389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.842490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.842599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.842684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.842786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.842883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 18:44:03.731020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.731293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:03.731339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:03.731548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:44:03.731707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:03.731766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 38 2025-12-23T18:44:03.731809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 39 2025-12-23T18:44:03.732200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.732251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:44:03.732334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.732374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-12-23T18:44:03.732415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2025-12-23T18:44:03.733102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:03.733220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:03.733264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:44:03.733299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2025-12-23T18:44:03.733336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:44:03.734159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:03.734233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:03.734277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:44:03.734306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2025-12-23T18:44:03.734332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:44:03.734393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-12-23T18:44:03.736820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.736877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:03.737216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:44:03.737389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:44:03.737432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:44:03.737466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:44:03.737496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:44:03.737528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-12-23T18:44:03.737584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:816:2783] message: TxId: 103 2025-12-23T18:44:03.737621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:44:03.737653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:44:03.737682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:44:03.737774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:44:03.738178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:03.738215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:03.739736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:44:03.740824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:44:03.741686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:03.741742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 0, path id: 38 2025-12-23T18:44:03.741846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:44:03.741889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1138:3072] 2025-12-23T18:44:03.742711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-12-23T18:44:03.744336Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:03.744537Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 224us result status StatusSuccess 2025-12-23T18:44:03.745014Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:03.144847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:03.144938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:03.145002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:03.145046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:03.145090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:03.145119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:03.145176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:03.145264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:03.146214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:03.146519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:03.235216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:03.235279Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:03.241628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:03.241884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:03.242038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:03.245348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:03.245552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:03.246324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:03.246578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:03.248823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:03.249036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:03.250258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:03.250323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:03.250497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:03.250563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:03.250613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:03.250782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.258097Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:03.399442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:03.399701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.399931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:03.399972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:03.400191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:03.400259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:03.402388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:03.402645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:03.402864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.402923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:03.402967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:03.403005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:03.404975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.405051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:03.405095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:03.406707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.406776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.406843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:03.406910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:03.410657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:03.412391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:03.412610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:03.413735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:03.413863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:03.413917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:03.414216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:03.414296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:03.414488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:03.414576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:03.416547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:03.416592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... shard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 612 RawX2: 4294969848 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-12-23T18:44:04.059384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 2 2025-12-23T18:44:04.059491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 107:2, at schemeshard: 72057594046678944, message: Source { RawX1: 612 RawX2: 4294969848 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-12-23T18:44:04.059531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:44:04.059603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 612 RawX2: 4294969848 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-12-23T18:44:04.059639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:2, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:04.059665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:2, at schemeshard: 72057594046678944 2025-12-23T18:44:04.059706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-12-23T18:44:04.059735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:2 129 -> 240 2025-12-23T18:44:04.062520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:44:04.065314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:44:04.065394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:44:04.065445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-12-23T18:44:04.065512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-12-23T18:44:04.065585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:44:04.065674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-12-23T18:44:04.065957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-12-23T18:44:04.066002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 107:0 ProgressState 2025-12-23T18:44:04.066106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 2/3 2025-12-23T18:44:04.066146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-12-23T18:44:04.066186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 2/3 2025-12-23T18:44:04.066224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-12-23T18:44:04.066261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2025-12-23T18:44:04.066497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-12-23T18:44:04.066760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2025-12-23T18:44:04.066798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 107:2 ProgressState 2025-12-23T18:44:04.066858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 3/3 2025-12-23T18:44:04.066883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-12-23T18:44:04.066913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 3/3 2025-12-23T18:44:04.066936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-12-23T18:44:04.066969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2025-12-23T18:44:04.067028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:487:2436] message: TxId: 107 2025-12-23T18:44:04.067087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-12-23T18:44:04.067140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2025-12-23T18:44:04.067197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 107:0 2025-12-23T18:44:04.067303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-12-23T18:44:04.067357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:1 2025-12-23T18:44:04.067391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 107:1 2025-12-23T18:44:04.067414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-12-23T18:44:04.067431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:2 2025-12-23T18:44:04.067450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 107:2 2025-12-23T18:44:04.067526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-12-23T18:44:04.069479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-12-23T18:44:04.069512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:543:2492] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-12-23T18:44:04.072286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:04.072663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:101: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 2] domain path: /MyRoot/USER_0 shardsToCreate: 6 GetShardsInside: 4 MaxShards: 7 2025-12-23T18:44:04.072751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-12-23T18:44:04.072787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-12-23T18:44:04.074463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:04.074653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-12-23T18:44:04.075004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-12-23T18:44:04.075043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-12-23T18:44:04.075453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-12-23T18:44:04.075537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-12-23T18:44:04.075566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:733:2653] TestWaitNotification: OK eventTxId 108 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:00.667639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:00.667711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:00.667754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:00.667803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:00.667867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:00.667945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:00.668022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:00.668103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:00.668971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:00.669229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:00.743838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:00.743900Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:00.750699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:00.750982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:00.751154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:00.754819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:00.755040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:00.755933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:00.756159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:00.758505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:00.758712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:00.759782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:00.759836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:00.759948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:00.760000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:00.760054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:00.760181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:00.909100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.910244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.910380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.910475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.910556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.910648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.910771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.910847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.910921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.911012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.911092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.911195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.911272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.911369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:00.911466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... p:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-12-23T18:44:03.960472Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-12-23T18:44:03.960512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-12-23T18:44:03.960548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-12-23T18:44:03.961608Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:44:03.961693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:44:03.961728Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:44:03.961774Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-12-23T18:44:03.961813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:03.962810Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:44:03.962899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:44:03.962929Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:44:03.962958Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-12-23T18:44:03.962990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-12-23T18:44:03.963054Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-12-23T18:44:03.964474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2025-12-23T18:44:03.964583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:03.964628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:03.965265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:44:03.965752Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-12-23T18:44:03.967121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:03.967448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-12-23T18:44:03.968223Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-12-23T18:44:03.968605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:44:03.968815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-12-23T18:44:03.969275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:03.969329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:03.969446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:44:03.970223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:03.970276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:03.970349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:03.971711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:44:03.972570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:44:03.972629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:44:03.974253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:44:03.974309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:44:03.974429Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:03.974572Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:44:03.974888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:44:03.974934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:44:03.975413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:44:03.975504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:44:03.975545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:2103:3707] TestWaitNotification: OK eventTxId 104 2025-12-23T18:44:03.983782Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:03.983981Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 238us result status StatusPathDoesNotExist 2025-12-23T18:44:03.984130Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:44:03.984752Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:03.984911Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 174us result status StatusPathDoesNotExist 2025-12-23T18:44:03.985032Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 6060, MsgBus: 2715 2025-12-23T18:43:05.997973Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125807335115645:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:05.998068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005128/r3tmp/tmp47L9iF/pdisk_1.dat 2025-12-23T18:43:06.233396Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:06.270357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:06.270504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:06.316520Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125807335115617:2081] 1766515385996673 != 1766515385996676 2025-12-23T18:43:06.321624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:06.324564Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6060, node 1 2025-12-23T18:43:06.401371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:06.401399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:06.401633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:06.401771Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:06.421450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2715 TClient is connected to server localhost:2715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515386377 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:06.849542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:07.005993Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:09.172559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125824514985688:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:09.172679Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:09.172989Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125824514985698:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:09.173043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:09.220340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:09.361204Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125824514985794:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:09.361323Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:09.361686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125824514985796:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:09.361764Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:09.379624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:09.423134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125824514985874:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:09.423261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:09.423324Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125824514985879:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:09.423512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125824514985881:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:09.423604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:09.427344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:09.436867Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125824514985882:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:43:09.502968Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125824514985934:2643] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 16770, MsgBus: 4496 2025-12-23T18:43:10.847559Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125829406317450:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:10.847791Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005128/r3tmp/tmpcFXAu8/pdisk_1.dat 2025-12-23T18:43:10.863736Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:10.936110Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:10.946404Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:10.946476Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:10.951158Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16770, node 2 2025-12-23T18:43:11.030633Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:11.030655Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:11.030665Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:11.030784Z node 2 :NET_CLASSI ... 87126026164614566:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:56.953305Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:56.958367Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:56.970935Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7587126026164614565:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:57.067135Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7587126030459581914:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:57.098420Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [10:7587126030459581931:2332], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-12-23T18:43:57.098890Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=10&id=MTU2M2U1YTAtZTFjMmQ5NWYtY2I2YjQyOWQtMTRiYWUxNWY=, ActorId: [10:7587126026164614548:2322], ActorState: ExecuteState, TraceId: 01kd68cqmqfbfr4a092gd8amy7, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiCreateTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: \"text\"\n" end_position { row: 1 column: 1 } severity: 1 } } }, remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-12-23T18:43:57.190178Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7587126008984744507:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:57.190323Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6575, MsgBus: 8187 2025-12-23T18:43:58.188404Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7587126034181034665:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:58.188484Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005128/r3tmp/tmp3AebIW/pdisk_1.dat 2025-12-23T18:43:58.216932Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:58.320108Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:58.323039Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:58.323213Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:58.347566Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6575, node 11 2025-12-23T18:43:58.449529Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:58.449567Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:58.449580Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:58.449713Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:58.465002Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8187 TClient is connected to server localhost:8187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515438443 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:59.159847Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:59.195121Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:02.697046Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126051360904708:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:02.697090Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126051360904700:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:02.697219Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:02.697569Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126051360904715:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:02.697702Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:02.702882Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:02.718651Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7587126051360904714:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:44:02.808233Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7587126051360904768:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:02.831676Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [11:7587126051360904785:2332], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-12-23T18:44:02.832093Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=11&id=OTQzN2U4NDQtZGVhOGFiMjYtNmVjYzk4YTUtZmI1ZmVlMTA=, ActorId: [11:7587126051360904698:2322], ActorState: ExecuteState, TraceId: 01kd68cxj88yj041e431d3mh38, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiCreateTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: \"text\"\n" end_position { row: 1 column: 1 } severity: 1 } } }, remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink >> TSchemeShardSubDomainTest::DeleteAndRestart >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [GOOD] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] TEST create schemeshard, 0x00007D772FAABD00 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:01.098533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:01.098599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:01.098630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:01.098655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:01.098684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:01.098711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:01.098771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:01.098843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:01.099519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:01.099795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:01.175238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:01.175297Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:01.180461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:01.180649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:01.180765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:01.183310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:01.183456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:01.183985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:01.184220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:01.186020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:01.186167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:01.187186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:01.187244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:01.187357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:01.187400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:01.187432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:01.187525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:01.329225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.332580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.332734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.332840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.332929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.332984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.333063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.333130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.333195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.333252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.333309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.333409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.333474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.333561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.333644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... n 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 Leader for TabletID 72057594046678944 is [1:891:2840] sender: [1:964:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 2025-12-23T18:44:05.172580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 2025-12-23T18:44:05.183530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 0 2025-12-23T18:44:05.183638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2025-12-23T18:44:05.183700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2025-12-23T18:44:05.183785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2025-12-23T18:44:05.183832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2025-12-23T18:44:05.183854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2025-12-23T18:44:05.183881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D772FBD9100, stats written 2 2025-12-23T18:44:05.184222Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:05.184416Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 234us result status StatusSuccess 2025-12-23T18:44:05.184788Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:02.029026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:02.029099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:02.029146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:02.029189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:02.029227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:02.029258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:02.029343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:02.029420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:02.030233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:02.030543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:02.110406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:02.110460Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:02.115637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:02.115822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:02.115944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:02.118822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:02.119000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:02.119671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:02.119876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:02.121558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:02.121716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:02.122713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:02.122784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:02.122918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:02.122976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:02.123025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:02.123176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:02.129329Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:02.255458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:02.255630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:02.255800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:02.255836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:02.256018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:02.256074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:02.258404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:02.258721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:02.258980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:02.259057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:02.259106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:02.259153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:02.261783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:02.261854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:02.261900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:02.264167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:02.264224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:02.264297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:02.264375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:02.267944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:02.269831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:02.270021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:02.271194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:02.271345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:02.271408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:02.271754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:02.271821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:02.272016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:02.272102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:02.274721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:02.274800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... erId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-12-23T18:44:05.007640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:05.007967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-12-23T18:44:05.008285Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 0 TabletID: 72075186233409556 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 0 TabletID: 72075186233409556 2025-12-23T18:44:05.008789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2025-12-23T18:44:05.009034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 3 2025-12-23T18:44:05.009353Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 0 TabletID: 72075186233409555 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 0 TabletID: 72075186233409555 Forgetting tablet 72075186233409556 2025-12-23T18:44:05.012863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2025-12-23T18:44:05.013139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 2 2025-12-23T18:44:05.013403Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-12-23T18:44:05.014281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 Forgetting tablet 72075186233409555 2025-12-23T18:44:05.014686Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 0 TabletID: 72075186233409557 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 0 TabletID: 72075186233409557 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409557 2025-12-23T18:44:05.017578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-12-23T18:44:05.017762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:44:05.018118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-12-23T18:44:05.018639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2025-12-23T18:44:05.018877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2025-12-23T18:44:05.020064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-12-23T18:44:05.020202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:05.020259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-12-23T18:44:05.020330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:44:05.020601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:05.020656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:05.020778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:44:05.023406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:44:05.023474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:44:05.023560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:15 2025-12-23T18:44:05.023587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2025-12-23T18:44:05.023714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:14 2025-12-23T18:44:05.023748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2025-12-23T18:44:05.026186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:44:05.026230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:44:05.026299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:16 2025-12-23T18:44:05.026336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2025-12-23T18:44:05.026554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:05.026698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:05.026828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:05.026879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:05.026973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:05.028931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2025-12-23T18:44:05.029772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2025-12-23T18:44:05.029820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2025-12-23T18:44:05.030922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2025-12-23T18:44:05.031047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2025-12-23T18:44:05.031089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2149:3932] TestWaitNotification: OK eventTxId 139 2025-12-23T18:44:05.032717Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:05.032931Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 226us result status StatusSuccess 2025-12-23T18:44:05.033428Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 4 MaxPaths: 5 MaxChildrenInDir: 4 MaxAclBytesSize: 25 MaxTableColumns: 3 MaxTableColumnNameLength: 10 MaxTableKeyColumns: 1 MaxTableIndices: 20 MaxShards: 6 MaxShardsInPath: 4 MaxConsistentCopyTargets: 1 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 20 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] TEST create schemeshard, 0x00007D0F882A5900 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:01.130489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:01.130560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:01.130589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:01.130617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:01.130651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:01.130676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:01.130727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:01.130839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:01.131542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:01.131772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:01.219425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:01.219510Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:01.226339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:01.226614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:01.226829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:01.230541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:01.230726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:01.231525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:01.231755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:01.233821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:01.233999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:01.235451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:01.235529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:01.235677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:01.235724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:01.235785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:01.235926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:01.401650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.402724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.402896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.403008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.403094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.403213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.403310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.403413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.403490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.403563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.403667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.403750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.403826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.403932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.404048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... aitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 Leader for TabletID 72057594046678944 is [1:905:2854] sender: [1:976:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 2025-12-23T18:44:05.306499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 0 2025-12-23T18:44:05.306679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2025-12-23T18:44:05.306737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2025-12-23T18:44:05.306927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 2025-12-23T18:44:05.319546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 2025-12-23T18:44:05.330491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 1 2025-12-23T18:44:05.330629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2025-12-23T18:44:05.330675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2025-12-23T18:44:05.330833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D0F883DE100, stats written 2 2025-12-23T18:44:05.331347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:05.333209Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 232us result status StatusSuccess 2025-12-23T18:44:05.333574Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] >> KqpPg::InsertFromSelect_Simple-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:112:2143] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:112:2143] Leader for TabletID 9437184 is [1:134:2156] sender: [1:135:2057] recipient: [1:112:2143] 2025-12-23T18:43:02.118206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:02.118253Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:02.119487Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:43:02.148513Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:43:02.148963Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:43:02.149269Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:43:02.160374Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:43:02.219403Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:43:02.220728Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:43:02.222626Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:43:02.222715Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:43:02.233126Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:43:02.234131Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:43:02.236010Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:43:02.236126Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:203:2156] in generation 2 Leader for TabletID 9437184 is [1:134:2156] sender: [1:214:2057] recipient: [1:14:2061] 2025-12-23T18:43:02.344295Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:43:02.408765Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:43:02.409043Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:43:02.409175Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2025-12-23T18:43:02.409224Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:43:02.409275Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:43:02.409321Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:02.409637Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.409704Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.410075Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:43:02.410200Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:43:02.410304Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:02.410356Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:02.410432Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:43:02.410528Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:43:02.410568Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:43:02.410616Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:43:02.410673Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:43:02.411065Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:215:2214], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.411131Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.411184Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:215:2214], sessionId# [0:0:0] 2025-12-23T18:43:02.428758Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:43:02.428846Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:43:02.428963Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:02.429218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:43:02.429289Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:43:02.429348Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:43:02.429427Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:43:02.429476Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:43:02.429520Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:43:02.429587Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:02.429907Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:43:02.429944Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:43:02.429993Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:02.430055Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:02.430120Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:43:02.430156Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:02.430196Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:43:02.430231Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:02.430258Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:43:02.445490Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:02.445604Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:02.445645Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:02.445694Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:43:02.445787Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:43:02.446421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.446484Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.446533Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2025-12-23T18:43:02.446690Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-12-23T18:43:02.446726Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:43:02.446927Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:02.446991Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2025-12-23T18:43:02.447047Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:43:02.447095Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:43:02.451305Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:43:02.451393Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:02.451657Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.451720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.451790Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:02.451842Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:43:02.451886Z node 1 :TX_DATASHARD TRACE: datashard_pipelin ... HARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2025-12-23T18:44:05.403288Z node 39 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:44:05.403316Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2025-12-23T18:44:05.403349Z node 39 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [7:6] in PlanQueue unit at 9437186 2025-12-23T18:44:05.403378Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit PlanQueue 2025-12-23T18:44:05.403409Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2025-12-23T18:44:05.403436Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit PlanQueue 2025-12-23T18:44:05.403465Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit LoadTxDetails 2025-12-23T18:44:05.403493Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit LoadTxDetails 2025-12-23T18:44:05.404146Z node 39 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437186 loaded tx from db 7:6 keys extracted: 1 2025-12-23T18:44:05.404217Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2025-12-23T18:44:05.404252Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit LoadTxDetails 2025-12-23T18:44:05.404280Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit FinalizeDataTxPlan 2025-12-23T18:44:05.404310Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit FinalizeDataTxPlan 2025-12-23T18:44:05.404353Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2025-12-23T18:44:05.404380Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit FinalizeDataTxPlan 2025-12-23T18:44:05.404408Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit BuildAndWaitDependencies 2025-12-23T18:44:05.404438Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit BuildAndWaitDependencies 2025-12-23T18:44:05.404487Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [7:6] is the new logically complete end at 9437186 2025-12-23T18:44:05.404518Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [7:6] is the new logically incomplete end at 9437186 2025-12-23T18:44:05.404551Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [7:6] at 9437186 2025-12-23T18:44:05.404595Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2025-12-23T18:44:05.404619Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit BuildAndWaitDependencies 2025-12-23T18:44:05.404643Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit BuildDataTxOutRS 2025-12-23T18:44:05.404671Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit BuildDataTxOutRS 2025-12-23T18:44:05.404725Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2025-12-23T18:44:05.404756Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit BuildDataTxOutRS 2025-12-23T18:44:05.404789Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit StoreAndSendOutRS 2025-12-23T18:44:05.404833Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit StoreAndSendOutRS 2025-12-23T18:44:05.404866Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2025-12-23T18:44:05.404891Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit StoreAndSendOutRS 2025-12-23T18:44:05.404940Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit PrepareDataTxInRS 2025-12-23T18:44:05.404967Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit PrepareDataTxInRS 2025-12-23T18:44:05.405003Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2025-12-23T18:44:05.405028Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit PrepareDataTxInRS 2025-12-23T18:44:05.405062Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit LoadAndWaitInRS 2025-12-23T18:44:05.405092Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit LoadAndWaitInRS 2025-12-23T18:44:05.405120Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2025-12-23T18:44:05.405146Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit LoadAndWaitInRS 2025-12-23T18:44:05.405171Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit BlockFailPoint 2025-12-23T18:44:05.405200Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit BlockFailPoint 2025-12-23T18:44:05.405228Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2025-12-23T18:44:05.405254Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit BlockFailPoint 2025-12-23T18:44:05.405280Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit ExecuteDataTx 2025-12-23T18:44:05.405308Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit ExecuteDataTx 2025-12-23T18:44:05.405686Z node 39 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [7:6] at tablet 9437186 with status COMPLETE 2025-12-23T18:44:05.405744Z node 39 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [7:6] at 9437186: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:44:05.405794Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2025-12-23T18:44:05.405817Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit ExecuteDataTx 2025-12-23T18:44:05.405839Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit CompleteOperation 2025-12-23T18:44:05.405869Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit CompleteOperation 2025-12-23T18:44:05.406068Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is DelayComplete 2025-12-23T18:44:05.406106Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit CompleteOperation 2025-12-23T18:44:05.406141Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit CompletedOperations 2025-12-23T18:44:05.406176Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit CompletedOperations 2025-12-23T18:44:05.406219Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2025-12-23T18:44:05.406251Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit CompletedOperations 2025-12-23T18:44:05.406282Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [7:6] at 9437186 has finished 2025-12-23T18:44:05.406316Z node 39 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:44:05.406348Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2025-12-23T18:44:05.406383Z node 39 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2025-12-23T18:44:05.406418Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2025-12-23T18:44:05.420524Z node 39 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 7 txid# 6} 2025-12-23T18:44:05.420610Z node 39 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 7} 2025-12-23T18:44:05.420674Z node 39 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:44:05.420715Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [7:6] at 9437184 on unit CompleteOperation 2025-12-23T18:44:05.420783Z node 39 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [7 : 6] from 9437184 at tablet 9437184 send result to client [39:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:44:05.420836Z node 39 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:44:05.421151Z node 39 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437186 step# 7 txid# 6} 2025-12-23T18:44:05.421184Z node 39 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437186 step# 7} 2025-12-23T18:44:05.421216Z node 39 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:44:05.421247Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [7:6] at 9437186 on unit CompleteOperation 2025-12-23T18:44:05.421293Z node 39 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [7 : 6] from 9437186 at tablet 9437186 send result to client [39:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:44:05.421327Z node 39 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:44:05.421613Z node 39 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 7 txid# 6} 2025-12-23T18:44:05.421663Z node 39 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 7} 2025-12-23T18:44:05.421717Z node 39 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-12-23T18:44:05.421757Z node 39 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [7:6] at 9437185 on unit CompleteOperation 2025-12-23T18:44:05.421805Z node 39 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [7 : 6] from 9437185 at tablet 9437185 send result to client [39:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:44:05.421846Z node 39 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-system >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] >> TSchemeShardSubDomainTest::CreateAndWait ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:03.035863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:03.035986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:03.036033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:03.036064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:03.036101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:03.036129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:03.036182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:03.036252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:03.037119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:03.037424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:03.128284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:03.128337Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:03.133491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:03.133669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:03.133788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:03.136454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:03.136630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:03.137260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:03.137518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:03.139539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:03.139707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:03.140851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:03.140910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:03.141068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:03.141111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:03.141151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:03.141262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.299757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.302172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.302276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.302351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.302412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.302453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.302511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.302689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.302768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.302806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.302840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.302894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.302954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.303005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.303060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... EMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:06.059484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:06.059525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:06.060595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:44:06.061240Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-12-23T18:44:06.065154Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2025-12-23T18:44:06.065635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:06.065993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 Forgetting tablet 72075186233409546 2025-12-23T18:44:06.067140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-12-23T18:44:06.069436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:44:06.069861Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-12-23T18:44:06.070823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:44:06.071031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:44:06.071692Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 2025-12-23T18:44:06.072835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:44:06.073021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 Forgetting tablet 72075186233409549 2025-12-23T18:44:06.075325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:06.075385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:44:06.075450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:44:06.075910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:06.075963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:44:06.076060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:44:06.076504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:44:06.076646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:44:06.078166Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:44:06.078219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:44:06.078314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:44:06.078339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-12-23T18:44:06.080663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:44:06.080737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:44:06.080815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:44:06.080845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-12-23T18:44:06.081041Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:06.081109Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:06.081153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:06.081187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:44:06.081263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:06.083670Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-12-23T18:44:06.085725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:44:06.085767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-12-23T18:44:06.086111Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:44:06.086186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.086213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1060:3013] TestWaitNotification: OK eventTxId 105 2025-12-23T18:44:06.086703Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:06.086885Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 204us result status StatusPathDoesNotExist 2025-12-23T18:44:06.087029Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:44:06.087469Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:06.087601Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 142us result status StatusPathDoesNotExist 2025-12-23T18:44:06.087684Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:05.562653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:05.562728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:05.562787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:05.562819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:05.562853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:05.562896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:05.562967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:05.563047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:05.563917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:05.564229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:05.648291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:05.648355Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:05.654912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:05.655223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:05.655382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:05.659094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:05.659321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:05.660016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:05.660261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:05.662382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:05.662556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:05.663819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:05.663879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:05.664027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:05.664072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:05.664113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:05.664252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:05.675755Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:05.823788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:05.824000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:05.824221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:05.824268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:05.824533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:05.824613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:05.826730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:05.826980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:05.827201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:05.827258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:05.827293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:05.827324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:05.829275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:05.829337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:05.829390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:05.831059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:05.831102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:05.831188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:05.831240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:05.839680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:05.841916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:05.842135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:05.843312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:05.843449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:05.843498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:05.843810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:05.843861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:05.844032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:05.844124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:05.846342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:05.846391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 7594046678944, message: Source { RawX1: 512 RawX2: 4294969761 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:44:06.171650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:44:06.171766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 512 RawX2: 4294969761 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:44:06.171859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:06.171906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.171953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-12-23T18:44:06.172015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:44:06.172530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:44:06.172635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:44:06.172675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:44:06.172711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-12-23T18:44:06.172750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-12-23T18:44:06.172819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:44:06.177485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.177638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:44:06.177747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.178045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.178082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:44:06.178175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:44:06.178244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:44:06.178302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:44:06.178333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:44:06.178374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:44:06.178437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:314:2304] message: TxId: 102 2025-12-23T18:44:06.178475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:44:06.178511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:44:06.178537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:44:06.178669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-12-23T18:44:06.179027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:44:06.180510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.180555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:540:2485] TestWaitNotification: OK eventTxId 102 2025-12-23T18:44:06.180981Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:06.181208Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 249us result status StatusSuccess 2025-12-23T18:44:06.181652Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:06.182253Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:06.182497Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 238us result status StatusSuccess 2025-12-23T18:44:06.182910Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:04.198959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:04.199046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:04.199090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:04.199132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:04.199189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:04.199224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:04.199282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:04.199371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:04.200373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:04.200722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:04.290962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:04.291042Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:04.303484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:04.303738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:04.303878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:04.307595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:04.307818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:04.308665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:04.308948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:04.311474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:04.311692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:04.312982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:04.313067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:04.313221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:04.313276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:04.313326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:04.313460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:04.321555Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:04.466244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:04.466496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:04.466804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:04.466878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:04.467266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:04.467342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:04.470258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:04.470567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:04.470829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:04.470893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:04.470938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:04.471000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:04.473543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:04.473630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:04.473677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:04.475767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:04.475819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:04.475910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:04.475992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:04.480088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:04.483220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:04.483479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:04.484803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:04.484987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:04.485054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:04.485431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:04.485500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:04.485703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:04.485792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:04.488228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:04.488307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:06.302410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:06.302465Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:199: TAlterOlapStore TPropose operationId# 108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2025-12-23T18:44:06.302635Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 128 -> 129 2025-12-23T18:44:06.302859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:06.302934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-12-23T18:44:06.305374Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:06.305429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:06.305591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-12-23T18:44:06.305760Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:06.305809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:340:2316], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-12-23T18:44:06.305866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:340:2316], at schemeshard: 72057594046678944, txId: 108, path id: 5 2025-12-23T18:44:06.306313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.306373Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:305: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:44:06.306440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: alter_store.cpp:332: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2025-12-23T18:44:06.307284Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:44:06.307392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:44:06.307437Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-12-23T18:44:06.307497Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:44:06.307543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:44:06.308566Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:44:06.308638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:44:06.308669Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-12-23T18:44:06.308697Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-12-23T18:44:06.308726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-12-23T18:44:06.308786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-12-23T18:44:06.311123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2025-12-23T18:44:06.311993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-12-23T18:44:06.312981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-12-23T18:44:06.325182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6585: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-12-23T18:44:06.325245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2025-12-23T18:44:06.325391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-12-23T18:44:06.325447Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 129 -> 240 FAKE_COORDINATOR: Erasing txId 108 2025-12-23T18:44:06.327483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.327655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.327714Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2025-12-23T18:44:06.327821Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2025-12-23T18:44:06.327855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-12-23T18:44:06.327911Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2025-12-23T18:44:06.327945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-12-23T18:44:06.327980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-12-23T18:44:06.328043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:496:2445] message: TxId: 108 2025-12-23T18:44:06.328090Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-12-23T18:44:06.328129Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2025-12-23T18:44:06.328160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 108:0 2025-12-23T18:44:06.328294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-12-23T18:44:06.330332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.330389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:948:2859] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2025-12-23T18:44:06.334151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:06.334395Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: alter_store.cpp:465: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.334769Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2025-12-23T18:44:06.337068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:06.337299Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-12-23T18:44:06.337735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-12-23T18:44:06.337860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-12-23T18:44:06.338410Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-12-23T18:44:06.338511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.338550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:983:2894] TestWaitNotification: OK eventTxId 109 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:05.571277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:05.571338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:05.571364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:05.571390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:05.571418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:05.571447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:05.571485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:05.571551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:05.572212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:05.572424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:05.653179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:05.653257Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:05.659389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:05.659703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:05.659825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:05.663063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:05.663270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:05.663988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:05.664233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:05.666327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:05.666490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:05.667706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:05.667779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:05.667969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:05.668017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:05.668059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:05.668188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:05.818058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.819119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.819287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.819383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.819455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.819526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.819619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.819692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.819761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.819821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.819923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.820021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.820110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.820201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:05.820304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:06.476190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-12-23T18:44:06.476363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2025-12-23T18:44:06.476402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-12-23T18:44:06.476539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:06.476602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:44:06.476648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:44:06.478190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:06.478240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:06.478362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:06.478443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:06.478473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:44:06.478506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 38 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:44:06.478673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.478700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:44:06.478800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:06.478825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:06.478857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:06.478880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:06.478910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:44:06.478937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:06.478979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:44:06.479006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:44:06.479066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:44:06.479093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-12-23T18:44:06.479123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:44:06.479141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2025-12-23T18:44:06.479854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:06.479937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:06.479994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:44:06.480025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:44:06.480058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:44:06.480870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:06.480933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:06.480953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:44:06.480983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:44:06.481011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:44:06.481062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:44:06.483961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:06.484351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-12-23T18:44:06.486847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:06.487034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.487192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2025-12-23T18:44:06.488950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38, at schemeshard: 72057594046678944 2025-12-23T18:44:06.489121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-12-23T18:44:06.489356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:44:06.489386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-12-23T18:44:06.489473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:44:06.489506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:44:06.489799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:44:06.489890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.489918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:717:2707] 2025-12-23T18:44:06.490157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:44:06.490200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.490216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:717:2707] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:38:58.864605Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:38:58.989918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:38:59.002575Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:38:59.002974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:38:59.003049Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:38:59.312730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:38:59.312918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:38:59.395976Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515135757931 != 1766515135757935 2025-12-23T18:38:59.414896Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:38:59.465172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:38:59.554637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:38:59.783558Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002fa9/r3tmp/tmphTbXVW/pdisk_1.dat 2025-12-23T18:39:00.058153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:39:00.269968Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:39:00.270056Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:39:00.270212Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:865:2748] 2025-12-23T18:39:00.446328Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:865:2748] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:39:00.446460Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:865:2748] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:39:00.447252Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:39:00.447376Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:865:2748] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:39:00.447768Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:39:00.447929Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:865:2748] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:39:00.448098Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:865:2748] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:39:00.450086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:00.450669Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:39:00.451639Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:865:2748] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:39:00.451736Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:865:2748] txid# 281474976715657 SEND to# [1:846:2729] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:39:00.491256Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:39:00.492350Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:39:00.492657Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:890:2769] 2025-12-23T18:39:00.492952Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:39:00.537360Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:39:00.538095Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:39:00.538242Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:39:00.539924Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:39:00.540011Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:39:00.540068Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:39:00.540411Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:39:00.540534Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:39:00.540622Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:905:2769] in generation 1 2025-12-23T18:39:00.553665Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:39:00.635696Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:39:00.635956Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:39:00.636157Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:907:2779] 2025-12-23T18:39:00.636206Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:39:00.636269Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:39:00.636316Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:39:00.636582Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:890:2769], Recipient [1:890:2769]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:39:00.636637Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:39:00.637093Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:39:00.637227Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:39:00.637306Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:39:00.637353Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:39:00.637422Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:39:00.637468Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:39:00.637510Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:39:00.637586Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:39:00.637671Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:39:00.637815Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2770], Recipient [1:890:2769]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:39:00.637887Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:39:00.637949Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:888:2768], serverId# [1:892:2770], sessionId# [0:0:0] 2025-12-23T18:39:00.638074Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2770] 2025-12-23T18:39:00.638132Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:39:00.638285Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:39:00.638576Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:39:00.638659Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 ... 37: Add [0:7] at 72075186224037889 to execution unit ExecuteRead 2025-12-23T18:44:04.196239Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037889 on unit ExecuteRead 2025-12-23T18:44:04.196369Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-12-23T18:44:04.196621Z node 29 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2503/18446744073709551615 2025-12-23T18:44:04.196682Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[29:1299:3058], 1} after executionsCount# 1 2025-12-23T18:44:04.196733Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[29:1299:3058], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:44:04.196818Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[29:1299:3058], 1} finished in read 2025-12-23T18:44:04.196882Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037889 is Executed 2025-12-23T18:44:04.196915Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037889 executing on unit ExecuteRead 2025-12-23T18:44:04.196948Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037889 to execution unit CompletedOperations 2025-12-23T18:44:04.196982Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037889 on unit CompletedOperations 2025-12-23T18:44:04.197037Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037889 is Executed 2025-12-23T18:44:04.197068Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037889 executing on unit CompletedOperations 2025-12-23T18:44:04.197102Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037889 has finished 2025-12-23T18:44:04.197140Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-12-23T18:44:04.197238Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:16} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:44:04.197301Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:16} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:44:04.197347Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-12-23T18:44:04.197899Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037889] send [29:1148:2946] 2025-12-23T18:44:04.197946Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037889] push event to server [29:1148:2946] 2025-12-23T18:44:04.198090Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [29:1299:3058], Recipient [29:930:2789]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-12-23T18:44:04.198147Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } 2025-12-23T18:44:04.198608Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037890] ::Bootstrap [29:1302:3061] 2025-12-23T18:44:04.198728Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037890] lookup [29:1302:3061] 2025-12-23T18:44:04.199026Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72075186224037890] queue send [29:1302:3061] 2025-12-23T18:44:04.199232Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72075186224037890] forward result local node, try to connect [29:1302:3061] 2025-12-23T18:44:04.199290Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037890]::SendEvent [29:1302:3061] 2025-12-23T18:44:04.199529Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [29:1303:3062], Recipient [29:1255:3030]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:44:04.199573Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:44:04.199620Z node 29 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [29:1302:3061], serverId# [29:1303:3062], sessionId# [0:0:0] 2025-12-23T18:44:04.199672Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037890] connected with status OK role: Leader [29:1302:3061] 2025-12-23T18:44:04.199721Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037890] send queued [29:1302:3061] 2025-12-23T18:44:04.199757Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037890] push event to server [29:1302:3061] 2025-12-23T18:44:04.199994Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [29:1299:3058], Recipient [29:1255:3030]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-12-23T18:44:04.200105Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-12-23T18:44:04.200167Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:44:04.200269Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-12-23T18:44:04.200346Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2025-12-23T18:44:04.200427Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2025-12-23T18:44:04.200461Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2025-12-23T18:44:04.200493Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-12-23T18:44:04.200531Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2025-12-23T18:44:04.200595Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1] at 72075186224037890 2025-12-23T18:44:04.200651Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2025-12-23T18:44:04.200685Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-12-23T18:44:04.200718Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2025-12-23T18:44:04.200748Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-12-23T18:44:04.200873Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-12-23T18:44:04.201130Z node 29 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037890 promoting UnprotectedReadEdge to v2503/18446744073709551615 2025-12-23T18:44:04.201189Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037890 Complete read# {[29:1299:3058], 2} after executionsCount# 1 2025-12-23T18:44:04.201238Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037890 read iterator# {[29:1299:3058], 2} sends rowCount# 1, bytes# 32, quota rows left# 998, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:44:04.201316Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037890 read iterator# {[29:1299:3058], 2} finished in read 2025-12-23T18:44:04.201375Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2025-12-23T18:44:04.201406Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2025-12-23T18:44:04.201438Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2025-12-23T18:44:04.201470Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2025-12-23T18:44:04.201526Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2025-12-23T18:44:04.201556Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2025-12-23T18:44:04.201585Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037890 has finished 2025-12-23T18:44:04.201621Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-12-23T18:44:04.201721Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{17, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:44:04.201782Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:44:04.201826Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-12-23T18:44:04.202392Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037890] send [29:1302:3061] 2025-12-23T18:44:04.202438Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037890] push event to server [29:1302:3061] 2025-12-23T18:44:04.202579Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [29:1299:3058], Recipient [29:1255:3030]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2025-12-23T18:44:04.202630Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037890 ReadCancel: { ReadId: 2 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 11 } items { uint32_value: 111 } }, { items { uint32_value: 21 } items { uint32_value: 21 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:06.272670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:06.272760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:06.272803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:06.272843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:06.272884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:06.272916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:06.272979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:06.273079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:06.274026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:06.274343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:06.364717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:06.364778Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:06.370121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:06.370340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:06.370488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:06.373379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:06.373551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:06.374280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:06.374511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:06.376264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:06.376431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:06.377639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:06.377715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:06.377837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:06.377882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:06.377924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:06.378050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.384615Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:06.526978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:06.527230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.527481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:06.527531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:06.527819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:06.527906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:06.530339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:06.530577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:06.530816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.530875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:06.530915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:06.530967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:06.533015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.533099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:06.533145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:06.534821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.534876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.534953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:06.535012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:06.538905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:06.540839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:06.541066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:06.542194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:06.542348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:06.542404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:06.542706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:06.542790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:06.542973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:06.543050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:06.545270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:06.545326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 23T18:44:06.602348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:06.602373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:06.604192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:44:06.604383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:44:06.604454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.604486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:283:2273] 2025-12-23T18:44:06.605059Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 2025-12-23T18:44:06.605172Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 2025-12-23T18:44:06.605296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-12-23T18:44:06.605709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-12-23T18:44:06.606152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:06.606357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-12-23T18:44:06.606553Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 2025-12-23T18:44:06.606813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-12-23T18:44:06.606972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-12-23T18:44:06.607172Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 2025-12-23T18:44:06.607264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-12-23T18:44:06.607404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-12-23T18:44:06.607547Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 2025-12-23T18:44:06.607745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:44:06.607920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-12-23T18:44:06.608103Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 2025-12-23T18:44:06.608438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:44:06.608618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:44:06.609117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:06.609176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:06.609308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:44:06.609976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:06.610037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:06.610104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:06.611716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-12-23T18:44:06.614208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:44:06.614340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:44:06.614523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-12-23T18:44:06.614951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:44:06.616754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:44:06.616950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:06.617133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-12-23T18:44:06.617712Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:06.617940Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 222us result status StatusPathDoesNotExist 2025-12-23T18:44:06.618129Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:44:06.618581Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:06.618852Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 204us result status StatusSuccess 2025-12-23T18:44:06.619400Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats [GOOD] >> KqpPg::PgCreateTable [GOOD] >> KqpPg::PgUpdate+useSink |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_volatile/unittest >> KqpPg::CreateTempTable [GOOD] >> KqpPg::CreateTempTableSerial >> test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:05.678898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:05.679003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:05.679042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:05.679075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:05.679123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:05.679171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:05.679239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:05.679313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:05.680202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:05.680508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:05.755382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:05.755452Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:05.761720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:05.761976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:05.762099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:05.765528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:05.765723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:05.766449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:05.766694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:05.768861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:05.769037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:05.770270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:05.770334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:05.770481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:05.770525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:05.770570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:05.770707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:05.778008Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:05.887812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:05.888081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:05.888272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:05.888329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:05.888572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:05.888642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:05.890692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:05.890913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:05.891122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:05.891188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:05.891230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:05.891260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:05.893111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:05.893190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:05.893258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:05.895103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:05.895170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:05.895234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:05.895301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:05.899118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:05.901015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:05.901227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:05.902363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:05.902500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:05.902549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:05.902844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:05.902899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:05.903088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:05.903188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:05.905212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:05.905255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 2025-12-23T18:44:06.938230Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2025-12-23T18:44:06.938302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-12-23T18:44:06.938338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-12-23T18:44:06.939223Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:06.939322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:06.939361Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:44:06.939407Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-12-23T18:44:06.939448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-12-23T18:44:06.940235Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:06.940320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:06.940349Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:44:06.940374Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-12-23T18:44:06.940419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-12-23T18:44:06.940494Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-12-23T18:44:06.940534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:893:2738] 2025-12-23T18:44:06.944934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:06.945034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:06.945095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.945126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:894:2739] TestWaitNotification: OK eventTxId 101 2025-12-23T18:44:06.945628Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:06.945828Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 246us result status StatusSuccess 2025-12-23T18:44:06.946247Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:06.946812Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:06.947032Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 255us result status StatusSuccess 2025-12-23T18:44:06.947520Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 4 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:06.948006Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:06.948163Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 165us result status StatusSuccess 2025-12-23T18:44:06.948535Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardSubDomainTest::CreateDropNbs >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] TEST create schemeshard, 0x00007DABC68ABD00 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:43:59.541987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:43:59.542053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:59.542085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:43:59.542129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:43:59.542160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:43:59.542197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:43:59.542255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:43:59.542336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:43:59.543101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:43:59.543378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:43:59.610386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:59.610439Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:59.615907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:43:59.616112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:43:59.616240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:43:59.618896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:43:59.619053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:43:59.619655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:43:59.619890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:43:59.621731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:59.621915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:43:59.622990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:43:59.623049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:43:59.623186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:43:59.623224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:43:59.623269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:43:59.623407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:43:59.788446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.789470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.789615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.789742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.789824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.789889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.789976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.790050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.790127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.790216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.790285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.790351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.790435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.790529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:43:59.790641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... ten 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 Leader for TabletID 72057594046678944 is [2:1019:2979] sender: [2:1078:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 2025-12-23T18:44:07.119398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 2025-12-23T18:44:07.172170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 0 2025-12-23T18:44:07.172396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2025-12-23T18:44:07.172472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2025-12-23T18:44:07.172559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:451: OLAP store contains 1 tables. 2025-12-23T18:44:07.172629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:473: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007DABC6821100, stats written 1 2025-12-23T18:44:07.173253Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:07.173533Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 309us result status StatusSuccess 2025-12-23T18:44:07.174249Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 166 LastUpdateTime: 166 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.2%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] >> Cdc::InitialScanComplete [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords >> KqpPg::EquiJoin-useSink [GOOD] >> KqpPg::ExplainColumnsReorder >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] |95.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::CopyRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:50.648921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:50.649016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:50.649084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:50.649144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:50.649182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:50.649213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:50.649292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:50.649368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:50.650220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:50.650518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:50.776608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:50.776701Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:50.777535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:50.792611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:50.792784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:50.792922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:50.805019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:50.805285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:50.805839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:50.806123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:50.808309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:50.808451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:50.809332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:50.809374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:50.810371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:50.810453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:50.810509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:50.810725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.818944Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-12-23T18:40:50.972310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:50.972637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.972899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:40:50.972969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:40:50.973258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:40:50.973323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:50.978322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:50.978542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:40:50.978813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.978877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:40:50.978940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:40:50.978984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:40:50.983962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.984033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:40:50.984071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:40:50.986103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.986163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.986230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:50.986281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:40:50.990263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:40:50.992497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:40:50.992708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:40:50.993874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:50.994032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:50.994083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:50.994334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:40:50.994387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:50.994573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:40:50.994661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:50.996611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard ... as 2 2025-12-23T18:44:06.304118Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2025-12-23T18:44:06.304156Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2025-12-23T18:44:06.304189Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2025-12-23T18:44:06.304216Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2025-12-23T18:44:06.304243Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2025-12-23T18:44:06.306168Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:44:06.306298Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:44:06.306348Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:44:06.306390Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2025-12-23T18:44:06.306436Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2025-12-23T18:44:06.308100Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:44:06.308228Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:44:06.308276Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:44:06.308314Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2025-12-23T18:44:06.308356Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2025-12-23T18:44:06.309793Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:44:06.309879Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:44:06.309912Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:44:06.309942Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2025-12-23T18:44:06.309973Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2025-12-23T18:44:06.311086Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:44:06.311181Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:44:06.311212Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:44:06.311241Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2025-12-23T18:44:06.311272Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 1 2025-12-23T18:44:06.311332Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2025-12-23T18:44:06.314146Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:44:06.314261Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:44:06.316036Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:44:06.316150Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2025-12-23T18:44:06.317464Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2025-12-23T18:44:06.317506Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2025-12-23T18:44:06.318950Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2025-12-23T18:44:06.319041Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.319072Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2692:4680] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2025-12-23T18:44:06.320270Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2025-12-23T18:44:06.320310Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2025-12-23T18:44:06.320381Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2025-12-23T18:44:06.320404Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2025-12-23T18:44:06.320454Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2025-12-23T18:44:06.320475Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2025-12-23T18:44:06.320545Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2025-12-23T18:44:06.320569Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2025-12-23T18:44:06.320619Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2025-12-23T18:44:06.320638Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2025-12-23T18:44:06.322212Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2025-12-23T18:44:06.322412Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.322451Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2695:4683] 2025-12-23T18:44:06.322651Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2025-12-23T18:44:06.322846Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.322880Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2695:4683] 2025-12-23T18:44:06.322985Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2025-12-23T18:44:06.323067Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2025-12-23T18:44:06.323131Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.323188Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2695:4683] 2025-12-23T18:44:06.323318Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2025-12-23T18:44:06.323382Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.323409Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2695:4683] 2025-12-23T18:44:06.323549Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2025-12-23T18:44:06.323586Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2695:4683] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::SimultaneousDefine >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:07.613409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:07.613470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:07.613500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:07.613533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:07.613568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:07.613603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:07.613650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:07.613753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:07.614444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:07.614679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:07.686544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:07.686597Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:07.691777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:07.691985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:07.692108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:07.695104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:07.695265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:07.695775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:07.695984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:07.697568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:07.697725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:07.698866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:07.698943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:07.699115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:07.699176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:07.699216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:07.699347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.705884Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:07.844092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:07.844319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.844520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:07.844584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:07.844813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:07.844886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:07.847137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:07.847411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:07.847605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.847690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:07.847729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:07.847765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:07.850527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.850595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:07.850635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:07.855809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.855884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.855952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:07.856010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:07.860169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:07.862589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:07.862869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:07.864029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:07.864189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:07.864238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:07.864576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:07.864635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:07.864815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:07.864904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:07.866981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:07.867032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... eTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 TabletID: 72075186233409550 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 TabletID: 72075186233409550 2025-12-23T18:44:08.378976Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-12-23T18:44:08.379252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-12-23T18:44:08.379581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409550 2025-12-23T18:44:08.380450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:08.380668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-12-23T18:44:08.381823Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2025-12-23T18:44:08.382034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 Forgetting tablet 72075186233409546 2025-12-23T18:44:08.383159Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-12-23T18:44:08.384124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-12-23T18:44:08.384318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-12-23T18:44:08.385234Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409548 2025-12-23T18:44:08.386824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:44:08.387008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-12-23T18:44:08.388144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:44:08.388315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409549 2025-12-23T18:44:08.389728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:08.389794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-12-23T18:44:08.389889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:44:08.390405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-12-23T18:44:08.390592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:08.390654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:08.390789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:44:08.392425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-12-23T18:44:08.392492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-12-23T18:44:08.392596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:44:08.392626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:44:08.400270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:44:08.400348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-12-23T18:44:08.400469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:44:08.400492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:44:08.400919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:44:08.400976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-12-23T18:44:08.401340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:08.402043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:08.402101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:08.402198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:08.402712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:08.404271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-12-23T18:44:08.404596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-12-23T18:44:08.404643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-12-23T18:44:08.405154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-12-23T18:44:08.405248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:44:08.405291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:804:2706] TestWaitNotification: OK eventTxId 106 2025-12-23T18:44:08.405970Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:08.406167Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 226us result status StatusSuccess 2025-12-23T18:44:08.406637Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] TEST create schemeshard, 0x00007DF2E709F500 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:04.125354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:04.125435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:04.125477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:04.125508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:04.125545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:04.125572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:04.125632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:04.125738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:04.126556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:04.126880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:04.214123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:04.214182Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:04.220112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:04.220334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:04.220461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:04.223442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:04.223646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:04.224344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:04.224595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:04.226608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:04.226811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:04.228021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:04.228103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:04.228249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:04.228294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:04.228334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:04.228467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:04.386609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.387636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.387782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.387881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.387953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.388018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.388106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.388183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.388251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.388311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.388393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.388505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.388577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.388676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:04.388770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... aitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 Leader for TabletID 72057594046678944 is [1:899:2848] sender: [1:972:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 2025-12-23T18:44:08.357379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 0 2025-12-23T18:44:08.357543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2025-12-23T18:44:08.357618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2025-12-23T18:44:08.357742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 2025-12-23T18:44:08.368213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 2025-12-23T18:44:08.379212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 1 2025-12-23T18:44:08.379337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2025-12-23T18:44:08.379383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2025-12-23T18:44:08.379461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DF2E71D0500, stats written 2 2025-12-23T18:44:08.379854Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:08.380080Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 247us result status StatusSuccess 2025-12-23T18:44:08.380493Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:07.495047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:07.495131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:07.495187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:07.495224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:07.495262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:07.495296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:07.495349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:07.495429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:07.496358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:07.496671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:07.571224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:07.571296Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:07.576700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:07.576871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:07.576996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:07.579493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:07.579636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:07.580216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:07.580414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:07.582153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:07.582325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:07.583612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:07.583683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:07.583848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:07.583898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:07.583953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:07.584094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.590232Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:07.711563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:07.711795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.712023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:07.712079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:07.712340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:07.712425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:07.714708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:07.715003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:07.715239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.715322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:07.715368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:07.715407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:07.717446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.717527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:07.717582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:07.719311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.719365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.719428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:07.719493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:07.723624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:07.725432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:07.725633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:07.726822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:07.726974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:07.727036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:07.727362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:07.727422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:07.727630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:07.727715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:07.729796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:07.729849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 8.503645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:44:08.503925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:08.503968Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-12-23T18:44:08.504053Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:08.504087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:08.504124Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:08.504154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:08.504189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:44:08.504221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:08.504257Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:44:08.504287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:44:08.504346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-12-23T18:44:08.504384Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-12-23T18:44:08.504414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-12-23T18:44:08.504445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-12-23T18:44:08.505064Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:08.505140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:08.505172Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:44:08.505207Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-12-23T18:44:08.505243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:08.505920Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:08.505989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:08.506020Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:44:08.506046Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-12-23T18:44:08.506074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:44:08.506142Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:44:08.506856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:08.506911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:08.507005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:44:08.507287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:08.507327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:08.507386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:08.509064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:08.510666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:08.510815Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:08.510885Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:44:08.511106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:44:08.511185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:44:08.511576Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:44:08.511667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:44:08.511701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:343:2333] TestWaitNotification: OK eventTxId 101 2025-12-23T18:44:08.512124Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:08.512279Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 189us result status StatusPathDoesNotExist 2025-12-23T18:44:08.512463Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:44:08.512913Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:08.513056Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 161us result status StatusSuccess 2025-12-23T18:44:08.513450Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:07.363728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:07.363833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:07.363874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:07.363913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:07.363952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:07.363985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:07.364041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:07.364120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:07.365062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:07.365388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:07.456955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:07.457010Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:07.463001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:07.463243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:07.463401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:07.467076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:07.467286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:07.468033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:07.468265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:07.470383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:07.470566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:07.471833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:07.471897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:07.472058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:07.472104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:07.472147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:07.472276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.479400Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:07.621986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:07.622235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.622451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:07.622501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:07.622760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:07.622835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:07.625289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:07.625543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:07.625766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.625825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:07.625864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:07.625899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:07.628002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.628080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:07.628121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:07.629925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.629980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.630043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:07.630108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:07.634103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:07.635953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:07.636126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:07.637212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:07.637393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:07.637449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:07.637747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:07.637814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:07.638000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:07.638093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:07.640064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:07.640132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... T_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:08.505307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:08.505333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:08.505839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:44:08.507329Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-12-23T18:44:08.508308Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2025-12-23T18:44:08.508520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:08.508818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-12-23T18:44:08.509325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-12-23T18:44:08.509552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-12-23T18:44:08.510289Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-12-23T18:44:08.511444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:44:08.511613Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-12-23T18:44:08.512144Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 2025-12-23T18:44:08.512551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:44:08.512742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409549 2025-12-23T18:44:08.513587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:08.513639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-12-23T18:44:08.513719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:44:08.514155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:44:08.514640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:44:08.514760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:08.514819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:08.514944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:44:08.516356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:44:08.516412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:44:08.519638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:44:08.519685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-12-23T18:44:08.519748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:44:08.519771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:44:08.520030Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:44:08.520087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-12-23T18:44:08.520830Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:08.521128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:08.521187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:08.521270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:08.521600Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:08.523079Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:44:08.523358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:44:08.523401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:44:08.523884Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:44:08.523991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:44:08.524028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:542:2496] TestWaitNotification: OK eventTxId 102 2025-12-23T18:44:08.539533Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:08.539760Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 285us result status StatusPathDoesNotExist 2025-12-23T18:44:08.539986Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:44:08.540747Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:08.540899Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 176us result status StatusPathDoesNotExist 2025-12-23T18:44:08.541093Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> KqpPg::TableSelect+useSink [GOOD] >> KqpPg::TableSelect-useSink >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::RedefineErrors |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclare >> Cdc::ShouldBreakLocksOnConcurrentAddIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAddStream >> BridgeGet::PartRestorationAcrossBridgeOnRange [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:08.773129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:08.773216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:08.773254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:08.773287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:08.773322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:08.773349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:08.773409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:08.773506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:08.774340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:08.774607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:08.859346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:08.859408Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:08.864834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:08.865076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:08.865189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:08.868184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:08.868497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:08.869155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:08.869376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:08.871432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:08.871585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:08.872699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:08.872754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:08.872894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:08.872938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:08.872975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:08.873179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:08.879829Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:09.027435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:09.027645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:09.027849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:09.027899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:09.028135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:09.028220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:09.030459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:09.030687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:09.030918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:09.030977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:09.031019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:09.031052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:09.032960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:09.033047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:09.033083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:09.034788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:09.034836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:09.034898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:09.034952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:09.038815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:09.040612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:09.040815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:09.042038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:09.042156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:09.042207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:09.042496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:09.042549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:09.042723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:09.042815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:09.044747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:09.044796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-12-23T18:44:09.374960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:44:09.375177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:44:09.376315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:09.376367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:09.376495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:44:09.377005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-12-23T18:44:09.377054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-12-23T18:44:09.377136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2025-12-23T18:44:09.377158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-12-23T18:44:09.377270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6152: Failed to connect, to tablet: 72075186233409552, at schemeshard: 72057594046678944 2025-12-23T18:44:09.380532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:09.380706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:09.380760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:09.380831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:09.381105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:44:09.381146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:44:09.381258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:44:09.381287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-12-23T18:44:09.381467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-12-23T18:44:09.381496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-12-23T18:44:09.383559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:44:09.383590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:44:09.383658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:44:09.383689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-12-23T18:44:09.383861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:09.384927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-12-23T18:44:09.385123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:44:09.385168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-12-23T18:44:09.385219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:44:09.385237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:44:09.385566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:44:09.385663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:44:09.385695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:719:2613] 2025-12-23T18:44:09.385746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:44:09.385857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:44:09.385874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:719:2613] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-12-23T18:44:09.386267Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:09.386410Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 161us result status StatusPathDoesNotExist 2025-12-23T18:44:09.386566Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:44:09.386899Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:09.387078Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 142us result status StatusPathDoesNotExist 2025-12-23T18:44:09.387184Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:44:09.387501Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:09.387619Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 136us result status StatusSuccess 2025-12-23T18:44:09.387905Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:08.519173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:08.519258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:08.519297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:08.519333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:08.519366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:08.519406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:08.519479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:08.519570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:08.520469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:08.520791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:08.595007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:08.595066Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:08.599885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:08.600092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:08.600200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:08.603046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:08.603259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:08.603976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:08.604227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:08.605911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:08.606086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:08.607220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:08.607279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:08.607401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:08.607448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:08.607488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:08.607626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:08.721512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.722982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:09.264177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:09.264328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:09.264369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-12-23T18:44:09.264408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 38 2025-12-23T18:44:09.264608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-12-23T18:44:09.264644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2025-12-23T18:44:09.264824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:44:09.264859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:09.264891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:44:09.264929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:09.264975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-12-23T18:44:09.265013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:09.265043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2025-12-23T18:44:09.265076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 100:0 2025-12-23T18:44:09.265144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:44:09.265205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-12-23T18:44:09.265236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:44:09.265260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2025-12-23T18:44:09.265981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:09.266054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:09.266085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:44:09.266139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:44:09.266182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:44:09.267159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:09.267232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:09.267256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:44:09.267281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:44:09.267326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:44:09.267402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-12-23T18:44:09.270186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-12-23T18:44:09.270971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-12-23T18:44:09.271223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-12-23T18:44:09.271276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-12-23T18:44:09.271365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:44:09.271390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:44:09.271767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-12-23T18:44:09.271897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-12-23T18:44:09.271933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:723:2713] 2025-12-23T18:44:09.272062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:44:09.272131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:44:09.272151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:723:2713] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-12-23T18:44:09.272562Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:09.272741Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 190us result status StatusSuccess 2025-12-23T18:44:09.273192Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:09.273641Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:09.273860Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 200us result status StatusPathDoesNotExist 2025-12-23T18:44:09.274039Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] >> TSchemeShardSubDomainTest::DeclareAndDelete >> Cdc::AlterViaTopicService [GOOD] >> Cdc::Alter >> test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:43:50.232986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:43:50.351526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:43:50.364189Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:43:50.364549Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:43:50.364629Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:43:50.675409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:50.675540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:50.760565Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515427524502 != 1766515427524506 2025-12-23T18:43:50.773712Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:50.820181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:50.901967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004337/r3tmp/tmppAAdi0/pdisk_1.dat 2025-12-23T18:43:51.197689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:51.211987Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:51.316796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:51.616680Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2025-12-23T18:43:51.618419Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:951:2809], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2025-12-23T18:43:51.633688Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:951:2809], subTag: 1} TUpsertActor finished in 0.014942s, errors=0 2025-12-23T18:43:51.633977Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2025-12-23T18:43:51.634081Z node 1 :DS_LOAD_TEST NOTICE: kqp_select.cpp:322: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2025-12-23T18:43:51.635441Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:367: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-12-23T18:43:51.635598Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:401: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 3} started fullscan actor# [1:963:2821] 2025-12-23T18:43:51.635730Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 1} Bootstrap called, sample# 100 2025-12-23T18:43:51.635771Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 1} Connect to# 72075186224037888 called 2025-12-23T18:43:51.636044Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-12-23T18:43:51.637064Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [1:960:2818], subTag: 1} finished in 0.000891s, sampled# 100, iter finished# 1, oks# 100 2025-12-23T18:43:51.637260Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:417: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 3} received keyCount# 100 2025-12-23T18:43:51.637503Z node 1 :DS_LOAD_TEST NOTICE: kqp_select.cpp:446: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:951:2809], subTag: 3} started# 10 actors each with inflight# 1 2025-12-23T18:43:51.637585Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 2} Bootstrap called 2025-12-23T18:43:51.637636Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-12-23T18:43:51.637687Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 3} Bootstrap called 2025-12-23T18:43:51.637719Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-12-23T18:43:51.637755Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 4} Bootstrap called 2025-12-23T18:43:51.637796Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-12-23T18:43:51.637828Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 5} Bootstrap called 2025-12-23T18:43:51.637848Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-12-23T18:43:51.637875Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 6} Bootstrap called 2025-12-23T18:43:51.637897Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-12-23T18:43:51.637934Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 7} Bootstrap called 2025-12-23T18:43:51.637968Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-12-23T18:43:51.637996Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 8} Bootstrap called 2025-12-23T18:43:51.638017Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-12-23T18:43:51.638057Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 9} Bootstrap called 2025-12-23T18:43:51.638088Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-12-23T18:43:51.638116Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 10} Bootstrap called 2025-12-23T18:43:51.638138Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-12-23T18:43:51.638162Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 11} Bootstrap called 2025-12-23T18:43:51.638182Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-12-23T18:43:51.640436Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 2} session: ydb://session/3?node_id=1&id=NWQxNWUyYjktZjVkNDUyMjctMzQ2ZGRjNmYtZjRjNzhiNjE= 2025-12-23T18:43:51.642607Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 3} session: ydb://session/3?node_id=1&id=OGZhNGEyMS05ODliYTE4ZS00OWM2ZDM4ZC00M2I5NjhjMg== 2025-12-23T18:43:51.646121Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 4} session: ydb://session/3?node_id=1&id=Njg2NGViZmItOWQ4NTRlMWUtMTBmN2FiMTAtM2Q3NjNjMzk= 2025-12-23T18:43:51.646242Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 5} session: ydb://session/3?node_id=1&id=NTMyNWFmMTctNDE0MTNkNTAtNWIyMGM1NGItMWE2Mjc5MjI= 2025-12-23T18:43:51.647972Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 6} session: ydb://session/3?node_id=1&id=NTdlY2YzOTctODkwNzVmZTItODc5MGMyMTYtN2NjOGRkNGY= 2025-12-23T18:43:51.649648Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 7} session: ydb://session/3?node_id=1&id=OTQzYWMyNTItNGIyYzEyMWUtN2JkYmU0YTAtNTg5YjdhZg== 2025-12-23T18:43:51.651337Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 8} session: ydb://session/3?node_id=1&id=NDA4ZjczOTEtYzdmMGU3YmYtNTZjYmY2OTctYjY2ZWM4NjM= 2025-12-23T18:43:51.654395Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 9} session: ydb://session/3?node_id=1&id=YjViYWZkZjctZGY0ZGNiYzEtZDk2YzFhYzctODcyZWYzYjk= 2025-12-23T18:43:51.654484Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 10} session: ydb://session/3?node_id=1&id=Y2UxM2Q0Y2QtOTEyOTcyMjItNDRlZDQ4MWQtZjQ3Yjk2NmM= 2025-12-23T18:43:51.656142Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:960:2818], subTag: 11} session: ydb://session/3?node_id=1&id=OGM4NDZmN2QtM2U4YTBhYjktZTQ5Mzc5ZGItOTZmNTAzMjM= 2025-12-23T18:43:51.661423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1025:2877], DatabaseId: /Root, PoolId: ... manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:44:02.690963Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1075:2914] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:44:02.691835Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1078:2915] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:44:02.692387Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1082:2917] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:44:02.796911Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1037:2889], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:44:02.797044Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1038:2890], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:44:02.797150Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1039:2891], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:44:02.797205Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1040:2892], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:44:02.797252Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1041:2893], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:44:02.797309Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1042:2894], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:44:02.797383Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1043:2895], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:44:02.797460Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1051:2903], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:44:02.797526Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1068:2912], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:44:02.797586Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1081:2916], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:44:02.832202Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1181:3005] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:03.263761Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:960:2818], subTag: 10} finished in 0.609833s, errors=0 2025-12-23T18:44:03.263937Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:951:2809], subTag: 3} finished: 10 { Tag: 10 DurationMs: 609 OperationsOK: 100 OperationsError: 0 } 2025-12-23T18:44:03.277783Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2210:3427] txid# 281474976715769, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:03.651844Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:960:2818], subTag: 8} finished in 1.002773s, errors=0 2025-12-23T18:44:03.652127Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:951:2809], subTag: 3} finished: 8 { Tag: 8 DurationMs: 1002 OperationsOK: 100 OperationsError: 0 } 2025-12-23T18:44:03.666228Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:3217:3833] txid# 281474976715870, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:04.090506Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:960:2818], subTag: 5} finished in 1.446588s, errors=0 2025-12-23T18:44:04.090757Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:951:2809], subTag: 3} finished: 5 { Tag: 5 DurationMs: 1446 OperationsOK: 100 OperationsError: 0 } 2025-12-23T18:44:04.105400Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4224:4239] txid# 281474976715971, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:04.636679Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:960:2818], subTag: 4} finished in 1.994547s, errors=0 2025-12-23T18:44:04.636941Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:951:2809], subTag: 3} finished: 4 { Tag: 4 DurationMs: 1994 OperationsOK: 100 OperationsError: 0 } 2025-12-23T18:44:04.651895Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:5231:4645] txid# 281474976716072, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:05.297081Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:960:2818], subTag: 3} finished in 2.655081s, errors=0 2025-12-23T18:44:05.297421Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:951:2809], subTag: 3} finished: 3 { Tag: 3 DurationMs: 2655 OperationsOK: 100 OperationsError: 0 } 2025-12-23T18:44:05.313972Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:6238:5051] txid# 281474976716173, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:06.000143Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:960:2818], subTag: 7} finished in 3.352824s, errors=0 2025-12-23T18:44:06.000530Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:951:2809], subTag: 3} finished: 7 { Tag: 7 DurationMs: 3352 OperationsOK: 100 OperationsError: 0 } 2025-12-23T18:44:06.018977Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7245:5457] txid# 281474976716274, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:06.750032Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:960:2818], subTag: 2} finished in 4.111782s, errors=0 2025-12-23T18:44:06.750275Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:951:2809], subTag: 3} finished: 2 { Tag: 2 DurationMs: 4111 OperationsOK: 100 OperationsError: 0 } 2025-12-23T18:44:06.769354Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:8252:5863] txid# 281474976716375, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:07.508349Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:960:2818], subTag: 9} finished in 4.857662s, errors=0 2025-12-23T18:44:07.508633Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:951:2809], subTag: 3} finished: 9 { Tag: 9 DurationMs: 4857 OperationsOK: 100 OperationsError: 0 } 2025-12-23T18:44:07.528521Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:9259:6269] txid# 281474976716476, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:08.333607Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:960:2818], subTag: 6} finished in 5.687976s, errors=0 2025-12-23T18:44:08.333904Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:951:2809], subTag: 3} finished: 6 { Tag: 6 DurationMs: 5687 OperationsOK: 100 OperationsError: 0 } 2025-12-23T18:44:08.353575Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:10266:6675] txid# 281474976716577, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:09.251492Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:960:2818], subTag: 11} finished in 6.597485s, errors=0 2025-12-23T18:44:09.251974Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:951:2809], subTag: 3} finished: 11 { Tag: 11 DurationMs: 6597 OperationsOK: 100 OperationsError: 0 } 2025-12-23T18:44:09.252047Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:481: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:951:2809], subTag: 3} finished in 6.616642s, oks# 1000, errors# 0 2025-12-23T18:44:09.252432Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:960:2818] with tag# 3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnRange [GOOD] Test command err: RandomSeed# 1272771569377455690 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 2 *** performing bridge range ... sk2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] TEST create schemeshard, 0x00007D1A6EFA8100 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:06.799403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:06.799489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:06.799524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:06.799559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:06.799593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:06.799620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:06.799667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:06.799782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:06.800604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:06.800900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:06.888244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:06.888307Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:06.894128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:06.894475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:06.894637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:06.897557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:06.897703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:06.898340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:06.898548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:06.900275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:06.900413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:06.901332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:06.901394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:06.901481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:06.901513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:06.901572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:06.901674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.031262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.032343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.032472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.032546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.032618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.032708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.032783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.032847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.032912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.032999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.033095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.033186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.033378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.033449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.033513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 Leader for TabletID 72057594046678944 is [1:963:2923] sender: [1:1022:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 2025-12-23T18:44:09.625386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 2025-12-23T18:44:09.687505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 0 2025-12-23T18:44:09.687695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2025-12-23T18:44:09.687773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2025-12-23T18:44:09.687829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:477: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2025-12-23T18:44:09.687879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:487: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D1A6F154D00, stats written 1 2025-12-23T18:44:09.688319Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:09.688545Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 254us result status StatusSuccess 2025-12-23T18:44:09.689025Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 147 LastUpdateTime: 147 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:09.593805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:09.593882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.593917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:09.593951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:09.594009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:09.594037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:09.594091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.594159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:09.594989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:09.595284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:09.681202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:09.681261Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:09.686992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:09.687221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:09.687342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:09.690273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:09.690452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:09.691179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:09.691447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:09.693276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:09.693434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:09.694591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:09.694648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:09.694809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:09.694852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:09.694904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:09.695045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:09.817461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.818394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.818506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.818575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.818638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.818712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.818828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.818902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.818981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.819040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.819098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.819183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.819252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.819337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.819421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ESHARD INFO: schemeshard__operation_common_subdomain.cpp:84: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-12-23T18:44:10.447065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-12-23T18:44:10.447108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 3 -> 128 2025-12-23T18:44:10.448253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:10.449856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:10.450011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:10.450046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:10.450089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-12-23T18:44:10.450143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-12-23T18:44:10.450285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:10.451894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-12-23T18:44:10.451982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000039 2025-12-23T18:44:10.452289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:10.452399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:10.452445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-12-23T18:44:10.452676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2025-12-23T18:44:10.452739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-12-23T18:44:10.452897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:44:10.452968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:44:10.454726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:10.454793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:10.454956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:10.454995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 38 2025-12-23T18:44:10.455370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:10.455413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:44:10.455501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:10.455530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:10.455568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:10.455614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:10.455662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:44:10.455698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:10.455734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:44:10.455763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:44:10.455938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2025-12-23T18:44:10.455979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2025-12-23T18:44:10.456013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2025-12-23T18:44:10.456498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:10.456613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:10.456669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:44:10.456712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2025-12-23T18:44:10.456762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:44:10.456829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-12-23T18:44:10.456867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:722:2712] 2025-12-23T18:44:10.459644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:10.459733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:44:10.459766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:729:2719] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-12-23T18:44:10.460262Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:10.460453Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 191us result status StatusSuccess 2025-12-23T18:44:10.460883Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.2%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable |95.2%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching |95.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:09.966801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:09.966864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.966889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:09.966913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:09.966961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:09.966986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:09.967024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.967091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:09.967821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:09.968063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:10.038082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:10.038139Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:10.043273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:10.043477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:10.043596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:10.046933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:10.047132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:10.047920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:10.048180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:10.050055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:10.050226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:10.051337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:10.051406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:10.051554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:10.051609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:10.051664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:10.051823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:10.182006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.182701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.182828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.182897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.182943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.182993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.183063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.183109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.183174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.183216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.183253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.183302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.183347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.183403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.183460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... oordinatorID: 72057594046316545 2025-12-23T18:44:10.723515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-12-23T18:44:10.723638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000038 2025-12-23T18:44:10.723938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:10.724039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:10.724089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-12-23T18:44:10.724313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 100:0 128 -> 240 2025-12-23T18:44:10.724367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-12-23T18:44:10.724504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:10.724562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:44:10.724613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-12-23T18:44:10.726415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:10.726448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:10.726669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:10.726842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:10.726887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-12-23T18:44:10.726932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 38 2025-12-23T18:44:10.727103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-12-23T18:44:10.727163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2025-12-23T18:44:10.727266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:44:10.727316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:10.727358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:44:10.727387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:10.727419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-12-23T18:44:10.727467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:10.727499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2025-12-23T18:44:10.727524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 100:0 2025-12-23T18:44:10.727583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:44:10.727625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-12-23T18:44:10.727704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:44:10.727733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2025-12-23T18:44:10.728513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:10.728576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:10.728604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:44:10.728643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:44:10.728687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:44:10.729311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:10.729391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:10.729417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:44:10.729437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:44:10.729458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:44:10.729516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-12-23T18:44:10.729562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:685:2675] 2025-12-23T18:44:10.732186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-12-23T18:44:10.732993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-12-23T18:44:10.733074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-12-23T18:44:10.733097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:686:2676] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2025-12-23T18:44:10.733592Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:10.733818Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 227us result status StatusSuccess 2025-12-23T18:44:10.734259Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:09.863829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:09.863924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.863968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:09.864005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:09.864064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:09.864103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:09.864163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.864254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:09.865194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:09.865496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:09.960383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:09.960453Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:09.966728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:09.967014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:09.967179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:09.970695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:09.970919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:09.971767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:09.972097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:09.974254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:09.974437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:09.975773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:09.975847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:09.975990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:09.976045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:09.976092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:09.976294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:10.144896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.145950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.146089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.146183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.146261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.146346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.146460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.146543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.146655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.146755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.146832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.146922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.147008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.147108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.147237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... xt state 2025-12-23T18:44:10.968147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 2 -> 3 2025-12-23T18:44:10.970016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-12-23T18:44:10.970074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 108:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:10.970132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 3 -> 128 2025-12-23T18:44:10.971845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-12-23T18:44:10.971895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2025-12-23T18:44:10.971942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet# 72057594046678944 2025-12-23T18:44:10.972058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2025-12-23T18:44:10.972191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:10.973683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-12-23T18:44:10.973821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000043 2025-12-23T18:44:10.974165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:10.974274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:10.974321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-12-23T18:44:10.974632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 128 -> 240 2025-12-23T18:44:10.974693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-12-23T18:44:10.974948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:44:10.975027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 108 2025-12-23T18:44:10.976997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:10.977061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:10.977253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:10.977318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 108, path id: 38 2025-12-23T18:44:10.977707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-12-23T18:44:10.977760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2025-12-23T18:44:10.977876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2025-12-23T18:44:10.977922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-12-23T18:44:10.978014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2025-12-23T18:44:10.978067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-12-23T18:44:10.978113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2025-12-23T18:44:10.978177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-12-23T18:44:10.978216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2025-12-23T18:44:10.978264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 108:0 2025-12-23T18:44:10.978326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2025-12-23T18:44:10.978375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2025-12-23T18:44:10.978409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 38], 8 2025-12-23T18:44:10.978893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:44:10.978993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-12-23T18:44:10.979038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2025-12-23T18:44:10.979081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2025-12-23T18:44:10.979127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:44:10.979227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2025-12-23T18:44:10.982100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-12-23T18:44:10.982442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-12-23T18:44:10.982492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-12-23T18:44:10.983027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-12-23T18:44:10.983126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-12-23T18:44:10.983184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:994:2950] TestWaitNotification: OK eventTxId 108 2025-12-23T18:44:10.983905Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:10.984128Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 251us result status StatusSuccess 2025-12-23T18:44:10.984640Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink [GOOD] >> KqpPg::InsertFromSelect_Serial+useSink >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine >> TSchemeShardSubDomainTest::RmDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:09.647322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:09.647395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.647431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:09.647464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:09.647497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:09.647525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:09.647575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.647664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:09.648524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:09.648797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:09.719950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:09.719995Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:09.724880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:09.725064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:09.725207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:09.728184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:09.728369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:09.729106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:09.729320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:09.731183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:09.731347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:09.732508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:09.732585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:09.732718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:09.732760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:09.732805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:09.732914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:09.880526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.881469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.881592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.881669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.881734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.881794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.881897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.881968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.882033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.882090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.882186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.882275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.882346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.882428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.882524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard_impl.cpp:5625: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:11.545093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5641: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:11.545201Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 130 2025-12-23T18:44:11.545323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:11.545384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:44:11.546278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:11.547525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:44:11.548749Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:11.548790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:11.548927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:11.549083Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:11.549120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:44:11.549162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2025-12-23T18:44:11.549396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:11.549440Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-12-23T18:44:11.549498Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:11.549537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:11.549580Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:11.549614Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:11.549656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:44:11.549695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:11.549738Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:44:11.549772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:44:11.549858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:44:11.549905Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-12-23T18:44:11.549944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-12-23T18:44:11.549980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2025-12-23T18:44:11.550625Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:11.550711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:11.550768Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:44:11.550816Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-12-23T18:44:11.550879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:44:11.551542Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:11.551621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:11.551653Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:44:11.551683Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2025-12-23T18:44:11.551714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:44:11.551800Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:44:11.552236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:11.552293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:44:11.552395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:44:11.553068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:11.553119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:44:11.553206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:11.555860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:11.556339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:11.556906Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:11.557015Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:44:11.557231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:44:11.557274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:44:11.557693Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:44:11.557775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:44:11.557813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:746:2736] TestWaitNotification: OK eventTxId 101 2025-12-23T18:44:11.558281Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:11.558451Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 205us result status StatusPathDoesNotExist 2025-12-23T18:44:11.558612Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-system >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:09.107635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:09.107738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.107775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:09.107804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:09.107836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:09.107870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:09.107926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.107999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:09.108747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:09.109028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:09.174520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:09.174575Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:09.179410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:09.179582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:09.179685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:09.182763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:09.182931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:09.183462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:09.183633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:09.185265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:09.185410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:09.186408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:09.186459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:09.186580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:09.186617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:09.186652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:09.186779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:09.341113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.341953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.342059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.342108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.342155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.342204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.342269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.342312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.342404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.342472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.342540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.342653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.342734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.342830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.342908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-12-23T18:44:11.685592Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 106:0 240 -> 240 2025-12-23T18:44:11.687473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:44:11.687533Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 106:0 ProgressState 2025-12-23T18:44:11.687657Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2025-12-23T18:44:11.687699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:44:11.687747Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2025-12-23T18:44:11.687789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:44:11.687831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-12-23T18:44:11.687904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:638:2561] message: TxId: 106 2025-12-23T18:44:11.687967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:44:11.688020Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2025-12-23T18:44:11.688061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 106:0 2025-12-23T18:44:11.688219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-12-23T18:44:11.688258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-12-23T18:44:11.690100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:44:11.690162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:814:2712] TestWaitNotification: OK eventTxId 106 2025-12-23T18:44:11.690935Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:11.691230Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 329us result status StatusSuccess 2025-12-23T18:44:11.691755Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:11.692500Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:11.692716Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 241us result status StatusSuccess 2025-12-23T18:44:11.693169Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:11.693848Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:11.694062Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 226us result status StatusSuccess 2025-12-23T18:44:11.694534Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true >> KqpPg::AlterColumnSetDefaultFromSequence [GOOD] >> KqpPg::CreateTableIfNotExists_GenericQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] Test command err: 2025-12-23T18:43:20.135259Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125873266149328:2082];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:20.137843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:43:20.173596Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b0/r3tmp/tmpSX3CHZ/pdisk_1.dat 2025-12-23T18:43:20.541366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:20.541624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:20.600655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:20.620565Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:20.697297Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27429, node 1 2025-12-23T18:43:20.765565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:20.765603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:20.765617Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:20.765740Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:20.918397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:20.952455Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:20.956831Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:20.956874Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:20.957873Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:5819, port: 5819 2025-12-23T18:43:20.958608Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:20.965483Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:21.012015Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****1_MA (E0E79099) () has now valid token of ldapuser@ldap 2025-12-23T18:43:23.595430Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125887556726832:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:23.595493Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b0/r3tmp/tmpQE5nH0/pdisk_1.dat 2025-12-23T18:43:23.614243Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:23.721975Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:23.723457Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:23.723556Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:23.723799Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587125887556726805:2081] 1766515403594119 != 1766515403594122 2025-12-23T18:43:23.749369Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18759, node 2 2025-12-23T18:43:23.800391Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:23.800423Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:23.800427Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:23.800509Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:23.855309Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:23.886020Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:23.888978Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:23.889034Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:23.889695Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:24322, port: 24322 2025-12-23T18:43:23.889863Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:23.910933Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:23.960158Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:23.960622Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:24322 return no entries 2025-12-23T18:43:23.961094Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****6vjA (5873987A) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:24322 return no entries)' 2025-12-23T18:43:26.935702Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587125897246085491:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:26.935767Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b0/r3tmp/tmpue9D3B/pdisk_1.dat 2025-12-23T18:43:26.950934Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:27.006366Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:27.007671Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587125897246085466:2081] 1766515406934882 != 1766515406934885 2025-12-23T18:43:27.019006Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:27.019082Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:27.042360Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62878, node 3 2025-12-23T18:43:27.096198Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:27.096227Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:27.096235Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:27.096344Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:27.238073Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:27.397016Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:43:27.399975Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:43:27.400013Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:43:27.400683Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:4782, port: 4782 2025-12-23T18:43:27.400746Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:27.419001Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:27.467349Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:27.515168Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:43:27.515939Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:43:27.516002Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:27.563323Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:27.607183Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: sear ... net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:43:49.752195Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****oAdA (39753176) () has now valid token of ldapuser@ldap 2025-12-23T18:43:50.252327Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:54.247367Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587125998942616101:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:54.247508Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:43:54.251494Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****oAdA (39753176) 2025-12-23T18:43:54.251662Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:20416, port: 20416 2025-12-23T18:43:54.251765Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:43:54.263694Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:43:54.307378Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:43:54.307930Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:20416 return no entries 2025-12-23T18:43:54.308428Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****oAdA (39753176) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:20416 return no entries)' 2025-12-23T18:43:58.253263Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****oAdA (39753176) 2025-12-23T18:44:00.408574Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587126045893761907:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:00.408685Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0046b0/r3tmp/tmpBTdHuS/pdisk_1.dat 2025-12-23T18:44:00.421359Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:00.500432Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7587126045893761880:2081] 1766515440407722 != 1766515440407725 2025-12-23T18:44:00.511621Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:00.542658Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:00.542882Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:00.552849Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18962, node 6 2025-12-23T18:44:00.605806Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:44:00.620199Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:00.620228Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:00.620236Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:00.620331Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:44:00.799780Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:44:00.802947Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:44:00.802983Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:44:00.803753Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:64404, port: 64404 2025-12-23T18:44:00.803848Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:44:00.815595Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:44:00.859450Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:44:00.860102Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:64404. Server is busy 2025-12-23T18:44:00.860650Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****RLGw (6DDD86A4) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:64404. Server is busy)' 2025-12-23T18:44:00.861036Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:44:00.861078Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:44:00.862100Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:64404, port: 64404 2025-12-23T18:44:00.862203Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:44:00.871300Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:44:00.915297Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:44:00.915841Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:64404. Server is busy 2025-12-23T18:44:00.916329Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****RLGw (6DDD86A4) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:64404. Server is busy)' 2025-12-23T18:44:01.449944Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:03.450114Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****RLGw (6DDD86A4) 2025-12-23T18:44:03.450547Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:44:03.450582Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:44:03.452082Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:64404, port: 64404 2025-12-23T18:44:03.452160Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:44:03.461012Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:44:03.503258Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:44:03.503772Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:64404. Server is busy 2025-12-23T18:44:03.504258Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****RLGw (6DDD86A4) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:64404. Server is busy)' 2025-12-23T18:44:05.408971Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7587126045893761907:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:05.409061Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:44:07.453374Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****RLGw (6DDD86A4) 2025-12-23T18:44:07.453597Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:44:07.453616Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:44:07.454182Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:64404, port: 64404 2025-12-23T18:44:07.454263Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2025-12-23T18:44:07.464380Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-12-23T18:44:07.511418Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-12-23T18:44:07.559060Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-12-23T18:44:07.559658Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2025-12-23T18:44:07.559717Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:44:07.603104Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:44:07.647134Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-12-23T18:44:07.648197Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****RLGw (6DDD86A4) () has now valid token of ldapuser@ldap |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> KqpPg::CreateTempTableSerial [GOOD] >> KqpPg::DropSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:11.667286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:11.667377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:11.667414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:11.667449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:11.667486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:11.667532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:11.667606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:11.667703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:11.668602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:11.668911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:11.754932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:11.754973Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:11.759481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:11.759663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:11.759752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:11.762141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:11.762276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:11.762804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:11.763002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:11.764503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:11.764644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:11.765522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:11.765567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:11.765728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:11.765772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:11.765812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:11.765949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:11.771231Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:11.863811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:11.864008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:11.864226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:11.864275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:11.864534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:11.864613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:11.866722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:11.866963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:11.867154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:11.867208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:11.867246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:11.867295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:11.869094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:11.869160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:11.869196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:11.870771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:11.870814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:11.870888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:11.870940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:11.874567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:11.876136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:11.876353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:11.877395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:11.877516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:11.877564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:11.877862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:11.877916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:11.878078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:11.878170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:11.880088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:11.880142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ted: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2025-12-23T18:44:12.301537Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409548 2025-12-23T18:44:12.302695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-12-23T18:44:12.303040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409551 2025-12-23T18:44:12.303335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-12-23T18:44:12.303455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-12-23T18:44:12.304166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-12-23T18:44:12.304891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:44:12.305017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-12-23T18:44:12.305228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:44:12.305462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:44:12.305572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:44:12.306005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:44:12.306790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:12.306834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:12.306929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:44:12.308888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-12-23T18:44:12.308932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-12-23T18:44:12.309108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2025-12-23T18:44:12.309146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-12-23T18:44:12.309262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:12.309384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:12.309416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:12.309473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:12.311888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:44:12.311920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:44:12.311973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:44:12.311995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-12-23T18:44:12.312046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-12-23T18:44:12.312059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-12-23T18:44:12.312155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:44:12.312190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:44:12.312272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:44:12.312361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-12-23T18:44:12.313764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:12.313834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:44:12.314074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:44:12.314150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:44:12.314612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:44:12.314720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:44:12.314779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:793:2683] TestWaitNotification: OK eventTxId 103 2025-12-23T18:44:12.315235Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:12.315408Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 181us result status StatusPathDoesNotExist 2025-12-23T18:44:12.315528Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:44:12.315832Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:12.315981Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 153us result status StatusSuccess 2025-12-23T18:44:12.316272Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:11.808972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:11.809039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:11.809089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:11.809113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:11.809140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:11.809163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:11.809225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:11.809279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:11.809979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:11.810217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:11.869156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:11.869215Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:11.874701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:11.874931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:11.875039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:11.877962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:11.878125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:11.878782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:11.879011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:11.880853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:11.881021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:11.882242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:11.882303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:11.882442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:11.882484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:11.882524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:11.882636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:11.889246Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:12.016613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:12.016804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.017008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:12.017084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:12.017301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:12.017378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:12.019494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:12.019715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:12.019888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.019940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:12.019976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:12.020008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:12.021883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.021947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:12.021985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:12.023592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.023636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.023695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:12.023755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:12.027546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:12.029204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:12.029408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:12.030489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:12.030609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:12.030665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:12.031002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:12.031061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:12.031249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:12.031333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:12.033291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:12.033341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... gnore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 150 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1447 } } CommitVersion { Step: 150 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2025-12-23T18:44:12.392404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 627 RawX2: 4294969837 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:44:12.392438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-12-23T18:44:12.392549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 627 RawX2: 4294969837 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:44:12.392610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:44:12.392688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 627 RawX2: 4294969837 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:44:12.392769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:12.392808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.392842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-12-23T18:44:12.392890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2025-12-23T18:44:12.396175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:12.396237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:12.396283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.396353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.396564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.396604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:44:12.396679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:12.396703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:12.396740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:12.396766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:12.396794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-12-23T18:44:12.396840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:281:2271] message: TxId: 101 2025-12-23T18:44:12.396874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:12.396900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:44:12.396929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:44:12.397030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-12-23T18:44:12.398237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:44:12.398273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:282:2272] TestWaitNotification: OK eventTxId 101 2025-12-23T18:44:12.398653Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:12.398847Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 205us result status StatusSuccess 2025-12-23T18:44:12.399286Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:12.399732Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:12.399877Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 138us result status StatusSuccess 2025-12-23T18:44:12.400212Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:09.668455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:09.668543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.668597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:09.668640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:09.668679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:09.668708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:09.668761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.668853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:09.669702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:09.670003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:09.757619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:09.757698Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:09.763768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:09.763990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:09.764114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:09.767536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:09.767731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:09.768496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:09.768747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:09.770705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:09.770944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:09.772235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:09.772300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:09.772496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:09.772560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:09.772608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:09.772754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:09.927307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.928395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.928513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.928598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.928701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.928775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.928862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.928937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.929029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.929093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.929161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.929246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.929344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.929438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.929542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... athId: 38] was 11 2025-12-23T18:44:12.324058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 46] was 3 2025-12-23T18:44:12.326221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 46, at schemeshard: 72057594046678944 2025-12-23T18:44:12.326473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-12-23T18:44:12.326756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:12.326809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:12.327067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 46] 2025-12-23T18:44:12.327168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:12.327211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1396:3259], at schemeshard: 72057594046678944, txId: 137, path id: 38 2025-12-23T18:44:12.327283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1396:3259], at schemeshard: 72057594046678944, txId: 137, path id: 46 2025-12-23T18:44:12.327915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.327988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046678944 2025-12-23T18:44:12.328212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2025-12-23T18:44:12.329024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 15 PathOwnerId: 72057594046678944, cookie: 137 2025-12-23T18:44:12.329143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 15 PathOwnerId: 72057594046678944, cookie: 137 2025-12-23T18:44:12.329194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-12-23T18:44:12.329239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 15 2025-12-23T18:44:12.329282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 12 2025-12-23T18:44:12.330585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 46 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-12-23T18:44:12.330662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 46 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-12-23T18:44:12.330705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-12-23T18:44:12.330762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 46], version: 1 2025-12-23T18:44:12.330797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 46] was 4 2025-12-23T18:44:12.330873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2025-12-23T18:44:12.333814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2025-12-23T18:44:12.333996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2025-12-23T18:44:12.334044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-12-23T18:44:12.334853Z node 1 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2025-12-23T18:44:12.335064Z node 1 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2025-12-23T18:44:12.335244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6306: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-12-23T18:44:12.335299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-12-23T18:44:12.335430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-12-23T18:44:12.335486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-12-23T18:44:12.335557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-12-23T18:44:12.335656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 137:0 2 -> 3 2025-12-23T18:44:12.337440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-12-23T18:44:12.337836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-12-23T18:44:12.339071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.339311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.339377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:219: TCreateTable TConfigureParts operationId# 137:0 ProgressState at tabletId# 72057594046678944 2025-12-23T18:44:12.339466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:239: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2025-12-23T18:44:12.339832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:255: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 1343 RawX2: 4294970513 } TxBody: "\n\236\004\n\007Table11\020.\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020.:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 38 2025-12-23T18:44:12.343956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2025-12-23T18:44:12.344120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] |95.2%| [TA] $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools >> KqpPg::PgUpdate+useSink [GOOD] >> KqpPg::PgUpdate-useSink >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] >> TSchemeShardSubDomainTest::RmDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 25932, MsgBus: 29221 2025-12-23T18:43:10.165753Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125831683391395:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:10.168157Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005119/r3tmp/tmpQZ5qy1/pdisk_1.dat 2025-12-23T18:43:10.394564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:10.394663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:10.440986Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:10.441342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:10.463041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25932, node 1 2025-12-23T18:43:10.540270Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:10.540300Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:10.540308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:10.540407Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:10.646242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:29221 TClient is connected to server localhost:29221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515390507 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:43:11.036444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:11.173598Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:13.482667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125844568294104:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.482668Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125844568294131:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.482848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.483311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125844568294134:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.483374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.487480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:13.504310Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125844568294133:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:43:13.599878Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125844568294187:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:13.668749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 12348, MsgBus: 2860 2025-12-23T18:43:14.841594Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125847023285831:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:14.841755Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005119/r3tmp/tmpdPlOAf/pdisk_1.dat 2025-12-23T18:43:14.855490Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:14.921406Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587125847023285788:2081] 1766515394840728 != 1766515394840731 2025-12-23T18:43:14.927955Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:14.958016Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:14.958084Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12348, node 2 2025-12-23T18:43:14.959991Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:15.005695Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:15.005716Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:15.005723Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:15.005832Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:15.156980Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2860 TClient is connected to server localhost:2860 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515395008 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:15.422883Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:15.431374Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:43:15.848209Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:18.104594Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125864203155862:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:18.104597Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125864203155851:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool in ...
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:44:04.391710Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7587126063692384250:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:04.431329Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:04.531471Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:04.618696Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [10:7587126063692384486:2354], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-12-23T18:44:04.619361Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=10&id=ZGZlYjliZi01ODI2YjhhMC1mZTYyYzJkMi1mYWFmYmIyYw==, ActorId: [10:7587126063692384484:2353], ActorState: ExecuteState, TraceId: 01kd68d2v2cpvwr4haxn4189bq, ReplyQueryCompileError, status: BAD_REQUEST, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Missing not null column in input: c. All not null columns should be initialized" end_position { row: 1 column: 1 } issue_code: 2032 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:44:04.702348Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7587126042217546852:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:04.702468Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18911, MsgBus: 3817 2025-12-23T18:44:05.888280Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7587126066592488234:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:05.888361Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005119/r3tmp/tmpMSjTTk/pdisk_1.dat 2025-12-23T18:44:05.904966Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:05.988305Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:06.063728Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:06.063859Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:06.087388Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:06.096586Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18911, node 11 2025-12-23T18:44:06.154034Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:06.154068Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:06.154081Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:06.154198Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3817 2025-12-23T18:44:06.579140Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3817 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515446150 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:06.840293Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:06.895783Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:10.877125Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126088067325570:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:10.877132Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126088067325578:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:10.877266Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:10.877589Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126088067325585:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:10.877687Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:10.883346Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:10.888748Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7587126066592488234:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:10.888812Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:44:10.898786Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7587126088067325584:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:44:10.973174Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7587126088067325640:2541] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:11.013134Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:11.105023Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:11.180920Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [11:7587126092362293171:2355], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-12-23T18:44:11.181349Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=11&id=NzVhODFiYjktYjM0NTg0NDItNTk4ZjIwYzktMzNkZjcyODM=, ActorId: [11:7587126092362293169:2354], ActorState: ExecuteState, TraceId: 01kd68d98d9gxrc7h6krjc5yyw, ReplyQueryCompileError, status: BAD_REQUEST, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Missing not null column in input: c. All not null columns should be initialized" end_position { row: 1 column: 1 } issue_code: 2032 severity: 1 } } }, remove tx with tx_id: |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> TSchemeShardSubDomainTest::Restart >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:12.544959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:12.545033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:12.545068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:12.545094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:12.545140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:12.545164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:12.545215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:12.545271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:12.545926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:12.546159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:12.612787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:12.612850Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:12.617804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:12.617983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:12.618109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:12.620988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:12.621177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:12.621937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:12.622186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:12.624053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:12.624226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:12.625262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:12.625322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:12.625426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:12.625467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:12.625506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:12.625602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.760495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.761488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.761610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.761680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.761775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.761861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.761956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.762030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.762179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.762256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.762327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.762403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.762473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.762556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.762646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-12-23T18:44:13.381225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 100:0 128 -> 240 2025-12-23T18:44:13.381296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-12-23T18:44:13.381460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:13.381522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:44:13.381579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-12-23T18:44:13.383398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:13.383452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:13.383589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:13.383694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:13.383739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-12-23T18:44:13.383798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 38 2025-12-23T18:44:13.383984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-12-23T18:44:13.384036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2025-12-23T18:44:13.384147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:44:13.384182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:13.384276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:44:13.384333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:13.384383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-12-23T18:44:13.384424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:13.384461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2025-12-23T18:44:13.384497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 100:0 2025-12-23T18:44:13.384581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:44:13.384621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-12-23T18:44:13.384669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:44:13.384712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2025-12-23T18:44:13.385513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:13.385598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:13.385653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:44:13.385697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:44:13.385740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:44:13.386696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:13.387109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:13.387165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:44:13.387198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:44:13.387229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:44:13.387299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-12-23T18:44:13.390424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-12-23T18:44:13.391356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-12-23T18:44:13.391610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-12-23T18:44:13.391676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-12-23T18:44:13.391765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:44:13.391790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:44:13.392245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-12-23T18:44:13.392407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-12-23T18:44:13.392456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:723:2713] 2025-12-23T18:44:13.392619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:44:13.392713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:44:13.392741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:723:2713] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-12-23T18:44:13.393173Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:13.393456Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 273us result status StatusSuccess 2025-12-23T18:44:13.393941Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-true >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:08.659250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:08.659349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:08.659397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:08.659434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:08.659471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:08.659504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:08.659558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:08.659641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:08.660536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:08.660795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:08.728518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:08.728578Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:08.733141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:08.733346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:08.733440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:08.735937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:08.736099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:08.736648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:08.736859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:08.738348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:08.738483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:08.739401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:08.739470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:08.739571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:08.739606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:08.739649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:08.739761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:08.847710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.851094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.851268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.851348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.851442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.851504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.851568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.851641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.851697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.851756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.851828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.851954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.852024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.852082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:08.852147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-12-23T18:44:13.379586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-12-23T18:44:13.379640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-12-23T18:44:13.379847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2025-12-23T18:44:13.380047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-12-23T18:44:13.380090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:853:2807], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-12-23T18:44:13.380148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:853:2807], at schemeshard: 72075186233409546, txId: 104, path id: 32 2025-12-23T18:44:13.380589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-12-23T18:44:13.380650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-12-23T18:44:13.380749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-12-23T18:44:13.380811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-12-23T18:44:13.380866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2025-12-23T18:44:13.381767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2025-12-23T18:44:13.381876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2025-12-23T18:44:13.381929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-12-23T18:44:13.381981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 12 2025-12-23T18:44:13.382026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2025-12-23T18:44:13.383013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-12-23T18:44:13.383104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-12-23T18:44:13.383145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-12-23T18:44:13.383180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2025-12-23T18:44:13.383210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2025-12-23T18:44:13.383278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-12-23T18:44:13.385893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-12-23T18:44:13.385955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-12-23T18:44:13.386301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2025-12-23T18:44:13.386507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:44:13.386550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:44:13.386595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:44:13.386632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:44:13.386667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-12-23T18:44:13.386731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1002:2943] message: TxId: 104 2025-12-23T18:44:13.386801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:44:13.386847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:44:13.386879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:44:13.387002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2025-12-23T18:44:13.387636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-12-23T18:44:13.387675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-12-23T18:44:13.389131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-12-23T18:44:13.389238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-12-23T18:44:13.390715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-12-23T18:44:13.390786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:853:2807], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-12-23T18:44:13.390867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:44:13.390906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1353:3271] 2025-12-23T18:44:13.391627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-12-23T18:44:13.392613Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-12-23T18:44:13.392820Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 227us result status StatusSuccess 2025-12-23T18:44:13.393315Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:12.622347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:12.622460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:12.622542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:12.622587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:12.622627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:12.622660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:12.622711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:12.622814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:12.623951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:12.624258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:12.711085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:12.711159Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:12.716546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:12.716770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:12.716890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:12.719599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:12.719788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:12.720491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:12.720737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:12.722511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:12.722690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:12.723887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:12.723948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:12.724113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:12.724177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:12.724220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:12.724382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.872070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.873031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.873147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.873226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.873316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.873391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.873477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.873547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.873631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.873705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.873788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.873874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.873973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.874056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.874158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 25-12-23T18:44:13.623392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2025-12-23T18:44:13.623441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-12-23T18:44:13.625120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:13.625172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:13.625351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:13.625459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:13.625502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-12-23T18:44:13.625543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 100, path id: 38 2025-12-23T18:44:13.625842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-12-23T18:44:13.625908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2025-12-23T18:44:13.626028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:44:13.626078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:13.626124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:44:13.626160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:13.626198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-12-23T18:44:13.626241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:44:13.626284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2025-12-23T18:44:13.626318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 100:0 2025-12-23T18:44:13.626563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2025-12-23T18:44:13.626612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-12-23T18:44:13.626670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:44:13.626703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2025-12-23T18:44:13.627474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:13.627573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:13.627617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:44:13.627656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:44:13.627702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:44:13.628377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:13.628465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:44:13.628505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:44:13.628538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:44:13.628579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2025-12-23T18:44:13.628645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-12-23T18:44:13.628685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:944:2861] 2025-12-23T18:44:13.631556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-12-23T18:44:13.631645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-12-23T18:44:13.631703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-12-23T18:44:13.631743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:945:2862] TestWaitNotification: OK eventTxId 100 2025-12-23T18:44:13.632237Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:13.632489Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 224us result status StatusSuccess 2025-12-23T18:44:13.633016Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:44:13.636180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:13.636341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:13.636462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:44:13.638504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:13.638828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] TEST create schemeshard, 0x00007D99121B0D00 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:09.552021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:09.552095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.552133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:09.552165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:09.552201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:09.552229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:09.552331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:09.552404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:09.553227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:09.553509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:09.637924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:09.637978Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:09.647801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:09.648025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:09.648160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:09.651215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:09.651394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:09.652128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:09.652367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:09.654118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:09.654278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:09.655472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:09.655527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:09.655660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:09.655703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:09.655743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:09.655867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:09.802489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.803473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.803583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.803670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.803750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.803809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.803885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.803951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.804037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.804100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.804159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.804241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.804310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.804396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:09.804473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... n 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 Leader for TabletID 72057594046678944 is [1:894:2843] sender: [1:967:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 2025-12-23T18:44:13.649841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 2025-12-23T18:44:13.660725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 0 2025-12-23T18:44:13.660848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2025-12-23T18:44:13.660924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2025-12-23T18:44:13.661124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2025-12-23T18:44:13.661182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2025-12-23T18:44:13.661213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2025-12-23T18:44:13.661287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D991221B100, stats written 2 2025-12-23T18:44:13.661828Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:13.663769Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 205us result status StatusSuccess 2025-12-23T18:44:13.664215Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:13.300884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:13.300987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:13.301029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:13.301063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:13.301096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:13.301116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:13.301154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:13.301208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:13.301905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:13.302217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:13.383463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:13.383513Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:13.395680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:13.395942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:13.396118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:13.399889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:13.400097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:13.400917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:13.401174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:13.403185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:13.403382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:13.404522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:13.404573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:13.404682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:13.404726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:13.404760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:13.404861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:13.415856Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:13.524668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:13.524921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:13.525110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:13.525145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:13.525318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:13.525400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:13.527461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:13.527627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:13.527784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:13.527827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:13.527857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:13.527884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:13.529416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:13.529472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:13.529516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:13.530826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:13.530864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:13.530917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:13.530952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:13.538669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:13.540388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:13.540571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:13.541369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:13.541473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:13.541512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:13.541785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:13.541830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:13.541989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:13.542046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:13.543730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:13.543767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 4 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-12-23T18:44:13.728513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-12-23T18:44:13.728847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:13.728977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:13.729028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-12-23T18:44:13.729420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2025-12-23T18:44:13.729497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-12-23T18:44:13.729691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:13.729763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-12-23T18:44:13.729810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:44:13.731669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:13.731719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:13.731940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:44:13.732060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:13.732130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:44:13.732199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-12-23T18:44:13.732272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:13.732325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:44:13.732427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:13.732462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:13.732503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:13.732541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:13.732581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:44:13.732621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:13.732661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:44:13.732724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:44:13.732986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-12-23T18:44:13.733031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2025-12-23T18:44:13.733086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-12-23T18:44:13.733127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-12-23T18:44:13.734080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:13.734238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:13.734289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:44:13.734327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-12-23T18:44:13.734374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:13.735363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:13.735451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:13.735497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:44:13.735530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-12-23T18:44:13.735560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-12-23T18:44:13.735624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-12-23T18:44:13.735692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:556:2473] 2025-12-23T18:44:13.738414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:13.739388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:13.739480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:44:13.739511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:557:2474] TestWaitNotification: OK eventTxId 101 2025-12-23T18:44:13.739975Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:13.740156Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 199us result status StatusSuccess 2025-12-23T18:44:13.740741Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:40:51.235024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:51.235138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:51.235187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:51.235231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:51.235273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:51.235306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:51.235371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:51.235471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:51.236510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:51.236835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:51.395948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:51.396024Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:51.396989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:51.415239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:51.415460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:51.415638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:51.424348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:51.424639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:51.425491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:51.425835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:51.429060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:51.429278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:51.430629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:51.430697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:51.430848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:51.430907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:51.430994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:51.431194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:51.439126Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-12-23T18:40:51.608844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:51.609078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:51.609309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:40:51.609363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:40:51.609663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:40:51.609729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:51.614082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:51.614387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:40:51.614643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:51.614708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:40:51.614797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:40:51.614836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:40:51.621544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:51.621620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:40:51.621660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:40:51.631509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:51.631575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:51.631639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:51.631717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:40:51.643952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:40:51.651632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:40:51.651937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:40:51.653282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:51.653439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:51.653487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:51.654010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:40:51.654089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:51.654381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:40:51.654463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:51.656757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard ... as 3 2025-12-23T18:44:11.800419Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 190, publications: 4, subscribers: 0 2025-12-23T18:44:11.800463Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 137], 6 2025-12-23T18:44:11.800502Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 138], 6 2025-12-23T18:44:11.800533Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 139], 5 2025-12-23T18:44:11.800565Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 140], 2 2025-12-23T18:44:11.803626Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:44:11.803758Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:44:11.803809Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 190 2025-12-23T18:44:11.803855Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 137], version: 6 2025-12-23T18:44:11.803906Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 137] was 2 2025-12-23T18:44:11.805181Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:44:11.805301Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:44:11.805344Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 190 2025-12-23T18:44:11.805386Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 138], version: 6 2025-12-23T18:44:11.805434Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2025-12-23T18:44:11.806835Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:44:11.806942Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:44:11.806979Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 190 2025-12-23T18:44:11.807016Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2025-12-23T18:44:11.807057Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2025-12-23T18:44:11.808350Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:44:11.808465Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:44:11.808505Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 190 2025-12-23T18:44:11.808543Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 2 2025-12-23T18:44:11.808582Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 2 2025-12-23T18:44:11.808664Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 190, subscribers: 0 2025-12-23T18:44:11.813384Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2025-12-23T18:44:11.813568Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2025-12-23T18:44:11.813690Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2025-12-23T18:44:11.813809Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 TestModificationResult got TxId: 190, wait until txId: 190 TestWaitNotification wait txId: 190 2025-12-23T18:44:11.815561Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 190: send EvNotifyTxCompletion 2025-12-23T18:44:11.815619Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 190 2025-12-23T18:44:11.817295Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 190, at schemeshard: 72057594046678944 2025-12-23T18:44:11.817415Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2025-12-23T18:44:11.817459Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [32:2921:4909] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2025-12-23T18:44:11.818870Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2025-12-23T18:44:11.818921Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2025-12-23T18:44:11.819017Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2025-12-23T18:44:11.819050Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2025-12-23T18:44:11.819159Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2025-12-23T18:44:11.819193Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2025-12-23T18:44:11.819264Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2025-12-23T18:44:11.819294Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2025-12-23T18:44:11.819361Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2025-12-23T18:44:11.819391Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2025-12-23T18:44:11.821377Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2025-12-23T18:44:11.821564Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2025-12-23T18:44:11.821616Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [32:2924:4912] 2025-12-23T18:44:11.822012Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2025-12-23T18:44:11.822379Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2025-12-23T18:44:11.822428Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [32:2924:4912] 2025-12-23T18:44:11.822528Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2025-12-23T18:44:11.822788Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2025-12-23T18:44:11.822836Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [32:2924:4912] 2025-12-23T18:44:11.822948Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2025-12-23T18:44:11.823130Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2025-12-23T18:44:11.823212Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2025-12-23T18:44:11.823249Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [32:2924:4912] 2025-12-23T18:44:11.823429Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2025-12-23T18:44:11.823464Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [32:2924:4912] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] TEST create schemeshard, 0x00007DF5E50A5900 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:03.661933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:03.662006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:03.662035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:03.662062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:03.662099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:03.662118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:03.662170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:03.662226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:03.662914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:03.663142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:03.735125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:03.735180Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:03.739965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:03.740104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:03.740190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:03.742915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:03.743057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:03.743622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:03.743833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:03.745539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:03.745669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:03.746545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:03.746587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:03.746695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:03.746726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:03.746784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:03.746875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:03.867274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:03.868779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... ten 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 Leader for TabletID 72057594046678944 is [2:1019:2979] sender: [2:1078:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 2025-12-23T18:44:13.691412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 2025-12-23T18:44:13.743333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 0 2025-12-23T18:44:13.743529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2025-12-23T18:44:13.743602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2025-12-23T18:44:13.743661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:451: OLAP store contains 1 tables. 2025-12-23T18:44:13.743715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:473: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007DF5E5242100, stats written 1 2025-12-23T18:44:13.744125Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:13.744333Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 236us result status StatusSuccess 2025-12-23T18:44:13.744958Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 166 LastUpdateTime: 166 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas >> TSchemeShardSubDomainTest::Restart [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:13.118278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:13.118370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:13.118413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:13.118451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:13.118492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:13.118525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:13.118584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:13.118689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:13.119702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:13.120041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:13.209871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:13.209936Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:13.216814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:13.217020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:13.217148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:13.220213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:13.220388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:13.221119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:13.221337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:13.223252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:13.223420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:13.224490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:13.224566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:13.224718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:13.224763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:13.224802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:13.224916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:13.344206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.344926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.345011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.345060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.345103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.345150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.345211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.345254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.345304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.345350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.345408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.345485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.345542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.345589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.345646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... plete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:14.202572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:14.203214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:14.203248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:14.203367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:14.203447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:14.203475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:44:14.203502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 38 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:44:14.203647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.203686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:44:14.203805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:14.203863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:14.203910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:44:14.203939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:14.203979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:44:14.204020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:44:14.204050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:44:14.204078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:44:14.204149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:44:14.204181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-12-23T18:44:14.204207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-12-23T18:44:14.204228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2025-12-23T18:44:14.204783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:14.204862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:14.204896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:44:14.204928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:44:14.204980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:44:14.205432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:14.205509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:44:14.205558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:44:14.205580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:44:14.205603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:44:14.205657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:44:14.208604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:44:14.208670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-12-23T18:44:14.211387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:14.231609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2025-12-23T18:44:14.231701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2025-12-23T18:44:14.231896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-12-23T18:44:14.231951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-12-23T18:44:14.234551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:14.234800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-12-23T18:44:14.235159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:44:14.235211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-12-23T18:44:14.235291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:44:14.235314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:44:14.235795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:44:14.235931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:44:14.236004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:44:14.236040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:719:2709] 2025-12-23T18:44:14.236193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:44:14.236220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:719:2709] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> KqpPg::ExplainColumnsReorder [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TNodeBrokerTest::NodesMigrationRemovedChanged >> Cdc::Alter [GOOD] >> Cdc::DescribeStream >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] |95.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:14.213961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:14.214052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:14.214142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:14.214199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:14.214236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:14.214283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:14.214347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:14.214433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:14.215269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:14.215596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:14.300192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:14.300248Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:14.305822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:14.306049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:14.306186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:14.309629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:14.309812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:14.310545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:14.310853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:14.312824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:14.312994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:14.314260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:14.314323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:14.314496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:14.314550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:14.314593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:14.314735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.321966Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:14.445352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:14.445561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.445763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:14.445807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:14.446018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:14.446081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:14.448197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:14.448416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:14.448575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.448630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:14.448667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:14.448699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:14.450407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.450468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:14.450508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:14.452071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.452116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.452188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:14.452240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:14.456499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:14.458283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:14.458477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:14.459522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:14.459638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:14.459681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:14.459977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:14.460026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:14.460195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:14.460277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:14.462197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:14.462238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... G: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:44:14.628683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1492: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.628750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1518: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.629005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-12-23T18:44:14.629317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1820: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.629404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2025-12-23T18:44:14.629708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.629845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.629950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-12-23T18:44:14.629990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:44:14.630021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:44:14.630045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-12-23T18:44:14.630151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.630245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.630441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-12-23T18:44:14.630810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.630924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.631371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.631489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.631718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.631823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.631866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.631945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.632151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.632253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.632436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.632680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.632781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.632834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.632968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.633040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.633102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.638507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:14.641167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:14.641234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:14.641519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:14.641573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:14.641622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:14.641902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:466:2419] sender: [1:527:2058] recipient: [1:15:2062] 2025-12-23T18:44:14.716406Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:14.716659Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 258us result status StatusSuccess 2025-12-23T18:44:14.717107Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:14.717850Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:14.718014Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 165us result status StatusSuccess 2025-12-23T18:44:14.718434Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:01.654174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:01.654262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:01.654303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:01.654342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:01.654385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:01.654418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:01.654467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:01.654544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:01.655543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:01.655878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:01.738250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:01.738325Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:01.744437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:01.744679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:01.744815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:01.748132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:01.748333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:01.749129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:01.749389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:01.751573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:01.751766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:01.753001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:01.753074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:01.753252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:01.753303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:01.753350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:01.753478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:01.896495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.897565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.897713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.897794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.897871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.897934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.898041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.898123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.898191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.898278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.898364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.898472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.898548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.898631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:01.898760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 14.537123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.537394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:14.537444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:14.537623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:44:14.537799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:14.537841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 38 2025-12-23T18:44:14.537885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 39 2025-12-23T18:44:14.537971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.538023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:44:14.538099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.538140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-12-23T18:44:14.538185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2025-12-23T18:44:14.539695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:14.539802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:14.539846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:44:14.539888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 10 2025-12-23T18:44:14.539934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:44:14.540646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:14.540727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:14.540759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:44:14.540791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2025-12-23T18:44:14.540823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:44:14.540886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-12-23T18:44:14.543836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.543902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:14.544280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:44:14.544471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:44:14.544510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:44:14.544545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:44:14.544578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:44:14.544615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-12-23T18:44:14.544673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:816:2783] message: TxId: 103 2025-12-23T18:44:14.544720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:44:14.544759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:44:14.544794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:44:14.544893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:44:14.545401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:14.545445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:14.546225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:44:14.547876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:44:14.548862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:14.548917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 0, path id: 38 2025-12-23T18:44:14.549225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:44:14.549276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1746:3671] 2025-12-23T18:44:14.549912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-12-23T18:44:14.553962Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:14.554183Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 252us result status StatusSuccess 2025-12-23T18:44:14.554700Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:06.113514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:06.113576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:06.113613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:06.113655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:06.113682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:06.113703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:06.113763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:06.113861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:06.114538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:06.114786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:06.196413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:06.196471Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:06.201636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:06.201824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:06.201937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:06.204724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:06.204901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:06.205550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:06.205744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:06.207466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:06.207630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:06.208599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:06.208645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:06.208739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:06.208812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:06.208848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:06.208979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.214381Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:06.335602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:06.335814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.336029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:06.336071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:06.336318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:06.336386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:06.338684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:06.338918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:06.339108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.339184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:06.339233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:06.339266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:06.341183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.341247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:06.341291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:06.342875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.342935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.342990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:06.343048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:06.351672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:06.353609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:06.353780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:06.354848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:06.354980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:06.355031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:06.355331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:06.355380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:06.355633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:06.355704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:06.357625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:06.357669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 Leader for TabletID 72057594046678944 is [2:1099:3020] sender: [2:1174:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 2025-12-23T18:44:14.507115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0.097 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 2025-12-23T18:44:14.537990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 0 2025-12-23T18:44:14.538246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2025-12-23T18:44:14.538312Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table1, is column=0, is olap=0, RowCount 1, DataSize 584 2025-12-23T18:44:14.538415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 2025-12-23T18:44:14.548845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 2025-12-23T18:44:14.570043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 2025-12-23T18:44:14.580905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 1 2025-12-23T18:44:14.581062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 584 row count 1 2025-12-23T18:44:14.581114Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2025-12-23T18:44:14.581189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2025-12-23T18:44:14.581220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:3 data size 584 row count 1 2025-12-23T18:44:14.581237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2025-12-23T18:44:14.581281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DDCCA1EC900, stats written 3 2025-12-23T18:44:14.581790Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:14.581979Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 231us result status StatusSuccess 2025-12-23T18:44:14.582491Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-false |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:14.284521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:14.284610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:14.284677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:14.284722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:14.284763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:14.284794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:14.284856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:14.284950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:14.285853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:14.286103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:14.373763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:14.373824Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:14.379610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:14.379851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:14.379994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:14.383044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:14.383265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:14.384019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:14.384265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:14.386158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:14.386345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:14.387627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:14.387691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:14.387878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:14.387933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:14.387981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:14.388112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.395070Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:44:14.543030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:14.543285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.543512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:44:14.543572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:44:14.543845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:44:14.543930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:14.546099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:14.546330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:44:14.546515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.546575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:44:14.546617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:44:14.546664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:44:14.548616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.548681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:14.548726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:44:14.550351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.550399Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.550458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:14.550503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:44:14.554581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:44:14.556307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:44:14.556492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:44:14.557543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:14.557685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:14.557732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:14.558032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:44:14.558083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:44:14.558274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:44:14.558356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:44:14.560287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:14.560331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... lt> execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409551 TxId: 104 Status: OK 2025-12-23T18:44:15.064887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:654: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409551 TxId: 104 Status: OK 2025-12-23T18:44:15.064906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:659: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-12-23T18:44:15.064925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-12-23T18:44:15.077543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.077662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.118101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-12-23T18:44:15.118249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-12-23T18:44:15.118325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-12-23T18:44:15.118375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.118416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-12-23T18:44:15.118450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-12-23T18:44:15.118899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-12-23T18:44:15.119048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-12-23T18:44:15.119119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-12-23T18:44:15.119177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.119226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:44:15.119400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2025-12-23T18:44:15.119567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:15.119631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 FAKE_COORDINATOR: Erasing txId 104 2025-12-23T18:44:15.123335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.123623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.123828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:15.123860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:15.124011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-12-23T18:44:15.124218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:15.124254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:343:2319], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-12-23T18:44:15.124289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:343:2319], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-12-23T18:44:15.124924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.124978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:44:15.125074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:44:15.125104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:44:15.125139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:44:15.125165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:44:15.125196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-12-23T18:44:15.125246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:44:15.125293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:44:15.125333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:44:15.125523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-12-23T18:44:15.125561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-12-23T18:44:15.125604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-12-23T18:44:15.125643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-12-23T18:44:15.126256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:44:15.126340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:44:15.126384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:44:15.126419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-12-23T18:44:15.126458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:44:15.126841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:44:15.126896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:44:15.126918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:44:15.126941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-12-23T18:44:15.126963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-12-23T18:44:15.127017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-12-23T18:44:15.131347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:44:15.134100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:11.876849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:11.876929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:11.876967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:11.876999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:11.877033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:11.877063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:11.877140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:11.877227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:11.878118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:11.878394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:11.942152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:11.942218Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:11.947330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:11.947515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:11.947648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:11.950560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:11.950767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:11.951509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:11.951747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:11.953464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:11.953590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:11.954482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:11.954529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:11.954618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:11.954649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:11.954678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:11.954781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.076490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.077427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.077541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.077605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.077665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.077720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.077806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.077894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.077958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.078034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.078109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.078184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.078251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.078320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.078406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:15.252551Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 181us result status StatusSuccess 2025-12-23T18:44:15.252898Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 250 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:15.253336Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:15.253523Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 216us result status StatusSuccess 2025-12-23T18:44:15.253952Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:15.254472Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:15.254625Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 173us result status StatusSuccess 2025-12-23T18:44:15.255013Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 250 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:15.255513Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:15.255695Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 206us result status StatusSuccess 2025-12-23T18:44:15.256094Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 250 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TEnumerationTest::TestPublish [GOOD] >> TLocalTests::TestAddTenant >> TNodeBrokerTest::NodeNameExpiration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:10.899288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:10.899399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:10.899446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:10.899488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:10.899528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:10.899560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:10.899611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:10.899683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:10.900628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:10.900957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:10.981438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:10.981487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:10.987655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:10.987848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:10.987954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:10.990916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:10.991129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:10.991944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:10.992164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:10.993842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:10.994026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:10.995114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:10.995190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:10.995302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:10.995337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:10.995372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:10.995493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:11.151305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.152507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.152671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.152753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.152827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.152893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.152991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.153070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.153138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.153235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.153323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.153423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.153496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.153572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.153660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... -23T18:44:15.315605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-12-23T18:44:15.315664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-12-23T18:44:15.315857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2025-12-23T18:44:15.316023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-12-23T18:44:15.316062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:853:2807], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-12-23T18:44:15.316104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:853:2807], at schemeshard: 72075186233409546, txId: 104, path id: 32 2025-12-23T18:44:15.316422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-12-23T18:44:15.316464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-12-23T18:44:15.316521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-12-23T18:44:15.316559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-12-23T18:44:15.316600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2025-12-23T18:44:15.317285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2025-12-23T18:44:15.317364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2025-12-23T18:44:15.317397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-12-23T18:44:15.317427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 12 2025-12-23T18:44:15.317459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2025-12-23T18:44:15.318156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-12-23T18:44:15.318205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-12-23T18:44:15.318222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-12-23T18:44:15.318240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2025-12-23T18:44:15.318260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2025-12-23T18:44:15.318314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-12-23T18:44:15.320679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-12-23T18:44:15.320743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-12-23T18:44:15.321102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2025-12-23T18:44:15.321309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:44:15.321352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:44:15.321389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:44:15.321425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:44:15.321461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-12-23T18:44:15.321519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1002:2943] message: TxId: 104 2025-12-23T18:44:15.321580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:44:15.321619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:44:15.321650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:44:15.321747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2025-12-23T18:44:15.322214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-12-23T18:44:15.322254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-12-23T18:44:15.323002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-12-23T18:44:15.323119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-12-23T18:44:15.324621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-12-23T18:44:15.324699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:853:2807], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-12-23T18:44:15.324787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:44:15.324829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1423:3342] 2025-12-23T18:44:15.325621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-12-23T18:44:15.327277Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-12-23T18:44:15.327486Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 237us result status StatusSuccess 2025-12-23T18:44:15.328022Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] >> TSchemeShardSubDomainTest::DeclareDefineAndDelete >> TDynamicNameserverTest::TestCacheUsage >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:13.661580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:13.661657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:13.661694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:13.661729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:13.661769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:13.661802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:13.661858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:13.661960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:13.662905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:13.663215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:13.751445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:13.751512Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:13.756514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:13.756741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:13.756860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:13.759541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:13.759677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:13.760179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:13.760363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:13.762004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:13.762143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:13.763146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:13.763244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:13.763384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:13.763426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:13.763463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:13.763599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:13.887827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.888673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.888821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.888929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.889003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.889066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.889164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.889256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.889321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.889357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.889417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.889497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.889569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.889658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.889748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ecute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:44:15.781298Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.781424Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:44:15.783473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 108 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 106, at schemeshard: 72057594046678944 2025-12-23T18:44:15.783707Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2025-12-23T18:44:15.784283Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:15.784476Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 213us result status StatusSuccess 2025-12-23T18:44:15.784910Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:15.785525Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:15.785710Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 207us result status StatusSuccess 2025-12-23T18:44:15.786151Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:15.786911Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:15.787067Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 176us result status StatusSuccess 2025-12-23T18:44:15.787403Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:44:15.788064Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:15.788222Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 182us result status StatusSuccess 2025-12-23T18:44:15.788611Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 41 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 41 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID >> TNodeBrokerTest::Test1001NodesSubscribers >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:11.474919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:11.475030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:11.475073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:11.475111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:11.475164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:11.475195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:11.475267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:11.475346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:11.476332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:11.476660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:11.549180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:11.549242Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:11.554804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:11.555043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:11.555199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:11.558180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:11.558361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:11.559122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:11.559362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:11.561264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:11.561426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:11.562583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:11.562644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:11.562822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:11.562868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:11.562909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:11.563056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:11.717175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:11.718869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... lete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.841296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.841524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:15.841574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:15.841760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:44:15.841940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:15.842032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 38 2025-12-23T18:44:15.842115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 39 2025-12-23T18:44:15.842601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.842660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:44:15.842769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.842813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-12-23T18:44:15.842864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2025-12-23T18:44:15.843715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:15.843813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:15.843874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:44:15.843912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2025-12-23T18:44:15.843960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:44:15.845286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:15.845366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:15.845406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:44:15.845435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2025-12-23T18:44:15.845465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:44:15.845533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-12-23T18:44:15.848340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.848400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:15.848857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:44:15.849064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:44:15.849112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:44:15.849167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:44:15.849199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:44:15.849238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-12-23T18:44:15.849311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:816:2783] message: TxId: 103 2025-12-23T18:44:15.849351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:44:15.849387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:44:15.849418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:44:15.849508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:44:15.849909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:15.849943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:15.851125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:44:15.852571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:44:15.853454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:15.853505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 0, path id: 38 2025-12-23T18:44:15.853598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:44:15.853638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1068:3001] 2025-12-23T18:44:15.854428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-12-23T18:44:15.855430Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:15.855710Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 295us result status StatusSuccess 2025-12-23T18:44:15.856351Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 13265, MsgBus: 10233 2025-12-23T18:43:07.630862Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125818741283350:2118];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:07.631842Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005127/r3tmp/tmprd0Ul8/pdisk_1.dat 2025-12-23T18:43:07.814611Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:07.843327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:07.843473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:07.895157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:07.904661Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13265, node 1 2025-12-23T18:43:07.994362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:07.994414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:07.994422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:07.994544Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:08.045074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10233 TClient is connected to server localhost:10233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515387959 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:08.579462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:08.591487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:43:08.638134Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:10.998386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125831626186050:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:10.998417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125831626186038:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:10.998566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:10.999366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125831626186053:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:10.999494Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:11.003207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:11.017470Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125831626186052:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:43:11.104175Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125835921153401:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 32164, MsgBus: 21305 2025-12-23T18:43:11.888887Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125836366826265:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:11.889135Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005127/r3tmp/tmpVMeoyt/pdisk_1.dat 2025-12-23T18:43:11.918012Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:11.967238Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:12.005229Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:12.005332Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:12.011956Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32164, node 2 2025-12-23T18:43:12.075466Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:12.075493Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:12.075502Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:12.075587Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:12.172293Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21305 TClient is connected to server localhost:21305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515392061 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:12.448702Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:12.460113Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:43:12.897468Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:15.030377Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125853546696277:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:15.030377Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125853546696269:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:15.030460Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access perm ... 25: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126064622969773:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:05.969071Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:05.969476Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126064622969782:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:05.969582Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:05.998317Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:06.046259Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:06.111644Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7587126047443099731:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:06.111726Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:44:06.112346Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126068917937245:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:06.112481Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:06.112632Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126068917937250:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:06.112715Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126068917937252:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:06.112773Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:06.120215Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:06.141569Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7587126068917937254:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:44:06.206454Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7587126068917937308:2645] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2025-12-23T18:44:13.370533Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:44:13.426616Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:44:13.429599Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:44:13.430057Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:44:13.430173Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:44:13.741929Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:13.742114Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:13.776907Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:34:2081] 1766515448838894 != 1766515448838897 2025-12-23T18:44:13.781676Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:13.831469Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:44:13.904101Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005127/r3tmp/tmph0hRYL/pdisk_1.dat 2025-12-23T18:44:14.233581Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:44:14.249102Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:14.366835Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:863:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:14.366979Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:874:2751], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:14.367101Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:14.368352Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:879:2756], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:14.368630Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:14.373705Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:14.480515Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:877:2754], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-12-23T18:44:14.508508Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:934:2792] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } PreparedQuery: "247944b0-694ee8e9-72a5116-6e49d9d5" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'219) \'(\'\"_id\" \'\"a428713e-45d619e3-dcdbbbc6-d4a655fb\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-system >> TNodeBrokerTest::ShiftIdRangeRemoveNew >> TLocalTests::TestAddTenant [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords [GOOD] >> Cdc::InitialScanRacyProgressAndDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:07.748904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:07.748971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:07.749001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:07.749028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:07.749059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:07.749084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:07.749123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:07.749207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:07.749920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:07.750152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:07.815689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:07.815738Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:07.821123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:07.821377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:07.821521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:07.824586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:07.824769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:07.825468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:07.825682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:07.827594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:07.827770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:07.828799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:07.828860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:07.828985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:07.829026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:07.829069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:07.829163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:07.980409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.981498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.981639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.981719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.981791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.981854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.981946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.982031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.982125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.982211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.982294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.982389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.982463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.982544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:07.982643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 46678944, LocalPathId: 39] 2025-12-23T18:44:15.804095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:15.804118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 38 2025-12-23T18:44:15.804145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 38 2025-12-23T18:44:15.804163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 39 2025-12-23T18:44:15.804616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.804650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:44:15.804739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:44:15.804767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:44:15.804797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:44:15.804829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:44:15.804871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-12-23T18:44:15.804910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:44:15.804941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:44:15.804980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:44:15.805096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:44:15.805129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-12-23T18:44:15.805153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 9 2025-12-23T18:44:15.805178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2025-12-23T18:44:15.805851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:15.805920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:15.805951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:44:15.805980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2025-12-23T18:44:15.806009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:44:15.806260Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 2025-12-23T18:44:15.806364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:15.806394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:44:15.806445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:44:15.806674Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 2025-12-23T18:44:15.806825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-12-23T18:44:15.807274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:15.807342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:44:15.807365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:44:15.807387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 9 2025-12-23T18:44:15.807410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:44:15.807475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-12-23T18:44:15.807843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:44:15.810717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:44:15.810866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:15.811151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:44:15.811276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:44:15.811953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:44:15.812302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:44:15.812342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:44:15.812654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:44:15.812721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:44:15.812747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1153:3058] TestWaitNotification: OK eventTxId 103 2025-12-23T18:44:16.197567Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:16.197832Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 317us result status StatusSuccess 2025-12-23T18:44:16.198341Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted >> TNodeBrokerTest::NodesMigrationSetLocation >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2025-12-23T18:44:16.877430Z node 1 :LOCAL ERROR: local.cpp:1299: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2025-12-23T18:44:16.877675Z node 1 :LOCAL ERROR: local.cpp:1549: Unknown domain dc-3 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted >> TLocalTests::TestAlterTenant >> TDynamicNameserverTest::TestCacheUsage [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::MinDynamicNodeIdShifted >> KqpPg::InsertFromSelect_Serial+useSink [GOOD] >> KqpPg::InsertFromSelect_Serial-useSink >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:15.306157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:15.306257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:15.306327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:15.306378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:15.306422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:15.306458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:15.306546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:15.306652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:15.307694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:15.308055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:15.393335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:15.393383Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:15.397781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:15.397925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:15.398031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:15.400402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:15.400554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:15.401130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:15.401311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:15.402782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:15.402924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:15.403846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:15.403893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:15.403996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:15.404046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:15.404079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:15.404180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.518790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.519681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.519803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.519860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.519930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.519983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.520075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.520125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.520170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.520210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.520252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.520310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.520379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.520448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.520511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 342729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2025-12-23T18:44:17.342779Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:44:17.342804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-12-23T18:44:17.342828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2025-12-23T18:44:17.343529Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:44:17.343594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:44:17.343621Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:44:17.343652Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-12-23T18:44:17.343703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:44:17.344443Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:44:17.344513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:44:17.344535Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:44:17.344624Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2025-12-23T18:44:17.344648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:44:17.344701Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:44:17.346729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2025-12-23T18:44:17.346867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:17.346898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:17.346917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:44:17.347364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:44:17.348187Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-12-23T18:44:17.348542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:17.348856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:44:17.350117Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2025-12-23T18:44:17.350272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-12-23T18:44:17.350457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:44:17.350732Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-12-23T18:44:17.351148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:44:17.351285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 Forgetting tablet 72075186233409547 2025-12-23T18:44:17.352025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:44:17.352832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:17.352885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:44:17.353002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:44:17.354050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:44:17.354086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:44:17.354141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:44:17.354516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:44:17.354551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:44:17.356025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:44:17.356054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-12-23T18:44:17.356268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:44:17.356300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:44:17.356546Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:44:17.357150Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:44:17.357356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:44:17.357384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:44:17.357665Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:44:17.357725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:44:17.357749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:927:2883] TestWaitNotification: OK eventTxId 102 2025-12-23T18:44:17.358110Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:17.358242Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 156us result status StatusPathDoesNotExist 2025-12-23T18:44:17.358363Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TLocalTests::TestAlterTenant [GOOD] >> TLocalTests::TestAddTenantWhileResolving >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2025-12-23T18:44:16.754634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:16.754685Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:17.882087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:17.882133Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false >> TNodeBrokerTest::NodesV2BackMigration >> Cdc::ShouldBreakLocksOnConcurrentAddStream [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterStream >> KqpPg::DropSequence [GOOD] >> KqpPg::DeleteWithQueryService+useSink >> TLocalTests::TestAddTenantWhileResolving [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-true [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire [GOOD] Test command err: 2025-12-23T18:44:16.514636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:16.514697Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-false [GOOD] >> GracefulShutdown::TTxGracefulShutdown >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration [GOOD] >> TNodeBrokerTest::NodesMigrationSetLocation [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch [GOOD] Test command err: 2025-12-23T18:44:16.842671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:16.842775Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> Cdc::DescribeStream [GOOD] >> Cdc::DecimalKey >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false [GOOD] >> KqpPg::PgUpdate-useSink [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2025-12-23T18:44:18.408888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:18.408949Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE cookie 0 ... unblocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE 2025-12-23T18:44:18.947646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:18.947704Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] Test command err: 2025-12-23T18:44:15.796266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:15.796349Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] >> TNodeBrokerTest::UpdateNodesLog >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> TNodeBrokerTest::ShiftIdRangeRemoveNew [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration [GOOD] Test command err: 2025-12-23T18:44:17.145832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:17.145880Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationSetLocation [GOOD] Test command err: 2025-12-23T18:44:17.818237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:17.818306Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2025-12-23T18:44:18.792320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:18.792387Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2025-12-23T18:44:19.523486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:19.523547Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2025-12-23T18:44:18.902108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:18.902166Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from to NODE_BROKER_ACTOR 2025-12-23T18:44:19.637675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:19.637758Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 >> TNodeBrokerTest::ListNodesEpochDeltasPersistance >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID [GOOD] >> TNodeBrokerTest::NodeNameExpiration [GOOD] >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveNew [GOOD] Test command err: 2025-12-23T18:44:17.403538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:17.403619Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:18.408431Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:06.399692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:06.399800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:06.399850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:06.399887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:06.399932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:06.399964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:06.400042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:06.400132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:06.401035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:06.401323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:06.487307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:06.487365Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:06.492788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:06.492964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:06.493066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:06.495614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:06.495763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:06.496354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:06.496563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:06.498324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:06.498505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:06.499806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:06.499909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:06.500052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:06.500100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:06.500168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:06.500313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:06.638950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:06.640817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... -23T18:44:19.683686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-12-23T18:44:19.683738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-12-23T18:44:19.683921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2025-12-23T18:44:19.684089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-12-23T18:44:19.684134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:853:2807], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-12-23T18:44:19.684192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:853:2807], at schemeshard: 72075186233409546, txId: 104, path id: 32 2025-12-23T18:44:19.684680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-12-23T18:44:19.684737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-12-23T18:44:19.684824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-12-23T18:44:19.684867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-12-23T18:44:19.684909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2025-12-23T18:44:19.685811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72075186233409546, cookie: 104 2025-12-23T18:44:19.685904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72075186233409546, cookie: 104 2025-12-23T18:44:19.685941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-12-23T18:44:19.685979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 14 2025-12-23T18:44:19.686024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2025-12-23T18:44:19.687890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-12-23T18:44:19.687981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-12-23T18:44:19.688011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-12-23T18:44:19.688042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2025-12-23T18:44:19.688073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2025-12-23T18:44:19.688147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-12-23T18:44:19.689824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-12-23T18:44:19.689876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-12-23T18:44:19.690209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2025-12-23T18:44:19.690397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:44:19.690438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:44:19.690478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:44:19.690515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:44:19.690565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-12-23T18:44:19.690623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1002:2943] message: TxId: 104 2025-12-23T18:44:19.690665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:44:19.690700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:44:19.690733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:44:19.690849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2025-12-23T18:44:19.691522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-12-23T18:44:19.691559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-12-23T18:44:19.692087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-12-23T18:44:19.693204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-12-23T18:44:19.694395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-12-23T18:44:19.694452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:853:2807], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-12-23T18:44:19.694871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:44:19.694918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2034:3944] 2025-12-23T18:44:19.695423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-12-23T18:44:19.699239Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-12-23T18:44:19.699458Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 242us result status StatusSuccess 2025-12-23T18:44:19.699976Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] TEST create schemeshard, 0x00007D32376AD100 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:10.232887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:10.232965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:10.233002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:10.233034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:10.233069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:10.233104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:10.233211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:10.233305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:10.234158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:10.234427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:10.324786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:10.324832Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:10.330415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:10.330607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:10.330734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:10.333995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:10.334169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:10.334956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:10.335199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:10.336892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:10.337055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:10.338524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:10.338579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:10.338706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:10.338784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:10.338829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:10.339001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:10.469786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.470701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.470842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.470910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.470977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.471065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.471133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.471223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.471285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.471354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.471439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.471504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.471575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.471673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:10.471760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... 00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 2025-12-23T18:44:19.836600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 0 2025-12-23T18:44:19.836753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2025-12-23T18:44:19.836802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2025-12-23T18:44:19.836873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:451: OLAP store contains 1 tables. 2025-12-23T18:44:19.836925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:473: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D323773B900, stats written 1 2025-12-23T18:44:19.837335Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:19.837537Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 221us result status StatusSuccess 2025-12-23T18:44:19.838043Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 165 LastUpdateTime: 165 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 162304 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous >> GracefulShutdown::TTxGracefulShutdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2025-12-23T18:44:16.871505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:16.871590Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:16.958331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] Test command err: 2025-12-23T18:44:16.926511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:16.926589Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:17.019859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseID ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID [GOOD] Test command err: 2025-12-23T18:44:17.105558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:17.105646Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:19.017404Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TLocalTests::TestRemoveTenantWhileResolving ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2025-12-23T18:44:18.300205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:18.300270Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNodeName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] Test command err: 2025-12-23T18:44:16.211869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:16.211940Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:16.326072Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Dirty] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2025-12-23T18:44:16.338175Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Committed] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2025-12-23T18:44:19.447896Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:19.447945Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TNodeBrokerTest::NodesMigrationExpireActive |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable [GOOD] >> TNodeBrokerTest::ExtendLeaseBumpVersion >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] >> TNodeBrokerTest::BasicFunctionality >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted [GOOD] >> TNodeBrokerTest::Test1000NodesSubscribers >> TNodeBrokerTest::NodesMigrationNewActiveNode >> TNodeBrokerTest::NodesMigration2000Nodes >> TNodeBrokerTest::TestListNodes >> TNodeBrokerTest::ShiftIdRangeRemoveExpired >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] TEST create schemeshard, 0x00007D2EABDAA900 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:14.771419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:14.771478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:14.771517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:14.771542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:14.771567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:14.771588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:14.771622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:14.771694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:14.772325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:14.772539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:14.834464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:14.834505Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:14.839085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:14.839305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:14.839427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:14.841977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:14.842142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:14.842814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:14.842993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:14.844535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:14.844670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:14.845641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:14.845703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:14.845842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:14.845902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:14.845943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:14.846082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:14.950291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.951349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.951477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.951554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.951628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.951717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.951778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.951830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.951913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.951976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.952057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.952145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.952211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.952284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:14.952367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 Leader for TabletID 72057594046678944 is [2:963:2923] sender: [2:1020:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 2025-12-23T18:44:21.586703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 2025-12-23T18:44:21.628340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 0 2025-12-23T18:44:21.628544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2025-12-23T18:44:21.628628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2025-12-23T18:44:21.628673Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:477: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2025-12-23T18:44:21.628748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:487: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D2EABD44100, stats written 1 2025-12-23T18:44:21.629334Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:21.629599Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 302us result status StatusSuccess 2025-12-23T18:44:21.630137Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 147 LastUpdateTime: 147 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted [GOOD] Test command err: 2025-12-23T18:44:17.991035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:17.991100Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for first batch is committed ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 2 ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 1 ... waiting for first batch is committed (done) >> TNodeBrokerTest::NodesV2BackMigration [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig >> TSlotIndexesPoolTest::Init [GOOD] >> TNodeBrokerTest::SubscribeToNodes >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove [GOOD] >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::TestRandomActions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigration [GOOD] Test command err: 2025-12-23T18:44:19.265639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:19.265692Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] >> TNodeBrokerTest::ConfigPipelining |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] TEST create schemeshard, 0x00007DBDC2BA5900 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:13.167273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:13.167346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:13.167378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:13.167412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:13.167445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:13.167489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:13.167546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:13.167657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:13.168422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:13.168689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:13.250213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:13.250262Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:13.255739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:13.255963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:13.256077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:13.259200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:13.259371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:13.260019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:13.260225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:13.261995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:13.262149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:13.263353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:13.263410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:13.263518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:13.263562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:13.263599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:13.263727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:13.414465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:13.415943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... eshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 2025-12-23T18:44:22.426994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 0 2025-12-23T18:44:22.427151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2025-12-23T18:44:22.427198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2025-12-23T18:44:22.427248Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:477: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2025-12-23T18:44:22.427296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:487: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007DBDC2C03500, stats written 1 2025-12-23T18:44:22.427718Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:22.427917Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 218us result status StatusSuccess 2025-12-23T18:44:22.428284Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 149 LastUpdateTime: 149 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 162304 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove [GOOD] Test command err: 2025-12-23T18:44:21.284345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:21.284411Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> KqpPg::TableArrayInsert-useSink [GOOD] >> KqpPg::Returning+useSink >> TNodeBrokerTest::NodesMigrationReuseID [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange [GOOD] Test command err: 2025-12-23T18:44:19.581184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:19.581251Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:21.479355Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1142: [DB] Removing node with wrong ID 1025 not in range (1023, 1024] ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNodeName [GOOD] >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] >> TNodeBrokerTest::ListNodesEpochDeltasPersistance [GOOD] >> KqpPg::InsertFromSelect_Serial-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink >> TNodeBrokerTest::NodesMigrationExpireActive [GOOD] >> TNodeBrokerTest::NoEffectBeforeCommit >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted [GOOD] >> TNodeBrokerTest::UpdateNodesLog [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] >> TNodeBrokerTest::NodesMigrationNewActiveNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseID [GOOD] Test command err: 2025-12-23T18:44:21.723079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:21.723162Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2025-12-23T18:44:22.826225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:22.826274Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:23.406507Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:23.406563Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] Test command err: 2025-12-23T18:44:23.353276Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:44:23.353636Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/0049f8/r3tmp/tmpgKFHDg/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:44:23.354122Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/0049f8/r3tmp/tmpgKFHDg/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/0049f8/r3tmp/tmpgKFHDg/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13939610437118318430 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:44:23.358437Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:44:23.358773Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/0049f8/r3tmp/tmpgKFHDg/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:44:23.358932Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/0049f8/r3tmp/tmpgKFHDg/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/0049f8/r3tmp/tmpgKFHDg/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12621546807522997408 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNodeName [GOOD] Test command err: 2025-12-23T18:44:22.118482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:22.118559Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TNodeBrokerTest::ExtendLeasePipelining >> KqpPg::DeleteWithQueryService+useSink [GOOD] >> KqpPg::DeleteWithQueryService-useSink |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ListNodesEpochDeltasPersistance [GOOD] Test command err: 2025-12-23T18:44:20.895875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:20.895980Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TNodeBrokerTest::ExtendLeaseBumpVersion [GOOD] >> TNodeBrokerTest::EpochCacheUpdate |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireActive [GOOD] Test command err: 2025-12-23T18:44:22.226183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:22.226250Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNewActiveNode [GOOD] Test command err: 2025-12-23T18:44:22.592143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:22.592221Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> Cdc::DecimalKey [GOOD] >> Cdc::AddColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateNodesLog [GOOD] Test command err: 2025-12-23T18:44:20.408400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:20.408463Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2025-12-23T18:44:22.680027Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1024] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::PgAggregate+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted [GOOD] Test command err: 2025-12-23T18:44:17.750945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:17.750995Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for first batch is committed ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 2 ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 1 ... waiting for first batch is committed (done) >> TNodeBrokerTest::NodesMigration1000Nodes >> TNodeBrokerTest::NodeNameReuseRestart >> test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks-default.txt] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ConfigPipelining [GOOD] >> TNodeBrokerTest::NodesMigrationExpiredChanged >> TSlotIndexesPoolTest::Basic [GOOD] >> TNodeBrokerTest::NodeNameWithDifferentTenants >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] >> KqpPg::CheckPgAutoParams+useSink |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveExpired [GOOD] >> TNodeBrokerTest::SyncNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ConfigPipelining [GOOD] Test command err: 2025-12-23T18:44:18.938285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:18.938345Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:23.652413Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:23.652473Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:23.727936Z node 9 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host1:1001: ERROR_TEMP: No free node IDs ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveActive >> TNodeBrokerTest::NodesMigrationNewExpiredNode >> TNodeBrokerTest::NodesMigrationReuseExpiredID >> TNodeBrokerTest::BasicFunctionality [GOOD] >> TNodeBrokerTest::ExtendLeaseRestartRace >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveExpired [GOOD] Test command err: 2025-12-23T18:44:22.911643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:22.911694Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:24.309928Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesSubscriberDisconnect >> TNodeBrokerTest::NodesMigrationReuseRemovedID >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] >> TTenantPoolTests::TestStateStatic >> TSlotIndexesPoolTest::Ranges [GOOD] >> TNodeBrokerTest::SubscribeToNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2025-12-23T18:44:22.277021Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:22.277085Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:23.786359Z node 2 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host1:1001: WRONG_REQUEST: Another location is registered for host1:1001, expected = DC=1/M=2/R=3/U=4/, got = DC=1/M=2/R=3/U=5/ 2025-12-23T18:44:23.800544Z node 2 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-12-23T18:44:23.801037Z node 2 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:44:23.801506Z node 2 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe [GOOD] Test command err: 2025-12-23T18:44:24.115477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:24.115542Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for updates are sent ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME cookie 0 ... waiting for updates are sent (done) ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] Test command err: 2025-12-23T18:44:24.727534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:24.727611Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] TEST create schemeshard, 0x00007D37E70A5900 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:11.892392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:11.892479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:11.892516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:11.892549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:11.892582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:11.892627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:11.892679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:11.892744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:11.893550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:11.893844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:11.978549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:11.978603Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:11.984087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:11.984290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:11.984413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:11.987798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:11.987981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:11.988634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:11.988842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:11.990615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:11.990780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:11.991883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:11.991941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:11.992084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:11.992126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:11.992163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:11.992273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:12.128062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.129031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.129176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.129254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.129341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.129409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.129475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.129539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.129616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.129728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.129826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.129894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.129981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.130086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:12.130179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 2025-12-23T18:44:26.259003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 2025-12-23T18:44:26.280417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 1 2025-12-23T18:44:26.280578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 584 row count 1 2025-12-23T18:44:26.280635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2025-12-23T18:44:26.280811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 2025-12-23T18:44:26.293645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 2025-12-23T18:44:26.304748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 2 2025-12-23T18:44:26.304918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:3 data size 584 row count 1 2025-12-23T18:44:26.304977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2025-12-23T18:44:26.305169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D37E7103500, stats written 3 2025-12-23T18:44:26.305934Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:26.308535Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 288us result status StatusSuccess 2025-12-23T18:44:26.309208Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TTenantPoolTests::TestStateStatic [GOOD] >> Cdc::InitialScanRacyProgressAndDrop [GOOD] >> Cdc::EnqueueRequestProcessSend >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SubscribeToNodes [GOOD] Test command err: 2025-12-23T18:44:23.349777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:23.349839Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:25.275804Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1025 not in range (1023, 1024] |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] >> TNodeBrokerTest::Test999NodesSubscribers >> TNodeBrokerTest::EpochCacheUpdate [GOOD] >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] Test command err: 2025-12-23T18:44:25.676328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:25.676398Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:25.768001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] >> TNodeBrokerTest::NodesMigration2000Nodes [GOOD] |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SyncNodes [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] Test command err: 2025-12-23T18:44:24.975133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:24.975213Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> Cdc::ShouldBreakLocksOnConcurrentAlterStream [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge >> TNodeBrokerTest::NodesMigration999Nodes >> TNodeBrokerTest::NodesMigrationNewExpiredNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::EpochCacheUpdate [GOOD] Test command err: 2025-12-23T18:44:22.318297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:22.318347Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:25.095407Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:25.095481Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2025-12-23T18:44:25.959256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:25.959302Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:26.032084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:44:26.052350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:44:15.260195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:44:15.260278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:15.260316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:44:15.260353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:44:15.260393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:44:15.260423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:44:15.260474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:44:15.260568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:44:15.261442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:44:15.261727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:44:15.353967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:15.354023Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:15.359401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:44:15.359615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:44:15.359787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:44:15.362587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:44:15.362789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:44:15.363508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:44:15.363731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:44:15.365484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:15.365642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:44:15.366724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:15.366813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:15.366953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:44:15.366997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:44:15.367040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:44:15.367184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:44:15.537628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.538706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.538834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.538909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.538983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.539046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.539146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.539225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.539295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.539391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.539462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.539555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.539627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.539700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:44:15.539795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... nId: 107:0, at schemeshard: 72057594046678944 2025-12-23T18:44:27.599470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-12-23T18:44:27.600574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:27.600617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:27.600745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2025-12-23T18:44:27.600859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:27.600902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 107, path id: 38 2025-12-23T18:44:27.600933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 107, path id: 40 2025-12-23T18:44:27.601255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-12-23T18:44:27.601291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:44:27.601366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-12-23T18:44:27.601406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-12-23T18:44:27.601441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:0 129 -> 240 2025-12-23T18:44:27.602147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-12-23T18:44:27.602222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-12-23T18:44:27.602259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-12-23T18:44:27.602295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 14 2025-12-23T18:44:27.602332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:44:27.602912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-12-23T18:44:27.602972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-12-23T18:44:27.602993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-12-23T18:44:27.603013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2025-12-23T18:44:27.603035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2025-12-23T18:44:27.603107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-12-23T18:44:27.605025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-12-23T18:44:27.605065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:44:27.605343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2025-12-23T18:44:27.605449Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2025-12-23T18:44:27.605495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-12-23T18:44:27.605539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2025-12-23T18:44:27.605562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-12-23T18:44:27.605591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-12-23T18:44:27.605615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-12-23T18:44:27.605644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2025-12-23T18:44:27.605672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 107:0 2025-12-23T18:44:27.605745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:44:27.606199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:44:27.606227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:44:27.606855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:44:27.607072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:44:27.608121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:44:27.608176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 0, path id: 38 2025-12-23T18:44:27.608764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2025-12-23T18:44:27.609207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-12-23T18:44:27.609239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-12-23T18:44:27.609620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-12-23T18:44:27.609685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-12-23T18:44:27.609722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1400:3325] TestWaitNotification: OK eventTxId 107 2025-12-23T18:44:27.610383Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:44:27.610568Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 167us result status StatusSuccess 2025-12-23T18:44:27.610946Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TNodeBrokerTest::NodesMigration1001Nodes >> TNodeBrokerTest::TestListNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration2000Nodes [GOOD] Test command err: 2025-12-23T18:44:22.661234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:22.661287Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SyncNodes [GOOD] Test command err: 2025-12-23T18:44:26.448508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:26.448586Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpiredChanged [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNewExpiredNode [GOOD] Test command err: 2025-12-23T18:44:26.651859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:26.651938Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesAlreadyMigrated >> TNodeBrokerTest::TestListNodesEpochDeltas >> TNodeBrokerTest::ShiftIdRangeRemoveActive [GOOD] >> TNodeBrokerTest::UpdateEpochPipelining >> TNodeBrokerTest::NodesMigrationExtendLease >> TNodeBrokerTest::NodesMigration1000Nodes [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveExpired >> TNodeBrokerTest::NodesSubscriberDisconnect [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2025-12-23T18:44:22.839018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:22.839078Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpiredChanged [GOOD] Test command err: 2025-12-23T18:44:25.773710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:25.773788Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveActive [GOOD] Test command err: 2025-12-23T18:44:26.450437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:26.450491Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:27.392136Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] >> TNodeBrokerTest::NodesMigrationReuseExpiredID [GOOD] |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds >> TNodeBrokerTest::NodesMigrationRemoveActive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1000Nodes [GOOD] Test command err: 2025-12-23T18:44:25.546078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:25.546155Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesSubscriberDisconnect [GOOD] Test command err: 2025-12-23T18:44:27.015188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:27.015248Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining >> TNodeBrokerTest::ResolveScopeIdForServerless >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease [GOOD] >> TNodeBrokerTest::RegistrationPipeliningNodeName >> TTenantPoolTests::TestSensorsConfigForStaticSlot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseExpiredID [GOOD] Test command err: 2025-12-23T18:44:26.627268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:26.627315Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseRemovedID [GOOD] >> TNodeBrokerTest::FixedNodeId >> TNodeBrokerTest::LoadStateMoveEpoch >> KqpPg::DeleteWithQueryService-useSink [GOOD] >> TNodeBrokerTest::NodesMigrationExpireRemoved ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease [GOOD] Test command err: 2025-12-23T18:44:28.182040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:28.182149Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-false >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseRemovedID [GOOD] Test command err: 2025-12-23T18:44:27.094282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:27.094338Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> KqpPg::PgAggregate+useSink [GOOD] >> KqpPg::PgAggregate-useSink >> KqpPg::Returning+useSink [GOOD] >> TSlotIndexesPoolTest::Expansion [GOOD] >> KqpPg::Returning-useSink >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] >> TNodeBrokerTest::NodesMigrationExtendLease [GOOD] >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2025-12-23T18:44:26.892386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:26.892451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... rebooting node broker ... OnActivateExecutor tabletId# 72057594037936129 ... captured cache request ... sending extend lease request ... captured cache request ... captured cache request ... waiting for response ... waiting for epoch update |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] Test command err: 2025-12-23T18:44:30.454350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:30.454402Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:30.549643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:44:30.680963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2025-12-23T18:44:30.007022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:30.007120Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:30.258712Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host3:1001: ERROR_TEMP: No free node IDs 2025-12-23T18:44:30.285733Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-12-23T18:44:30.300587Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node ID is banned 2025-12-23T18:44:31.138802Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-12-23T18:44:31.170921Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLease [GOOD] Test command err: 2025-12-23T18:44:29.539853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:29.539920Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::DeleteWithQueryService-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 28990, MsgBus: 21941 2025-12-23T18:43:10.570680Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125829626037262:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:10.571499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005117/r3tmp/tmpEnkfjP/pdisk_1.dat 2025-12-23T18:43:10.838311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:10.838449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:10.839022Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:10.848866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:10.913918Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28990, node 1 2025-12-23T18:43:11.017833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:11.017862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:11.017877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:11.018015Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:11.023163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21941 TClient is connected to server localhost:21941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515390969 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:43:11.478971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:43:11.582951Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:13.926527Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125842510939994:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.926684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.927076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125842510940004:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.927188Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.980891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:14.124231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125846805907425:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:14.124313Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:14.124510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125846805907431:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:14.124519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125846805907430:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:14.124597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:14.127868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:14.137047Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125846805907434:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:43:14.203691Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125846805907485:2617] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 1 1 1 Trying to start YDB, gRPC: 64669, MsgBus: 13731 2025-12-23T18:43:15.511709Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125851131309452:2218];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:15.512110Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:43:15.537487Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005117/r3tmp/tmpn2inBA/pdisk_1.dat 2025-12-23T18:43:15.615758Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:15.617302Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587125851131309271:2081] 1766515395496570 != 1766515395496573 2025-12-23T18:43:15.624686Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:15.655571Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:15.655660Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:15.658969Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64669, node 2 2025-12-23T18:43:15.715140Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:15.715167Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:15.715175Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:15.715277Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:15.830044Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13731 TClient is connected to server localhost:13731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515395701 ParentPathId: 1 PathState: EPathStateCreat ... _WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:23.248409Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:23.313755Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126144803065136:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:23.313871Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126144803065141:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:23.313887Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:23.314124Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126144803065143:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:23.314199Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:23.320888Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:23.332635Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7587126144803065144:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:44:23.396223Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7587126144803065196:2592] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 23059, MsgBus: 5211 2025-12-23T18:44:24.737440Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7587126148648814415:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:24.737538Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005117/r3tmp/tmpYA7oR5/pdisk_1.dat 2025-12-23T18:44:24.754791Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:24.864316Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:24.880254Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:24.880369Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:24.882937Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23059, node 12 2025-12-23T18:44:24.961061Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:44:24.962935Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:24.962958Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:24.962968Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:24.963080Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5211 TClient is connected to server localhost:5211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515464959 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:25.567126Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:25.744195Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:29.122247Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587126170123651751:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:29.122437Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:29.122887Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587126170123651760:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:29.123024Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:29.195055Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:29.257590Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587126170123651857:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:29.257768Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:29.257803Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587126170123651862:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:29.258035Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587126170123651864:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:29.258090Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:29.264403Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:29.279826Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7587126170123651865:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:44:29.349964Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7587126170123651917:2596] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:29.737777Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7587126148648814415:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:29.737897Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveActive [GOOD] >> TNodeBrokerTest::RegistrationPipelining [GOOD] >> Cdc::AddColumn [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2025-12-23T18:44:30.628281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:30.628336Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:30.805385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] >> TNodeBrokerTest::NodesMigration999Nodes [GOOD] >> KqpSnapshotIsolation::ConflictWrite+IsOlap-FillTables [GOOD] >> KqpSnapshotIsolation::ConflictWrite-IsOlap+FillTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveActive [GOOD] Test command err: 2025-12-23T18:44:30.258465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:30.258545Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveExpired [GOOD] >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining [GOOD] Test command err: 2025-12-23T18:44:30.448914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:30.449014Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1001Nodes [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2025-12-23T18:44:31.331100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:31.331168Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) 2025-12-23T18:44:31.910368Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:31.910427Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] Test command err: 2025-12-23T18:44:30.778908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:30.778972Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:30.856563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:44:30.883508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup >> TNodeBrokerTest::NodesAlreadyMigrated [GOOD] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] Test command err: 2025-12-23T18:44:29.455732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:29.455804Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:30.757211Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1001: ERROR_TEMP: No free node IDs ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] Test command err: 2025-12-23T18:39:45.492451Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124948824555929:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:45.492497Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003610/r3tmp/tmphnmQyD/pdisk_1.dat 2025-12-23T18:39:46.310894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:46.568074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:46.582906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:46.583025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:46.585118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:46.591968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:39:46.593611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:46.694902Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:11637 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:39:47.035700Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587124948824556144:2118] Handle TEvNavigate describe path dc-1 2025-12-23T18:39:47.035750Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587124957414491227:2449] HANDLE EvNavigateScheme dc-1 2025-12-23T18:39:47.035849Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587124948824556167:2131], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:47.035941Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587124953119523701:2284][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587124948824556167:2131], cookie# 1 2025-12-23T18:39:47.037410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124953119523711:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124953119523703:2284], cookie# 1 2025-12-23T18:39:47.037442Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124953119523712:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124953119523704:2284], cookie# 1 2025-12-23T18:39:47.037461Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587124953119523713:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124953119523705:2284], cookie# 1 2025-12-23T18:39:47.037492Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124948824555833:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124953119523711:2284], cookie# 1 2025-12-23T18:39:47.037540Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124948824555836:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124953119523712:2284], cookie# 1 2025-12-23T18:39:47.037560Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587124948824555839:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587124953119523713:2284], cookie# 1 2025-12-23T18:39:47.037608Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124953119523711:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124948824555833:2050], cookie# 1 2025-12-23T18:39:47.037629Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124953119523712:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124948824555836:2053], cookie# 1 2025-12-23T18:39:47.037645Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587124953119523713:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124948824555839:2056], cookie# 1 2025-12-23T18:39:47.037683Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124953119523701:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124953119523703:2284], cookie# 1 2025-12-23T18:39:47.037708Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587124953119523701:2284][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:39:47.037727Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124953119523701:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124953119523704:2284], cookie# 1 2025-12-23T18:39:47.037753Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587124953119523701:2284][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:39:47.037776Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587124953119523701:2284][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587124953119523705:2284], cookie# 1 2025-12-23T18:39:47.037790Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587124953119523701:2284][/dc-1] Sync cookie mismatch: sender# [1:7587124953119523705:2284], cookie# 1, current cookie# 0 2025-12-23T18:39:47.037857Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587124948824556167:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:39:47.072777Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587124948824556167:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587124953119523701:2284] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:39:47.072928Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587124948824556167:2131], cacheItem# { Subscriber: { Subscriber: [1:7587124953119523701:2284] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:39:47.076041Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587124948824556167:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:39:47.076109Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7587124948824556167:2131], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480 2025-12-23T18:39:47.076166Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7587124948824556167:2131], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480 2025-12-23T18:39:47.076187Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7587124948824556167:2131], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480 2025-12-23T18:39:47.076336Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7587124957414491229:2451][/dc-1/.metadata/script_executions] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2025-12-23T18:39:47.076741Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587124948824555833:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7587124957414491235:2451] 2025-12-23T18:39:47.076754Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587124948824555833:2050] Upsert description: path# /dc-1/.metadata/script_executions 2025-12-23T18:39:47.076816Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587124948824555833:2050] Subscribe: subscriber# [1:7587124957414491235:2451], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:39:47.076849Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587124948824555836:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7587124957414491236:2451] 2025-12-23T18:39:47.076855Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587124948824555836:2053] Upsert description: path# /dc-1/.metadata/script_executions 2025-12-23T18:39:47.076874Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587124948824555836:2053] Subscribe: subscriber# [1:7587124957414491236:2451], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:39:47.076895Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587124948824555839:2056] Handle NKikimrSchemeBoard.TEvSubscribe { P ... { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:44:29.957003Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7587126168743743596:3183], recipient# [6:7587126168743743594:2598], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:44:29.957050Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7587126168743743597:3184], recipient# [6:7587126168743743595:2599], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:44:29.957646Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7587125630754403962:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:44:29.957736Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7587125630754403962:2109], cacheItem# { Subscriber: { Subscriber: [7:7587125635049371595:2307] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:44:29.957791Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7587126167625316870:2568], recipient# [7:7587126167625316869:2579], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } IsActive: /dc-1/USER_0 -- 1 -- 1 2025-12-23T18:44:30.615136Z node 5 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 6 2025-12-23T18:44:30.616015Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:44:30.616372Z node 5 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [5:7587125630072574172:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [6:7587125631872829986:2103] 2025-12-23T18:44:30.616406Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [5:7587125630072574172:2051] Unsubscribe: subscriber# [6:7587125631872829986:2103], path# /dc-1/USER_0 2025-12-23T18:44:30.616410Z node 5 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [5:7587125630072574175:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [6:7587125631872829987:2103] 2025-12-23T18:44:30.616425Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [5:7587125630072574175:2054] Unsubscribe: subscriber# [6:7587125631872829987:2103], path# /dc-1/USER_0 2025-12-23T18:44:30.616445Z node 5 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [5:7587125630072574178:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [6:7587125631872829990:2103] 2025-12-23T18:44:30.616459Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [5:7587125630072574178:2057] Unsubscribe: subscriber# [6:7587125631872829990:2103], path# /dc-1/USER_0 2025-12-23T18:44:30.804653Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7587125630754403962:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:44:30.804768Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7587125630754403962:2109], cacheItem# { Subscriber: { Subscriber: [7:7587125647934273501:2311] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:44:30.804896Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7587126171920284168:2569], recipient# [7:7587126171920284167:2580], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:44:30.957733Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7587125631872830036:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:44:30.957847Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7587125631872830036:2109], cacheItem# { Subscriber: { Subscriber: [6:7587125649052699676:2384] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:44:30.957922Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7587125631872830036:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:44:30.957967Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7587125631872830036:2109], cacheItem# { Subscriber: { Subscriber: [6:7587125636167797729:2350] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:44:30.957996Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7587126173038710913:3188], recipient# [6:7587126173038710911:2600], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:44:30.958015Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7587126173038710914:3189], recipient# [6:7587126173038710912:2601], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:44:30.958202Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7587125630754403962:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:44:30.958326Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7587125630754403962:2109], cacheItem# { Subscriber: { Subscriber: [7:7587125635049371595:2307] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:44:30.958407Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7587126171920284170:2570], recipient# [7:7587126171920284169:2581], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> BSCRestartPDisk::RestartNotAllowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2025-12-23T18:44:30.990194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:30.990264Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration999Nodes [GOOD] Test command err: 2025-12-23T18:44:28.840437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:28.840515Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveExpired [GOOD] Test command err: 2025-12-23T18:44:29.827230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:29.827294Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2025-12-23T18:44:29.475191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:29.475269Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> KqpPg::V1CreateTable [GOOD] >> KqpPg::ValuesInsert+useSink >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1001Nodes [GOOD] Test command err: 2025-12-23T18:44:29.216629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:29.216713Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesAlreadyMigrated [GOOD] Test command err: 2025-12-23T18:44:29.450637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:29.450706Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] |95.4%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] Test command err: 2025-12-23T18:44:30.976304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:30.976361Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] Test command err: RandomSeed# 13148387954585708111 2025-12-23T18:44:33.719446Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.719527Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.719553Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.719576Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.719614Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.719648Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.719678Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.719711Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.720647Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.720710Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.720747Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.720780Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.720831Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.720864Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.720897Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.720928Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.720987Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.721029Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.721053Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.721074Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.721098Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.721129Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.721158Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.721179Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.722687Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.722791Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.722836Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.722883Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.722918Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.722950Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.722982Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.723013Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |95.4%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::NodesMigrationExpireRemoved [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] Test command err: RandomSeed# 776655469649402492 2025-12-23T18:44:33.773830Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.773900Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.773924Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.773946Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.773974Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.774015Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.774050Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.774913Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.774972Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.775005Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.775034Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.775065Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.775110Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.775141Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.775191Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.775226Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.775290Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.775350Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.775386Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.775416Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.775437Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-12-23T18:44:33.776679Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.776736Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.776762Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.776813Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.776843Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.776872Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-12-23T18:44:33.776900Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false >> KqpPg::TableSelect-useSink [GOOD] >> KqpPg::TableInsert+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireRemoved [GOOD] Test command err: 2025-12-23T18:44:31.180016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:31.180111Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:114:2144] 2025-12-23T18:40:49.702841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:40:49.702973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:49.703029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:40:49.703065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:40:49.703103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:40:49.703150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:40:49.703245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:40:49.703326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:40:49.704248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:49.704561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:40:49.844210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:40:49.844302Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:49.845156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:40:49.863855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:40:49.864223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:40:49.864454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:40:49.879125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:40:49.879444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:40:49.880376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:49.881169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:40:49.886054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:49.886271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:40:49.887688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:40:49.887783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:40:49.887937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:40:49.888037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:40:49.888085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:40:49.888310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:40:49.900453Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:246:2058] recipient: [1:15:2062] 2025-12-23T18:40:50.051369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:40:50.051641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.051890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:40:50.051947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:40:50.052218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:40:50.052293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:50.057147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:50.057371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:40:50.057612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.057674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:40:50.057750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:40:50.057811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:40:50.063105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.063191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:40:50.063265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:40:50.066623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.066691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:40:50.066793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:50.066855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:40:50.071550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:40:50.074552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:40:50.074811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:40:50.076093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:40:50.076251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:40:50.076298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:50.076631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:40:50.076705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:40:50.076904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:40:50.077000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:40:50.079510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:44:32.035036Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:44:32.035058Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 190 2025-12-23T18:44:32.035081Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2025-12-23T18:44:32.035125Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 3 2025-12-23T18:44:32.035563Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 190:2 from tablet: 72057594046678944 to tablet: 72075186233409584 cookie: 72057594046678944:39 msg type: 275382275 2025-12-23T18:44:32.036691Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 4 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:44:32.036788Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 4 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:44:32.036822Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 190 2025-12-23T18:44:32.036857Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 4 2025-12-23T18:44:32.036894Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 4 2025-12-23T18:44:32.036968Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 190, ready parts: 2/3, is published: true 2025-12-23T18:44:32.040532Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2025-12-23T18:44:32.040676Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2025-12-23T18:44:32.042895Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2025-12-23T18:44:32.043077Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2025-12-23T18:44:32.057094Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6585: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409584 TxId: 190 2025-12-23T18:44:32.057171Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 190, tablet: 72075186233409584, partId: 2 2025-12-23T18:44:32.057301Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 190:2, at schemeshard: 72057594046678944, message: Origin: 72075186233409584 TxId: 190 FAKE_COORDINATOR: Erasing txId 190 2025-12-23T18:44:32.059693Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 190:2, at schemeshard: 72057594046678944 2025-12-23T18:44:32.059881Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 190:2, at schemeshard: 72057594046678944 2025-12-23T18:44:32.059922Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 190:2 ProgressState 2025-12-23T18:44:32.060017Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#190:2 progress is 3/3 2025-12-23T18:44:32.060062Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2025-12-23T18:44:32.060099Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#190:2 progress is 3/3 2025-12-23T18:44:32.060126Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2025-12-23T18:44:32.060160Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 190, ready parts: 3/3, is published: true 2025-12-23T18:44:32.060227Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [31:309:2298] message: TxId: 190 2025-12-23T18:44:32.060267Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2025-12-23T18:44:32.060316Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:0 2025-12-23T18:44:32.060351Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 190:0 2025-12-23T18:44:32.060434Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2025-12-23T18:44:32.060466Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:1 2025-12-23T18:44:32.060485Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 190:1 2025-12-23T18:44:32.060515Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2025-12-23T18:44:32.060537Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:2 2025-12-23T18:44:32.060558Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 190:2 2025-12-23T18:44:32.060634Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 3 2025-12-23T18:44:32.062857Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2025-12-23T18:44:32.062903Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [31:5663:7168] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2025-12-23T18:44:32.064270Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2025-12-23T18:44:32.064320Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2025-12-23T18:44:32.064391Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2025-12-23T18:44:32.064412Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2025-12-23T18:44:32.064455Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2025-12-23T18:44:32.064477Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2025-12-23T18:44:32.064521Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2025-12-23T18:44:32.064542Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2025-12-23T18:44:32.064597Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2025-12-23T18:44:32.064617Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2025-12-23T18:44:32.066249Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2025-12-23T18:44:32.066495Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2025-12-23T18:44:32.066575Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2025-12-23T18:44:32.066615Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [31:5704:7209] 2025-12-23T18:44:32.066867Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2025-12-23T18:44:32.066940Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2025-12-23T18:44:32.066964Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [31:5704:7209] 2025-12-23T18:44:32.067139Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2025-12-23T18:44:32.067233Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2025-12-23T18:44:32.067257Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [31:5704:7209] 2025-12-23T18:44:32.067336Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2025-12-23T18:44:32.067450Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2025-12-23T18:44:32.067477Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [31:5704:7209] 2025-12-23T18:44:32.067624Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2025-12-23T18:44:32.067648Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [31:5704:7209] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2025-12-23T18:44:31.998834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:31.998904Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:32.084060Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Dirty] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2025-12-23T18:44:32.096298Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Committed] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2025-12-23T18:44:35.423129Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:35.423234Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpPg::PgAggregate-useSink [GOOD] >> KqpPg::MkqlTerminate |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> Cdc::EnqueueRequestProcessSend [GOOD] >> Cdc::InitialScanAndResolvedTimestamps |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpPg::CheckPgAutoParams+useSink [GOOD] >> KqpPg::CheckPgAutoParams-useSink |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> Cdc::AddColumn_TopicAutoPartitioning [GOOD] >> Cdc::AddIndex >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] [FAIL] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] >> test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter >> BSCRestartPDisk::RestartNotAllowed [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] >> test_sql_streaming.py::test[watermarks-watermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] >> KqpPg::Returning-useSink [GOOD] >> KqpPg::SelectIndex+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] Test command err: RandomSeed# 18427826336377366634 |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::NoSelectFullScan >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpPg::SelectIndex+useSink [GOOD] >> KqpPg::SelectIndex-useSink |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> Cdc::InitialScanAndResolvedTimestamps [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> Cdc::AddIndex [GOOD] >> Cdc::AddStream >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] Test command err: 2025-12-23T18:40:37.680513Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125175031218536:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:37.681093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002f65/r3tmp/tmpXqyW6g/pdisk_1.dat 2025-12-23T18:40:37.916119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:37.916260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:37.970480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:37.972410Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:37.972906Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125175031218504:2081] 1766515237675521 != 1766515237675524 2025-12-23T18:40:37.977241Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62629, node 1 2025-12-23T18:40:38.084405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:40:38.084423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:40:38.084430Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:40:38.084511Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:40:38.130008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:38.138170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:38.160982Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7587125179326186613:2295] 2025-12-23T18:40:38.161279Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:38.173303Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:38.173393Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:38.175496Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:38.175578Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:38.175612Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:38.175986Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:38.176028Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:38.176077Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7587125179326186628:2295] in generation 1 2025-12-23T18:40:38.177399Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:38.214001Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:38.214140Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:38.214182Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7587125179326186630:2296] 2025-12-23T18:40:38.214194Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:38.214202Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:38.214212Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:38.214354Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:38.214434Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:38.214455Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:38.214482Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:38.214504Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:38.214518Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:38.217229Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7587125179326186611:2495], serverId# [1:7587125179326186615:2496], sessionId# [0:0:0] 2025-12-23T18:40:38.217338Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:38.217694Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:40:38.217762Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-12-23T18:40:38.218256Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:38.219780Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:38.219931Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:38.220003Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:40:38.221592Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7587125179326186666:2516], serverId# [1:7587125179326186667:2517], sessionId# [0:0:0] 2025-12-23T18:40:38.226287Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1766515238264 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1766515238264 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:40:38.226320Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:38.226435Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:38.226521Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:38.226540Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:40:38.226581Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1766515238264:281474976710657] in PlanQueue unit at 72075186224037888 2025-12-23T18:40:38.226898Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1766515238264:281474976710657 keys extracted: 0 2025-12-23T18:40:38.227040Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:40:38.227192Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:38.227218Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:40:38.229244Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:40:38.230379Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:38.231196Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1766515238264} 2025-12-23T18:40:38.231227Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:38.231252Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1766515238263 2025-12-23T18:40:38.231267Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:38.231290Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1766515238271 2025-12-23T18:40:38.231746Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:38.231758Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:38.231773Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:40:38.231813Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1766515238264 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7587125175031218862:2150], exec latency: 2 ms, propose latency: 4 ms 2025-12-23T18:40:38.231839Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-12-23T18:40:38.232169Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:38.2 ... artition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:44:50.087364Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:44:50.087395Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:44:50.115476Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:44:50.115544Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:44:50.115582Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:44:50.115616Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:44:50.115649Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:44:50.116482Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:3810: Notified by mediator time cast with PlanStep# 9000 at tablet 72075186224037888 2025-12-23T18:44:50.116593Z node 30 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:44:50.116752Z node 30 :TX_DATASHARD INFO: cdc_stream_heartbeat.cpp:42: [CdcStreamHeartbeat] Emit change records: edge# v9000/18446744073709551615, at tablet# 72075186224037888 2025-12-23T18:44:50.117052Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-12-23T18:44:50.129062Z node 30 :TX_DATASHARD INFO: cdc_stream_heartbeat.cpp:78: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2025-12-23T18:44:50.129148Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 } 2025-12-23T18:44:50.129197Z node 30 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:44:50.129247Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:3842: Waiting for PlanStep# 12000 from mediator time cast 2025-12-23T18:44:50.129353Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:71: [ChangeSender][72075186224037888:1][30:902:2774] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 }] } 2025-12-23T18:44:50.129498Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:628: [CdcChangeSenderMain][72075186224037888:1][30:1160:2949] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 }] } 2025-12-23T18:44:50.129705Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:215: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-12-23T18:44:50.129905Z node 30 :TX_DATASHARD DEBUG: datashard_change_sending.cpp:235: Send 1 change records: to# [30:1160:2949], at tablet# 72075186224037888 2025-12-23T18:44:50.129961Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:260: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-12-23T18:44:50.130101Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:633: [CdcChangeSenderMain][72075186224037888:1][30:1160:2949] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-12-23T18:44:50.130233Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:111: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][30:1242:2949] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-12-23T18:44:50.130472Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-12-23T18:44:50.130524Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-12-23T18:44:50.130676Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 4 requestId: cookie: 3 2025-12-23T18:44:50.130879Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-12-23T18:44:50.130922Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-12-23T18:44:50.130978Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2023: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 5 partNo : 0 messageNo: 5 size 26 offset: -1 2025-12-23T18:44:50.131047Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:636: [72075186224037889][Partition][0][StateIdle] Received TPartition::TEvWrite 2025-12-23T18:44:50.131116Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:44:50.131149Z node 30 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:44:50.131195Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:44:50.131227Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:44:50.131264Z node 30 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:44:50.131395Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1263: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2025-12-23T18:44:50.131449Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:44:50.131481Z node 30 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037889][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:44:50.131518Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:44:50.131566Z node 30 :PERSQUEUE INFO: partition_write.cpp:1739: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2025-12-23T18:44:50.131668Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-12-23T18:44:50.132591Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 4 PartNo 0 PackedSize 107 count 1 nextOffset 5 batches 1 2025-12-23T18:44:50.133048Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1704: [72075186224037889][Partition][0][StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 4,1 HeadOffset 4 endOffset 4 curOffset 5 d0000000000_00000000000000000004_00000_0000000001_00000? size 93 WTime 8950 2025-12-23T18:44:50.133331Z node 30 :PERSQUEUE DEBUG: read.h:275: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:44:50.133456Z node 30 :PERSQUEUE DEBUG: read.h:313: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 4 partNo 0 count 1 size 93 2025-12-23T18:44:50.134487Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 4 count 1 size 93 actorID [30:1123:2902] 2025-12-23T18:44:50.134604Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 4 partno 0 count 1 parts 0 suffix '63' size 93 2025-12-23T18:44:50.134654Z node 30 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037889][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-12-23T18:44:50.135651Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-12-23T18:44:50.135705Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-12-23T18:44:50.135854Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 5 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 4 max time lag 0ms effective offset 0 2025-12-23T18:44:50.136794Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 5 added 2 blobs, size 452 count 4 last offset 3, current partition end offset: 4 2025-12-23T18:44:50.136912Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2025-12-23T18:44:50.137094Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 3 parts_count 0 source 1 size 359 accessed 1 times before, last time 1970-01-01T00:00:07.000000Z 2025-12-23T18:44:50.137180Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 0 count 1 parts_count 0 source 1 size 93 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2025-12-23T18:44:50.137321Z node 30 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 5. All 2 blobs are from cache. 2025-12-23T18:44:50.137531Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 3 parts 0 suffix '63' 2025-12-23T18:44:50.137619Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 suffix '63' 2025-12-23T18:44:50.137723Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2025-12-23T18:44:50.138052Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 3 count 3 size 339 from pos 0 cbcount 3 2025-12-23T18:44:50.138207Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2025-12-23T18:44:50.140996Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] Test command err: 2025-12-23T18:40:37.610614Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125172255422941:2145];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:37.611243Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002f51/r3tmp/tmpx4XRxC/pdisk_1.dat 2025-12-23T18:40:37.817009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:40:37.844386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:37.844592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:37.864953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:37.931289Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:37.932169Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125172255422825:2081] 1766515237603944 != 1766515237603947 TServer::EnableGrpc on GrpcPort 4122, node 1 2025-12-23T18:40:38.002933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:40:38.002960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:40:38.002969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:40:38.003076Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:40:38.034106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:38.040820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:38.058135Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7587125176550390937:2295] 2025-12-23T18:40:38.058384Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:38.077948Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:38.078031Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:38.079759Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:38.079815Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:38.079848Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:38.080206Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:38.080273Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:38.080304Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7587125176550390974:2295] in generation 1 2025-12-23T18:40:38.080462Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:40:38.081929Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:38.118221Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:38.118373Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:38.118409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7587125176550390976:2296] 2025-12-23T18:40:38.118421Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:38.118432Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:38.118441Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:38.118569Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:38.118632Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:38.118645Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:38.118663Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:38.118679Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:38.118695Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:38.118781Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7587125176550390934:2497], serverId# [1:7587125176550390939:2499], sessionId# [0:0:0] 2025-12-23T18:40:38.120643Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:38.120860Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:40:38.120942Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-12-23T18:40:38.122396Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:38.126827Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:38.126932Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:40:38.129500Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7587125176550390990:2519], serverId# [1:7587125176550390991:2520], sessionId# [0:0:0] 2025-12-23T18:40:38.134645Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1766515238173 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1766515238173 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:40:38.134677Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:38.134822Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:38.134910Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:38.134933Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:40:38.134951Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1766515238173:281474976710657] in PlanQueue unit at 72075186224037888 2025-12-23T18:40:38.135240Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1766515238173:281474976710657 keys extracted: 0 2025-12-23T18:40:38.135356Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:40:38.135523Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:38.135562Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:40:38.137790Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:40:38.138275Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:38.139393Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1766515238173} 2025-12-23T18:40:38.139436Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:38.139961Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1766515238172 2025-12-23T18:40:38.139980Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:38.140010Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1766515238180 2025-12-23T18:40:38.140777Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:38.140803Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:38.140840Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:40:38.140902Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1766515238173 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7587125172255423181:2148], exec latency: 2 ms, propose latency: 5 ms 2025-12-23T18:40:38.140932Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-12-23T18:40:38.141037Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:38.14 ... topic 'Table/Stream/streamImpl' partition 0 2025-12-23T18:44:50.545124Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2023: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 6 partNo : 0 messageNo: 11 size 26 offset: -1 2025-12-23T18:44:50.545303Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:636: [72075186224037889][Partition][0][StateIdle] Received TPartition::TEvWrite 2025-12-23T18:44:50.545432Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:44:50.545534Z node 30 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:44:50.545643Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:44:50.545745Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:44:50.545849Z node 30 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:44:50.546009Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1263: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2025-12-23T18:44:50.546155Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:44:50.546262Z node 30 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037889][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:44:50.546369Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:44:50.546544Z node 30 :PERSQUEUE INFO: partition_write.cpp:1739: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2025-12-23T18:44:50.546882Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-12-23T18:44:50.548099Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 107 count 1 nextOffset 6 batches 1 2025-12-23T18:44:50.549487Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1704: [72075186224037889][Partition][0][StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 5,1 HeadOffset 5 endOffset 5 curOffset 6 d0000000000_00000000000000000005_00000_0000000001_00000? size 93 WTime 8979 2025-12-23T18:44:50.550166Z node 30 :PERSQUEUE DEBUG: read.h:275: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:44:50.550581Z node 30 :PERSQUEUE DEBUG: read.h:313: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 5 partNo 0 count 1 size 93 2025-12-23T18:44:50.552292Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 5 count 1 size 93 actorID [30:1010:2834] 2025-12-23T18:44:50.552592Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 suffix '63' size 93 2025-12-23T18:44:50.552743Z node 30 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037889][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:44:50.564509Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:492: [72075186224037889][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-12-23T18:44:50.564716Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037889][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:44:50.564907Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037889][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-12-23T18:44:50.565090Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037889][Partition][0][StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-12-23T18:44:50.565639Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:44:50.565750Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:44:50.565847Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:44:50.565958Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:44:50.566061Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:44:50.566180Z node 30 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037889][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:44:50.566362Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 11 requestId: cookie: 6 2025-12-23T18:44:50.566794Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:160: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][30:1110:2877] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 6 Offset: 5 WriteTimestampMS: 8979 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 6 } } } 2025-12-23T18:44:50.567029Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:643: [CdcChangeSenderMain][72075186224037888:1][30:1045:2877] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-12-23T18:44:50.567328Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-12-23T18:44:50.567420Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 6, at tablet: 72075186224037888 2025-12-23T18:44:50.568196Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... checking the update is logged before the new resolved timestamp >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-12-23T18:44:50.683777Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-12-23T18:44:50.683967Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-12-23T18:44:50.684344Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 10 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 6 max time lag 0ms effective offset 0 2025-12-23T18:44:50.686281Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 10 added 6 blobs, size 763 count 6 last offset 5, current partition end offset: 6 2025-12-23T18:44:50.686407Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 10. Send blob request. 2025-12-23T18:44:50.686617Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 93 accessed 5 times before, last time 1970-01-01T00:00:06.000000Z 2025-12-23T18:44:50.686698Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 1 partno 0 count 1 parts_count 0 source 1 size 174 accessed 2 times before, last time 1970-01-01T00:00:06.000000Z 2025-12-23T18:44:50.686759Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 2 partno 0 count 1 parts_count 0 source 1 size 93 accessed 1 times before, last time 1970-01-01T00:00:06.000000Z 2025-12-23T18:44:50.686800Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 0 count 1 parts_count 0 source 1 size 155 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2025-12-23T18:44:50.686908Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 4 partno 0 count 1 parts_count 0 source 1 size 155 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2025-12-23T18:44:50.686964Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 5 partno 0 count 1 parts_count 0 source 1 size 93 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2025-12-23T18:44:50.687111Z node 30 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 10. All 6 blobs are from cache. 2025-12-23T18:44:50.687322Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2025-12-23T18:44:50.687410Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 1 partno 0 count 1 parts 0 suffix '63' 2025-12-23T18:44:50.687455Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' 2025-12-23T18:44:50.687498Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 suffix '63' 2025-12-23T18:44:50.687545Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 4 partno 0 count 1 parts 0 suffix '63' 2025-12-23T18:44:50.687586Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 suffix '63' 2025-12-23T18:44:50.687770Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 6 blobs 2025-12-23T18:44:50.688267Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2025-12-23T18:44:50.688420Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 1 totakecount 1 count 1 size 154 from pos 0 cbcount 1 2025-12-23T18:44:50.688506Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 2 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2025-12-23T18:44:50.688579Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 135 from pos 0 cbcount 1 2025-12-23T18:44:50.688647Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 1 count 1 size 135 from pos 0 cbcount 1 2025-12-23T18:44:50.688712Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2025-12-23T18:44:50.688995Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest >> KqpExplain::ExplainStream >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] >> KqpPg::NoSelectFullScan [GOOD] >> KqpPg::LongDomainName |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> KqpQuery::QueryTimeout |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpStats::JoinNoStatsYql >> KqpExplain::SortStage >> KqpExplain::Explain |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpQueryDiscard::DiscardSelectSupport >> KqpQuery::DecimalOutOfPrecisionBulk+EnableParameterizedDecimal >> KqpExplain::LimitOffset >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] >> KqpLimits::TooBigQuery+useSink >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> KqpLimits::BigParameter >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] >> KqpPg::SelectIndex-useSink [GOOD] >> KqpPg::TableDeleteAllData+useSink >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] >> KqpStats::SysViewClientLost >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> KqpPg::CheckPgAutoParams-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter-useSink |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpExplain::ExplainStream [GOOD] >> KqpExplain::ExplainScanQueryWithParams >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] >> KqpQuery::DecimalOutOfPrecisionBulk+EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecisionBulk-EnableParameterizedDecimal >> KqpQuery::QueryTimeout [GOOD] >> KqpQuery::RandomNumber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::CheckPgAutoParams-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 19025, MsgBus: 12724 2025-12-23T18:43:07.718525Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125817266595536:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:07.718623Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005126/r3tmp/tmpvVtCfv/pdisk_1.dat 2025-12-23T18:43:07.917318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:07.952262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:07.952397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:08.005902Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:08.008344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:08.010915Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125817266595501:2081] 1766515387717081 != 1766515387717084 TServer::EnableGrpc on GrpcPort 19025, node 1 2025-12-23T18:43:08.125316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:08.125351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:08.125384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:08.125511Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:08.176850Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12724 TClient is connected to server localhost:12724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515388057 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:08.642266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:08.651457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:43:08.730893Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:10.881762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:11.016271Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-12-23T18:43:11.067136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:11.115608Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-12-23T18:43:11.184814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:11.229812Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-12-23T18:43:11.269514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:11.313527Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-12-23T18:43:11.357721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:11.396725Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill f f t t 2025-12-23T18:43:11.442080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:11.494781Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill f f t t 2025-12-23T18:43:11.538994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:11.590628Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2025-12-23T18:43:11.637749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:11.684175Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:43:11.685736Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:245: Prepare transaction failed. txid 281474976710688 at tablet 72075186224037895 errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710688] at 72075186224037895 while waiting for stream clearance) | 2025-12-23T18:43:11.686492Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710688 at tablet 72075186224037895 status: ERROR errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710688] at 72075186224037895 while waiting for stream clearance) | {f,f} {f,f} {t,t} {t,t} 2025-12-23T18:43:11.726776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:11.767482Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-12-23T18:43:11.806547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-12-23T18:43:11.887408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard_ ... oot, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:44:54.884412Z node 14 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [14:7587126276377067132:2544] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:54.928027Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:55.300486Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:55.993411Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:504: Get parsing result with error, self: [14:7587126280672034770:2391], owner: [14:7587126276377067025:2309], statement id: 0 2025-12-23T18:44:55.993956Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=14&id=OTY1NzgyY2UtN2U0N2E0MjAtOTI0MWJmY2EtYWMxZmE3N2M=, ActorId: [14:7587126280672034768:2390], ActorState: ExecuteState, TraceId: 01kd68en1jbtx915hkfnz5v1s3, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Error while parsing query." severity: 1 issues { position { row: 1 column: 1 } message: "alternative is not implemented yet : 34" end_position { row: 1 column: 1 } severity: 1 } }, remove tx with tx_id: 2025-12-23T18:44:56.347821Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7587126284967002094:2402], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple
: Error: At tuple
:1:1: Error: At function: PgWhere, At lambda
:2:56: Error: At function: PgOp
:2:56: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-12-23T18:44:56.348389Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=14&id=ZTlkMTdkMjMtY2U1MTZhYzktOGE3NDExNGYtMmUxOTAxYjU=, ActorId: [14:7587126284967002091:2400], ActorState: ExecuteState, TraceId: 01kd68ena1520wjbk93tb5t3g6, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple" end_position { row: 1 column: 1 } severity: 1 issues { message: "At tuple" severity: 1 issues { position { row: 1 column: 1 } message: "At function: PgWhere, At lambda" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 2 column: 56 } message: "At function: PgOp" end_position { row: 2 column: 56 } severity: 1 issues { position { row: 2 column: 56 } message: "Unable to find an overload for operator = with given argument type(s): (text,int4)" end_position { row: 2 column: 56 } severity: 1 } } } } } }, remove tx with tx_id: 2025-12-23T18:44:56.398581Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7587126284967002106:2408], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple
: Error: At tuple
:1:1: Error: At function: PgWhere, At lambda
:2:58: Error: At function: PgAnd
:2:68: Error: At function: PgOp
:2:68: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-12-23T18:44:56.399408Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=14&id=NWE5MGIzODAtZjQ3ODlhNTktZmUwZGM0MzQtMWI2ODM5YTg=, ActorId: [14:7587126284967002103:2406], ActorState: ExecuteState, TraceId: 01kd68end8f98489h52v6h6g8m, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple" end_position { row: 1 column: 1 } severity: 1 issues { message: "At tuple" severity: 1 issues { position { row: 1 column: 1 } message: "At function: PgWhere, At lambda" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 2 column: 58 } message: "At function: PgAnd" end_position { row: 2 column: 58 } severity: 1 issues { position { row: 2 column: 68 } message: "At function: PgOp" end_position { row: 2 column: 68 } severity: 1 issues { position { row: 2 column: 68 } message: "Unable to find an overload for operator = with given argument type(s): (text,int4)" end_position { row: 2 column: 68 } severity: 1 } } } } } } }, remove tx with tx_id: 2025-12-23T18:44:56.419049Z node 14 :KQP_EXECUTER CRIT: kqp_literal_executer.cpp:112: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kd68enes9k85cek7qf85p621, Database: /Root, SessionId: ydb://session/3?node_id=14&id=Y2VlM2U0YmQtYmNjYWZiZDQtMmU3ZGQwZGYtNzVjMTEyNmI=, PoolId: default, IsStreamingQuery: 0}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-12-23T18:44:56.419588Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=14&id=Y2VlM2U0YmQtYmNjYWZiZDQtMmU3ZGQwZGYtNzVjMTEyNmI=, ActorId: [14:7587126284967002115:2412], ActorState: ExecuteState, TraceId: 01kd68enes9k85cek7qf85p621, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "(NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: \"a\"\n\n" issue_code: 2029 severity: 1 } 2025-12-23T18:44:56.472091Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:56.583003Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:56.671253Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7587126284967002283:2437], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2025-12-23T18:44:56.671765Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=14&id=MjQyZjZkMTUtNjY4YTc1MGItM2E1Y2RlYWUtOWM2NDAyOWI=, ActorId: [14:7587126284967002280:2435], ActorState: ExecuteState, TraceId: 01kd68ennvfskdraqx1pqn2btg, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "values have 3 columns, INSERT INTO expects: 2" end_position { row: 1 column: 1 } severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:44:56.713675Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7587126284967002295:2443], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/PgTable2] 2025-12-23T18:44:56.714365Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=14&id=ZGM5YWJlYzEtOWUzYzVhYTEtYjA1MmY1YjAtNmZiZDYzMjM=, ActorId: [14:7587126284967002292:2441], ActorState: ExecuteState, TraceId: 01kd68enq92r90yt0mkmkq52xp, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to convert type: List> to List>" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to convert \'id\': pgunknown to Optional" end_position { row: 1 column: 1 } severity: 1 } } issues { position { row: 1 column: 1 } message: "Row type mismatch for table: db.[/Root/PgTable2]" end_position { row: 1 column: 1 } severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:44:57.245319Z node 14 :KQP_EXECUTER CRIT: kqp_literal_executer.cpp:112: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01kd68enrq4gtshnp91pgyk2nj, Database: /Root, SessionId: ydb://session/3?node_id=14&id=N2Y4YTNmYzktYzdjNWI0ZWQtZmEwYmY2ZGQtNjE5MjRmMg==, PoolId: default, IsStreamingQuery: 0}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-12-23T18:44:57.246079Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=14&id=N2Y4YTNmYzktYzdjNWI0ZWQtZmEwYmY2ZGQtNjE5MjRmMg==, ActorId: [14:7587126284967002304:2447], ActorState: ExecuteState, TraceId: 01kd68enrq4gtshnp91pgyk2nj, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "(NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: \"a\"\n\n" issue_code: 2029 severity: 1 } 2025-12-23T18:44:57.298462Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:57.953462Z node 14 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037892 not found 2025-12-23T18:44:57.994125Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] >> Cdc::AddStream [GOOD] >> Cdc::DisableStream >> test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpQuery::YqlSyntaxV0 >> KqpExplain::SortStage [GOOD] >> KqpExplain::SelfJoin3xSameLabels >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] >> KqpStats::JoinNoStatsYql [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::MultiUsedStage >> KqpExplain::Explain [GOOD] >> KqpExplain::ExplainDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 18659, MsgBus: 2158 2025-12-23T18:43:08.679799Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125822705354119:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:08.679857Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005123/r3tmp/tmpasZjMI/pdisk_1.dat 2025-12-23T18:43:08.982852Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:08.992953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:08.993090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:09.054716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:09.146449Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:09.154331Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125822705354078:2081] 1766515388677675 != 1766515388677678 TServer::EnableGrpc on GrpcPort 18659, node 1 2025-12-23T18:43:09.208646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:09.231334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:09.231383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:09.231393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:09.231581Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2158 TClient is connected to server localhost:2158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515389156 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:09.689862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:09.724211Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:11.840522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125835590256854:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:11.840563Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125835590256866:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:11.840636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:11.840883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125835590256869:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:11.840946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:11.843929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:11.854935Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125835590256868:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:43:11.944696Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125835590256921:2535] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 31402, MsgBus: 12434 2025-12-23T18:43:12.920379Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125839430959518:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:12.920621Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:43:12.947450Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005123/r3tmp/tmpugIVxq/pdisk_1.dat 2025-12-23T18:43:13.032880Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:13.039746Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:13.039820Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:13.056714Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31402, node 2 2025-12-23T18:43:13.155227Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:13.155283Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:13.155294Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:13.155375Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:13.222342Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12434 TClient is connected to server localhost:12434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515393111 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:13.590800Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:13.599687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:43:13.931492Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:16.198886Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125856610829548:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:16.198892Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587125856610829543:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:16.198962Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have ... _WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:50.528397Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:50.550028Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7587126260647801721:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:44:50.651936Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7587126260647801774:2544] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:50.704051Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["aid (null, 3)","aid [7, 7]","abalance"],"E-Cost":"0","ReadRangesExpectedSize":"2"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["aid (null, 3)","aid [7, 7]","abalance"],"E-Cost":"0","ReadRangesExpectedSize":"2"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 10153, MsgBus: 17975 2025-12-23T18:44:54.491738Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7587126278604624480:2180];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:54.491897Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:44:54.522237Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/script_executions test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005123/r3tmp/tmpiIE2z4/pdisk_1.dat 2025-12-23T18:44:54.622013Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:54.622127Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:54.623585Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:54.657175Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10153, node 11 2025-12-23T18:44:54.731423Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:54.731449Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:54.731464Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:54.731567Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:44:54.747382Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/script_executions TClient is connected to server localhost:17975 TClient is connected to server localhost:17975 WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'... TClient::Ls request: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515494723 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true ... (TRUNCATED) WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' success. 2025-12-23T18:44:55.455947Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:55.466262Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:59.490909Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7587126278604624480:2180];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:59.491014Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:44:59.714255Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126300079461692:2327], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.714329Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126300079461675:2324], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.714772Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.719391Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:59.730328Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7587126300079461702:2328], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:44:59.823562Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7587126300079461764:2542] txid# 281474976715659, issues: { message: "Check failed: path: \'/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:59.858092Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink >> KqpParams::MissingParameter >> KqpLimits::BigParameter [GOOD] >> KqpLimits::AffectedShardsLimit >> KqpQuery::DecimalOutOfPrecisionBulk-EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision-UseOltpSink-EnableParameterizedDecimal >> KqpSinkTx::TIsolationSettingTest+IsOlap+UsePragma [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink >> test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> KqpExplain::FewEffects+UseSink >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink [GOOD] >> KqpLimits::CancelAfterRwTx+useSink >> KqpPg::InsertNoTargetColumns_Alter-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial+useSink >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::RandomUuid |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] >> KqpLimits::DatashardProgramSize+useSink >> KqpQueryDiscard::DiscardSelectSupport [GOOD] >> KqpQueryDiscard::DiscardSelectEnsureExecuted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::TIsolationSettingTest+IsOlap+UsePragma [GOOD] Test command err: Trying to start YDB, gRPC: 2075, MsgBus: 17187 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004115/r3tmp/tmp36dkoB/pdisk_1.dat 2025-12-23T18:39:15.495055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:15.495172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:15.592985Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:15.593310Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:15.595664Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:15.616198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:15.617139Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124822559916312:2081] 1766515155030802 != 1766515155030805 TServer::EnableGrpc on GrpcPort 2075, node 1 2025-12-23T18:39:15.775275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:15.775304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:15.775313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:15.775395Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:15.796281Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17187 2025-12-23T18:39:16.166338Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:17187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515155664 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:16.794530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:16.803114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:39:19.480101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124839739786385:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.480268Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.486918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124839739786396:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.487040Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124839739786398:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.487115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:19.493049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:19.523626Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587124839739786401:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:39:19.636810Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587124839739786454:2543] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:20.129701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:20.345084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:21.559115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 12279, MsgBus: 18800 ... waiting for SysViewsRoster update finished 2025-12-23T18:39:30.321781Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:39:30.337443Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:39:30.345871Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:39:30.345989Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:39:30.826207Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:30.826349Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:30.856010Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766515166081071 != 1766515166081074 2025-12-23T18:39:30.863919Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:30.921253Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:39:31.053446Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004115/r3tmp/tmp6UNGii/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12279, node 2 2025-12-23T18:39:31.637736Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:31.637799Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:31.637837Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:31.638316Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:39:31.655422Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18800 TClient is connected to server localhost:18800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@sys ... nternal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.062671Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.062783Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.062823Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.073288Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.073384Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.073429Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.084557Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.084659Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.084700Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.095120Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.095216Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.095256Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.105977Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.106084Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.106141Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.116502Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.116599Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.116640Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.126974Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.127083Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.127124Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.137119Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.137217Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.137258Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.147963Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.148059Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.148098Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.158610Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.158696Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.158731Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.169026Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.169119Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.169160Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.179474Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.179572Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.179611Z node 10 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:45:02.297480Z node 10 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd68e1znepqzadfjw4t4kpbz", SessionId: ydb://session/3?node_id=10&id=NTk0NDc2NTctZDBhYjBjY2UtYjRiYzY5ZGItZmU3MmMxZDY=, Slow query, duration: 25.823221s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2025-12-23T18:45:02.384319Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [10:8718:8045], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:50: Error: Bad "DefaultTxMode" setting for "$all" cluster: (yexception) Unknown DefaultTxMode, available: [SerializableRW, SnapshotRW, SnapshotRO, StaleRO] 2025-12-23T18:45:02.388157Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=10&id=ZjAyY2I4ODYtZjBjN2M3NWItODZkY2IyNzItNDkyOTc5OWE=, ActorId: [10:8709:8036], ActorState: ExecuteState, TraceId: 01kd68ev8rdkvf9jamgj2b8taw, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Pre type annotation" issue_code: 1020 severity: 1 issues { position { row: 2 column: 50 } message: "Bad \"DefaultTxMode\" setting for \"$all\" cluster: (yexception) Unknown DefaultTxMode, available: [SerializableRW, SnapshotRW, SnapshotRO, StaleRO]" end_position { row: 2 column: 50 } severity: 1 } }, remove tx with tx_id: |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpQuery::YqlSyntaxV0 [GOOD] >> KqpQuery::YqlTableSample >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::SqlIn >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpQuery::DecimalOutOfPrecision-UseOltpSink-EnableParameterizedDecimal [GOOD] >> KqpQuery::DeleteWhereInSubquery >> KqpExplain::MultiUsedStage [GOOD] >> KqpExplain::MergeConnection >> KqpQuery::Now >> KqpAnalyze::AnalyzeTable+ColumnStore >> KqpParams::MissingParameter [GOOD] >> KqpParams::MissingOptionalParameter+UseSink >> Cdc::DisableStream [GOOD] >> Cdc::AwsRegion |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpExplain::ExplainDataQuery [GOOD] >> KqpExplain::ExplainDataQueryWithParams |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpParams::ImplicitParameterTypes >> KqpLimits::AffectedShardsLimit [GOOD] >> KqpLimits::CancelAfterRoTx >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink >> KqpPg::TableInsert+useSink [GOOD] >> KqpPg::TableInsert-useSink >> KqpExplain::FewEffects+UseSink [GOOD] >> KqpExplain::FewEffects-UseSink >> KqpTypes::UnsafeTimestampCastV0 >> KqpLimits::WaitCAsStateOnAbort >> KqpQuery::RandomUuid [GOOD] >> KqpQuery::ReadOverloaded+StreamLookup >> KqpPg::ValuesInsert+useSink [GOOD] >> KqpPg::ValuesInsert-useSink >> KqpLimits::DatashardProgramSize+useSink [GOOD] >> KqpLimits::DatashardProgramSize-useSink >> KqpQueryDiscard::DiscardSelectEnsureExecuted [GOOD] >> KqpQueryDiscard::NoChannelDataEventsWhenDiscard >> KqpQuery::YqlTableSample [GOOD] >> KqpQuery::UpdateWhereInSubquery >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpQuery::DictJoin >> KqpExplain::SqlIn [GOOD] >> KqpExplain::SsaProgramInJsonPlan >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicScan >> KqpParams::MissingOptionalParameter+UseSink [GOOD] >> KqpParams::MissingOptionalParameter-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 6605, MsgBus: 9485 2025-12-23T18:43:09.046472Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125825625174707:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:09.051401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005121/r3tmp/tmptBHIIZ/pdisk_1.dat 2025-12-23T18:43:09.331441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:09.331591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:09.399162Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:09.400335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:09.400415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:09.402349Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125825625174671:2081] 1766515389041317 != 1766515389041320 TServer::EnableGrpc on GrpcPort 6605, node 1 2025-12-23T18:43:09.483463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:09.483504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:09.483513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:09.483621Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9485 2025-12-23T18:43:09.658582Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9485 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515389457 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:09.902808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 16 2025-12-23T18:43:10.059109Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:12.448290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:12.588221Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:43:12.596143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:12.642842Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:43:12.644829Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:245: Prepare transaction failed. txid 281474976710662 at tablet 72075186224037889 errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710662] at 72075186224037889 while waiting for scan finish) | 2025-12-23T18:43:12.647305Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710662 at tablet 72075186224037889 status: ERROR errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710662] at 72075186224037889 while waiting for scan finish) | 2025-12-23T18:43:12.666897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125838510077639:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:12.666897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125838510077644:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:12.667021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:12.667294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125838510077654:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:12.667391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:12.670488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:12.680360Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125838510077653:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-12-23T18:43:12.753957Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125838510077706:2661] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } f f t t 18 2025-12-23T18:43:13.177616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:13.238152Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:43:13.246584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:13.294116Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 21 2025-12-23T18:43:13.769260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:13.850602Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:43:13.858720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:13.950809Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:43:14.046503Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125825625174707:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:14.046572Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 23 2025-12-23T18:43:14.303311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:14.349197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:14.388060Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 20 2025-12-23T18:43:14.770125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but ... oot@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515500008 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:00.748639Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:00.778603Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:04.504413Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126319435207405:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:04.504520Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:04.504547Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126319435207413:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:04.504942Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126319435207419:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:04.504991Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:04.509574Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:04.524465Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7587126319435207420:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:45:04.614346Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7587126319435207472:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:04.719716Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:04.772664Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7587126297960370075:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:04.772761Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:45:05.382217Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Trying to start YDB, gRPC: 3594, MsgBus: 3314 2025-12-23T18:45:07.200842Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7587126333429830652:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:07.200939Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005121/r3tmp/tmp1RNDOI/pdisk_1.dat 2025-12-23T18:45:07.219122Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:07.312645Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:07.328226Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:07.328320Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:07.352656Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3594, node 12 2025-12-23T18:45:07.435481Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:07.435520Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:07.435533Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:07.435649Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:07.501471Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3314 TClient is connected to server localhost:3314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515507421 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:08.160544Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:08.210893Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:12.201324Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7587126333429830652:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:12.201451Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:45:12.916738Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587126354904667966:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:12.916836Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587126354904667977:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:12.916958Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:12.922473Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:12.940482Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7587126354904667996:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:45:13.042942Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7587126359199635355:2543] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:13.084306Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> KqpQuery::Now [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> BSCRestartPDisk::RestartOneByOne [GOOD] >> KqpQuery::RewriteIfPresentToMap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne [GOOD] Test command err: RandomSeed# 7620125608605268928 |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> KqpParams::ImplicitParameterTypes [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> KqpExplain::ExplainDataQueryWithParams [GOOD] >> KqpExplain::CreateTableAs+Stats |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpQuery::ExtendedTimeOutOfBounds+BulkUpsert >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] >> test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink [GOOD] >> KqpLimits::QueryReplySize >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 >> Cdc::AwsRegion [GOOD] >> KqpStats::DataQueryWithEffects+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] Test command err: RandomSeed# 4805111959685864697 |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> KqpLimits::DatashardProgramSize-useSink [GOOD] >> KqpLimits::ComputeNodeMemoryLimit |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |95.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |95.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExplain::FewEffects-UseSink [GOOD] >> KqpExplain::FullOuterJoin |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] >> KqpQuery::UpdateWhereInSubquery [GOOD] >> KqpQuery::UpdateThenDelete-UseSink |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpQuery::UdfTerminate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::AwsRegion [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:40:40.710490Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:40:40.831150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:40:40.842711Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:40:40.843106Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:40:40.843222Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:40:41.159251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:40:41.159441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:40:41.268687Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515237683300 != 1766515237683304 2025-12-23T18:40:41.292343Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:41.339776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:40:41.441503Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002f4d/r3tmp/tmpgxbAtd/pdisk_1.dat 2025-12-23T18:40:41.774421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:41.789981Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:40:41.911310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:41.956230Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:40:41.956585Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:40:42.009126Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:40:42.009309Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:40:42.011425Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:40:42.011579Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:40:42.011647Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:40:42.012129Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:40:42.012300Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:40:42.012431Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:40:42.023396Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:40:42.054454Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:40:42.054718Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:40:42.054888Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:40:42.054946Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:42.055012Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:40:42.055053Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:42.055585Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:40:42.055719Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:40:42.056271Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:42.056331Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:42.056382Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:40:42.056431Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:42.056511Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:40:42.056714Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:40:42.056987Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:40:42.057103Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:40:42.059142Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:42.072375Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:40:42.072521Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T18:40:42.199651Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T18:40:42.205405Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T18:40:42.205520Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:42.206690Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:42.215110Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:40:42.215279Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T18:40:42.215688Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T18:40:42.215871Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:40:42.216354Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:40:42.216435Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T18:40:42.218688Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T18:40:42.223980Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:40:42.226202Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T18:40:42.226269Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:42.227372Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T18:40:42.227457Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:42.229207Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:40:42.229291Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:40:42.229359Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T18:40:42.229449Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T18:40:42.229509Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T18:40:42.229590Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:40:42.232656Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:902:2774][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-12-23T18:40:42.238486Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T18:40:42.240543Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 7 ... tion][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.427981Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:45:18.466039Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:45:18.466119Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.466161Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:45:18.466204Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.466242Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:45:18.466322Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:45:18.466354Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.466386Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:45:18.466421Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.466450Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:45:18.497713Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:45:18.497798Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.497839Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:45:18.497885Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.497922Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:45:18.498008Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:45:18.498042Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.498076Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:45:18.498108Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.498137Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:45:18.518935Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:45:18.519010Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.519054Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:45:18.519087Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.519115Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:45:18.519183Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:45:18.519207Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.519231Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:45:18.519254Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.519277Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:45:18.540017Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:45:18.540110Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.540156Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:45:18.540201Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.540233Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:45:18.540303Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:45:18.540335Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.540366Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:45:18.540411Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.540440Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:45:18.561503Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:45:18.561577Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.561622Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:45:18.561667Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.561705Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:45:18.561874Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:45:18.561910Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.561942Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:45:18.561976Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:45:18.562006Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-12-23T18:45:18.583759Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-12-23T18:45:18.583823Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-12-23T18:45:18.584040Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 5 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-12-23T18:45:18.584558Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 5 added 1 blobs, size 427 count 1 last offset 0, current partition end offset: 1 2025-12-23T18:45:18.584654Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2025-12-23T18:45:18.584834Z node 24 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 427 accessed 1 times before, last time 1970-01-01T00:00:03.000000Z 2025-12-23T18:45:18.584979Z node 24 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 5. All 1 blobs are from cache. 2025-12-23T18:45:18.585162Z node 24 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2025-12-23T18:45:18.585341Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2025-12-23T18:45:18.585616Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 407 from pos 0 cbcount 1 2025-12-23T18:45:18.586279Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >>>>> GetRecords path=/Root/Table/Stream2 partitionId=0 2025-12-23T18:45:18.587794Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'streamImpl' requestId: 2025-12-23T18:45:18.587905Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-12-23T18:45:18.588722Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037891][Partition][0][StateIdle] read cookie 3 Topic 'Table/Stream2/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-12-23T18:45:18.589217Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037891][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 426 count 1 last offset 0, current partition end offset: 1 2025-12-23T18:45:18.589308Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037891][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2025-12-23T18:45:18.589430Z node 24 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 426 accessed 0 times before, last time 1970-01-01T00:00:03.000000Z 2025-12-23T18:45:18.589595Z node 24 :PERSQUEUE DEBUG: read.h:126: [72075186224037891][PQCacheProxy]Reading cookie 3. All 1 blobs are from cache. 2025-12-23T18:45:18.589761Z node 24 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037891' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2025-12-23T18:45:18.589865Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2025-12-23T18:45:18.590114Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 406 from pos 0 cbcount 1 2025-12-23T18:45:18.590783Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpQuery::QueryClientTimeout |95.5%| [TA] $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpExplain::PrecomputeRange >> KqpExplain::UpdateSecondaryConditional+UseSink >> KqpQuery::CreateTableAs_PragmaAndParamsAndNamedExprs >> KqpQuery::DictJoin [GOOD] >> KqpParams::MissingOptionalParameter-UseSink [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck >> KqpQuery::ExtendedTimeOutOfBounds+BulkUpsert [GOOD] >> KqpQuery::ExtendedTimeOutOfBounds-BulkUpsert >> KqpExplain::CreateTableAs+Stats [GOOD] >> KqpExplain::CreateTableAs-Stats >> KqpStats::MultiTxStatsFullExpYql >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] >> KqpQuery::NoEvaluate >> KqpExplain::UpdateConditional-UseSink >> KqpExplain::IdxFullscan [GOOD] >> KqpExplain::MultiJoinCteLinks >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::RowsLimit >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck >> KqpExplain::UpdateSecondaryConditional-UseSink >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: Trying to start YDB, gRPC: 63707, MsgBus: 31886 2025-12-23T18:44:56.940491Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126283240542993:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:56.940585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033d9/r3tmp/tmpzxXhtU/pdisk_1.dat 2025-12-23T18:44:57.182876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:57.189864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:57.189983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:57.245104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:44:57.273451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:57.293110Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126283240542967:2081] 1766515496939305 != 1766515496939308 TServer::EnableGrpc on GrpcPort 63707, node 1 2025-12-23T18:44:57.394371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:57.394403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:57.394411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:57.394539Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:44:57.445107Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:31886 TClient is connected to server localhost:31886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515497306 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:57.866508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:57.875510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:44:57.948151Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:59.879476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126296125445726:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.879480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126296125445739:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.879643Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.879891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126296125445749:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.880010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.882820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:59.892309Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126296125445748:2325], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:44:59.955489Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126296125445803:2531] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:00.195805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 30117, MsgBus: 9948 2025-12-23T18:45:00.992719Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587126303142546110:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:00.992785Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033d9/r3tmp/tmp211kWB/pdisk_1.dat 2025-12-23T18:45:01.005196Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:01.074342Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:01.075745Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587126303142546084:2081] 1766515500991774 != 1766515500991777 2025-12-23T18:45:01.141912Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:01.142024Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 30117, node 2 2025-12-23T18:45:01.143645Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:01.171299Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:01.171320Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:01.171326Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:01.171425Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:01.295000Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9948 TClient is connected to server localhost:9948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515501191 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:01.521588Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:01.998581Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:03.883735Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587126316027448850:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:03 ... ons 2025-12-23T18:45:15.787013Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:15.806248Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:15.806327Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:15.825649Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20949, node 5 2025-12-23T18:45:15.866002Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:15.866030Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:15.866040Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:15.866135Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:15.920698Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:29256 TClient is connected to server localhost:29256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515515877 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:16.314957Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:16.333530Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:16.388143Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:16.518893Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:16.576432Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:16.743721Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:19.279510Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126384085321095:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.279639Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.279965Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126384085321105:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.280034Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.381666Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:19.414080Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:19.444965Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:19.479834Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:19.517865Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:19.551329Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:19.586007Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:19.634287Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:19.725022Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126384085321974:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.725110Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.725165Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126384085321979:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.725288Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126384085321981:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.725340Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.729148Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:19.740689Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126384085321983:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:45:19.807050Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126384085322036:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:20.691127Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126366905450054:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:20.691206Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpLimits::OutOfSpaceBulkUpsertFail >> test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64-IsColumn |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> KqpStats::JoinStatsBasicScan [GOOD] >> KqpStats::DataQueryWithEffects+UseSink [GOOD] >> KqpStats::DataQueryMulti >> KqpQueryDiscard::NoChannelDataEventsWhenDiscard [GOOD] >> KqpQueryDiscard::DiscardSelectMultiLine >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpExplain::UpdateConditional+UseSink >> KqpParams::DefaultParameterValue |95.6%| [TA] $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |95.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 22167, MsgBus: 20325 2025-12-23T18:44:55.772199Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126280183600128:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:55.773976Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033dd/r3tmp/tmpbMmsMG/pdisk_1.dat 2025-12-23T18:44:56.001691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:56.001810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:56.004329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:56.010158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:44:56.121225Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:56.122896Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126280183600098:2081] 1766515495769684 != 1766515495769687 TServer::EnableGrpc on GrpcPort 22167, node 1 2025-12-23T18:44:56.210434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:56.210459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:56.210464Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:56.210539Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:44:56.232337Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20325 TClient is connected to server localhost:20325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515496123 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:56.676066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:56.689237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:56.778902Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:56.785213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:56.926697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:56.983454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:58.963969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126293068503853:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:58.964115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:58.964462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126293068503863:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:58.964566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.209838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.235785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.262391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.285820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.313438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.345111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.402461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.447235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.514939Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297363472028:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.515006Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.515079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297363472033:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.515332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297363472035:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.515379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.518512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:59.528593Z node 1 ... , (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:16.588320Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:16.617598Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9135, node 4 2025-12-23T18:45:16.674057Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:16.674083Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:16.674090Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:16.674173Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:16.818320Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26355 TClient is connected to server localhost:26355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515516675 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:17.186991Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:45:17.195864Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:17.249893Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:17.388735Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:17.458271Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:17.464618Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:19.896216Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126385692344736:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.896310Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.896658Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126385692344746:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.896709Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.943520Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:19.976812Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.009643Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.046109Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.085077Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.129406Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.165814Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.215963Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.290668Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126389987312911:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.290751Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.290828Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126389987312916:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.290874Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126389987312918:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.290905Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.294528Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:20.306531Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587126389987312920:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:45:20.401180Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126389987312971:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:21.459254Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126372807441002:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:21.459319Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:45:25.273685Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515522737, txId: 281474976710673] shutting down |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinNoStatsScan >> KqpQuery::ReadOverloaded+StreamLookup [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup >> KqpQuery::ExtendedTimeOutOfBounds-BulkUpsert [GOOD] >> KqpQuery::ExecuteDataQueryCollectMeta >> KqpQuery::CreateTableAs_PragmaAndParamsAndNamedExprs [GOOD] >> KqpQuery::CreateAsSelect_DisableDataShard >> KqpStats::OneShardLocalExec+UseSink >> KqpExplain::CreateTableAs-Stats [GOOD] >> KqpQuery::UpdateThenDelete-UseSink [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup >> KqpExplain::FullOuterJoin [GOOD] >> KqpExplain::PrecomputeRange [GOOD] >> KqpExplain::PureExpr >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::QueryExecTimeoutCancel >> KqpTypes::Time64Columns-EnableTableDatetime64-IsColumn [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CreateTableAs-Stats [GOOD] Test command err: Trying to start YDB, gRPC: 23283, MsgBus: 26791 2025-12-23T18:44:56.109415Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126284126573151:2084];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:56.110774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:44:56.131537Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033db/r3tmp/tmpe7r0jn/pdisk_1.dat 2025-12-23T18:44:56.380469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:56.380569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:56.404554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:44:56.463921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:56.467000Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23283, node 1 2025-12-23T18:44:56.515265Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:56.515285Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:56.515291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:56.515404Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:44:56.637578Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26791 TClient is connected to server localhost:26791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515496522 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:56.974466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:56.981152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:44:56.986906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:57.105939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:57.200331Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:57.275538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:44:57.346990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.302331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297011476854:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.302460Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.302772Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297011476864:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.302835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.552284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.581143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.609577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.637486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.665855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.697155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.756015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.793426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.854510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297011477737:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.854582Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.854794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297011477742:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.854814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297011477743:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.854864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.858214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, ... ode Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 21443, MsgBus: 23376 2025-12-23T18:45:23.217655Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587126399850924825:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:23.217711Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033db/r3tmp/tmp3nILeL/pdisk_1.dat 2025-12-23T18:45:23.242825Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:23.329924Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:23.364797Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:23.364898Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:23.371986Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21443, node 5 2025-12-23T18:45:23.423373Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:23.423400Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:23.423412Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:23.423507Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:23.540734Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23376 TClient is connected to server localhost:23376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515523423 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:23.983841Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:23.991548Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:45:24.224112Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:26.876508Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126412735827559:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.876616Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.876977Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126412735827572:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.877017Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126412735827571:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.877057Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.882076Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:26.893976Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126412735827575:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:45:26.966467Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126412735827626:2531] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:27.039701Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) PLAN::{"Plan":{"Plans":[{"Tables":["Destination"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Destination","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]},{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/Destination2"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/Destination2","Name":"FillTable","Table":"test\/Destination2","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/Destination2","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination2","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/test2\/Destination3"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> KqpQuery::RowsLimit [GOOD] >> KqpQuery::RowsLimitServiceOverride >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateThenDelete-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16862, MsgBus: 30199 2025-12-23T18:45:02.171558Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126309852752706:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:02.171632Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033d3/r3tmp/tmpG2JDCV/pdisk_1.dat 2025-12-23T18:45:02.385949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:02.386102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:02.429609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:02.436458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:02.496308Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:02.497781Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126309852752681:2081] 1766515502170778 != 1766515502170781 TServer::EnableGrpc on GrpcPort 16862, node 1 2025-12-23T18:45:02.546873Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:02.546898Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:02.546906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:02.547039Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:02.686175Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30199 TClient is connected to server localhost:30199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515502521 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:02.998044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:03.015933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:03.144096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:03.255081Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:03.301172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:03.368377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:05.141698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126322737656444:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:05.141843Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:05.142198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126322737656454:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:05.142251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:05.487437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:05.519177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:05.547571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:05.574995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:05.602448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:05.634482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:05.692141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:05.737514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:05.808582Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126322737657324:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:05.808652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:05.808771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126322737657329:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:05.808820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126322737657331:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:05.808860Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:05.812661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:05.823172Z node 1 ... 7968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:20.953706Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:20.957706Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32713, node 4 2025-12-23T18:45:21.065674Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:21.065699Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:21.065709Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:21.065811Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:21.112258Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16200 TClient is connected to server localhost:16200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515521071 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:21.479231Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:21.484698Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:45:21.489245Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:21.544561Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:21.685449Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:21.755594Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:21.877579Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:24.529378Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126405596629417:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.529472Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.529784Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126405596629427:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.529831Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.654463Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.693693Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.729829Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.769827Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.809817Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.855389Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.899135Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.950646Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.046267Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126409891597592:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.046386Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.046764Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126409891597598:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.046820Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126409891597597:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.046943Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.050679Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:25.062686Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587126409891597601:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:45:25.124490Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126409891597652:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:25.835372Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126388416758400:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:25.835442Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateConditional-UseSink [GOOD] >> KqpExplain::UpdateConditionalKey+UseSink >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::InvalidJson >> KqpQuery::NoEvaluate [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::FullOuterJoin [GOOD] Test command err: Trying to start YDB, gRPC: 10482, MsgBus: 21473 2025-12-23T18:44:53.919517Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126273454804223:2202];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:53.919828Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003458/r3tmp/tmpys7Qsw/pdisk_1.dat 2025-12-23T18:44:54.239997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:54.240125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:54.264468Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:54.267478Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:54.268406Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126273454804050:2081] 1766515493906373 != 1766515493906376 2025-12-23T18:44:54.282144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10482, node 1 2025-12-23T18:44:54.368955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:54.368983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:54.368993Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:54.369083Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:44:54.522841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21473 TClient is connected to server localhost:21473 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515494331 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:54.871332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:54.893796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:54.939759Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:55.040125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:55.202572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:55.268389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:57.130700Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126290634675112:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.131058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.131532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126290634675122:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.131590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.458501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:57.493160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:57.522436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:57.553542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:57.584924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:57.619768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:57.653420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:57.705278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:57.779302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126290634675990:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.779422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.779457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126290634675995:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.779644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126290634675997:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.779692Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.783121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:57.792402Z node 1 ... me status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:64038 TClient is connected to server localhost:64038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515520847 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:21.306038Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:21.314336Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:45:21.326072Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:21.395520Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:21.545956Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:21.611857Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:21.745408Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:24.900724Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126403496105677:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.900834Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.901114Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126403496105687:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.901161Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.994602Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.033092Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.070392Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.102441Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.140024Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.181413Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.219365Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.276777Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.378595Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126407791073856:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.378675Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.378817Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126407791073861:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.378978Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126407791073863:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.379014Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.382631Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:25.393520Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126407791073865:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:45:25.469398Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126407791073916:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:25.649489Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126386316234644:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:25.649560Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:45:27.367096Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:27.692980Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.733741Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpParams::EmptyListForListParameterExecuteDataQuery >> KqpExplain::UpdateSecondaryConditional+UseSink [GOOD] >> KqpExplain::UpdateOnSecondary+UseSink >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled >> KqpQuery::DecimalOutOfPrecision+UseOltpSink-EnableParameterizedDecimal >> KqpExplain::MultiJoinCteLinks [GOOD] >> KqpQuery::QuerySkipHasNoColumns [GOOD] >> KqpQuery::QueryResultsTruncated >> KqpStats::DataQueryMulti [GOOD] >> KqpStats::CreateTableAsStats+IsOlap >> KqpExplain::UpdateSecondaryConditional-UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink >> KqpQuery::PreparedQueryInvalidate >> KqpParams::DefaultParameterValue [GOOD] >> KqpParams::Decimal-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: Trying to start YDB, gRPC: 2179, MsgBus: 18755 2025-12-23T18:44:57.047323Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126290063478708:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:57.047388Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033d8/r3tmp/tmpeFRHBq/pdisk_1.dat 2025-12-23T18:44:57.278055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:57.294377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:57.294479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:57.299218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:44:57.383849Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:57.387827Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126290063478679:2081] 1766515497045591 != 1766515497045594 TServer::EnableGrpc on GrpcPort 2179, node 1 2025-12-23T18:44:57.440763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:57.440789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:57.440812Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:57.440887Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:44:57.529042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18755 TClient is connected to server localhost:18755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515497418 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:57.896724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:57.910594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:58.035448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:58.063965Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:58.186985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:58.246294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:59.899868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126298653415141:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.899986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.900254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126298653415151:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.900333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.137421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.162965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.189545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.215354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.242846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.271945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.318330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.355688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.417930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126302948383317:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.418013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.418130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126302948383322:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.418354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126302948383324:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.418410Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.421813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:00.432115Z node 1 : ... x/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:24.839050Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:24.924850Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:25.003271Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:25.080145Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:28.242926Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126423899715016:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.243069Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.243465Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126423899715026:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.243539Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.331630Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.378177Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.420307Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.452872Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.488264Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.566234Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.603565Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.669951Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.750719Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126423899715902:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.750856Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.751037Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126423899715907:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.751062Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126423899715909:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.751107Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.756265Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:28.769735Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126423899715911:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:45:28.851585Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126402424876714:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:28.851716Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:45:28.863899Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126423899715962:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"E-Size":"0","PlanNodeId":6,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/EightShard","Columns":["Data","Key","Text"],"E-Rows":"0","Table":"EightShard","Plans":[{"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"0"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":6}],"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":8}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":3,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key (-∞, +∞)","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"0","Columns":["Data","Key","Text"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"EightShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":11,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key (-∞, +∞)","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateConditional+UseSink [GOOD] >> KqpQuery::ExecuteDataQueryCollectMeta [GOOD] >> KqpQuery::GenericQueryNoRowsLimit >> KqpStats::OneShardLocalExec+UseSink [GOOD] >> KqpStats::OneShardLocalExec-UseSink >> KqpExplain::PureExpr [GOOD] >> KqpExplain::ReadTableRangesFullScan >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap >> KqpLimits::WaitCAsTimeout [GOOD] >> KqpParams::BadParameterType >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::MultiTxStatsFullYql >> KqpStats::RequestUnitForBadRequestExecute >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::DeferredEffects+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateConditional+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10275, MsgBus: 14410 2025-12-23T18:44:55.987193Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126280280908505:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:55.988867Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033dc/r3tmp/tmpR57BVL/pdisk_1.dat 2025-12-23T18:44:56.221474Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:56.227106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:56.227227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:56.279744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:44:56.320860Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:56.323529Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126280280908476:2081] 1766515495984506 != 1766515495984509 TServer::EnableGrpc on GrpcPort 10275, node 1 2025-12-23T18:44:56.393545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:56.393568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:56.393579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:56.393698Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:44:56.459995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:14410 TClient is connected to server localhost:14410 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515496347 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:56.861457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:56.880643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:56.999381Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:57.038057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:57.213409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:57.283058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:59.244152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297460779535:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.244281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.244592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297460779545:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.244706Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.601665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.628307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.655201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.682146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.706583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.738335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.784819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.827540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:59.894347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297460780417:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.894422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.894603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297460780422:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.894614Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297460780423:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.894678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.898393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:59.908739Z node 1 ... Code: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515526923 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:27.568061Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:27.576611Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:45:27.583976Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:27.651820Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:27.793418Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:27.839624Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:45:27.931350Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.003778Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126430186589328:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.003889Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.004295Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126434481556634:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.004347Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.090912Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.131562Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.171768Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.212774Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.249126Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.294988Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.353079Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.442510Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.545156Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126434481557504:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.545244Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126434481557509:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.545431Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.545869Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126434481557512:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.545936Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.549371Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:31.562245Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126434481557511:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:45:31.658516Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126434481557564:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:31.708643Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126413006718376:2140];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:31.708730Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Operators":[{"Inputs":[],"Path":"\/Root\/EightShard","Name":"Upsert","SinkType":"KqpTableSink","Table":"EightShard"}],"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Operators":[{"E-Rows":"0","Inputs":[{"ExternalPlanNodeId":1}],"Predicate":"item.Data \u003E 0","E-Cost":"0","E-Size":"0","Name":"Filter"}],"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"EightShard","ReadColumns":["Key (-∞, +∞)","Data"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"Upsert","SinkType":"KqpTableSink","Table":"EightShard"}],"Node Type":"Upsert"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateConditionalKey+UseSink [GOOD] >> KqpExplain::UpdateConditionalKey-UseSink >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectCountAsteriskFromVar >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn [GOOD] >> KqpQuery::DecimalOutOfPrecision+UseOltpSink-EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision-UseOltpSink+EnableParameterizedDecimal >> test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn >> KqpQueryDiscard::DiscardSelectMultiLine [GOOD] >> KqpParams::InvalidJson [GOOD] >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [FAIL] >> KqpParams::CheckQueryCacheForPreparedQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 11735, MsgBus: 1620 2025-12-23T18:45:13.227491Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126360368150806:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:13.229037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033cb/r3tmp/tmpNZHo04/pdisk_1.dat 2025-12-23T18:45:13.421182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:13.421301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:13.470257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:13.478564Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:13.493362Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:13.494398Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126360368150773:2081] 1766515513222368 != 1766515513222371 TServer::EnableGrpc on GrpcPort 11735, node 1 2025-12-23T18:45:13.562146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:13.562179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:13.562186Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:13.562310Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1620 2025-12-23T18:45:13.773585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1620 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515513553 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:14.052083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:14.069813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:14.201547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:14.249961Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:45:14.364197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:14.431162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:16.317949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126373253054534:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:16.318058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:16.318354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126373253054544:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:16.318408Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:16.661510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:16.690951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:16.718652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:16.747785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:16.775076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:16.808540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:16.853884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:16.892796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:16.960449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126373253055415:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:16.960531Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126373253055420:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:16.960547Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:16.960766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126373253055423:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:16.960811Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:16.963870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:16.972995Z node 1 :K ... 2-23T18:45:36.108287Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.108289Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.117166Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.117227Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.117244Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.117850Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.117928Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.117952Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.126520Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.126582Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.126598Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.126810Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.126857Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.126873Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.136339Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.136401Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.136418Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.136543Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.136588Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.136604Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.146216Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.146293Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.146309Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.146843Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.146902Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.146915Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.155121Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.155205Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.155222Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.155359Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.155416Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.155437Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.164783Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.164840Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.164853Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.164868Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.164888Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.164903Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.172510Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.172556Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.172566Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2025-12-23T18:45:36.200976Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126455549019319:2789], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.201058Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.201256Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126455549019321:2790], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.201298Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.217816Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126455549019330:3872] txid# 281474976710660, issues: { message: "Type \'Datetime64\' specified for column \'Datetime\', but support for new date/time 64 types is disabled (EnableTableDatetime64 feature flag is off)" severity: 1 } |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpParams::EmptyListForListParameterExecuteDataQuery [GOOD] >> KqpParams::EmptyListForListParameterExecuteQuery >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] >> KqpQuery::MixedCreateAsSelect >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryStats+UseSink >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpQuery::OlapTemporary >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [FAIL] |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: Trying to start YDB, gRPC: 10252, MsgBus: 28371 2025-12-23T18:45:04.360523Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126319266406405:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:04.360583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033d1/r3tmp/tmpohU7zv/pdisk_1.dat 2025-12-23T18:45:04.602562Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:04.608372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:04.608486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:04.673086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:04.681101Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:04.682526Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126319266406376:2081] 1766515504358765 != 1766515504358768 TServer::EnableGrpc on GrpcPort 10252, node 1 2025-12-23T18:45:04.764813Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:04.764837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:04.764849Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:04.764958Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:04.894731Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28371 TClient is connected to server localhost:28371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515504726 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:05.277640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:05.284432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:45:05.290123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:05.373586Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:05.439939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:05.584615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:05.642371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:07.489675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126332151310139:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:07.489808Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:07.490151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126332151310149:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:07.490238Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:07.755602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:07.785011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:07.811124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:07.842464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:07.871256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:07.904390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:07.973830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:08.016043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:08.090327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126336446278315:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.090411Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.090475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126336446278320:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.090619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126336446278322:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.090665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.094730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... 18710Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:31.018756Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:31.018766Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:31.018864Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9914 TClient is connected to server localhost:9914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515530983 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:31.553867Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:31.573200Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:31.660000Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:31.800350Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:31.869938Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:31.953747Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:35.048089Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126451741236304:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:35.048193Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:35.048673Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126451741236314:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:35.048718Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:35.189550Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:35.222790Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:35.264458Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:35.307137Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:35.347053Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:35.423879Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:35.466873Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:35.526710Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:35.631378Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126451741237195:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:35.631504Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:35.631801Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126451741237200:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:35.631849Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126451741237201:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:35.631900Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:35.635780Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:35.648028Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126451741237204:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:45:35.730955Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126451741237255:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:35.780248Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126430266397982:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:35.780319Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:45:37.808304Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:37.963946Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=ODE1NjNlMTMtOWFiNDI4MDItOWEzNTQ5ZjgtMmU0ZjVmZDQ=, ActorId: [5:7587126460331172180:2532], ActorState: ExecuteState, TraceId: 01kd68fxyacd7qck17pgzp4g6f, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1256: Invalid Json value, status: BAD_REQUEST
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1256: Invalid Json value ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQueryDiscard::DiscardSelectMultiLine [GOOD] Test command err: Trying to start YDB, gRPC: 12226, MsgBus: 28847 2025-12-23T18:44:56.861155Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126284529294094:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:56.861332Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033da/r3tmp/tmpHxH7uy/pdisk_1.dat 2025-12-23T18:44:57.065120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:57.065235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:57.068726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:44:57.169904Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126284529294056:2081] 1766515496858916 != 1766515496858919 2025-12-23T18:44:57.169949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:57.170139Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12226, node 1 2025-12-23T18:44:57.235422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:57.235445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:57.235452Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:57.235549Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:44:57.364679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28847 TClient is connected to server localhost:28847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515497215 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:57.721082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:57.727615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:44:57.743729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:57.868180Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:57.871087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:58.010986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:58.071978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:59.805001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297414197814:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.805121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.805395Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126297414197824:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:59.805436Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.148798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.176715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.202735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.231518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.261890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.293937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.333739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.372788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.436336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126301709165991:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.436404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.436498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126301709165996:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.436586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126301709165998:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.436619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.439793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... k script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15111 TClient is connected to server localhost:15111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515526916 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:45:27.552500Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:45:27.572284Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:27.647067Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:27.763591Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:27.817754Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:27.954532Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:30.904181Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126431618547149:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:30.904298Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:30.904532Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126431618547158:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:30.904596Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:30.997279Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.034882Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.069906Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.101672Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.138027Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.186465Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.230279Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.336930Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.424069Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126435913515324:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.424171Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.424258Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126435913515329:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.424465Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126435913515331:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.424518Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.428519Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:31.441937Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587126435913515332:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:45:31.521091Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126435913515384:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:31.695019Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126414438676197:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:31.695078Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:45:37.741960Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7587126461683319878:2657], status: GENERIC_ERROR, issues:
:1:0: Error: mismatched input 'sub' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, TRUNCATE, UPDATE, UPSERT, USE, VALUES} 2025-12-23T18:45:37.742361Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=4&id=NGI0YThmZTAtM2RlYzExNGItMWUyYjQzM2YtY2MwMDk1MTA=, ActorId: [4:7587126461683319876:2656], ActorState: ExecuteState, TraceId: 01kd68fxsxbq4md6qmyxa033t0, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 1 } message: "mismatched input \'sub\' expecting {\';\', \'(\', \'$\', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, TRUNCATE, UPDATE, UPSERT, USE, VALUES}" end_position { row: 1 } severity: 1 }, remove tx with tx_id: |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] >> KqpExplain::UpdateOnSecondary+UseSink [GOOD] >> KqpExplain::UpdateOnSecondary-UseSink >> KqpQuery::CreateAsSelect_DisableDataShard [GOOD] >> KqpQuery::CreateAsSelect_BadCases >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryCancelWrite >> KqpQuery::QueryCache >> KqpStats::OneShardLocalExec-UseSink [GOOD] >> KqpStats::OneShardNonLocalExec+UseSink >> KqpQuery::DecimalOutOfPrecision-UseOltpSink+EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision+UseOltpSink+EnableParameterizedDecimal >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::ExecuteWriteQuery >> KqpParams::Decimal-QueryService-UseSink [GOOD] >> KqpParams::Decimal+QueryService-UseSink >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal-IsColumn >> KqpExplain::ReadTableRangesFullScan [GOOD] >> KqpExplain::ReadTableRanges >> KqpQuery::SelectWhereInSubquery >> KqpTypes::QuerySpecialTypes >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] >> KqpParams::BadParameterType [GOOD] >> KqpParams::CheckCacheByAst >> KqpAnalyze::AnalyzeTable+ColumnStore [FAIL] >> KqpAnalyze::AnalyzeTable-ColumnStore >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::MultiTxStatsFullScan >> KqpStats::RequestUnitForBadRequestExecute [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> KqpExplain::UpdateConditionalKey-UseSink [GOOD] >> KqpExplain::UpdateOn+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 61892, MsgBus: 16953 2025-12-23T18:44:55.129834Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126280887573213:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:55.132954Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:44:55.161571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033e1/r3tmp/tmp3leq0Y/pdisk_1.dat 2025-12-23T18:44:55.405776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:55.405930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:55.422174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61892, node 1 2025-12-23T18:44:55.523936Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126280887573184:2081] 1766515495127671 != 1766515495127674 2025-12-23T18:44:55.524202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:44:55.525476Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:55.545807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:55.545845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:55.545858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:55.545989Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16953 TClient is connected to server localhost:16953 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515495535 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:56.007929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:56.028411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:56.146383Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:56.186867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:56.321724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:56.387690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:58.315738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126293772476952:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:58.315846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:58.316176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126293772476962:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:58.316231Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:58.608572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:58.637244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:58.664649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:58.689961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:58.717617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:58.748232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:58.798072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:58.833795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:44:58.897659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126293772477832:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:58.897720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:58.897748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126293772477837:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:58.897888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126293772477839:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:58.897946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:58.901423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:58.910691Z node 1 :KQP_WORKLOAD_SERVIC ... ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:33.029454Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:33.115486Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:33.378735Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:33.759318Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:34.063283Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:34.661554Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1904:3511], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:34.661945Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:34.663034Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1978:3530], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:34.663156Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:34.694968Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:34.855035Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:35.147834Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:35.405296Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:35.662957Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:35.970396Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:36.255989Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:36.570999Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:36.975570Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2791:4171], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.975709Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.976142Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2795:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.976217Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.976370Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2798:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.983651Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:37.144754Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:2800:4180], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:45:37.207975Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:2860:4221] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:39.349831Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:39.611986Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:39.960190Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:42.021674Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2061: SelfId: [5:3574:4764], TxId: 281474976715676, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68g0ct5s24hmq64cdfrhkv. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZjMxZDc0NWYtNzUzODI2NGUtZjI2YTBkNy0xMzEyZjEzNQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. } 2025-12-23T18:45:42.021822Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [5:3574:4764], TxId: 281474976715676, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68g0ct5s24hmq64cdfrhkv. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZjMxZDc0NWYtNzUzODI2NGUtZjI2YTBkNy0xMzEyZjEzNQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. }. 2025-12-23T18:45:42.023039Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [5:3575:4765], TxId: 281474976715676, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68g0ct5s24hmq64cdfrhkv. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZjMxZDc0NWYtNzUzODI2NGUtZjI2YTBkNy0xMzEyZjEzNQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:3568:4454], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-12-23T18:45:42.024025Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=ZjMxZDc0NWYtNzUzODI2NGUtZjI2YTBkNy0xMzEyZjEzNQ==, ActorId: [5:3155:4454], ActorState: ExecuteState, TraceId: 01kd68g0ct5s24hmq64cdfrhkv, Create QueryResponse for error on request, msg: , status: OVERLOADED, issues: { message: "Table \'/Root/SecondaryKeys\' retry limit exceeded." severity: 1 } >> test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost [GOOD] >> KqpStats::SysViewCancelled >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> KqpQuery::UpdateThenDelete+UseSink >> KqpQuery::SelectCountAsteriskFromVar [GOOD] >> KqpStats::CreateTableAsStats+IsOlap [GOOD] >> KqpStats::CreateTableAsStats-IsOlap >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery >> KqpParams::RowsList >> KqpQuery::QueryStats+UseSink [GOOD] >> KqpQuery::QueryStats-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: Trying to start YDB, gRPC: 30138, MsgBus: 15399 2025-12-23T18:45:17.154778Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126374953410373:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:17.154859Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033c9/r3tmp/tmpI0PkVr/pdisk_1.dat 2025-12-23T18:45:17.374615Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:17.401162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:17.401285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:17.428895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:17.475768Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:17.476380Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126374953410346:2081] 1766515517152973 != 1766515517152976 TServer::EnableGrpc on GrpcPort 30138, node 1 2025-12-23T18:45:17.526254Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:17.526295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:17.526308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:17.526433Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:17.544399Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15399 TClient is connected to server localhost:15399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515517508 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:17.994950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:18.001574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:45:18.017043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:18.142497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:18.180739Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:18.274088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:18.336930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:20.303763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126387838314103:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.303896Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.304252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126387838314113:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.304353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.597747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.621965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.648538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.676715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.707770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.741708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.776301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.826911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.899002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126387838314984:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.899127Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.900093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126387838314990:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.900120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126387838314989:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.900163Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:20.903925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sc ... ere not loaded 2025-12-23T18:45:37.547214Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587126462330942285:2081] 1766515537417369 != 1766515537417372 2025-12-23T18:45:37.583144Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64266, node 4 2025-12-23T18:45:37.711381Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:37.711405Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:37.711412Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:37.711499Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:37.768765Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16110 TClient is connected to server localhost:16110 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515537647 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:38.251193Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:38.263546Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:45:38.276287Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:38.354947Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:38.483586Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:38.534186Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:38.610181Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:41.650229Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126479510813346:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:41.650326Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:41.650776Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126479510813356:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:41.650827Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:41.732220Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:41.775183Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:41.813788Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:41.850488Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:41.885728Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:41.922916Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:41.967375Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:42.023961Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:42.120512Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126483805781520:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:42.120624Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:42.120720Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126483805781525:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:42.121184Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126483805781527:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:42.121252Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:42.124646Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:42.136094Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587126483805781528:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:45:42.212790Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126483805781580:3779] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:42.422794Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126462330942419:2164];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:42.422864Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::DecimalOutOfPrecision+UseOltpSink+EnableParameterizedDecimal [GOOD] >> KqpQuery::CurrentUtcTimestamp >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal-IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal-IsColumn >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] >> KqpParams::EmptyListForListParameterExecuteQuery [GOOD] >> KqpQuery::OlapTemporary [GOOD] >> KqpQuery::OlapCreateAsSelect_Simple >> KqpQuery::QueryCancelWrite [GOOD] >> KqpQuery::QueryCancelWriteImmediate >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap >> KqpQuery::QueryCache [GOOD] >> KqpQuery::QueryCacheTtl >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpExplain::AggGroupLimit >> KqpLimits::StreamWrite+Allowed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::EmptyListForListParameterExecuteQuery [GOOD] Test command err: Trying to start YDB, gRPC: 5396, MsgBus: 27717 2025-12-23T18:45:11.328406Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126351443276878:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:11.328475Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033cc/r3tmp/tmp5e0UrO/pdisk_1.dat 2025-12-23T18:45:11.565996Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:11.598861Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:11.603242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:11.603342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:11.607091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5396, node 1 2025-12-23T18:45:11.675232Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:11.675263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:11.675271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:11.675355Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:11.828916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27717 TClient is connected to server localhost:27717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515511656 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:12.128442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:12.145539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:12.261615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:12.361206Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:12.403659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:12.467492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:14.454897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126364328180600:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:14.455015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:14.455364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126364328180610:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:14.455438Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:14.821676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:14.852150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:14.879332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:14.908661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:14.935964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:14.973440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:15.039411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:15.085401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:15.157399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126368623148774:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:15.157541Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:15.157884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126368623148779:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:15.157893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126368623148780:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:15.157941Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:15.161511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:15.171947Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126368623148783:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Trans ... te: Disconnected -> Connecting 2025-12-23T18:45:39.885710Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13305, node 5 2025-12-23T18:45:39.959442Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:39.959469Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:39.959478Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:39.959559Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:40.004867Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7047 TClient is connected to server localhost:7047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515539957 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:40.525414Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:40.539364Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:45:40.556065Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:40.628936Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:40.760321Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:40.811581Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:40.941092Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:43.792178Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126485105983954:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:43.792259Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:43.792492Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126485105983964:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:43.792528Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:43.893425Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:43.945031Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:43.984207Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:44.027900Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:44.080247Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:44.138839Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:44.180753Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:44.244056Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:44.402384Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126489400952129:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:44.402500Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:44.403430Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126489400952134:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:44.403494Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126489400952135:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:44.403833Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:44.408448Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:44.422720Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2025-12-23T18:45:44.426917Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126489400952138:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:45:44.518302Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126489400952189:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:44.744425Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126467926112943:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:44.744510Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2025-12-23T18:43:02.469154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:43:02.469217Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:43:02.472770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:43:02.503596Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:43:02.503940Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:43:02.504231Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:43:02.563255Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:43:02.569771Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:43:02.569853Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:43:02.571457Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:43:02.571578Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:43:02.571634Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:43:02.571952Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:43:02.572007Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:43:02.572075Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:43:02.679199Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:43:02.720743Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:43:02.720941Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:43:02.721054Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:43:02.721091Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:43:02.721125Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:43:02.721164Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:02.721319Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.721398Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.722142Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:43:02.722256Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:43:02.722396Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:02.722455Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:43:02.722495Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:43:02.722538Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:43:02.722575Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:43:02.722626Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:43:02.722673Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:43:02.722785Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.722837Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.722881Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:43:02.729932Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:43:02.730010Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:43:02.730123Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:43:02.730348Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:43:02.730402Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:43:02.730456Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:43:02.730515Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:43:02.730560Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:43:02.730596Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:43:02.730644Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:02.731153Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:43:02.731214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:43:02.731272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:43:02.731323Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:02.731374Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:43:02.731407Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:43:02.731445Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:43:02.731481Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:02.731507Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:43:02.747547Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:43:02.747637Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:43:02.747675Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:43:02.747739Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:43:02.747860Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:43:02.748486Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:228:2224], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.748545Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:43:02.748608Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:227:2223], serverId# [1:228:2224], sessionId# [0:0:0] 2025-12-23T18:43:02.748789Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:43:02.748829Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:43:02.748977Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:43:02.749032Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:43:02.749116Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:43:02.749167Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:43:02.762395Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:43:02.762497Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:43:02.766848Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.766971Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:43:02.767058Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:43:02.767108Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:43:02.767147Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:43:02.767195Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:43:02.767256Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... execution plan for [0:10] at 9437184 executing on unit ExecuteDataTx 2025-12-23T18:45:38.720382Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:10] at 9437184 to execution unit FinishPropose 2025-12-23T18:45:38.720427Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:10] at 9437184 on unit FinishPropose 2025-12-23T18:45:38.720487Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 10 at tablet 9437184 send to client, exec latency: 8 ms, propose latency: 8 ms, status: COMPLETE 2025-12-23T18:45:38.720566Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:10] at 9437184 is DelayComplete 2025-12-23T18:45:38.720598Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2025-12-23T18:45:38.720634Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:10] at 9437184 to execution unit CompletedOperations 2025-12-23T18:45:38.720673Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2025-12-23T18:45:38.720733Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:10] at 9437184 is Executed 2025-12-23T18:45:38.720764Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2025-12-23T18:45:38.720794Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:10] at 9437184 has finished 2025-12-23T18:45:38.724506Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:45:38.724586Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:10] at 9437184 on unit FinishPropose 2025-12-23T18:45:38.724657Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:45:41.728026Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2025-12-23T18:45:41.728100Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2025-12-23T18:45:41.728468Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:496:2470], Recipient [3:240:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:45:41.728510Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:45:41.728554Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:495:2469], serverId# [3:496:2470], sessionId# [0:0:0] 2025-12-23T18:45:41.728774Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:240:2232]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006 2025-12-23T18:45:41.728814Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:45:41.728903Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:45:41.729673Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2025-12-23T18:45:41.756586Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2025-12-23T18:45:41.756687Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2025-12-23T18:45:41.756732Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2025-12-23T18:45:41.756774Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2025-12-23T18:45:41.756840Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-12-23T18:45:41.756915Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:11] at 9437184 2025-12-23T18:45:41.756958Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2025-12-23T18:45:41.756986Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2025-12-23T18:45:41.757016Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit BlockFailPoint 2025-12-23T18:45:41.757045Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit BlockFailPoint 2025-12-23T18:45:41.757078Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2025-12-23T18:45:41.757110Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit BlockFailPoint 2025-12-23T18:45:41.757138Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2025-12-23T18:45:41.757165Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-12-23T18:45:41.789766Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2025-12-23T18:45:41.790163Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 11 released its data 2025-12-23T18:45:41.790235Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2025-12-23T18:45:41.817566Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:45:41.817641Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-12-23T18:45:41.818457Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 11 at 9437184 restored its data 2025-12-23T18:45:42.020229Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:195: Tablet 9437184 is not ready for [0:11] execution 2025-12-23T18:45:42.021941Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 11 released its data 2025-12-23T18:45:42.022032Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2025-12-23T18:45:42.355738Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:45:42.355814Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-12-23T18:45:42.356615Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 11 at 9437184 restored its data 2025-12-23T18:45:42.592238Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2025-12-23T18:45:42.594203Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 11 released its data 2025-12-23T18:45:42.594277Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2025-12-23T18:45:42.601371Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:45:42.601427Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-12-23T18:45:42.602061Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 11 at 9437184 restored its data 2025-12-23T18:45:43.960814Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2025-12-23T18:45:43.960948Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:45:43.961044Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2025-12-23T18:45:43.961092Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2025-12-23T18:45:43.961133Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit FinishPropose 2025-12-23T18:45:43.961174Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit FinishPropose 2025-12-23T18:45:43.961233Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 6 ms, propose latency: 6 ms, status: COMPLETE 2025-12-23T18:45:43.961316Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is DelayComplete 2025-12-23T18:45:43.961347Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2025-12-23T18:45:43.961379Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit CompletedOperations 2025-12-23T18:45:43.961412Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2025-12-23T18:45:43.961467Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2025-12-23T18:45:43.961495Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2025-12-23T18:45:43.961525Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:11] at 9437184 has finished 2025-12-23T18:45:43.966276Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:45:43.966368Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:11] at 9437184 on unit FinishPropose 2025-12-23T18:45:43.966437Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForSuccessExplicitPrepare >> KqpQuery::SelectWhereInSubquery [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink |95.6%| [TA] $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExplain::ReadTableRanges [GOOD] >> KqpExplain::Predicates |95.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpStats::MultiTxStatsFullScan [GOOD] >> KqpExplain::UpdateOnSecondary-UseSink [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey+UseSink >> KqpParams::Decimal+QueryService-UseSink [GOOD] >> KqpParams::Decimal-QueryService+UseSink >> TKeyValueTracingTest::ReadHuge >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal-IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal+IsColumn >> KqpLimits::KqpMkqlMemoryLimitException >> KqpStats::CreateTableAsStats-IsOlap [GOOD] >> KqpExplain::UpdateOn+UseSink [GOOD] >> KqpExplain::UpdateOn-UseSink >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink >> KqpPg::TableDeleteAllData+useSink [GOOD] >> KqpPg::TableDeleteAllData-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::MultiTxStatsFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 25559, MsgBus: 12790 2025-12-23T18:45:23.300594Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126399501348194:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:23.300706Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033ba/r3tmp/tmpUeYl6c/pdisk_1.dat 2025-12-23T18:45:23.615777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:23.615896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:23.687441Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:23.700867Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126399501348144:2081] 1766515523282153 != 1766515523282156 2025-12-23T18:45:23.703254Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:23.707319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25559, node 1 2025-12-23T18:45:23.855793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:23.855825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:23.855833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:23.855903Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:23.872383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12790 TClient is connected to server localhost:12790 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:45:24.311528Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515523766 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:24.341965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:24.347812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:45:24.358006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:45:24.483212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.645233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:24.705893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:26.593937Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126412386251903:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.594105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.594592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126412386251913:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.594662Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.949826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.989145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.018928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.052805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.084269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.119346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.153793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.202614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.298506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126416681220080:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.298585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.298867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126416681220085:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.298929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126416681220086:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.299064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.305966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... 94046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:48.317270Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:48.354360Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:48.394439Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:48.431871Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:48.476167Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:48.533165Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:48.625278Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126510359357528:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:48.625386Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:48.625736Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126510359357534:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:48.625755Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126510359357533:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:48.625792Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:48.630189Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:48.644369Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587126510359357537:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:45:48.707505Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126510359357588:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:48.919622Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126488884518435:2156];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:48.919688Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:45:50.813297Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515550821, txId: 281474976710673] shutting down {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Limit":"4","Name":"Limit"}],"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Limit":"4","Name":"TopSort","TopSortBy":"row.Data"}],"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"TopSort","Stats":{"UseLlvm":"undefined","Table":[{"Path":"\/Root\/EightShard","ReadBytes":{"Count":1,"Max":54,"Sum":54,"Min":54},"ReadRows":{"Count":1,"Max":3,"Sum":3,"Min":3}}],"OutputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"PhysicalStageId":0,"FinishedTasks":1,"Introspections":["1 tasks for a single\/sequential source scan"],"IngressRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"DurationUs":{"Count":1,"Max":1000,"Sum":1000,"Min":1000},"Mkql":{},"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"History":[1,1048576,4,1048576],"Min":1048576},"BaseTimeMs":1766515550781,"Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":36,"Sum":36,"History":[4,36],"Min":36},"FirstMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3}},"Name":"4","Push":{"LastMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":3,"Sum":3,"Min":3},"ResumeMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"FirstMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"PauseMessageMs":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":2658,"Sum":2658,"History":[4,2658],"Min":2658},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":1}}}],"CpuTimeUs":{"Count":1,"Max":1081,"Sum":1081,"History":[1,934,4,1081],"Min":1081},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":144,"Sum":144,"History":[4,144],"Min":144},"FirstMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3}},"External":{},"Ingress":{},"Name":"KqpReadRangesSource","Push":{"LastMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"FirstMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":144,"Sum":144,"History":[4,144],"Min":144},"WaitTimeUs":{"Count":1,"Max":2571,"Sum":2571,"History":[4,2571],"Min":2571},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1}}}],"StageDurationUs":1000,"OutputBytes":{"Count":1,"Max":36,"Sum":36,"Min":36},"Tasks":1,"UpdateTimeMs":4}}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","OutputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"PhysicalStageId":1,"FinishedTasks":1,"InputBytes":{"Count":1,"Max":36,"Sum":36,"Min":36},"Introspections":["1 minimum tasks for compute"],"DurationUs":{"Count":1,"Max":2000,"Sum":2000,"Min":2000},"Mkql":{},"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"History":[2,1048576,5,1048576],"Min":1048576},"BaseTimeMs":1766515550781,"Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":36,"Sum":36,"History":[5,36],"Min":36},"FirstMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":3,"Sum":3,"Min":3},"ResumeMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"FirstMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"PauseMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"WaitTimeUs":{"Count":1,"Max":1911,"Sum":1911,"History":[5,1911],"Min":1911},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":2}}}],"CpuTimeUs":{"Count":1,"Max":612,"Sum":612,"History":[2,490,5,612],"Min":612},"StageDurationUs":2000,"ResultRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"ResultBytes":{"Count":1,"Max":36,"Sum":36,"Min":36},"OutputBytes":{"Count":1,"Max":36,"Sum":36,"Min":36},"Input":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":36,"Sum":36,"History":[5,36],"Min":36},"FirstMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"FirstMessageMs":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":36,"Sum":36,"History":[5,36],"Min":36},"PauseMessageMs":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":1846,"Sum":1846,"History":[5,1846],"Min":1846},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":1}}}],"InputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Tasks":1,"UpdateTimeMs":5}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"TotalDurationUs":209389,"ProcessCpuTimeUs":275,"Compilation":{"FromCache":false,"CpuTimeUs":176380,"DurationUs":182041}}},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-Rows":3,"A-SelfCpu":0.612,"A-Size":36,"A-Cpu":1.693,"Limit":"4","Name":"Limit"}],"Plans":[{"PlanNodeId":4,"Operators":[{"A-Rows":3,"A-SelfCpu":1.081,"A-Size":36,"A-Cpu":1.081,"Limit":"4","Name":"TopSort","TopSortBy":"row.Data"}],"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"1","ReadRange":["Key [150, 266]"],"E-Cost":"0","ReadColumns":["Data","Key","Text"],"Name":"TableRangeScan","E-Size":"0","Table":"EightShard"}],"Node Type":"TableRangeScan"}],"Node Type":"TopSort"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries >> KqpQuery::UpdateThenDelete+UseSink [GOOD] >> TKeyValueTracingTest::ReadHuge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::CreateTableAsStats-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 28597, MsgBus: 20115 2025-12-23T18:45:19.842872Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126385070673267:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:19.843059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033c6/r3tmp/tmpxySUPE/pdisk_1.dat 2025-12-23T18:45:20.084393Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:20.088033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:20.088139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:20.134873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:20.184177Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:20.185429Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126385070673232:2081] 1766515519841490 != 1766515519841493 TServer::EnableGrpc on GrpcPort 28597, node 1 2025-12-23T18:45:20.231581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:20.231607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:20.231614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:20.231693Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:20.358205Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20115 TClient is connected to server localhost:20115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515520210 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:20.735397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:20.752515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:20.852904Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:20.875539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:21.032511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:21.095952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:22.842372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126397955576990:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:22.842515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:22.842848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126397955577000:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:22.842899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:23.168114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:23.211184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:23.242492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:23.284522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:23.324591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:23.362811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:23.437042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:23.489630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:23.580756Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126402250545169:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:23.580856Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:23.581111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126402250545174:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:23.581141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126402250545175:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:23.581273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:23.585334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:23.597268Z node 1 ... sCpuTimeUs":1047,"TotalDurationUs":333928,"ResourcePoolId":"default","QueuedTimeUs":0},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","PlanNodeType":"Query"}} 2025-12-23T18:45:50.876132Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:45:50.885954Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 query_phases { duration_us: 12466 table_access { name: "/Root/.tmp/sessions/5b7c4d55-4a9e-170c-f5b3-eba8b81a96a8/Root/Destination_8bc6c3b8-4ee6-75f1-b14b-42b8f03330b8" updates { rows: 2 bytes: 24 } partitions_count: 1 } table_access { name: "/Root/Source" reads { rows: 2 bytes: 24 } partitions_count: 1 } cpu_time_us: 5015 affected_shards: 1 } compilation { duration_us: 14120 cpu_time_us: 8593 } process_cpu_time_us: 1047 query_plan: "{\"Plan\":{\"Plans\":[{\"Tables\":[\"Destination\"],\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Source\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Source\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"Source\",\"ReadColumns\":[\"Col1 (-\342\210\236, +\342\210\236)\",\"Col2\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Table\":[{\"Path\":\"\\/Root\\/Source\",\"ReadRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ReadBytes\":{\"Count\":1,\"Sum\":24,\"Max\":24,\"Min\":24}}],\"OutputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"PhysicalStageId\":0,\"FinishedTasks\":1,\"Introspections\":[\"1 tasks for a single\\/sequential source scan\"],\"IngressRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"DurationUs\":{\"Count\":1,\"Sum\":1000,\"Max\":1000,\"Min\":1000},\"Mkql\":{},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[3,1048576]},\"BaseTimeMs\":1766515550740,\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7,\"History\":[3,7]}},\"Name\":\"4\",\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":1231,\"Max\":1231,\"Min\":1231,\"History\":[3,1231]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"CpuTimeUs\":{\"Count\":1,\"Sum\":681,\"Max\":681,\"Min\":681,\"History\":[2,681]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64,\"History\":[3,64]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FirstMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64,\"History\":[3,64]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1272,\"Max\":1272,\"Min\":1272,\"History\":[3,1272]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"StageDurationUs\":1000,\"OutputBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"UpdateTimeMs\":2,\"Tasks\":1}}],\"Node Type\":\"Map\",\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"Egress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28,\"History\":[9,28]}},\"Name\":\"KqpTableSink\",\"Egress\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Splits\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"ActiveMessageMs\":{\"Count\":1,\"Max\":8,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":6000,\"Max\":6000,\"Min\":6000}},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28,\"History\":[9,28]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1145,\"Max\":1145,\"Min\":1145,\"History\":[9,1145]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":2,\"Min\":1}}}],\"UseLlvm\":\"undefined\",\"Table\":[{\"Path\":\"\\/Root\\/.tmp\\/sessions\\/5b7c4d55-4a9e-170c-f5b3-eba8b81a96a8\\/Root\\/Destination_8bc6c3b8-4ee6-75f1-b14b-42b8f03330b8\"}],\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"Introspections\":[\"1 tasks same as previous stage\"],\"EgressBytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"DurationUs\":{\"Count\":1,\"Sum\":6000,\"Max\":6000,\"Min\":6000},\"Mkql\":{},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[9,1048576]},\"BaseTimeMs\":1766515550740,\"CpuTimeUs\":{\"Count\":1,\"Sum\":603,\"Max\":603,\"Min\":603,\"History\":[9,603]},\"EgressRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"StageDurationUs\":6000,\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7,\"History\":[9,7]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7,\"History\":[9,7]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1497,\"Max\":1497,\"Min\":1497,\"History\":[9,1497]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UpdateTimeMs\":9,\"InputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1}}],\"Operators\":[{\"Inputs\":[],\"Path\":\"\\/Root\\/Destination\",\"Name\":\"FillTable\",\"Table\":\"Destination\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Sink\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":14120,\"CpuTimeUs\":8593},\"ProcessCpuTimeUs\":1047,\"TotalDurationUs\":333928,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":0},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"FillTable\",\"Table\":\"Destination\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"FillTable\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Source\" \'\"72057594046644480:42\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Col1\" \'\"Col2\") \'() (Void) \'()))\n(let $3 \'(\'(\'\"_logical_id\" \'474) \'(\'\"_id\" \'\"41f59d60-a9ff8ad9-f2450233-a50ecc60\") \'(\'\"_partition_mode\" \'\"single\") \'(\'\"_wide_channels\" (StructType \'(\'\"Col1\" (DataType \'Uint64)) \'(\'\"Col2\" (OptionalType (DataType \'Int32)))))))\n(let $4 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($10) (block \'(\n (let $11 (lambda \'($12) (Member $12 \'\"Col1\") (Member $12 \'\"Col2\")))\n (return (FromFlow (ExpandMap (ToFlow $10) $11)))\n))) $3))\n(let $5 (DqCnMap (TDqOutput $4 \'\"0\")))\n(let $6 \'\"/Root/.tmp/sessions/5b7c4d55-4a9e-170c-f5b3-eba8b81a96a8/Root/Destination_8bc6c3b8-4ee6-75f1-b14b-42b8f03330b8\")\n(let $7 (KqpTable $6 \'\"\" \'\"\" \'\"\"))\n(let $8 (KqpTableSinkSettings $7 \'\"true\" \'\"fill_table\" \'\"0\" \'\"true\" \'\"false\" \'\"false\" \'(\'(\'\"OriginalPath\" \'\"/Root/Destination\"))))\n(let $9 (DqPhyStage \'($5) (lambda \'($13) (FromFlow (NarrowMap (ToFlow $13) (lambda \'($14 $15) (AsStruct \'(\'\"Col1\" $14) \'(\'\"Col2\" $15)))))) \'(\'(\'\"_logical_id\" \'539) \'(\'\"_id\" \'\"a93830ff-d4561db9-5446a898-1677796\")) \'((DqSink \'\"0\" (DataSink \'\"KqpTableSink\" \'\"db\") $8))))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($4 $9) \'() \'() \'(\'(\'\"type\" \'\"generic\") \'(\'\"with_effects\")))) \'() \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 333928 total_cpu_time_us: 14655 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/.tmp/sessions/5b7c4d55-4a9e-170c-f5b3-eba8b81a96a8/Root/Destination_8bc6c3b8-4ee6-75f1-b14b-42b8f03330b8\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":47},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Col1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":true,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Col2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Col1\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1766515550\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"2b39648b-f77a5413-38533d25-b1e5618e\",\"version\":\"1.0\"}" 2025-12-23T18:45:51.156120Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126499235192880:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:51.156193Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpParams::RowsList [GOOD] >> KqpParams::ParameterTypes >> KqpQuery::QueryStats-UseSink [GOOD] >> KqpQuery::QueryFromSqs >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpQuery::DdlInDataQuery >> KqpQuery::CreateAsSelect_BadCases [GOOD] >> KqpQuery::CreateAsSelectView |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [GOOD] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateThenDelete+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 30232, MsgBus: 26608 2025-12-23T18:45:21.064081Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126393340324965:2248];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:21.064302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033c4/r3tmp/tmpSX3GkD/pdisk_1.dat 2025-12-23T18:45:21.291064Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:21.306024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:21.306136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:21.353718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:21.388268Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:21.390350Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126393340324756:2081] 1766515521052475 != 1766515521052478 TServer::EnableGrpc on GrpcPort 30232, node 1 2025-12-23T18:45:21.471546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:21.471573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:21.471581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:21.471662Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:21.521579Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26608 TClient is connected to server localhost:26608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515521456 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:21.945849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:21.965610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:22.058712Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:22.069608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:22.206599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:22.274940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:24.177302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126406225228520:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.177402Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.177726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126406225228530:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.177793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.463177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.497152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.533069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.568578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.607424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.643106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.712418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.751314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:24.831408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126406225229403:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.831519Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.831752Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126406225229409:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.831786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126406225229408:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.831789Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:24.835902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:24.850468Z node 1 ... Connecting -> Connected TServer::EnableGrpc on GrpcPort 15795, node 4 2025-12-23T18:45:45.870959Z node 4 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T18:45:45.870981Z node 4 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T18:45:45.897509Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:45.935772Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:45.935798Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:45.935805Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:45.935893Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:46.049707Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5524 TClient is connected to server localhost:5524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515545977 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:45:46.467949Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:45:46.477571Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:45:46.486462Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:46.560608Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:46.714947Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:46.754955Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:46.822487Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:49.759053Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126513746131986:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:49.759137Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:49.759416Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126513746131998:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:49.759466Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:49.810425Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:49.845595Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:49.886961Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:49.918477Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:49.957137Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:49.997107Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:50.036579Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:50.099722Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:50.222217Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126518041100162:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:50.222322Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:50.222884Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126518041100167:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:50.222899Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126518041100168:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:50.222969Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:50.227053Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:50.242821Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587126518041100171:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:45:50.306560Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126518041100222:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:50.698949Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126496566260974:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:50.699086Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::MixedCreateAsSelect [GOOD] >> TKeyValueTracingTest::WriteHuge >> KqpQuery::QueryCancelWriteImmediate [GOOD] >> KqpQuery::QueryExplain >> TKeyValueTracingTest::ReadSmall >> KqpTypes::SelectNull [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64-IsColumn >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [FAIL] >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink >> KqpExplain::AggGroupLimit [GOOD] >> KqpExplain::ComplexJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::MixedCreateAsSelect [GOOD] Test command err: Trying to start YDB, gRPC: 28992, MsgBus: 18092 2025-12-23T18:45:10.066594Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126346291454971:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:10.066682Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033ce/r3tmp/tmpZ5pmAb/pdisk_1.dat 2025-12-23T18:45:10.283898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:10.284018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:10.313986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:10.333085Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:10.334884Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126346291454930:2081] 1766515510065289 != 1766515510065292 2025-12-23T18:45:10.342761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28992, node 1 2025-12-23T18:45:10.411951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:10.411976Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:10.411987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:10.412148Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:10.566960Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18092 TClient is connected to server localhost:18092 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515510389 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:10.868734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:10.880687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:10.994497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:11.090308Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:11.133693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:11.197727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:13.030525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126359176358689:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:13.030650Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:13.031002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126359176358699:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:13.031083Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:13.321841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:13.354488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:13.386349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:13.416701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:13.447561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:13.479412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:13.528983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:13.569627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:13.648656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126359176359571:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:13.648746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:13.649001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126359176359576:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:13.649079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126359176359577:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:13.649122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:13.652844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:13.663530Z node 1 ... Result TxId: 281474976710669 2025-12-23T18:45:51.773210Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037989 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773254Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037983 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773297Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037985 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773331Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037981 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773364Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037979 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773402Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037975 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773438Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037977 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773474Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038006 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773510Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038002 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773545Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038004 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773577Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038000 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773610Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037998 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773644Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037996 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773681Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037994 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773713Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037992 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773748Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038015 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773791Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038013 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773827Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038009 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773863Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038011 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773896Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037956 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773925Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037954 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773954Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037971 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.773983Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774014Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037969 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774045Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037965 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774078Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037967 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774111Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037961 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774145Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037963 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774177Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037990 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774214Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774248Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774291Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037959 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774297Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774333Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774367Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774373Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774398Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.774422Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2025-12-23T18:45:51.779221Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:45:51.788431Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:45:51.896708Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126522656350671:6049] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:51.911109Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:53.025569Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:45:53.035882Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute >> TKeyValueTracingTest::WriteHuge [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap [GOOD] >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [FAIL] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [GOOD] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> TKeyValueTracingTest::ReadSmall [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [GOOD] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal+IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn |95.6%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::QueryCacheTtl [GOOD] >> KqpQuery::QueryCacheInvalidate >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] [FAIL] >> KqpExplain::Predicates [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpected |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> KqpParams::ParameterTypes [GOOD] >> KqpQuery::CreateAsSelectBadTypes+IsOlap |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] [FAIL] >> KqpParams::Decimal-QueryService+UseSink [GOOD] >> KqpParams::Decimal+QueryService+UseSink |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> KqpExplain::UpdateOn-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::Predicates [GOOD] Test command err: Trying to start YDB, gRPC: 25516, MsgBus: 13833 2025-12-23T18:45:22.760228Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126398460443131:2238];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:22.760278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:45:22.785393Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033c2/r3tmp/tmp7AKPL3/pdisk_1.dat 2025-12-23T18:45:23.025444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:23.025556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:23.033526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:23.137810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:23.138481Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:23.139724Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126398460442918:2081] 1766515522737257 != 1766515522737260 TServer::EnableGrpc on GrpcPort 25516, node 1 2025-12-23T18:45:23.257813Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:23.257837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:23.257849Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:23.257971Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:23.349442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13833 TClient is connected to server localhost:13833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:45:23.770832Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515523150 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:23.829690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:23.835544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:45:23.847928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:24.007966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:24.185455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:24.272971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:26.150734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126415640313972:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.150849Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.151227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126415640313982:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.151305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.409128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.441643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.476907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.511232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.547718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.592768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.637286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.715059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.810462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126415640314855:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.810585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.810665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126415640314860:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.812321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126415640314863:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.812388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 20 ... nableGrpc on GrpcPort 8884, node 5 2025-12-23T18:45:51.844673Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:45:51.877905Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:51.877931Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:51.877938Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:51.878041Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17036 TClient is connected to server localhost:17036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515551864 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:52.438117Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:52.443720Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:45:52.455512Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:52.526513Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:52.633732Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:52.689462Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:52.767319Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:55.691078Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126538983848355:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:55.691180Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:55.691958Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126538983848367:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:55.692038Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:55.757136Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:55.798407Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:55.833133Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:55.873162Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:55.911978Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:55.952626Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:55.994847Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:56.043943Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:56.147298Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126543278816532:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:56.147382Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:56.147557Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126543278816537:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:56.147671Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126543278816539:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:56.147723Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:56.151556Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:56.164512Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126543278816541:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:45:56.262512Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126543278816592:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:56.621580Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126521803977331:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:56.621650Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:45:58.204474Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_restarts.py::test_basic >> KqpTypes::Time64Columns+EnableTableDatetime64-IsColumn [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey+UseSink [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink >> KqpQuery::DdlInDataQuery [GOOD] >> KqpQuery::CreateAsSelectView [GOOD] >> KqpQuery::CreateTableAs_MkDir >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable >> KqpQuery::QueryFromSqs [GOOD] >> KqpStats::OneShardNonLocalExec+UseSink [GOOD] >> KqpStats::OneShardNonLocalExec-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOn-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8732, MsgBus: 10874 2025-12-23T18:45:23.711103Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126400614290643:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:23.711155Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033b9/r3tmp/tmp7WoK9A/pdisk_1.dat 2025-12-23T18:45:23.982463Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:24.009804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:24.009934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:24.058707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:24.118935Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8732, node 1 2025-12-23T18:45:24.183236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:24.183253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:24.183259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:24.183332Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:24.189946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10874 TClient is connected to server localhost:10874 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515524137 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:24.675351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:24.686912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:45:24.708433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:24.719828Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:24.837580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:45:25.002947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.074160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:26.902177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126413499194351:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.902253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.902542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126413499194361:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.902594Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.214694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.253560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.295992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.338608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.374554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.414592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.458242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.539894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.620116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126417794162531:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.620214Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.620547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126417794162537:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.620585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126417794162536:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.620629Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.625361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:27.637644Z node 1 :KQP_WORKLOAD_S ... "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:53.934777Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:53.961071Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:54.024182Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:54.149761Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:54.245090Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:54.325334Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:57.502812Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126549029151064:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:57.502960Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:57.504454Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126549029151074:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:57.504558Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:57.505351Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126549029151076:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:57.505438Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:57.565109Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:57.606075Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:57.644345Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:57.684303Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:57.716689Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:57.761561Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:57.805432Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:57.859722Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:57.948685Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126549029151946:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:57.948798Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:57.949035Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126549029151951:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:57.949082Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126549029151952:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:57.949151Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:57.956670Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:57.968839Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126549029151955:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:45:58.036039Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126553324119302:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:58.116317Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126531849280044:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:58.117111Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"Tables":["EightShard"],"PlanNodeId":8,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"Effect"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"E-Size":"0","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":2,"Path":"\/Root\/EightShard","Columns":["Key"],"E-Rows":"2","Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"precompute_0_1","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_1"}],"Table":"EightShard","PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Key"],"type":"Lookup"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":8,"Operators":[{"E-Rows":"2","Columns":["Key"],"Name":"TableLookup","E-Cost":"0","E-Size":"0","LookupKeyColumns":["Key"],"Table":"EightShard"}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> KqpQuery::QueryExplain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DdlInDataQuery [GOOD] Test command err: Trying to start YDB, gRPC: 10095, MsgBus: 8486 2025-12-23T18:45:32.902989Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126440504107286:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:32.903083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033a5/r3tmp/tmppVWb3E/pdisk_1.dat 2025-12-23T18:45:33.156560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:33.156659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:33.201114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:33.258415Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:33.260950Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:33.262851Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126440504107262:2081] 1766515532902040 != 1766515532902043 TServer::EnableGrpc on GrpcPort 10095, node 1 2025-12-23T18:45:33.336147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:33.336175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:33.336185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:33.336318Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:33.469846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8486 TClient is connected to server localhost:8486 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515533265 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:33.858285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:33.912943Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:36.046949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126457683977329:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.047137Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.047623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126457683977341:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.047677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126457683977342:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.047972Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.052427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:36.062752Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126457683977345:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:45:36.153401Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126457683977396:2532] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:36.404094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:36.672166Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=NDAxNzU2YjAtZjI0MTk5ZDUtYWI1NzRjODQtYzcwZGY1ODc=, ActorId: [1:7587126457683977506:2339], ActorState: ExecuteState, TraceId: 01kd68fwkn1m9pj1xran8sp9e5, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1256: Invalid Decimal value for precision: , status: BAD_REQUEST Trying to start YDB, gRPC: 63640, MsgBus: 61773 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033a5/r3tmp/tmpelledN/pdisk_1.dat 2025-12-23T18:45:37.651342Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:45:37.651439Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:37.730283Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:37.733191Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587126463280504818:2081] 1766515537576126 != 1766515537576129 2025-12-23T18:45:37.763667Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:37.763790Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:37.777254Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63640, node 2 2025-12-23T18:45:37.883637Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:37.883663Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:37.883670Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:37.883765Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:37.885644Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61773 TClient is connected to server localhost:61773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515537822 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:38.343334Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:38.645433Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:41.036378Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587126480460374865:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access pe ... RD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:45:55.859353Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:56.018648Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:56.025623Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:56.092210Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:58.976501Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126552461306835:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:58.976622Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:58.977058Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126552461306845:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:58.977120Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.051122Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:59.096977Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:59.130223Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:59.161359Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:59.201375Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:59.272893Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:59.317612Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:59.375172Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:59.463238Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126556756275012:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.463365Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.463442Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126556756275017:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.463620Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126556756275019:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.463668Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.467848Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:59.479386Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126556756275020:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:45:59.572825Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126556756275072:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:00.009578Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126539576403125:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:00.010322Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:01.617420Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7587126565346210006:2537], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2025-12-23T18:46:01.617805Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=5&id=MzYwZTQwZWMtMWJjMTk0N2QtNjliNGYyM2YtYTI1NDg1OWE=, ActorId: [5:7587126565346209997:2532], ActorState: ExecuteState, TraceId: 01kd68gn3vc8zwb3jx4xfv0ny0, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 5 column: 30 } message: "Operation \'CreateTable\' can\'t be performed in data query" end_position { row: 5 column: 30 } issue_code: 2008 severity: 1 } }, remove tx with tx_id:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2025-12-23T18:46:01.658634Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7587126565346210019:2540], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2025-12-23T18:46:01.659892Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=5&id=MzYwZTQwZWMtMWJjMTk0N2QtNjliNGYyM2YtYTI1NDg1OWE=, ActorId: [5:7587126565346209997:2532], ActorState: ExecuteState, TraceId: 01kd68gn5851w9f7rjenq0zwnp, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 24 } message: "Operation \'DropTable\' can\'t be performed in data query" end_position { row: 2 column: 24 } issue_code: 2008 severity: 1 } }, remove tx with tx_id:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2025-12-23T18:46:01.690528Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7587126565346210028:2544], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 2025-12-23T18:46:01.690881Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=5&id=MzYwZTQwZWMtMWJjMTk0N2QtNjliNGYyM2YtYTI1NDg1OWE=, ActorId: [5:7587126565346209997:2532], ActorState: ExecuteState, TraceId: 01kd68gn6827w7j0hynq8n84wg, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 54 } message: "Operation \'AlterTable\' can\'t be performed in data query" end_position { row: 2 column: 54 } issue_code: 2008 severity: 1 } }, remove tx with tx_id:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] >> KqpQuery::TableSinkWithSubquery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 19932, MsgBus: 62712 2025-12-23T18:45:32.885448Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126441557044283:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:32.885521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033a6/r3tmp/tmp78lLmA/pdisk_1.dat 2025-12-23T18:45:33.173384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:33.173492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:33.203612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:33.240596Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:33.240724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:33.241744Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126441557044254:2081] 1766515532883860 != 1766515532883863 TServer::EnableGrpc on GrpcPort 19932, node 1 2025-12-23T18:45:33.296682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:33.296706Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:33.296717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:33.296789Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62712 2025-12-23T18:45:33.491498Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:62712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515533272 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:33.757649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:33.764350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:45:33.770357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:33.893850Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:33.915448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:34.077868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:34.158038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:36.087996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126458736915323:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.088124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.088777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126458736915333:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.088874Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.439732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:36.475334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:36.506277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:36.538434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:36.571741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:36.609169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:36.646588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:36.705620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:36.781442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126458736916201:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.781499Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.781550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126458736916206:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.781583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126458736916208:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.781628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.785361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sc ... om file: (empty maybe) 2025-12-23T18:45:54.991564Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:55.100755Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20548 TClient is connected to server localhost:20548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515554993 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:55.496937Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:55.514711Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:55.578850Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:55.722583Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:55.786793Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:55.792954Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:58.616348Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126552030598820:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:58.616709Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:58.618014Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126552030598830:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:58.618089Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:58.618265Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126552030598832:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:58.618300Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:58.671842Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:58.713228Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:58.752570Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:58.791667Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:58.835262Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:58.880850Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:58.929983Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:58.994678Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:59.100309Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126556325566997:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.100390Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.100520Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126556325567002:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.100562Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126556325567004:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.100612Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.104322Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:59.115273Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587126556325567006:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:45:59.185438Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126556325567057:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:59.773067Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126534850727799:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:59.773158Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:01.108503Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [FAIL] >> KqpStats::RequestUnitForExecute [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 28553, MsgBus: 1251 2025-12-23T18:45:30.422401Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126432812345741:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:30.422550Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033a9/r3tmp/tmpp5ErgR/pdisk_1.dat 2025-12-23T18:45:30.648474Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:30.725014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:30.725144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:30.734597Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:30.736305Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126432812345702:2081] 1766515530416168 != 1766515530416171 2025-12-23T18:45:30.758629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28553, node 1 2025-12-23T18:45:30.833045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:45:30.869820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:30.869841Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:30.869847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:30.869937Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1251 TClient is connected to server localhost:1251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515530808 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:31.385721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:31.392200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:45:31.436620Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:33.472570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126445697248487:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:33.473055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126445697248463:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:33.473152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:33.474857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126445697248492:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:33.474952Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:33.477411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:33.491594Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126445697248490:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:45:33.595336Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126445697248543:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:33.890291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:34.594693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:34.852975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:45:34.869685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715667, at schemeshard: 72057594046644480 2025-12-23T18:45:34.870879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 18184, MsgBus: 61241 2025-12-23T18:45:35.840078Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587126452068829296:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:35.840198Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033a9/r3tmp/tmpHwthV1/pdisk_1.dat 2025-12-23T18:45:35.872915Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:35.943706Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:35.945340Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587126452068829259:2081] 1766515535838446 != 1766515535838449 2025-12-23T18:45:36.005241Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:36.005313Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:36.008058Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18184, node 2 2025-12-23T18:45:36.075489Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:36.075510Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:36.075517Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:36.075589Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:36.112682Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61241 TClient is connected to server localhost:61241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 ... 44 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2025-12-23T18:45:56.800468Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037898 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2025-12-23T18:45:56.800530Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037942 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2025-12-23T18:45:56.800562Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037900 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2025-12-23T18:45:56.800586Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037889 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2025-12-23T18:45:56.800613Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037894 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2025-12-23T18:45:56.804716Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:45:56.811848Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 64521, MsgBus: 23171 2025-12-23T18:45:58.075113Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587126551464405777:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:58.076353Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033a9/r3tmp/tmpaBHUPh/pdisk_1.dat 2025-12-23T18:45:58.087853Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:58.179299Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:58.211638Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:58.211743Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:58.214706Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64521, node 5 2025-12-23T18:45:58.260012Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:58.260035Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:58.260044Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:58.260141Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:58.345786Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23171 TClient is connected to server localhost:23171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515558262 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:58.791584Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:58.797960Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:45:58.804414Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:45:58.849490Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:45:59.080578Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:46:02.156778Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126568644275888:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:02.156816Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126568644275896:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:02.156902Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:02.157342Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126568644275903:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:02.157417Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:02.161167Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:02.172107Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126568644275902:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-12-23T18:46:02.272867Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126568644275955:2600] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:02.314480Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:02.617636Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:02.923040Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:02.930789Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:02.945768Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:03.073197Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126551464405777:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:03.073285Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryExplain [GOOD] Test command err: Trying to start YDB, gRPC: 10768, MsgBus: 8431 2025-12-23T18:45:22.174873Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126396741853073:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:22.174964Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033c3/r3tmp/tmpRQ2luS/pdisk_1.dat 2025-12-23T18:45:22.395581Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:22.416128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:22.416203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:22.462293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:22.489625Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:22.492122Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126396741853045:2081] 1766515522173440 != 1766515522173443 TServer::EnableGrpc on GrpcPort 10768, node 1 2025-12-23T18:45:22.558442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:22.558467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:22.558475Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:22.558567Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:22.606217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8431 TClient is connected to server localhost:8431 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515522527 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:45:23.022075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:45:23.028609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:45:23.035237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:23.169636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:23.182034Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:45:23.362911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:23.436968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:25.508694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126409626756804:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.508818Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.509130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126409626756814:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.509178Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:25.857013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.884700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.912212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.940196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:25.972187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.003174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.040539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.085783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.156769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126413921724979:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.156868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.157132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126413921724985:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.157154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126413921724984:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.157216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.161441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schem ... 4976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:57.326865Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:57.514250Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:57.607901Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:57.638569Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:00.590721Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126560887235148:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:00.591021Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:00.591360Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126560887235158:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:00.591437Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:00.680312Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:00.720310Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:00.757216Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:00.794173Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:00.838947Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:00.884479Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:00.936634Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:01.011658Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:01.115365Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126565182203328:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.115471Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.115897Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126565182203333:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.115940Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126565182203334:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.115979Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.119948Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:01.129286Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126565182203337:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:46:01.223766Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126565182203388:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:01.556544Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126543707364124:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:01.556629Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; AST: ( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:45" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (Uint64 '"1001")) (let $4 '('('"ItemsLimit" $3) '('"Sequential" '1) '('"PointPrefixLen" '1))) (let $5 (Uint32 '1)) (let $6 (KqpRowsSourceSettings $1 $2 $4 '((KqlKeyExc $5 (String '"Name")) (KqlKeyInc $5)))) (let $7 (OptionalType (DataType 'String))) (let $8 (StructType '('"Amount" (OptionalType (DataType 'Uint64))) '('"Comment" $7) '('"Group" (OptionalType (DataType 'Uint32))) '('"Name" $7))) (let $9 '('('"_logical_id" '715) '('"_id" '"a0acc77f-9774d5a1-69a7cd69-e508122b") '('"_partition_mode" '"single") '('"_wide_channels" $8))) (let $10 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $6)) (lambda '($14) (block '( (let $15 (lambda '($16) (Member $16 '"Amount") (Member $16 '"Comment") (Member $16 '"Group") (Member $16 '"Name"))) (return (FromFlow (ExpandMap (Take (ToFlow $14) $3) $15))) ))) $9)) (let $11 (DqCnUnionAll (TDqOutput $10 '"0"))) (let $12 (DqPhyStage '($11) (lambda '($17) (FromFlow (NarrowMap (Take (ToFlow $17) $3) (lambda '($18 $19 $20 $21) (AsStruct '('"Amount" $18) '('"Comment" $19) '('"Group" $20) '('"Name" $21)))))) '('('"_logical_id" '728) '('"_id" '"4999a2b1-e4fceefc-252e394d-1780bb8d")))) (let $13 (DqCnResult (TDqOutput $12 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($10 $12) '($13) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $8) '"0" '"0")) '('('"type" '"data_query")))) ) Plan: {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"0","ReadLimit":"1001","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Test","E-Rows":"1","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Test","reads":[{"lookup_by":["Group (1)"],"columns":["Amount","Comment","Group","Name"],"scan_by":["Name (Name, +∞)"],"limit":"1001","type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"0","ReadLimit":"1001","Name":"TableRangeScan","E-Rows":"1","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> KqpQuery::CreateAsSelectBadTypes+IsOlap [GOOD] >> KqpQuery::CreateAsSelectBadTypes-IsOlap |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [FAIL] |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> KqpParams::CheckCacheByAst [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] Test command err: Trying to start YDB, gRPC: 21154, MsgBus: 17786 2025-12-23T18:45:37.020601Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126459735696309:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:37.020637Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003391/r3tmp/tmpiCT7Lt/pdisk_1.dat 2025-12-23T18:45:37.230924Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:37.248146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:37.248284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:37.316315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:37.360867Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21154, node 1 2025-12-23T18:45:37.481221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:45:37.505211Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:37.505231Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:37.505237Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:37.505314Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17786 TClient is connected to server localhost:17786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:45:38.030146Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515537388 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:38.077595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:38.098273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:38.225201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:38.402901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:38.466056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:40.371733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126472620600026:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:40.371914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:40.372349Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126472620600036:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:40.372413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:40.783860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:40.834415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:40.870486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:40.906813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:40.939517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:40.993899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:41.064182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:41.108208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:41.192025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126476915568206:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:41.192128Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:41.192338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126476915568212:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:41.192339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126476915568211:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:41.192381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:41.196605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:41.207134Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126476915568215:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Tra ... :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:57.872104Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:57.872199Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:57.924494Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25787, node 4 2025-12-23T18:45:57.982848Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:45:57.982897Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:57.982907Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:57.982915Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:57.983037Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16399 TClient is connected to server localhost:16399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515558003 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:58.534732Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:58.554382Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:58.626668Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:58.755938Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:58.807113Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:58.881754Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:01.451819Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126565133498365:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.451923Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.452399Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126565133498375:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.452453Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.529634Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:01.578655Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:01.616800Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:01.653522Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:01.686696Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:01.761116Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:01.799521Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:01.856128Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:01.960533Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126565133499247:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.960677Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.963096Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126565133499252:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.963167Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126565133499253:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.963315Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.967827Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:01.980159Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587126565133499256:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:46:02.058233Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126569428466603:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:02.746868Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126547953627368:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:02.746966Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Consumed units: 312 Consumed units: 6 |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> KqpExplain::ComplexJoin [GOOD] >> KqpExplain::CompoundKeyRange >> KqpQuery::QueryCacheInvalidate [GOOD] >> KqpQuery::QueryCachePermissionsLoss |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |95.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |95.6%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpSnapshotIsolation::ConflictWrite-IsOlap+FillTables [GOOD] |95.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpQuery::CreateTableAs_MkDir [GOOD] |95.6%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] >> KqpLimits::StreamWrite+Allowed [GOOD] >> KqpLimits::StreamWrite-Allowed >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::DatashardReplySize >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] >> KqpQuery::TableSinkWithSubquery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] Test command err: Trying to start YDB, gRPC: 26781, MsgBus: 9444 2025-12-23T18:45:33.593933Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126444771605760:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:33.594003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003392/r3tmp/tmpGsUD0s/pdisk_1.dat 2025-12-23T18:45:33.834842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:33.862261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:33.862369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:33.925546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:33.936426Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:33.937455Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126444771605732:2081] 1766515533592753 != 1766515533592756 TServer::EnableGrpc on GrpcPort 26781, node 1 2025-12-23T18:45:34.046315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:45:34.064060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:34.064087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:34.064098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:34.064202Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9444 TClient is connected to server localhost:9444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515534014 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:34.555373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:34.561804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:45:34.575290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:34.619857Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:34.697308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:34.856061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:34.924687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:36.999666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126457656509494:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:36.999868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:37.000233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126457656509504:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:37.000285Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:37.318998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:37.353861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:37.381661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:37.409747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:37.438081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:37.473828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:37.515891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:37.578507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:37.656967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126461951477670:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:37.657067Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:37.657512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126461951477675:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:37.657562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126461951477676:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:37.657606Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:37.661929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schem ... : Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.271587Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.273611Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126564021379931:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.273691Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.275861Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:01.290676Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587126564021379930:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:46:01.374553Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126564021379985:2621] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:01.410452Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:01.690133Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:01.918880Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:01.925340Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:02.345875Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126546841509870:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:02.345980Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29780, MsgBus: 12640 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003392/r3tmp/tmpYt1VWF/pdisk_1.dat 2025-12-23T18:46:03.170952Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:46:03.172509Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:46:03.291606Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:46:03.293300Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7587126572549797020:2081] 1766515563136024 != 1766515563136027 2025-12-23T18:46:03.313054Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:46:03.313162Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:46:03.322452Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29780, node 5 2025-12-23T18:46:03.423285Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:46:03.438374Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:46:03.438399Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:46:03.438408Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:46:03.438501Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12640 TClient is connected to server localhost:12640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515563365 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:46:04.015162Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:46:04.024131Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:46:04.162913Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:46:07.716587Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126589729667102:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.716674Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126589729667091:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.716774Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.717055Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126589729667106:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.717110Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.721063Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:07.737653Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126589729667105:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:46:07.828092Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126589729667158:2538] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:07.862513Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.075885Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7587126594024634595:2349], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:5:49: Error: Creating table with data is not supported. 2025-12-23T18:46:08.076310Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=5&id=MTk2M2YwZmYtYzU2NGI0OWUtZTMxOWIxNTgtN2ZhNzBjMmI=, ActorId: [5:7587126594024634593:2348], ActorState: ExecuteState, TraceId: 01kd68gvdf3ybqjt6xegjsddcn, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Pre type annotation" issue_code: 1020 severity: 1 issues { position { row: 5 column: 49 } message: "Creating table with data is not supported." end_position { row: 5 column: 49 } severity: 1 } }, remove tx with tx_id: |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateTableAs_MkDir [GOOD] Test command err: Trying to start YDB, gRPC: 12338, MsgBus: 2359 2025-12-23T18:45:22.898022Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126397971579385:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:22.898104Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033bb/r3tmp/tmp7l6wXI/pdisk_1.dat 2025-12-23T18:45:23.168828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:23.168911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:23.222396Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:23.249844Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126397971579360:2081] 1766515522894777 != 1766515522894780 2025-12-23T18:45:23.256679Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:23.261035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12338, node 1 2025-12-23T18:45:23.431464Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:23.431485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:23.431491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:23.431556Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:23.474484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2359 TClient is connected to server localhost:2359 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-12-23T18:45:23.935105Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515523311 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:24.078016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:24.092575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:45:26.147163Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126415151449431:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.147173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126415151449436:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.147289Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.147960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126415151449447:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.148023Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.151819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:26.170041Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126415151449446:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:45:26.244700Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126415151449499:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:26.529560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.021991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:27.249838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:45:27.257108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:45:27.268776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 4960, MsgBus: 6013 2025-12-23T18:45:28.311680Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587126422047182832:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:28.311735Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033bb/r3tmp/tmpvC12ld/pdisk_1.dat 2025-12-23T18:45:28.333598Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:28.392885Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:28.394776Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587126422047182804:2081] 1766515528310847 != 1766515528310850 2025-12-23T18:45:28.428922Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:28.429000Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:28.432830Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4960, node 2 2025-12-23T18:45:28.469593Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:28.469620Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:28.469627Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:28.469696Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:28.510937Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6013 TClient is connected to server localhost:6013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVers ... hard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:00.014337Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126538707407220:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:00.014423Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:00.096736Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:00.312088Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:00.324047Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:00.645983Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126560182245298:2938] txid# 281474976710673, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:00.662385Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:01.573309Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:01.579199Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 9209, MsgBus: 17532 2025-12-23T18:46:02.976673Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587126567467944156:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:02.976775Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033bb/r3tmp/tmpNMVnej/pdisk_1.dat 2025-12-23T18:46:03.014813Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:46:03.094720Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:46:03.113439Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:46:03.113557Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:46:03.147284Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9209, node 5 2025-12-23T18:46:03.217679Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:46:03.240506Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:46:03.240532Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:46:03.240543Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:46:03.240644Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17532 TClient is connected to server localhost:17532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515563197 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:46:03.869195Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:46:03.875937Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:46:03.989896Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:46:07.386641Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126588942781492:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.386774Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.387368Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126588942781502:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.387457Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.387669Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126588942781506:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.392563Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:07.406954Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126588942781508:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:46:07.465287Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126588942781559:2535] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:07.580357Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:07.758211Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:07.771981Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:07.784397Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:07.978919Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126567467944156:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:07.979036Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> TNodeBrokerTest::TestRandomActions [GOOD] >> KqpQuery::CreateAsSelectBadTypes-IsOlap [GOOD] >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::ConflictWrite-IsOlap+FillTables [GOOD] Test command err: Trying to start YDB, gRPC: 9543, MsgBus: 19856 2025-12-23T18:39:50.423156Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124972461913927:2256];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:50.423207Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00410c/r3tmp/tmphbrOsi/pdisk_1.dat 2025-12-23T18:39:50.489397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:39:51.135883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:39:51.135976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:39:51.187096Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124972461913698:2081] 1766515190393002 != 1766515190393005 2025-12-23T18:39:51.187181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:39:51.217622Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:39:51.274839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9543, node 1 2025-12-23T18:39:51.495289Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:39:51.526975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:39:51.627177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:39:51.627199Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:39:51.627206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:39:51.627280Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19856 TClient is connected to server localhost:19856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515191322 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:39:53.099032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:39:55.419116Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124972461913927:2256];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:39:55.419172Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:39:57.071722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125002526685660:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:57.071817Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125002526685672:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:57.071934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:57.075134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125002526685684:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:57.075224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:39:57.076970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:39:57.100463Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125002526685683:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:39:57.174903Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125002526685736:2545] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:39:57.590197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:57.775259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:39:59.986210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:40:02.006404Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Read conflict with concurrent transaction.;tx_id=3; 2025-12-23T18:40:02.018818Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Read conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2025-12-23T18:40:02.018983Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Read conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2025-12-23T18:40:02.019199Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [1:7587125024001530356:2955], Table: `/Root/Test` ([72057594046644480:42:1]), SessionActorId: [1:7587125019706562789:2955]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7587125024001530356:2955].{
: Error: Read conflict with concurrent transaction., code: 2001 } 2025-12-23T18:40:02.019762Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:7587125024001530349:2955], SessionActorId: [1:7587125019706562789:2955], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Read conflict with concurrent transaction., code: 2001 . sessionActorId=[1:7587125019706562789:2955]. 2025-12-23T18:40:02.020481Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=ZjQ4YzYzODctNWRmNzkwNTItNjE4MDVkNWItNzgzNmRiNGQ=, ActorId: [1:7587125019706562789:2955], ActorState: ExecuteState, TraceId: 01kd685nw64gacfb4gkvketmtw, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7587125024001530350:2955] from: [1:7587125024001530349:2955] 2025-12-23T18:40:02.020588Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:7587125024001530350:2955] TxId: 281474976715666. Ctx: { TraceId: 01kd685nw64gacfb4gkvketmtw, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQ4YzYzODctNWRmNzkwNTItNjE4MDVkNWItNzgzNmRiNGQ=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Read conflict with concurrent transaction., code: 2001 } } 2025-12-23T18:40:02.020934Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=ZjQ4YzYzODctNWRmNzkwNTItNjE4MDVkNWItNzgzNmRiNGQ=, ActorId: [1:7587125019706562789:2955], ActorState: ExecuteState, TraceId: 01kd685nw64gacfb4gkvketmtw, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Read conflict with concurrent transaction." issue_code: 2001 severity: 1 } } Trying to start YDB, gRPC: 16440, MsgBus: 4878 2025-12-23T18:40:04.755317Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587125033247489424:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:40:04.755440Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base. ... on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } } Trying to start YDB, gRPC: 9226, MsgBus: 15929 2025-12-23T18:45:55.673869Z node 24 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[24:7587126539999610772:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:55.673973Z node 24 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00410c/r3tmp/tmpbmDVsO/pdisk_1.dat 2025-12-23T18:45:55.703116Z node 24 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:55.832936Z node 24 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:55.835080Z node 24 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [24:7587126539999610745:2081] 1766515555672630 != 1766515555672633 2025-12-23T18:45:55.863164Z node 24 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(24, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:55.863303Z node 24 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(24, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:55.897939Z node 24 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:55.907321Z node 24 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(24, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9226, node 24 2025-12-23T18:45:56.002824Z node 24 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:56.002859Z node 24 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:56.002872Z node 24 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:56.002988Z node 24 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15929 2025-12-23T18:45:56.306534Z node 24 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515555959 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:56.566477Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:56.681095Z node 24 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:46:00.674254Z node 24 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[24:7587126539999610772:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:00.674363Z node 24 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:02.102505Z node 24 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [24:7587126570064382712:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:02.102519Z node 24 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [24:7587126570064382721:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:02.102620Z node 24 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:02.102992Z node 24 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [24:7587126570064382727:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:02.103099Z node 24 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:02.106778Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:02.120741Z node 24 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [24:7587126570064382726:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:46:02.184628Z node 24 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [24:7587126570064382779:2544] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:02.267601Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:02.321653Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:04.072466Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:06.559896Z node 24 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Write conflict with concurrent transaction.;tx_id=281474976710668; 2025-12-23T18:46:06.560100Z node 24 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 281474976710668 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2025-12-23T18:46:06.560262Z node 24 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 281474976710668 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2025-12-23T18:46:06.560483Z node 24 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [24:7587126587244260284:2965], Table: `/Root/Test` ([72057594046644480:42:1]), SessionActorId: [24:7587126587244260023:2965]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[24:7587126587244260284:2965].{
: Error: Write conflict with concurrent transaction., code: 2001 } 2025-12-23T18:46:06.560602Z node 24 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [24:7587126587244260277:2965], SessionActorId: [24:7587126587244260023:2965], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Write conflict with concurrent transaction., code: 2001 . sessionActorId=[24:7587126587244260023:2965]. 2025-12-23T18:46:06.560866Z node 24 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=24&id=NTI0ZjcwOTctYzg1MGI4MTUtZmM5NzFkNmQtM2Y5MzViNzA=, ActorId: [24:7587126587244260023:2965], ActorState: ExecuteState, TraceId: 01kd68gssm4vtneqan308b16tj, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [24:7587126587244260278:2965] from: [24:7587126587244260277:2965] 2025-12-23T18:46:06.560965Z node 24 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [24:7587126587244260278:2965] TxId: 281474976710667. Ctx: { TraceId: 01kd68gssm4vtneqan308b16tj, Database: /Root, SessionId: ydb://session/3?node_id=24&id=NTI0ZjcwOTctYzg1MGI4MTUtZmM5NzFkNmQtM2Y5MzViNzA=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Write conflict with concurrent transaction., code: 2001 } } 2025-12-23T18:46:06.561334Z node 24 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=24&id=NTI0ZjcwOTctYzg1MGI4MTUtZmM5NzFkNmQtM2Y5MzViNzA=, ActorId: [24:7587126587244260023:2965], ActorState: ExecuteState, TraceId: 01kd68gssm4vtneqan308b16tj, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } } |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::QueryExecTimeout >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] >> KqpParams::Decimal+QueryService+UseSink [GOOD] >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSinkWithSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 2048, MsgBus: 3928 2025-12-23T18:45:43.208445Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126486030048530:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:43.209596Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003384/r3tmp/tmp6LEF5y/pdisk_1.dat 2025-12-23T18:45:43.496269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:43.498519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:43.498616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:43.567134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:43.642966Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2048, node 1 2025-12-23T18:45:43.725734Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:45:43.756823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:43.756854Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:43.756862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:43.756945Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3928 2025-12-23T18:45:44.211225Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515543660 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:44.389857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:44.411084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:45:44.631319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:44.817812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:45:44.914602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:46.836203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126498914952263:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.836318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.836622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126498914952273:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.836662Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.187722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.228161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.260586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.294133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.328982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.362665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.398870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.445686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.539522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126503209920445:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.539647Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.539715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126503209920450:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.539920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126503209920452:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.539956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.543319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:47.556628Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126503209920453:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transact ... ARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587126564807060620:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.510786Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587126564807060621:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.510916Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:01.514060Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:01.527399Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587126564807060624:2439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:46:01.611019Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587126564807060675:3410] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:02.427267Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587126547627189229:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:02.427343Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13224, MsgBus: 32543 2025-12-23T18:46:04.864778Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587126577934103819:2163];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:04.864865Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003384/r3tmp/tmprPGUZf/pdisk_1.dat 2025-12-23T18:46:04.966874Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:46:05.011831Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:46:05.014182Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587126577934103681:2081] 1766515564847957 != 1766515564847960 2025-12-23T18:46:05.029261Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:46:05.029366Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:46:05.063160Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13224, node 4 2025-12-23T18:46:05.139393Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:46:05.139420Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:46:05.139428Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:46:05.139517Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:46:05.265470Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:32543 TClient is connected to server localhost:32543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515565115 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:46:05.640658Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:46:05.648772Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:46:05.872221Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:46:08.790052Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126595113973748:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.790142Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.790481Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126595113973758:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.790556Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.822138Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.863465Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.949260Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126595113973923:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.949356Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.949754Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126595113973928:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.949805Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126595113973929:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.949850Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.954291Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:08.967356Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587126595113973932:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-12-23T18:46:09.065403Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126599408941279:2640] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:09.604710Z node 4 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037889 Cancelled read: {[4:7587126599408941371:2370], 0} 2025-12-23T18:46:09.864032Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126577934103819:2163];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:09.864132Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckQueryLimitsWorksAsExpected [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> KqpLimits::DataShardReplySizeExceeded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions [GOOD] Test command err: 2025-12-23T18:44:23.606136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:23.606194Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2025-12-23T18:44:23.802362Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-12-23T18:44:23.802859Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-12-23T18:44:23.803168Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-12-23T18:44:23.816163Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-12-23T18:44:23.829158Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-12-23T18:44:23.829480Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-12-23T18:44:24.562140Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-12-23T18:44:24.575029Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-12-23T18:44:24.616207Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-12-23T18:44:26.029415Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2025-12-23T18:44:26.029915Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.030267Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.030635Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.031054Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.031453Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.032095Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-12-23T18:44:26.033143Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.071019Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.102176Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.117175Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-12-23T18:44:26.156474Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.157069Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-12-23T18:44:26.157365Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.157628Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.157872Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.158276Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host8:7: ERROR_TEMP: No free node IDs 2025-12-23T18:44:26.158820Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.159188Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-12-23T18:44:26.159523Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.191187Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2025-12-23T18:44:26.195478Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-12-23T18:44:26.196105Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2025-12-23T18:44:26.196411Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.196812Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2025-12-23T18:44:26.197664Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-12-23T18:44:26.198789Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.199062Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:26.773608Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-12-23T18:44:27.227854Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-12-23T18:44:27.277759Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-12-23T18:44:27.281623Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-12-23T18:44:27.295217Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-12-23T18:44:27.296112Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-12-23T18:44:27.812109Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-12-23T18:44:27.839966Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-12-23T18:44:27.867410Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:44:27.868664Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2025-12-23T18:44:28.573188Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-12-23T18:44:29.075646Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-12-23T18:44:29.116776Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-12-23T18:44:29.117359Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-12-23T18:44:29.117799Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-12-23T18:44:29.119120Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-12-23T18:44:29.224512Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-12-23T18:44:29.225080Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-12-23T18:44:29.698961Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-12-23T18:44:29.714627Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-12-23T18:44:29.769442Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-12-23T18:44:30.176916Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-12-23T18:44:30.177406Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-12-23T18:44:30.191940Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-12-23T18:44:30.358930Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-12-23T18:44:30.459748Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-12-23T18:44:30.465712Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-12-23T18:44:30.467246Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-12-23T18:44:30.467739Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-12-23T18:44:30.469253Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-12-23T18:44:30.483042Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:44:30.496043Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:44:30.496398Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:44:30.497071Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:44:30.497744Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2025-12-23T18:44:30.577599Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:44:30.588582Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2025-12-23T18:44:30.590391Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:44:30.624835Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-12 ... UEST: Unknown node 2025-12-23T18:45:59.981688Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-12-23T18:46:00.820233Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-12-23T18:46:00.841716Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-12-23T18:46:00.863815Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:46:00.868848Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:46:00.871346Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:46:00.873449Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:46:01.020329Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-12-23T18:46:01.026930Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2025-12-23T18:46:01.028987Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2025-12-23T18:46:01.031269Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:46:01.057878Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2025-12-23T18:46:01.060130Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:46:01.061951Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2025-12-23T18:46:01.066239Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:46:02.049985Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-12-23T18:46:02.053203Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-12-23T18:46:02.055697Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-12-23T18:46:02.431194Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-12-23T18:46:02.433614Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-12-23T18:46:02.435899Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-12-23T18:46:02.438074Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-12-23T18:46:02.461801Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-12-23T18:46:02.464272Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-12-23T18:46:02.763771Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-12-23T18:46:02.836609Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-12-23T18:46:02.864193Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-12-23T18:46:02.869293Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-12-23T18:46:02.872365Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:46:02.881983Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:46:02.884738Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:46:02.886059Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-12-23T18:46:03.506494Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-12-23T18:46:03.515716Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-12-23T18:46:04.138138Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:46:04.225548Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-12-23T18:46:04.253042Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:46:04.255715Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:46:04.262087Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:46:04.266341Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:46:04.268291Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-12-23T18:46:04.305531Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:46:04.379552Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:46:04.391325Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:46:04.398253Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2025-12-23T18:46:04.408909Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-12-23T18:46:04.411445Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:46:04.413557Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:46:04.491614Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2025-12-23T18:46:04.517085Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:46:04.614155Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-12-23T18:46:05.482667Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-12-23T18:46:05.923294Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-12-23T18:46:05.926515Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-12-23T18:46:06.048140Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-12-23T18:46:06.145511Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-12-23T18:46:06.154127Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-12-23T18:46:06.156974Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-12-23T18:46:06.808539Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-12-23T18:46:06.880718Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-12-23T18:46:06.901838Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-12-23T18:46:07.837177Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-12-23T18:46:07.970254Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-12-23T18:46:07.978653Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-12-23T18:46:08.486237Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-12-23T18:46:08.488870Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-12-23T18:46:08.606807Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-12-23T18:46:08.609507Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-12-23T18:46:08.611838Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-12-23T18:46:08.614185Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-12-23T18:46:08.633015Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-12-23T18:46:08.635450Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-12-23T18:46:08.637674Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-12-23T18:46:08.642367Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-12-23T18:46:08.806721Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-12-23T18:46:08.809296Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-12-23T18:46:09.373253Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-12-23T18:46:09.740180Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 65042, MsgBus: 25068 2025-12-23T18:45:27.297126Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126419011201524:2171];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:27.297411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033b5/r3tmp/tmpEDA9k0/pdisk_1.dat 2025-12-23T18:45:27.520525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:27.520618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:27.559802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:27.574307Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:27.578898Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126419011201375:2081] 1766515527283044 != 1766515527283047 2025-12-23T18:45:27.590733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65042, node 1 2025-12-23T18:45:27.691367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:27.691397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:27.691404Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:27.691466Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:27.793571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25068 TClient is connected to server localhost:25068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515527637 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:28.191562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:28.205250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:45:28.215319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.293240Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:28.340017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:28.497225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:45:28.559537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:30.412118Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126431896105135:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:30.412254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:30.414513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126431896105145:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:30.414601Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:30.805152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:30.843462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:30.873619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:30.904383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:30.939325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:30.975655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.016914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.067731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.149411Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126436191073312:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.149503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.149842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126436191073318:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.149893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126436191073317:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.149931Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.156578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... t: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:06.376386Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:06.419756Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:06.453775Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:06.489721Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:06.532966Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:06.590596Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:06.680335Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126584894927131:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:06.680438Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126584894927136:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:06.680455Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:06.680712Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126584894927138:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:06.680756Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:06.684413Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:06.734721Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126563420087940:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:06.734838Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:06.736238Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126584894927139:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:46:06.837110Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126584894927194:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:08.716772Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:09.963796Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7587126597779829625:2583], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-12-23T18:46:09.965785Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=5&id=MTM4NWZmMTQtNWI1ZTBkYmUtMmRlY2VhYzktZjA2M2I2MGY=, ActorId: [5:7587126597779829623:2582], ActorState: ExecuteState, TraceId: 01kd68gx8p8dmx320y461vyahq, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 4 column: 17 } message: "At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda" end_position { row: 4 column: 17 } severity: 1 issues { position { row: 3 column: 25 } message: "At function: Parameter, At function: DataType" end_position { row: 3 column: 25 } severity: 1 issues { position { row: 3 column: 25 } message: "Invalid decimal precision: 99" end_position { row: 3 column: 25 } severity: 1 } } } }, remove tx with tx_id: 2025-12-23T18:46:10.100352Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=ZDUxNzJhZjUtZWZhMTkwNjYtNjExMGUwNjItY2ViMGNlZjI=, ActorId: [5:7587126597779829630:2585], ActorState: ExecuteState, TraceId: 01kd68gx9zfzy7fycfcw3svgdj, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1516: ydb/core/kqp/query_data/kqp_query_data.cpp:266: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 , status: BAD_REQUEST 2025-12-23T18:46:10.145990Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7587126602074796943:2592], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-12-23T18:46:10.147436Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=5&id=NWFkOTgxMTYtMzZlZTk2NmQtNDE1OTRiM2EtZDI5NTg0N2Q=, ActorId: [5:7587126602074796939:2590], ActorState: ExecuteState, TraceId: 01kd68gxe757kfac3dmdvn34qq, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 7 column: 29 } message: "At function: KiWriteTable!" end_position { row: 7 column: 29 } severity: 1 issues { position { row: 7 column: 50 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value22\':Decimal(35,10),\'Value35\':Decimal(35,10)> to Struct<\'Key\':Int32?,\'Value22\':Decimal(22,9)?,\'Value35\':Decimal(35,10)?>" end_position { row: 7 column: 50 } severity: 1 issues { position { row: 4 column: 25 } message: "Implicit decimal cast would lose precision" end_position { row: 4 column: 25 } severity: 1 } issues { position { row: 7 column: 50 } message: "Failed to convert \'Value22\': Decimal(35,10) to Optional" end_position { row: 7 column: 50 } severity: 1 } } issues { position { row: 7 column: 50 } message: "Failed to convert input columns types to scheme types" end_position { row: 7 column: 50 } issue_code: 2031 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:46:10.178321Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7587126602074796954:2597], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-12-23T18:46:10.178904Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=5&id=ZDY1MGJiM2YtYjIyM2JhYjEtODczMjc0NTAtNDM0MmZjZTI=, ActorId: [5:7587126602074796952:2596], ActorState: ExecuteState, TraceId: 01kd68gxfd8a5tf80qyyy3qtpf, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 29 } message: "At function: KiWriteTable!" end_position { row: 3 column: 29 } severity: 1 issues { position { row: 3 column: 50 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value22\':Decimal(35,10),\'Value35\':Decimal(35,10)> to Struct<\'Key\':Int32?,\'Value22\':Decimal(22,9)?,\'Value35\':Decimal(35,10)?>" end_position { row: 3 column: 50 } severity: 1 issues { position { column: 14 } message: "Implicit decimal cast would lose precision" end_position { column: 14 } severity: 1 } issues { position { row: 3 column: 50 } message: "Failed to convert \'Value22\': Decimal(35,10) to Optional" end_position { row: 3 column: 50 } severity: 1 } } issues { position { row: 3 column: 50 } message: "Failed to convert input columns types to scheme types" end_position { row: 3 column: 50 } issue_code: 2031 severity: 1 } } }, remove tx with tx_id: |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 17868, MsgBus: 24588 2025-12-23T18:45:43.066823Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126485844911357:2173];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:43.066879Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003385/r3tmp/tmpOwRweR/pdisk_1.dat 2025-12-23T18:45:43.362785Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:43.385171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:43.385281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:43.490023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:43.495126Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17868, node 1 2025-12-23T18:45:43.676832Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:45:43.774606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:43.774639Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:43.774649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:43.774774Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24588 2025-12-23T18:45:44.072865Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24588 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515543569 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:44.439820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:44.455243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:45:46.724740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126498729813977:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.724885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.727285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126498729813987:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.727361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.990393Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126498729814001:2507] txid# 281474976710658, issues: { message: "Type \'Decimal(15,0)\' specified for column \'Decimal_15_0PK\', but support for parametrized decimal is disabled (EnableParameterizedDecimal feature flag is off)" severity: 1 } 2025-12-23T18:45:47.015075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126503024781305:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.015139Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.015386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126503024781307:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.015417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.038025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.166069Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126503024781404:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.166132Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.166418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126503024781407:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.166490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.183901Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126503024781418:2579] txid# 281474976710660, issues: { message: "Type \'Decimal(15,0)\' specified for column \'Decimal_15_0\', but support for parametrized decimal is disabled (EnableParameterizedDecimal feature flag is off)" severity: 1 } Trying to start YDB, gRPC: 29361, MsgBus: 30193 2025-12-23T18:45:47.976529Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587126503547133497:2208];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:47.979070Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:45:47.979466Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003385/r3tmp/tmpAmyRJH/pdisk_1.dat 2025-12-23T18:45:48.065594Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:48.066641Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587126503547133313:2081] 1766515547958487 != 1766515547958490 2025-12-23T18:45:48.083806Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:48.101701Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:48.101796Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:48.111677Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29361, node 2 2025-12-23T18:45:48.203194Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:48.203209Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:48.203214Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:48.203272Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:48.246826Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30193 TClient is connected to server localhost:30193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true ... } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.313504Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.313631Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.314549Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037963;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.314623Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037971;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.315515Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037973;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.315766Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037965;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.316348Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.316760Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037967;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.317165Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.317704Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037969;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.317931Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.318672Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037981;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.318916Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037983;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.319810Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.319845Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.320709Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037989;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.320832Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037991;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.321712Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.321854Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.322867Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:10.323072Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn [GOOD] >> KqpExplain::CompoundKeyRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20675, MsgBus: 29944 2025-12-23T18:45:22.934065Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126396689771673:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:22.934118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033bc/r3tmp/tmpEyFi6D/pdisk_1.dat 2025-12-23T18:45:23.186504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:23.271999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:23.272131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:23.287254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:23.310939Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126396689771649:2081] 1766515522930725 != 1766515522930728 2025-12-23T18:45:23.339701Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20675, node 1 2025-12-23T18:45:23.446114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:23.446134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:23.446141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:23.446242Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:23.464913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:29944 TClient is connected to server localhost:29944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515523395 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:23.953717Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:23.956335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:23.972068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:24.101591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:24.255167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:24.317691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:26.228779Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126413869642711:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.228868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.229139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126413869642721:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.229184Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.540568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.570037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.595414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.634191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.668854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.709158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.756957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.807584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:26.949415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126413869643591:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.949503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.949755Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126413869643596:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.949805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126413869643597:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.949839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:26.954915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:26.970092Z node 1 ... 93Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:03.506901Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:46:03.555316Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:03.642244Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:06.863585Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126585473522413:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:06.863724Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:06.864410Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126585473522423:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:06.864487Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:06.957239Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:07.000900Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:07.039872Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:07.083207Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:07.131680Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:07.211100Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:07.259807Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:07.363749Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:07.412826Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126568293651498:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:07.425012Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:07.487521Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126589768490596:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.487649Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.488240Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126589768490601:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.488306Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126589768490602:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.488679Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.494047Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:07.517846Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126589768490605:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:46:07.588577Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126589768490656:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:10.019716Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:10.067265Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:10.107438Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":14,"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":13,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys","Name":"Upsert","Table":"SecondaryKeys"},{"Inputs":[],"Iterator":"precompute_2_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_2_0"}],"Node Type":"Effect"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"Filter","Name":"Iterator"},{"E-Rows":"2","Inputs":[],"Predicate":"Contains","E-Cost":"0","E-Size":"10","Name":"Filter"}],"Node Type":"ConstantExpr-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_2_0","Node Type":"Precompute_2","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"E-Size":"0","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":2,"Path":"\/Root\/SecondaryKeys","Columns":["Key"],"E-Rows":"2","Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"precompute_0_1","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_1"}],"Table":"SecondaryKeys","PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryKeys","reads":[{"lookup_by":["Key"],"columns":["Key"],"type":"Lookup"}],"writes":[{"columns":["Key","Value"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"SecondaryKeys"}],"Plans":[{"PlanNodeId":8,"Operators":[{"E-Rows":"2","Predicate":"Contains","E-Cost":"0","E-Size":"10","Name":"Filter"}],"Node Type":"Filter"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2025-12-23T18:40:25.910475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:25.910544Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:26.168887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:27.363488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:40:27.702248Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:27.702959Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f00/r3tmp/tmpvozxEq/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:27.703641Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f00/r3tmp/tmpvozxEq/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f00/r3tmp/tmpvozxEq/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9952910534086386156 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:27.769581Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:27.770078Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f00/r3tmp/tmpvozxEq/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:27.770283Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f00/r3tmp/tmpvozxEq/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f00/r3tmp/tmpvozxEq/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15742039310988883650 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:27.778258Z node 3 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f00/r3tmp/tmpvozxEq/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-12-23T18:40:27.887189Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:27.887717Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f00/r3tmp/tmpvozxEq/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:27.887970Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f00/r3tmp/tmpvozxEq/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f00/r3tmp/tmpvozxEq/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10465466287581471438 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:40:27.984989Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:40:27.985486Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f00/r3tmp/tmpvozxEq/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:40:27.985707Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/004f00/r3tmp/tmpvozxEq/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/004f00/r3tmp/tmpvozxEq/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2147637138996512966 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceI ... 2025-12-23T18:45:03.916261Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:03.916606Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:09.901736Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:09.902090Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:15.971107Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:15.971409Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:16.071733Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:16.072188Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:21.789658Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:21.790144Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:28.040141Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:28.040521Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:34.359686Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:34.360063Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:40.664936Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:40.665301Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:47.066070Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:47.066363Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:53.550107Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:53.550351Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:59.750041Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:45:59.750431Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:46:06.278561Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:46:06.278856Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:46:12.896678Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2025-12-23T18:46:12.897075Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 2122, MsgBus: 4257 2025-12-23T18:45:43.317338Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126488884473420:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:43.317384Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003383/r3tmp/tmpJ2oLri/pdisk_1.dat 2025-12-23T18:45:43.560168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:43.580161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:43.580270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:43.612691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:43.763803Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:43.766961Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126488884473383:2081] 1766515543315936 != 1766515543315939 TServer::EnableGrpc on GrpcPort 2122, node 1 2025-12-23T18:45:43.789185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:45:43.842694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:43.842718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:43.842730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:43.842839Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4257 TClient is connected to server localhost:4257 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-12-23T18:45:44.366922Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515543765 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:44.486068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:44.495927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:45:44.506619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:44.645760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:44.840295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:44.982800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:46.752954Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126501769377137:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.753069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.755214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126501769377147:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.755280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.153701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.188694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.224252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.252237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.296450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.344579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.389650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.433587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.523072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126506064345312:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.523205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.523478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126506064345317:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.523509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126506064345318:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.523532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.527383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemes ... } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.094453Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.094955Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037952;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.095848Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.096133Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037954;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.096688Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037982;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.096908Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037956;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.097573Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037984;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.097731Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.098393Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037986;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.098481Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037960;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.099481Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037962;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.100189Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037988;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.100307Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037964;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.101036Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037990;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.101744Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.101869Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037992;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.102608Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037994;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.102627Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.103852Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037972;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.104898Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037998;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.105794Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.106783Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.107139Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037996;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2025-12-23T18:46:13.107694Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] >> KqpQuery::QueryCachePermissionsLoss [GOOD] >> KqpQuery::Pure >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13677, MsgBus: 22920 2025-12-23T18:45:24.429657Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126406762735415:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:24.429697Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:45:24.516004Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033b8/r3tmp/tmpXwEhyL/pdisk_1.dat 2025-12-23T18:45:24.799648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:24.799732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:24.864620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:24.911616Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:24.912592Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:24.912843Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126406762735376:2081] 1766515524427492 != 1766515524427495 TServer::EnableGrpc on GrpcPort 13677, node 1 2025-12-23T18:45:24.971206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:24.971232Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:24.971240Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:24.971335Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22920 2025-12-23T18:45:25.197974Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:45:25.439161Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515524928 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:25.473799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:25.485497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:25.623134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:25.771317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:25.838303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:27.839165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126419647639143:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.839304Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.839688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126419647639153:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:27.839725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.240888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.277452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.316007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.351035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.382416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.420061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.466488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.523622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.611168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126423942607320:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.611289Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.611416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126423942607325:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.611540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126423942607327:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.611589Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.615559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreate ... ons } 2025-12-23T18:46:08.479133Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.538976Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.575552Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.614264Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.655496Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.695434Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.742288Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.787248Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.841938Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.972354Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126593820149210:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.972448Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.972632Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126593820149216:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.972688Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.972710Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126593820149215:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.976866Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:08.988835Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126593820149219:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:46:09.054360Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126576640277270:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:09.054465Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:09.079877Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126598115116566:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:11.383123Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:11.532360Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:11.594061Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":18,"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":17,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys","Name":"Upsert","Table":"SecondaryKeys"},{"Inputs":[],"Iterator":"precompute_1_2","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_2"}],"Node Type":"Effect"},{"PlanNodeId":16,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":15,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","Name":"Delete","Table":"SecondaryKeys\/Index\/indexImplTable"},{"Inputs":[],"Iterator":"precompute_1_1","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","CTE Name":"precompute_1_1"}],"Node Type":"Effect"},{"PlanNodeId":14,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":13,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","Name":"Upsert","Table":"SecondaryKeys\/Index\/indexImplTable"},{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"Effect"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","ReadRangesPointPrefixLen":"1","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","ReadRangesKeys":["Fk"],"Table":"SecondaryKeys\/Index\/indexImplTable","ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Subplan Name":"CTE Stage_5","Node Type":"Stage","Parent Relationship":"InitPlan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Node Type":"UnionAll","CTE Name":"Stage_5","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_1","Node Type":"Precompute_1_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"UnionAll","CTE Name":"Stage_5","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_2","Node Type":"Precompute_1_2","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryKeys","writes":[{"columns":["Key","Fk"],"type":"MultiUpsert"}]},{"name":"\/Root\/SecondaryKeys\/Index\/indexImplTable","reads":[{"columns":["Fk","Key"],"scan_by":["Fk [1, 4)"],"type":"Scan"}],"writes":[{"columns":["Key","Fk"],"type":"MultiUpsert"},{"type":"MultiErase"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"SecondaryKeys"}],"Plans":[{"PlanNodeId":7,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"},{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Operators":[{"Name":"Delete","Table":"indexImplTable"}],"Plans":[{"PlanNodeId":14,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Delete"}],"Node Type":"Effect"},{"PlanNodeId":15,"Plans":[{"PlanNodeId":16,"Operators":[{"Name":"Upsert","Table":"indexImplTable"}],"Plans":[{"PlanNodeId":22,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] Test command err: Trying to start YDB, gRPC: 10563, MsgBus: 26277 ... waiting for SysViewsRoster update finished 2025-12-23T18:45:16.145094Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:45:16.230830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:45:16.241922Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:45:16.242192Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:45:16.242248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:45:16.482598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:16.482753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:16.550760Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515513393586 != 1766515513393590 2025-12-23T18:45:16.563862Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:16.610779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:16.770055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033ca/r3tmp/tmp8Bltw7/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10563, node 1 2025-12-23T18:45:17.119585Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:17.119658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:17.119733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:17.120171Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:17.131800Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:26277 TClient is connected to server localhost:26277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:17.471184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:17.567768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:17.846605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:18.204187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:18.469582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:19.194863Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1908:3515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.195072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.196169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1982:3534], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.196266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:19.227574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:19.437458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:19.714426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:19.960436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.200951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.470059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:20.761118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:21.056578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:21.419067Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:21.419211Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:21.419710Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2798:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:21.419809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:21.419903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2800:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't hav ... lterResource ok# false data# peer# 2025-12-23T18:46:16.244135Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418bd5b80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DropResource ok# false data# peer# 2025-12-23T18:46:16.244352Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418c93980] received request Name# Ydb.RateLimiter.V1.RateLimiterService/ListResources ok# false data# peer# 2025-12-23T18:46:16.244417Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d94187f5080] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DescribeResource ok# false data# peer# 2025-12-23T18:46:16.244641Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418a94980] received request Name# Ydb.RateLimiter.V1.RateLimiterService/AcquireResource ok# false data# peer# 2025-12-23T18:46:16.244708Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418a1fc80] received request Name# Ydb.DataStreams.V1.DataStreamsService/CreateStream ok# false data# peer# 2025-12-23T18:46:16.244908Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418946c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreams ok# false data# peer# 2025-12-23T18:46:16.244988Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418a1cb80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeleteStream ok# false data# peer# 2025-12-23T18:46:16.245175Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418a23480] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStream ok# false data# peer# 2025-12-23T18:46:16.245276Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418947380] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListShards ok# false data# peer# 2025-12-23T18:46:16.245435Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418bbb780] received request Name# Ydb.DataStreams.V1.DataStreamsService/SetWriteQuota ok# false data# peer# 2025-12-23T18:46:16.245708Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418927480] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStream ok# false data# peer# 2025-12-23T18:46:16.245822Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418945080] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecord ok# false data# peer# 2025-12-23T18:46:16.245986Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418945780] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecords ok# false data# peer# 2025-12-23T18:46:16.246140Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418c12180] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetRecords ok# false data# peer# 2025-12-23T18:46:16.246249Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418a86980] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetShardIterator ok# false data# peer# 2025-12-23T18:46:16.246453Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418919b80] received request Name# Ydb.DataStreams.V1.DataStreamsService/SubscribeToShard ok# false data# peer# 2025-12-23T18:46:16.246512Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418850780] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeLimits ok# false data# peer# 2025-12-23T18:46:16.246789Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418b26480] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamSummary ok# false data# peer# 2025-12-23T18:46:16.247076Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418c77280] received request Name# Ydb.DataStreams.V1.DataStreamsService/IncreaseStreamRetentionPeriod ok# false data# peer# 2025-12-23T18:46:16.247374Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418a24980] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateShardCount ok# false data# peer# 2025-12-23T18:46:16.247654Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418bee380] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStreamMode ok# false data# peer# 2025-12-23T18:46:16.247916Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418c74880] received request Name# Ydb.DataStreams.V1.DataStreamsService/RegisterStreamConsumer ok# false data# peer# 2025-12-23T18:46:16.247948Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418bf0d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DecreaseStreamRetentionPeriod ok# false data# peer# 2025-12-23T18:46:16.248207Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418a26580] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeregisterStreamConsumer ok# false data# peer# 2025-12-23T18:46:16.248259Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418c3ac80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamConsumer ok# false data# peer# 2025-12-23T18:46:16.248488Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d94187e9a80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreamConsumers ok# false data# peer# 2025-12-23T18:46:16.248531Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d94189c6180] received request Name# Ydb.DataStreams.V1.DataStreamsService/AddTagsToStream ok# false data# peer# 2025-12-23T18:46:16.248815Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418bad080] received request Name# Ydb.DataStreams.V1.DataStreamsService/DisableEnhancedMonitoring ok# false data# peer# 2025-12-23T18:46:16.248834Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418a91880] received request Name# Ydb.DataStreams.V1.DataStreamsService/EnableEnhancedMonitoring ok# false data# peer# 2025-12-23T18:46:16.249107Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d94189e1380] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListTagsForStream ok# false data# peer# 2025-12-23T18:46:16.249117Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418bda880] received request Name# Ydb.DataStreams.V1.DataStreamsService/MergeShards ok# false data# peer# 2025-12-23T18:46:16.249376Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418925180] received request Name# Ydb.DataStreams.V1.DataStreamsService/RemoveTagsFromStream ok# false data# peer# 2025-12-23T18:46:16.249385Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418b35280] received request Name# Ydb.DataStreams.V1.DataStreamsService/SplitShard ok# false data# peer# 2025-12-23T18:46:16.249642Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418bb7180] received request Name# Ydb.DataStreams.V1.DataStreamsService/StartStreamEncryption ok# false data# peer# 2025-12-23T18:46:16.249657Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418851c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/StopStreamEncryption ok# false data# peer# 2025-12-23T18:46:16.249896Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d94189ebb80] received request Name# Ydb.Monitoring.V1.MonitoringService/SelfCheck ok# false data# peer# 2025-12-23T18:46:16.249933Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418a1c480] received request Name# Ydb.Monitoring.V1.MonitoringService/NodeCheck ok# false data# peer# 2025-12-23T18:46:16.250170Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418a20380] received request Name# Ydb.Monitoring.V1.MonitoringService/ClusterState ok# false data# peer# 2025-12-23T18:46:16.250217Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418bc4380] received request Name# Ydb.Query.V1.QueryService/CreateSession ok# false data# peer# 2025-12-23T18:46:16.250440Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418ba2880] received request Name# Ydb.Query.V1.QueryService/DeleteSession ok# false data# peer# 2025-12-23T18:46:16.250500Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d94188ab780] received request Name# Ydb.Query.V1.QueryService/AttachSession ok# false data# peer# 2025-12-23T18:46:16.251378Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d941895b580] received request Name# Ydb.Query.V1.QueryService/CommitTransaction ok# false data# peer# 2025-12-23T18:46:16.251661Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d941895ae80] received request Name# Ydb.Query.V1.QueryService/RollbackTransaction ok# false data# peer# 2025-12-23T18:46:16.251935Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418a1d280] received request Name# Ydb.Query.V1.QueryService/ExecuteQuery ok# false data# peer# 2025-12-23T18:46:16.252186Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418930e80] received request Name# Ydb.Query.V1.QueryService/ExecuteScript ok# false data# peer# 2025-12-23T18:46:16.252436Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418930780] received request Name# Ydb.Query.V1.QueryService/FetchScriptResults ok# false data# peer# 2025-12-23T18:46:16.252694Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d941884a580] received request Name# Ydb.Tablet.V1.TabletService/ExecuteTabletMiniKQL ok# false data# peer# 2025-12-23T18:46:16.252972Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d941884ac80] received request Name# Ydb.Tablet.V1.TabletService/ChangeTabletSchema ok# false data# peer# 2025-12-23T18:46:16.253225Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d941884b380] received request Name# Ydb.Tablet.V1.TabletService/RestartTablet ok# false data# peer# 2025-12-23T18:46:16.253477Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d941884ba80] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogStore ok# false data# peer# 2025-12-23T18:46:16.253773Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d941884c180] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogStore ok# false data# peer# 2025-12-23T18:46:16.254020Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d941884c880] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogStore ok# false data# peer# 2025-12-23T18:46:16.254258Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d941884cf80] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogStore ok# false data# peer# 2025-12-23T18:46:16.254499Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d941884d680] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogTable ok# false data# peer# 2025-12-23T18:46:16.255139Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d941884dd80] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogTable ok# false data# peer# 2025-12-23T18:46:16.255423Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418d20080] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogTable ok# false data# peer# 2025-12-23T18:46:16.255696Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418d22380] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogTable ok# false data# peer# 2025-12-23T18:46:16.255961Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418d23880] received request Name# Ydb.Auth.V1.AuthService/Login ok# false data# peer# 2025-12-23T18:46:16.256046Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418849e80] received request Name# Ydb.Query.V1.QueryService/BeginTransaction ok# false data# peer# 2025-12-23T18:46:16.256217Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418d23180] received request Name# Ydb.Replication.V1.ReplicationService/DescribeReplication ok# false data# peer# 2025-12-23T18:46:16.256472Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418d24680] received request Name# Ydb.Replication.V1.ReplicationService/DescribeTransfer ok# false data# peer# 2025-12-23T18:46:16.256481Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d9418d20e80] received request Name# Ydb.View.V1.ViewService/DescribeView ok# false data# peer# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 25525, MsgBus: 3087 2025-12-23T18:45:47.374858Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126502305169288:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:47.375472Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00337b/r3tmp/tmpNdPC11/pdisk_1.dat 2025-12-23T18:45:47.653633Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:47.662090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:47.662204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:47.722511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:47.757632Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:47.759500Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126502305169262:2081] 1766515547371826 != 1766515547371829 TServer::EnableGrpc on GrpcPort 25525, node 1 2025-12-23T18:45:47.869042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:47.869067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:47.869074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:47.869171Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:47.895099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3087 TClient is connected to server localhost:3087 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515547783 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:45:48.391198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:45:48.399577Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:48.407134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:48.550085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:48.717761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:48.788150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:50.776321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126515190073020:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:50.776397Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:50.778286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126515190073030:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:50.778322Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:51.094728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:51.128980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:51.160841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:51.189022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:51.259819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:51.300936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:51.374521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:51.436239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:51.537369Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126519485041204:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:51.537470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:51.537920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126519485041210:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:51.537983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126519485041209:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:51.537989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:51.542202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:51.555528Z node 1 :K ... sue_code: 1030 severity: 1 issues { position { row: 6 column: 45 } message: "At function: KiCreateTable!" end_position { row: 6 column: 45 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:46:10.673085Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7587126604346085229:2349], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:43: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2025-12-23T18:46:10.673492Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=4&id=ZjMxMzc1MS01MjI2OGY3MS02NDljZjI1ZS0xNDVhODRiMA==, ActorId: [4:7587126604346085223:2346], ActorState: ExecuteState, TraceId: 01kd68gxxwaef3xv3rs3ff46dr, ReplyQueryCompileError, status: BAD_REQUEST, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 6 column: 43 } message: "At function: KiCreateTable!" end_position { row: 6 column: 43 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } }, remove tx with tx_id: Trying to start YDB, gRPC: 11063, MsgBus: 31702 2025-12-23T18:46:11.747268Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587126607637832344:2145];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:11.748054Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:46:11.748101Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00337b/r3tmp/tmpaDCSol/pdisk_1.dat 2025-12-23T18:46:11.896525Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:46:11.897574Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:46:11.902812Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7587126607637832234:2081] 1766515571664519 != 1766515571664522 2025-12-23T18:46:11.931923Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:46:11.932010Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:46:11.946175Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11063, node 5 2025-12-23T18:46:12.039819Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:46:12.039843Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:46:12.039851Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:46:12.039937Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:46:12.092458Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:31702 TClient is connected to server localhost:31702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515571996 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:46:12.524966Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:46:12.533981Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:46:12.544425Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:46:12.603911Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:46:12.711191Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:46:16.124062Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126629112669678:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:16.124197Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:16.124287Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126629112669690:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:16.124587Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126629112669692:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:16.124642Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:16.129932Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:16.144119Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126629112669693:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-12-23T18:46:16.219203Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126629112669745:2601] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:16.262605Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:16.601687Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-12-23T18:46:16.626578Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:16.702925Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126607637832344:2145];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:16.703007Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:16.975280Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:16.986720Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:16.999557Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpPg::ValuesInsert-useSink [GOOD] >> PgCatalog::PgType >> KqpQuery::ExecuteWriteQuery [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ExecuteWriteQuery [GOOD] Test command err: Trying to start YDB, gRPC: 32633, MsgBus: 6364 2025-12-23T18:45:18.773518Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126380971018387:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:18.773597Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033c7/r3tmp/tmpshoILB/pdisk_1.dat 2025-12-23T18:45:18.961031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:18.968856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:18.968994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:19.023773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:19.029958Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:19.031442Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126380971018358:2081] 1766515518771915 != 1766515518771918 TServer::EnableGrpc on GrpcPort 32633, node 1 2025-12-23T18:45:19.098336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:19.098377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:19.098398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:19.098503Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:19.219730Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6364 TClient is connected to server localhost:6364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515519083 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:19.518067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:19.523675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:45:19.778648Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:21.610243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126393855921132:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:21.610249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126393855921141:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:21.610343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:21.610753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126393855921147:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:21.610866Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:21.614551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:21.630391Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126393855921146:2325], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:45:21.715607Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126393855921199:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:21.964718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 6041, MsgBus: 2851 2025-12-23T18:45:23.150897Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587126399773217862:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:23.153691Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033c7/r3tmp/tmpL50fpA/pdisk_1.dat 2025-12-23T18:45:23.174883Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:23.241818Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:23.246952Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587126399773217825:2081] 1766515523147909 != 1766515523147912 2025-12-23T18:45:23.279793Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:23.279896Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6041, node 2 2025-12-23T18:45:23.286224Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:23.370528Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:23.370558Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:23.370564Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:23.370634Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:23.385369Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2851 TClient is connected to server localhost:2851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515523332 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:45:23.782238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:45:23.788038Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:45:24.154342Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:26.442638Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587126412658120608:2325], Datab ... 8:45:46.760551Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126500457721521:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.760654Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.760941Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126500457721531:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.760988Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.857862Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:46.895588Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:46.933590Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:46.974517Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.008684Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.045858Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.083487Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.148458Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:47.240088Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126504752689692:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.240173Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.240238Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126504752689697:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.240375Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126504752689699:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.240432Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:47.243926Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:47.254364Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126504752689701:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:45:47.344053Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126504752689752:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:47.675025Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126483277850498:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:47.675107Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:45:49.421819Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:57.750222Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:45:57.750258Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:59.307193Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2025-12-23T18:45:59.345160Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2025-12-23T18:46:00.448385Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2025-12-23T18:46:01.288726Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2025-12-23T18:46:04.641638Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd68g92hebhrk8cnhmqs1vje", SessionId: ydb://session/3?node_id=5&id=NzA3NzAyMi1kNjAzMDg3Mi0zMjY0N2NlYy0zMTQ4YWJiNQ==, Slow query, duration: 15.372504s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE test_table (\n PRIMARY KEY (id)\n ) AS SELECT\n ROW_NUMBER() OVER w AS id, data\n FROM\n AS_TABLE(ListReplicate(<|data: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'|>, 500000))\n WINDOW\n w AS (ORDER BY data)", parameters: 0b 2025-12-23T18:46:04.971273Z node 5 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 68.8942 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037927 table: [/Root/.tmp/sessions/914b3d9f-44a9-6a48-661a-848b8dfd8fdf/Root/test_table_50be9823-402e-5397-97df-f390cda52dd5] 2025-12-23T18:46:07.061817Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd68g92hebhrk8cnhmqs1vje", SessionId: ydb://session/3?node_id=5&id=NzA3NzAyMi1kNjAzMDg3Mi0zMjY0N2NlYy0zMTQ4YWJiNQ==, Slow query, duration: 17.792643s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE test_table (\n PRIMARY KEY (id)\n ) AS SELECT\n ROW_NUMBER() OVER w AS id, data\n FROM\n AS_TABLE(ListReplicate(<|data: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'|>, 500000))\n WINDOW\n w AS (ORDER BY data)", parameters: 0b 2025-12-23T18:46:07.067976Z --------------- Start update --------------- 2025-12-23T18:46:07.067438Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:07.077404Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:46:19.399693Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd68gtf69cyhe8ey7zr3ez86", SessionId: ydb://session/3?node_id=5&id=ZTA1YjYxNzctMjU5ZjMzN2YtZDFiMWE3NjMtMTQ3MDg0ZDY=, Slow query, duration: 12.318197s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n UPDATE test_table SET data = \"a\"\n ", parameters: 0b |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService [GOOD] Test command err: Trying to start YDB, gRPC: 28810, MsgBus: 30148 2025-12-23T18:45:39.381371Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126469047840342:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:39.381461Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00338d/r3tmp/tmpBPgszg/pdisk_1.dat 2025-12-23T18:45:39.625520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:39.625618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:39.674100Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:39.681607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:39.691086Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:39.704528Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126469047840319:2081] 1766515539379716 != 1766515539379719 TServer::EnableGrpc on GrpcPort 28810, node 1 2025-12-23T18:45:39.780756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:39.780781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:39.780807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:39.780892Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:39.971915Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30148 TClient is connected to server localhost:30148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515539754 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:40.229576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:45:40.258213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:40.402223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:40.402232Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:45:40.569368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:40.665866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:42.608524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126481932744091:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:42.608619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:42.609041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126481932744101:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:42.609109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:42.951420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:42.990416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:43.051707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:43.086293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:43.124197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:43.189518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:43.229938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:43.278810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:43.390194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126486227712265:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:43.390284Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:43.390462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126486227712270:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:43.390508Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126486227712271:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:43.390635Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:43.400338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:43.417437Z node 1 ... lterResource ok# false data# peer# 2025-12-23T18:46:22.953974Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88b45680] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DropResource ok# false data# peer# 2025-12-23T18:46:22.954187Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88a4ea80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/ListResources ok# false data# peer# 2025-12-23T18:46:22.954376Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88b44f80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DescribeResource ok# false data# peer# 2025-12-23T18:46:22.954593Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88b43a80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/AcquireResource ok# false data# peer# 2025-12-23T18:46:22.958897Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c4c580] received request Name# Ydb.DataStreams.V1.DataStreamsService/CreateStream ok# false data# peer# 2025-12-23T18:46:22.959227Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88be3580] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreams ok# false data# peer# 2025-12-23T18:46:22.959470Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c4a980] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeleteStream ok# false data# peer# 2025-12-23T18:46:22.959695Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88b42c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStream ok# false data# peer# 2025-12-23T18:46:22.959942Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88be2780] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListShards ok# false data# peer# 2025-12-23T18:46:22.960176Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88b15480] received request Name# Ydb.DataStreams.V1.DataStreamsService/SetWriteQuota ok# false data# peer# 2025-12-23T18:46:22.960411Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88dad680] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStream ok# false data# peer# 2025-12-23T18:46:22.960655Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c7c780] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecord ok# false data# peer# 2025-12-23T18:46:22.960873Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c6b680] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecords ok# false data# peer# 2025-12-23T18:46:22.961106Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c64680] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetRecords ok# false data# peer# 2025-12-23T18:46:22.961241Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c66280] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetShardIterator ok# false data# peer# 2025-12-23T18:46:22.961324Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c63f80] received request Name# Ydb.DataStreams.V1.DataStreamsService/SubscribeToShard ok# false data# peer# 2025-12-23T18:46:22.961467Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c2b180] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeLimits ok# false data# peer# 2025-12-23T18:46:22.961530Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c68c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamSummary ok# false data# peer# 2025-12-23T18:46:22.961682Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c2aa80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DecreaseStreamRetentionPeriod ok# false data# peer# 2025-12-23T18:46:22.961750Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88e1cf80] received request Name# Ydb.DataStreams.V1.DataStreamsService/IncreaseStreamRetentionPeriod ok# false data# peer# 2025-12-23T18:46:22.961903Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88e1c180] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateShardCount ok# false data# peer# 2025-12-23T18:46:22.961959Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88be1980] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStreamMode ok# false data# peer# 2025-12-23T18:46:22.962129Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c49b80] received request Name# Ydb.DataStreams.V1.DataStreamsService/RegisterStreamConsumer ok# false data# peer# 2025-12-23T18:46:22.962161Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c4be80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeregisterStreamConsumer ok# false data# peer# 2025-12-23T18:46:22.962332Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c47f80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamConsumer ok# false data# peer# 2025-12-23T18:46:22.962365Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c5dd80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreamConsumers ok# false data# peer# 2025-12-23T18:46:22.962545Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c60e80] received request Name# Ydb.DataStreams.V1.DataStreamsService/AddTagsToStream ok# false data# peer# 2025-12-23T18:46:22.962560Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88be0b80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DisableEnhancedMonitoring ok# false data# peer# 2025-12-23T18:46:22.962734Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c5e480] received request Name# Ydb.DataStreams.V1.DataStreamsService/EnableEnhancedMonitoring ok# false data# peer# 2025-12-23T18:46:22.962983Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c5d680] received request Name# Ydb.DataStreams.V1.DataStreamsService/MergeShards ok# false data# peer# 2025-12-23T18:46:22.963193Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c67e80] received request Name# Ydb.DataStreams.V1.DataStreamsService/RemoveTagsFromStream ok# false data# peer# 2025-12-23T18:46:22.963383Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c67780] received request Name# Ydb.DataStreams.V1.DataStreamsService/SplitShard ok# false data# peer# 2025-12-23T18:46:22.963514Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c60080] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListTagsForStream ok# false data# peer# 2025-12-23T18:46:22.963591Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c6a880] received request Name# Ydb.DataStreams.V1.DataStreamsService/StartStreamEncryption ok# false data# peer# 2025-12-23T18:46:22.963736Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c6bd80] received request Name# Ydb.DataStreams.V1.DataStreamsService/StopStreamEncryption ok# false data# peer# 2025-12-23T18:46:22.963775Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88b15b80] received request Name# Ydb.Monitoring.V1.MonitoringService/SelfCheck ok# false data# peer# 2025-12-23T18:46:22.963929Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88cf4c80] received request Name# Ydb.Monitoring.V1.MonitoringService/NodeCheck ok# false data# peer# 2025-12-23T18:46:22.963970Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88b16980] received request Name# Ydb.Monitoring.V1.MonitoringService/ClusterState ok# false data# peer# 2025-12-23T18:46:22.964130Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c3c280] received request Name# Ydb.Query.V1.QueryService/CreateSession ok# false data# peer# 2025-12-23T18:46:22.964168Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c3a680] received request Name# Ydb.Query.V1.QueryService/DeleteSession ok# false data# peer# 2025-12-23T18:46:22.964333Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c3de80] received request Name# Ydb.Query.V1.QueryService/AttachSession ok# false data# peer# 2025-12-23T18:46:22.964364Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c35280] received request Name# Ydb.Query.V1.QueryService/BeginTransaction ok# false data# peer# 2025-12-23T18:46:22.964514Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c36080] received request Name# Ydb.Query.V1.QueryService/CommitTransaction ok# false data# peer# 2025-12-23T18:46:22.964560Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c0e380] received request Name# Ydb.Query.V1.QueryService/RollbackTransaction ok# false data# peer# 2025-12-23T18:46:22.964710Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88e7da80] received request Name# Ydb.Query.V1.QueryService/ExecuteQuery ok# false data# peer# 2025-12-23T18:46:22.964755Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c6a180] received request Name# Ydb.Query.V1.QueryService/ExecuteScript ok# false data# peer# 2025-12-23T18:46:22.964800Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c3b480] received request Name# Ydb.Query.V1.QueryService/FetchScriptResults ok# false data# peer# 2025-12-23T18:46:22.964962Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c39180] received request Name# Ydb.Tablet.V1.TabletService/ExecuteTabletMiniKQL ok# false data# peer# 2025-12-23T18:46:22.965022Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c39f80] received request Name# Ydb.Tablet.V1.TabletService/ChangeTabletSchema ok# false data# peer# 2025-12-23T18:46:22.965164Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88d27f80] received request Name# Ydb.Tablet.V1.TabletService/RestartTablet ok# false data# peer# 2025-12-23T18:46:22.965209Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88d2b080] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogStore ok# false data# peer# 2025-12-23T18:46:22.965338Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88d25580] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogStore ok# false data# peer# 2025-12-23T18:46:22.965398Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88af5c80] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogStore ok# false data# peer# 2025-12-23T18:46:22.965534Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88df6780] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogStore ok# false data# peer# 2025-12-23T18:46:22.965584Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88df7c80] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogTable ok# false data# peer# 2025-12-23T18:46:22.965707Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88df6e80] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogTable ok# false data# peer# 2025-12-23T18:46:22.965753Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c58280] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogTable ok# false data# peer# 2025-12-23T18:46:22.965887Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c57480] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogTable ok# false data# peer# 2025-12-23T18:46:22.965938Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88d45480] received request Name# Ydb.Auth.V1.AuthService/Login ok# false data# peer# 2025-12-23T18:46:22.966081Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c4fd80] received request Name# Ydb.Replication.V1.ReplicationService/DescribeReplication ok# false data# peer# 2025-12-23T18:46:22.966120Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c52780] received request Name# Ydb.Replication.V1.ReplicationService/DescribeTransfer ok# false data# peer# 2025-12-23T18:46:22.966263Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d1f88c53580] received request Name# Ydb.View.V1.ViewService/DescribeView ok# false data# peer# >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpPg::TableInsert-useSink [GOOD] >> KqpPg::TempTablesSessionsIsolation ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> Channels20::IcChannelEarlyFinish 2025-12-23 18:46:20,409 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-12-23 18:46:20,619 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1218328 58.6M 57.8M 32.5M test_tool run_ut @/home/runner/.ya/build/build_root/8ptz/00314b/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args 1219448 1.7G 1.5G 1.3G └─ ydb-library-yql-dq-runtime-ut --trace-path-append /home/runner/.ya/build/build_root/8ptz/00314b/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/ch Test command err: Trying to start YDB, gRPC: 13076, MsgBus: 5606 2025-12-23T18:36:23.320012Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124080769293946:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:23.320059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:36:23.534789Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.011714s 2025-12-23T18:36:23.624891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:36:23.611335Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587124083880968887:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:23.611386Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00314b/r3tmp/tmpkJD3FS/pdisk_1.dat 2025-12-23T18:36:23.674129Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:36:24.210990Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:36:24.252141Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:36:24.254337Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:36:24.383362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:36:24.383463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:36:24.407257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:36:24.407373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:36:24.499809Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:36:24.504661Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:36:24.551257Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:36:24.557973Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:24.581636Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:36:24.595725Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:36:24.614168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:36:24.663420Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TServer::EnableGrpc on GrpcPort 13076, node 1 2025-12-23T18:36:25.072571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:36:25.072747Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:36:25.072762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:36:25.072774Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:36:25.072837Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5606 TClient is connected to server localhost:5606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514984731 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:36:26.114638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:36:26.188671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:26.668935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:27.167283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:27.343305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:28.350245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587124080769293946:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:28.354775Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:36:28.614340Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587124083880968887:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:28.614437Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:36:30.167927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124110834067433:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:30.168104Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:30.169060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124110834067443:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:30.169147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:30.598151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:30.700419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:30.795492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:30.862304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:30.929798Z node 1 :FLA ... g... 2025-12-23T18:36:46.611920Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:46.803665Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:46.973324Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:49.656221Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587124192050027271:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:49.656314Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:49.657048Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587124192050027281:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:49.657136Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:49.745645Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:49.824973Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:49.976372Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:50.053763Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:50.133456Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:50.248017Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:50.271973Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587124174870155682:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:50.272069Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:36:50.360924Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:50.486327Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:50.637160Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587124196344995764:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:50.637276Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:50.637527Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587124196344995770:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:50.637582Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:50.637637Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587124196344995769:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:50.647766Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:36:50.703196Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587124196344995773:2434], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:36:50.770690Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587124196344995859:4903] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:36:52.641744Z node 5 :KQP_CHANNELS WARN: dq_channel_service.cpp:948: DATA UNDELIVERED, UNKNOWN ActorId to NodeId=6, NodeActorId=[5:7587124204934930761:2478], PeerActorId=[6:7587124206367053090:2368] 2025-12-23T18:36:52.641788Z node 5 :KQP_CHANNELS WARN: dq_channel_service.cpp:427: Descriptor.GenMajor=1, expected GenMajor=2 by Reconciliation 2025-12-23T18:36:52.742067Z node 5 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x2, to NodeId=6, NodeActorId=[5:7587124204934930761:2478], Gen=2.1, Next Delay=0.100000s 2025-12-23T18:36:52.942316Z node 5 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x3, to NodeId=6, NodeActorId=[5:7587124204934930761:2478], Gen=2.1, Next Delay=0.200000s 2025-12-23T18:36:53.342676Z node 5 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x4, to NodeId=6, NodeActorId=[5:7587124204934930761:2478], Gen=2.1, Next Delay=0.400000s 2025-12-23T18:36:54.142891Z node 5 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x5, to NodeId=6, NodeActorId=[5:7587124204934930761:2478], Gen=2.1, Next Delay=0.800000s 2025-12-23T18:36:55.743448Z node 5 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x6, to NodeId=6, NodeActorId=[5:7587124204934930761:2478], Gen=2.1, Next Delay=1.600000s 2025-12-23T18:36:58.943977Z node 5 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x7, to NodeId=6, NodeActorId=[5:7587124204934930761:2478], Gen=2.1, Next Delay=3.200000s 2025-12-23T18:37:00.460519Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:37:00.460567Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:05.350879Z node 5 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x8, to NodeId=6, NodeActorId=[5:7587124204934930761:2478], Gen=2.1, Next Delay=6.400000s 2025-12-23T18:37:18.154148Z node 5 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x9, to NodeId=6, NodeActorId=[5:7587124204934930761:2478], Gen=3.1, Next Delay=12.800000s Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/00314b/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/00314b/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/chunk0/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |95.7%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7337, MsgBus: 26662 2025-12-23T18:45:28.655265Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126422649783273:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:28.655357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033aa/r3tmp/tmpMQOaS2/pdisk_1.dat 2025-12-23T18:45:28.911810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:28.930085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:28.930185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:29.017545Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126422649783238:2081] 1766515528651361 != 1766515528651364 2025-12-23T18:45:29.020723Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:29.027316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7337, node 1 2025-12-23T18:45:29.118936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:45:29.131537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:29.131557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:29.131566Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:29.131674Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26662 TClient is connected to server localhost:26662 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515529079 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:29.566835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:29.573705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:45:29.591476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:29.668531Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:29.724838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:29.872468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:29.931553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:32.039066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126439829654298:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:32.039217Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:32.040034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126439829654308:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:32.040102Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:32.334512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:32.365905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:32.395731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:32.427711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:32.462989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:32.500633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:32.554397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:32.598632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:32.665893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126439829655177:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:32.666014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:32.666017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126439829655182:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:32.666143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126439829655184:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:32.666190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:32.669903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sche ... net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:46:04.109089Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:46:04.109232Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28636 2025-12-23T18:46:04.511782Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515564065 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:46:04.604566Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:46:04.652368Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:04.736225Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:46:04.768601Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:46:04.776015Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:04.934122Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:05.037114Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:07.653612Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126588750767709:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.653735Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.654055Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126588750767719:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.654116Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:07.743603Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:07.813582Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:07.894016Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:07.964511Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.036677Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.131571Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.210180Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.307669Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:08.478006Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126593045736215:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.478091Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126593045736220:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.478103Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.478313Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126593045736222:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.478356Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:08.482923Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:08.504537Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126593045736223:2432], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:46:08.588199Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126593045736294:4937] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:08.681848Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126571570896179:2159];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:08.681931Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:08.691559Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7587126572671241199:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:08.691648Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:18.902402Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:46:18.902432Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] >> TNodeBrokerTest::Test1001NodesSubscribers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> Channels20::IcChannelTrivial 2025-12-23 18:46:24,116 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-12-23 18:46:24,413 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1220337 58.6M 58.1M 32.5M test_tool run_ut @/home/runner/.ya/build/build_root/8ptz/003149/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args 1220902 1.8G 1.5G 1.3G └─ ydb-library-yql-dq-runtime-ut --trace-path-append /home/runner/.ya/build/build_root/8ptz/003149/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/ch Test command err: Trying to start YDB, gRPC: 7883, MsgBus: 6077 2025-12-23T18:36:26.272626Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587124094488231115:2130];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:26.272670Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003149/r3tmp/tmpdw4QuV/pdisk_1.dat 2025-12-23T18:36:26.761165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:36:26.833502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:36:26.833643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:36:26.846946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:36:26.851913Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:36:26.856327Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587124094488231023:2081] 1766514986253389 != 1766514986253392 TServer::EnableGrpc on GrpcPort 7883, node 1 2025-12-23T18:36:26.995009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:36:26.995034Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:36:26.995041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:36:26.995106Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:36:27.042846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6077 2025-12-23T18:36:27.311092Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766514986894 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:36:27.627209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:36:27.643470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:36:27.658912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:27.852802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:28.091138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:28.198990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:30.287581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124111668102086:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:30.287686Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:30.291166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124111668102096:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:30.291254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:30.625951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:30.687121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:30.729645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:30.759993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:30.822730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:30.901447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:30.937803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:31.011429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:31.117660Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124115963070263:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:31.117719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:31.117980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124115963070268:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:31.117980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587124115963070269:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:31.118014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:31.121700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemes ... f is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:43.921675Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:44.040051Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:36:46.545584Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124181719111901:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:46.545707Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:46.546056Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124181719111911:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:46.546115Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:46.660420Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:46.743138Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:46.820244Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:46.928999Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:47.019885Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:47.133575Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:47.232608Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:47.344301Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:36:47.395712Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587124164539240233:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:36:47.425938Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:36:47.564406Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124186014080408:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:47.564513Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:47.565258Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124186014080414:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:47.565390Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587124186014080413:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:47.565498Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:36:47.572535Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:36:47.603890Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587124186014080417:2437], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:36:47.708616Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587124186014080502:4979] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:36:49.776237Z node 3 :KQP_CHANNELS WARN: dq_channel_service.cpp:948: DATA UNDELIVERED, UNKNOWN ActorId to NodeId=4, NodeActorId=[3:7587124194604015427:2482], PeerActorId=[4:7587124194592798179:2364] 2025-12-23T18:36:49.776272Z node 3 :KQP_CHANNELS WARN: dq_channel_service.cpp:427: Descriptor.GenMajor=1, expected GenMajor=2 by Reconciliation 2025-12-23T18:36:49.876452Z node 3 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x2, to NodeId=4, NodeActorId=[3:7587124194604015427:2482], Gen=2.1, Next Delay=0.100000s 2025-12-23T18:36:50.076580Z node 3 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x3, to NodeId=4, NodeActorId=[3:7587124194604015427:2482], Gen=2.1, Next Delay=0.200000s 2025-12-23T18:36:50.480749Z node 3 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x4, to NodeId=4, NodeActorId=[3:7587124194604015427:2482], Gen=2.1, Next Delay=0.400000s 2025-12-23T18:36:51.282676Z node 3 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x5, to NodeId=4, NodeActorId=[3:7587124194604015427:2482], Gen=2.1, Next Delay=0.800000s 2025-12-23T18:36:52.884581Z node 3 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x6, to NodeId=4, NodeActorId=[3:7587124194604015427:2482], Gen=2.1, Next Delay=1.600000s 2025-12-23T18:36:56.084891Z node 3 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x7, to NodeId=4, NodeActorId=[3:7587124194604015427:2482], Gen=2.1, Next Delay=3.200000s 2025-12-23T18:36:57.694981Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:36:57.695031Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:37:02.485488Z node 3 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x8, to NodeId=4, NodeActorId=[3:7587124194604015427:2482], Gen=2.1, Next Delay=6.400000s 2025-12-23T18:37:15.290340Z node 3 :KQP_CHANNELS WARN: dq_channel_service.cpp:1483: NODE RECONCILIATION x9, to NodeId=4, NodeActorId=[3:7587124194604015427:2482], Gen=3.1, Next Delay=12.800000s 2025-12-23T18:37:15.290413Z node 3 :KQP_CHANNELS WARN: dq_channel_service.cpp:427: Descriptor.GenMajor=2, expected GenMajor=3 by Reconciliation Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/003149/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/003149/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |95.7%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> PgCatalog::PgType [GOOD] >> PgCatalog::InformationSchema |95.7%| [TA] $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpStats::SysViewCancelled [GOOD] >> KqpLimits::DatashardReplySize [GOOD] >> KqpLimits::ManyPartitions >> KqpTypes::DyNumberCompare |95.7%| [TA] {RESULT} $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 15973, MsgBus: 4522 2025-12-23T18:45:41.782442Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126478075077974:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:41.782542Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00338b/r3tmp/tmp4vMG6L/pdisk_1.dat 2025-12-23T18:45:42.056464Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:42.063793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:42.063918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:42.127493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:42.178796Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15973, node 1 2025-12-23T18:45:42.259288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:42.259311Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:42.259318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:42.259394Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:42.259617Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4522 TClient is connected to server localhost:4522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515542197 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:42.793905Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:42.795890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:42.802440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:45:42.812433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:42.988887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:43.179560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:43.268305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:45.504072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126495254948975:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:45.504213Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:45.504823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126495254948985:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:45.504890Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:45.874616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:45.907521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:45.953371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:45.985907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:46.032543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:46.075604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:46.108571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:46.150524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:46.222798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126499549917160:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.222919Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.223076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126499549917165:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.223546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126499549917167:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.223613Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:46.226626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:46.237812Z node 1 :KQP_WORKLOAD_SE ... 4037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:46:18.452249Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:46:18.478832Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16752, node 5 2025-12-23T18:46:18.559443Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:46:18.559469Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:46:18.559478Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:46:18.559575Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:46:18.734863Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15894 TClient is connected to server localhost:15894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515578534 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:46:19.264247Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:46:19.270230Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:46:19.271398Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:46:19.285922Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:19.379178Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:19.614614Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:19.722612Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:23.274868Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126635436864434:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:23.274981Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:23.380892Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126656911702550:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:23.380983Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:23.381417Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126656911702559:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:23.381463Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:23.534261Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:23.598024Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:23.654530Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:23.730882Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:23.779516Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:23.837177Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:23.899953Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:23.995279Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:24.123547Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126661206670728:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:24.123658Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:24.124025Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126661206670733:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:24.124062Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126661206670734:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:24.124096Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:24.129585Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:24.147928Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126661206670737:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:46:24.220214Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126661206670788:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test1001NodesSubscribers [GOOD] Test command err: 2025-12-23T18:44:17.144640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:17.144714Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> KqpLimits::StreamWrite-Allowed [GOOD] >> KqpLimits::TooBigColumn+useSink >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] >> KqpPg::TempTablesSessionsIsolation [FAIL] >> KqpPg::TempTablesDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 31062, MsgBus: 2768 2025-12-23T18:45:03.367176Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126315999867644:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:03.367986Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033d2/r3tmp/tmpzNDZRg/pdisk_1.dat 2025-12-23T18:45:03.577600Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:03.584354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:03.584463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:03.645600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:03.689801Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126315999867539:2081] 1766515503350829 != 1766515503350832 2025-12-23T18:45:03.692260Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31062, node 1 2025-12-23T18:45:03.771013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:03.771044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:03.771070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:03.771175Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:03.816270Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2768 TClient is connected to server localhost:2768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515503704 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:04.215104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:04.231295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:04.391227Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:06.610518Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126328884770729:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:06.610575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126328884770721:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:06.610680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:06.614082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:06.619014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126328884770769:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:06.619129Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:06.624403Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126328884770739:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:45:06.721188Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126328884770802:2817] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:07.091556Z node 1 :KQP_COMPUTE WARN: log.cpp:841: fline=kqp_compute_actor_factory.cpp:34;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=1;memory=1048576; 2025-12-23T18:45:07.091585Z node 1 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976710661, task: 1. [Mem] memory 1048576 NOT granted 2025-12-23T18:45:07.099191Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [1:7587126333179738129:2353], TxId: 281474976710661, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68eyex7jyya9mztegka08n. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTUxOTM5ODctZDlmMWRlMzYtNWE4NTA2OTMtYWIwMzY3ZTc=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-ou4bsyjxme, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2025-12-23T18:45:07.065958Z }, code: 2029 }. 2025-12-23T18:45:07.115578Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037894 Cancelled read: {[1:7587126333179738131:2353], 6} 2025-12-23T18:45:07.115705Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037891 Cancelled read: {[1:7587126333179738131:2353], 3} 2025-12-23T18:45:07.115791Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037895 Cancelled read: {[1:7587126333179738131:2353], 7} 2025-12-23T18:45:07.115867Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037890 Cancelled read: {[1:7587126333179738131:2353], 2} 2025-12-23T18:45:07.115977Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037892 Cancelled read: {[1:7587126333179738131:2353], 4} 2025-12-23T18:45:07.116050Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037888 Cancelled read: {[1:7587126333179738131:2353], 0} 2025-12-23T18:45:07.116125Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037893 Cancelled read: {[1:7587126333179738131:2353], 5} 2025-12-23T18:45:07.116185Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037889 Cancelled read: {[1:7587126333179738131:2353], 1} 2025-12-23T18:45:07.130509Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=MTUxOTM5ODctZDlmMWRlMzYtNWE4NTA2OTMtYWIwMzY3ZTc=, ActorId: [1:7587126328884770714:2353], ActorState: ExecuteState, TraceId: 01kd68eyex7jyya9mztegka08n, Create QueryResponse for error on request, msg: , status: OVERLOADED, issues: { message: "Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-ou4bsyjxme, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2025-12-23T18:45:07.065958Z }\n" issue_code: 2029 severity: 1 }
: Error: Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-ou4bsyjxme, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2025-12-23T18:45:07.065958Z } , code: 2029 query_phases { duration_us: 64727 table_access { name: "/Root/LargeTable" partitions_count: 8 } cpu_time_us: 13517 affected_shards: 8 } compilation { duration_us: 340810 cpu_time_us: 334259 } process_cpu_time_us: 644 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"LargeTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/LargeTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\",\"Data\",\"DataText\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\ ... 3T18:46:13.087547Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:46:13.149921Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13372 2025-12-23T18:46:13.716610Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:13372 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515572983 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:46:13.881713Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:46:13.889901Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:46:13.914095Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:46:13.999669Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:14.240747Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:14.350126Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:17.712437Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126610465558581:2200];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:17.712511Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:17.838201Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126631940396767:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:17.838308Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:17.838721Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126631940396776:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:17.838781Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:17.949963Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:17.994152Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:18.029731Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:18.072892Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:18.116805Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:18.185378Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:18.248611Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:18.333513Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:18.440500Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126636235364945:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:18.440588Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:18.441074Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126636235364950:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:18.441138Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126636235364951:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:18.441185Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:18.483844Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:18.496794Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126636235364954:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:46:18.600547Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126636235365005:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:20.784748Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:25.837863Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=YWY3NjI4Yy1iNWViNGU4My1hNGQyYjQ5Ni03MzE3MDk0Zg==, ActorId: [5:7587126644825299932:2536], ActorState: ExecuteState, TraceId: 01kd68hc6n6ehvsqfaj3yct52r, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Query result size limit exceeded. (51202599 > 50331648)" issue_code: 2013 severity: 1 } |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] >> TNodeBrokerTest::Test1000NodesSubscribers [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test1000NodesSubscribers [GOOD] Test command err: 2025-12-23T18:44:22.536669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:22.536716Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] >> KqpLimits::ManyPartitions [GOOD] >> KqpLimits::ManyPartitionsSorting >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> PgCatalog::InformationSchema [GOOD] >> PgCatalog::CheckSetConfig >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] >> KqpTypes::DyNumberCompare [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CompoundKeyRange [GOOD] Test command err: 2025-12-23T18:45:10.259161Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126343606288301:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:10.259363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033cd/r3tmp/tmpvxu2Pl/pdisk_1.dat 2025-12-23T18:45:10.495407Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:10.534488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:10.534566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:10.542269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:10.598197Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13438, node 1 2025-12-23T18:45:10.705942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:10.705968Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:10.705984Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:10.706107Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:10.714114Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:45:10.746814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) TClient is connected to server localhost:15872 2025-12-23T18:45:10.962010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:45:10.980656Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587126343885033227:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:10.980731Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:45:10.991436Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:45:10.991699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:10.991798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:10.994211Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:45:10.995151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:10.996684Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:45:11.028250Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:11.028341Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:11.028643Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:45:11.028825Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:45:11.029030Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:45:11.029154Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:45:11.029276Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:45:11.029386Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:45:11.029505Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:45:11.029639Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:45:11.029735Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:45:11.034385Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:11.103591Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:45:11.103641Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:45:11.146983Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:45:11.147068Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:45:11.147291Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:45:11.147334Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:45:11.147386Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:45:11.147418Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:45:11.147443Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:45:11.147473Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:45:11.148066Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:7587126348180001070:2294] 2025-12-23T18:45:11.150351Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:45:11.150467Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:45:11.150936Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:11.159275Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:7587126348180001122:2298] Owner: [2:7587126348180001115:2296]. Describe result: PathErrorUnknown 2025-12-23T18:45:11.159317Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:7587126348180001122:2298] Owner: [2:7587126348180001115:2296]. Creating table 2025-12-23T18:45:11.159379Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:7587126348180001122:2298] Owner: [2:7587126348180001115:2296]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:45:11.161076Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:45:11.166014Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:7587126348180001164:2359], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:45:11.168436Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587126348180001138:2346] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-12-23T18:45:11.174678Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:7587126348180001122:2298] Owner: [2:7587126348180001115:2296]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-12-23T18:45:11.176618Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:45:11.176663Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:7587126348180001206:2318], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:45:11.199450Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:7587126348180001256:2414] 2025-12-23T18:45:11.201596Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7587126348180001256:2414], schemeshard id = 72075186224037897 2025-12-23T18:45:11.265343Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:7587126348180001122:2298] Owner: [2:7587126348180001115:2296]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:45:11.267256Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:7587126348180001415:2562], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:45:11.270988Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:11.274841Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:11.288474Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:7587126348180001122:2298] Owner: [2:7587126348180001115:2296]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:45:11.288528Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:7587126348180001122:2298] Owner: [2:758 ... Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515567600 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:46:08.178602Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:46:08.201183Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:08.283221Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:08.419997Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:46:08.480020Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:08.571861Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:11.899323Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7587126608731051889:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:11.899435Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:11.899880Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7587126608731051899:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:11.899930Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:12.007800Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:12.065077Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:12.112498Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:12.154684Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:12.207771Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:12.259233Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:12.320353Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:12.349277Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7587126591551180937:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:12.349343Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:12.424856Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:12.544509Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7587126613026020066:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:12.544617Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:12.545061Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7587126613026020071:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:12.545107Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7587126613026020072:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:12.545146Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:12.550217Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:12.566862Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7587126613026020075:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:46:12.654278Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7587126613026020126:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Scan":"Parallel","ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Logs","E-Rows":"1","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"0"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"lookup_by":["App (new_app_1)","Ts (49)"],"columns":["App","Host","Message","Ts"],"scan_by":["Host (null, xyz)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"E-Size":"0","Name":"TableRangeScan","E-Rows":"1","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] >> KqpLimits::TooBigColumn+useSink [GOOD] >> KqpLimits::ReadsetCountLimit >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> KqpPg::TempTablesDrop [FAIL] >> KqpPg::TempTablesWithCache >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds >> TNodeBrokerTest::Test999NodesSubscribers [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> KqpLimits::ManyPartitionsSorting [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test999NodesSubscribers [GOOD] Test command err: 2025-12-23T18:44:28.168245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:44:28.168319Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSorting [GOOD] Test command err: Trying to start YDB, gRPC: 62621, MsgBus: 61907 2025-12-23T18:45:52.568742Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126525185295228:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:52.571596Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003358/r3tmp/tmpMBrYWz/pdisk_1.dat 2025-12-23T18:45:52.860327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:52.860447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:52.898352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:52.943917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:52.944167Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62621, node 1 2025-12-23T18:45:53.044954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:53.044977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:53.044985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:53.045113Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:53.229720Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61907 TClient is connected to server localhost:61907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:45:53.620928Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515553012 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:53.635625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:53.649348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:45:53.657219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:53.797334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:53.938432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:54.001223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:55.918913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126538070198944:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:55.919055Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:55.919488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126538070198954:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:55.919537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:56.203548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:56.239507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:56.270033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:56.301309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:56.332368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:56.364281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:56.400316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:56.440533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:56.530177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126542365167120:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:56.530274Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:56.530375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126542365167125:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:56.530525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126542365167127:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:56.530563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:56.534307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:56.545524Z node 1 :KQP_WORKLOAD ... "Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"Data\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Table\":[{\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"ReadBytes\":{\"Count\":4,\"Sum\":8800,\"Max\":2208,\"Min\":2192}}],\"OutputRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"PhysicalStageId\":0,\"FinishedTasks\":4,\"Introspections\":[\"4 tasks from DSScanMinimalThreads setting\"],\"IngressRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"DurationUs\":{\"Count\":4,\"Sum\":405000,\"Max\":121000,\"Min\":88000},\"Mkql\":{},\"MaxMemoryUsage\":{\"Count\":4,\"Sum\":4194304,\"Max\":1048576,\"Min\":1048576,\"History\":[0,1048576,1,2097152,4,4194304,130,4194304]},\"BaseTimeMs\":1766515604544,\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":421,\"Max\":127,\"Min\":89},\"ActiveMessageMs\":{\"Count\":4,\"Max\":127,\"Min\":8},\"FirstMessageMs\":{\"Count\":4,\"Sum\":32,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":4,\"Sum\":8168,\"Max\":2075,\"Min\":2004,\"History\":[20,520,23,1848,26,2346,54,3010,57,4089,69,4504,77,4978,81,5559,93,5974,101,5974,105,6515,110,6757,111,7338,130,8168]},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":389000,\"Max\":119000,\"Min\":81000}},\"Name\":\"4\",\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":421,\"Max\":127,\"Min\":89},\"Chunks\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":420,\"Max\":127,\"Min\":89},\"FirstMessageMs\":{\"Count\":4,\"Sum\":32,\"Max\":8,\"Min\":8},\"ActiveMessageMs\":{\"Count\":4,\"Max\":127,\"Min\":8},\"PauseMessageMs\":{\"Count\":4,\"Sum\":4,\"Max\":2,\"Min\":0},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":389000,\"Max\":119000,\"Min\":81000},\"WaitTimeUs\":{\"Count\":4,\"Sum\":415180,\"Max\":125555,\"Min\":88548,\"History\":[20,19780,23,63354,26,86735,54,113596,57,176059,69,197417,77,218742,81,241564,93,262113,101,262113,105,303638,110,311140,111,398390,130,415180]},\"WaitPeriods\":{\"Count\":4,\"Sum\":10,\"Max\":5,\"Min\":1}}}],\"CpuTimeUs\":{\"Count\":4,\"Sum\":8828,\"Max\":2784,\"Min\":1994,\"History\":[0,627,1,709,4,828,20,1544,23,2773,26,3231,54,3849,57,4870,69,5259,77,5695,81,6199,93,6625,101,6633,105,7220,110,7487,111,8039,130,8828]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":420,\"Max\":127,\"Min\":89},\"ActiveMessageMs\":{\"Count\":4,\"Max\":127,\"Min\":8},\"FirstMessageMs\":{\"Count\":4,\"Sum\":32,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768,\"History\":[20,2496,23,8128,26,10240,54,13056,57,17632,69,19392,77,21504,81,23968,93,25728,101,25728,105,28192,110,29216,111,31680,130,35200]},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":388000,\"Max\":119000,\"Min\":81000}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":420,\"Max\":127,\"Min\":89},\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":420,\"Max\":127,\"Min\":89},\"FirstMessageMs\":{\"Count\":4,\"Sum\":31,\"Max\":8,\"Min\":7},\"ActiveMessageMs\":{\"Count\":4,\"Max\":127,\"Min\":7},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768,\"History\":[20,2496,23,8128,26,10240,54,13056,57,17632,69,19392,77,21504,81,23968,93,25728,101,25728,105,28192,110,29216,111,31680,130,35200]},\"PauseMessageMs\":{\"Count\":4,\"Sum\":4,\"Max\":2,\"Min\":0},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":389000,\"Max\":119000,\"Min\":82000},\"WaitTimeUs\":{\"Count\":4,\"Sum\":415337,\"Max\":125476,\"Min\":88662,\"History\":[20,19690,23,63191,26,86583,54,113508,57,175939,69,197331,77,218657,81,241506,93,262161,101,262161,105,303966,110,311284,111,398527,130,415337]},\"WaitPeriods\":{\"Count\":4,\"Sum\":11,\"Max\":4,\"Min\":2}}}],\"StageDurationUs\":122000,\"WaitInputTimeUs\":{\"Count\":4,\"Sum\":373296,\"Max\":114985,\"Min\":77642,\"History\":[20,11469,23,39222,26,55875,54,81721,57,142170,69,162751,77,183222,81,205053,93,224799,101,224799,105,265176,110,272023,111,358135,130,373296]},\"OutputBytes\":{\"Count\":4,\"Sum\":8168,\"Max\":2075,\"Min\":2004},\"UpdateTimeMs\":129,\"Tasks\":4}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Key (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"OutputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"Introspections\":[\"1 minimum tasks for compute\"],\"DurationUs\":{\"Count\":1,\"Sum\":122000,\"Max\":122000,\"Min\":122000},\"Mkql\":{},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[7,1048576,131,1048576]},\"BaseTimeMs\":1766515604544,\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":37,\"Max\":37,\"Min\":37},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":129,\"Max\":129,\"Min\":129},\"ActiveMessageMs\":{\"Count\":1,\"Max\":129,\"Min\":8},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":7793,\"Max\":7793,\"Min\":7793,\"History\":[27,686,55,1184,69,1599,94,3863,114,5191,131,7793]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":121000,\"Max\":121000,\"Min\":121000}},\"Name\":\"RESULT\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":129,\"Max\":129,\"Min\":129},\"Chunks\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":127,\"Max\":127,\"Min\":127},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"ActiveMessageMs\":{\"Count\":1,\"Max\":129,\"Min\":8},\"PauseMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":121000,\"Max\":121000,\"Min\":121000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":116868,\"Max\":116868,\"Min\":116868,\"History\":[27,20777,55,41303,69,62575,94,83320,114,102974,131,116868]},\"WaitPeriods\":{\"Count\":1,\"Sum\":6,\"Max\":6,\"Min\":6},\"WaitMessageMs\":{\"Count\":1,\"Max\":127,\"Min\":8}}}],\"CpuTimeUs\":{\"Count\":1,\"Sum\":13982,\"Max\":13982,\"Min\":13982,\"History\":[7,4156,27,5593,55,6399,69,7069,94,10312,114,11634,131,13982]},\"StageDurationUs\":122000,\"ResultRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResultBytes\":{\"Count\":1,\"Sum\":7793,\"Max\":7793,\"Min\":7793},\"OutputBytes\":{\"Count\":1,\"Sum\":7793,\"Max\":7793,\"Min\":7793},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":43,\"Max\":43,\"Min\":43},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":128,\"Max\":128,\"Min\":128},\"ActiveMessageMs\":{\"Count\":1,\"Max\":128,\"Min\":8},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[27,935,55,1433,69,1848,94,4172,114,5500,131,8168]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":120000,\"Max\":120000,\"Min\":120000}},\"Name\":\"2\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":127,\"Max\":127,\"Min\":127},\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":127,\"Max\":127,\"Min\":127},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"ActiveMessageMs\":{\"Count\":1,\"Max\":127,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[27,2761,55,4421,69,5664,94,6782,114,7421,131,8168]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":6,\"Max\":6,\"Min\":6},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":119000,\"Max\":119000,\"Min\":119000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":27967,\"Max\":27967,\"Min\":27967,\"History\":[27,4810,55,9741,69,14888,94,19904,114,24755,131,27967]},\"WaitPeriods\":{\"Count\":1,\"Sum\":38,\"Max\":38,\"Min\":38},\"WaitMessageMs\":{\"Count\":1,\"Max\":127,\"Min\":6}}}],\"UpdateTimeMs\":130,\"Tasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100}}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":274427,\"CpuTimeUs\":260450},\"ProcessCpuTimeUs\":356,\"TotalDurationUs\":559699,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":133219},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"Data\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:38\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'368) \'(\'\"_id\" \'\"b721fd32-9a90bcb3-c9d5b250-df5ce61d\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'380) \'(\'\"_id\" \'\"ebbe23d4-9f0be957-a7cb6fbc-18fc92cb\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 559699 total_cpu_time_us: 400878 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/ManyShardsTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":38},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Data\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1766515604\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"296f040b-661f8d95-3bf086f2-60e698c7\",\"version\":\"1.0\"}" |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] >> PgCatalog::CheckSetConfig [FAIL] >> PgCatalog::PgDatabase+useSink |95.7%| [TA] $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {RESULT} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] |95.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-fifo] >> TSentinelTests::PDiskRackGuardHalfRack >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] >> KqpPg::TempTablesWithCache [FAIL] >> KqpPg::TableDeleteWhere+useSink >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::ReplySizeExceeded >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> KqpLimits::QueryExecTimeout [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] Test command err: Trying to start YDB, gRPC: 10828, MsgBus: 13967 ... waiting for SysViewsRoster update finished 2025-12-23T18:45:01.612838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:45:01.718751Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:45:01.728051Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:45:01.728402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:45:01.728488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:45:01.985643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:01.985750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:02.045627Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515499231826 != 1766515499231830 2025-12-23T18:45:02.061095Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:02.105998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:02.271826Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033d4/r3tmp/tmp0ETVCZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10828, node 1 2025-12-23T18:45:02.590619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:02.590688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:02.590729Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:02.591238Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:02.603913Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:13967 TClient is connected to server localhost:13967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:02.978311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:03.074319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:03.403104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:03.760404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:04.052556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:04.836534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1908:3515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:04.836724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:04.837613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1982:3534], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:04.837700Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:04.867023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:05.059612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:05.338208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:05.597996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:05.826869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:06.096971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:06.378651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:06.674595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:06.990199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:06.990314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:06.990773Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2798:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:06.990847Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:06.990899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2801:4182], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't hav ... e_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:46:42.112127Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:46:42.118949Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587126734834636835:2081] 1766515601787115 != 1766515601787118 TServer::EnableGrpc on GrpcPort 6270, node 4 2025-12-23T18:46:42.146839Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:46:42.167671Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:46:42.407338Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:46:42.407367Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:46:42.407376Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:46:42.407455Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1805 2025-12-23T18:46:42.793609Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515602306 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:46:43.398335Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:46:43.422223Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:43.606304Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:43.948732Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:44.052315Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:46.804314Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126734834637059:2253];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:46.804394Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:48.151273Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126764899409784:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:48.151389Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:48.151958Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126764899409793:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:48.152015Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:48.271417Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:48.332194Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:48.382603Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:48.509464Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:48.561713Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:48.615802Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:48.683374Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:48.770881Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:48.895461Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126764899410672:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:48.895574Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:48.896125Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126764899410679:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:48.896208Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126764899410680:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:48.896528Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:48.901806Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:48.920922Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587126764899410683:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:46:48.998283Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126764899410734:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> PgCatalog::PgDatabase+useSink [GOOD] >> PgCatalog::PgDatabase-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QueryExecTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 8580, MsgBus: 23501 2025-12-23T18:45:05.149635Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126325794071478:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:05.149819Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033d0/r3tmp/tmp1rwSyE/pdisk_1.dat 2025-12-23T18:45:05.376134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:05.376239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:05.387827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:05.424543Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:05.446278Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:05.448175Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126325794071450:2081] 1766515505147801 != 1766515505147804 TServer::EnableGrpc on GrpcPort 8580, node 1 2025-12-23T18:45:05.536966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:05.536992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:05.536999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:05.537093Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:05.692613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23501 TClient is connected to server localhost:23501 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515505489 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:06.016006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:06.026403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:06.140076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:06.254780Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:06.305199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:06.365290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:08.020441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126338678975213:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.020551Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.020926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126338678975223:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.021012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.375845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:08.403281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:08.430547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:08.457407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:08.484181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:08.515845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:08.573167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:08.613269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:08.678352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126338678976091:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.678450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.678582Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126338678976096:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.678628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126338678976098:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.678650Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:08.682068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:08.691991Z node 1 : ... tExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:46:12.145436Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:46:12.145463Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:46:12.145479Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:46:12.145589Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23567 2025-12-23T18:46:12.780901Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:23567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515572038 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:46:13.040478Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:46:13.059953Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:13.167610Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:13.436190Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:13.544739Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:16.704621Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126630110167826:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:16.704737Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:16.714995Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126630110167836:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:16.715151Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:16.795622Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:16.804368Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126608635329530:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:16.804642Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:16.857107Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:16.953588Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:17.016374Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:17.111881Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:17.186697Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:17.261459Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:17.330396Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:17.452809Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126634405136012:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:17.452972Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:17.454615Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126634405136017:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:17.454686Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126634405136018:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:17.454895Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:17.460361Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:17.486156Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126634405136021:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:46:17.570188Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126634405136072:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:46:26.924327Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:46:26.924363Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:46:38.098146Z node 5 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=5&id=N2NjMWQ2MzQtODgzNjU1YzMtMTVlYmYxOC1mZjhkODg2ZA== }
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=5&id=N2NjMWQ2MzQtODgzNjU1YzMtMTVlYmYxOC1mZjhkODg2ZA== |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TSentinelTests::PDiskFaultyState >> TSentinelTests::PDiskPileGuardHalfPile >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-fifo] [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] >> TSentinelTests::PDiskPileGuardHalfPile [GOOD] >> TSentinelTests::PDiskPileGuardWithoutBridgeMode |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup >> TSentinelTests::PDiskFaultyState [GOOD] >> TSentinelTests::PDiskFaultyGuard >> PgCatalog::PgDatabase-useSink [GOOD] >> PgCatalog::PgRoles >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TSentinelTests::PDiskPileGuardWithoutBridgeMode [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] >> KqpLimits::ReplySizeExceeded [GOOD] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> KqpBatchDelete::Large_2 |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] >> TSentinelTests::PDiskFaultyGuard [GOOD] >> TSentinelTests::PDiskFaultyGuardWithForced |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskPileGuardWithoutBridgeMode [GOOD] Test command err: 2025-12-23T18:47:00.265996Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2025-12-23T18:47:00.266071Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2025-12-23T18:47:00.266150Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:47:00.266202Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2025-12-23T18:47:00.266280Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-12-23T18:47:00.266420Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-12-23T18:47:00.267482Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-12-23T18:47:00.274476Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2025-12-23T18:47:04.899483Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2025-12-23T18:47:04.899647Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2025-12-23T18:47:04.899709Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:47:04.910456Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:47:04.910534Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2025-12-23T18:47:04.910654Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2025-12-23T18:47:04.910707Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-12-23T18:47:04.910772Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-12-23T18:47:04.910843Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-12-23T18:47:04.910892Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-12-23T18:47:04.910949Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-12-23T18:47:04.910981Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-12-23T18:47:04.911012Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-12-23T18:47:04.911463Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2025-12-23T18:47:04.912053Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2025-12-23T18:47:04.912285Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2025-12-23T18:47:04.912428Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2025-12-23T18:47:04.912580Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2025-12-23T18:47:04.912742Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2025-12-23T18:47:04.912899Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2025-12-23T18:47:04.913058Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2025-12-23T18:47:04.913128Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:47:04.913646Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:56, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:47:04.913720Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:58, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:47:04.913757Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:57, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:47:04.913797Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:59, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:47:04.913842Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 4 2025-12-23T18:47:04.914146Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 4 2025-12-23T18:47:04.914197Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:56 2025-12-23T18:47:04.914236Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:57 2025-12-23T18:47:04.914260Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:58 2025-12-23T18:47:04.914284Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:59 |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] >> KqpPg::TableDeleteAllData-useSink [GOOD] >> KqpPg::PgUpdateCompoundKey+useSink >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> KqpLimits::CancelAfterRwTx+useSink [GOOD] >> KqpLimits::CancelAfterRwTx-useSink >> TSentinelTests::PDiskFaultyGuardWithForced [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 5450, MsgBus: 27459 2025-12-23T18:45:50.056200Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126518940505778:2148];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:50.056330Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00336a/r3tmp/tmpzPljUu/pdisk_1.dat 2025-12-23T18:45:50.275614Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:50.277645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:50.277754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:50.282335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:50.371898Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:50.374815Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126518940505667:2081] 1766515550036904 != 1766515550036907 TServer::EnableGrpc on GrpcPort 5450, node 1 2025-12-23T18:45:50.385844Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T18:45:50.457549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:50.457595Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:50.457614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:50.457725Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:50.522845Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27459 TClient is connected to server localhost:27459 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515550485 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:51.018416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:51.029930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:45:51.044467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:51.058913Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:55.054522Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126518940505778:2148];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:55.054665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:45:59.283526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126557595213130:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.283636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.283662Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126557595213137:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.284044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126557595213144:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.284140Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:59.287308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:59.299504Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126557595213145:2447], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:45:59.377763Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126557595213197:3174] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:45:59.784196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:05.304864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:46:05.304897Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 8861, MsgBus: 6477 2025-12-23T18:46:09.126077Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587126599001157377:2250];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:09.126125Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00336a/r3tmp/tmpwmayDd/pdisk_1.dat 2025-12-23T18:46:09.143782Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:46:09.242978Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:46:09.261500Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587126599001157165:2081] 1766515569116410 != 1766515569116413 2025-12-23T18:46:09.269410Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:46:09.269515Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 8861, node 2 2025-12-23T18:46:09.275144Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:46:09.338886Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:46:09.338911Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:46:09.338918Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:46:09.339054Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:46:09.373562Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6477 TClient is connected to server localhost:6477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766 ... ata/script_executions 2025-12-23T18:46:53.611499Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:46:53.611523Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:46:53.611534Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:46:53.611639Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21676 2025-12-23T18:46:54.264925Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515613513 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:46:54.407101Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:46:54.430030Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:54.551503Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:54.822613Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:54.934593Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:46:58.148574Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126811440493789:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:58.148704Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:58.150463Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126811440493799:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:58.150563Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:58.223017Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587126789965655487:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:46:58.223113Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:46:58.252248Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:58.316854Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:58.360300Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:58.408839Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:58.463139Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:58.515033Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:58.600099Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:58.678456Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:46:58.815419Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126811440494681:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:58.815545Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:58.815639Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126811440494686:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:58.815834Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587126811440494688:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:58.815923Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:46:58.820084Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:46:58.833972Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587126811440494690:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:46:58.926431Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587126811440494741:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:01.378382Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:05.559238Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=OGM0ZTVhNzctZWExYjg5MTgtM2E1N2YwMjktYzRlYjA1M2I=, ActorId: [5:7587126824325396968:2537], ActorState: ExecuteState, TraceId: 01kd68jk4j89kx7sbn28w6nem8, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Query result size limit exceeded. (51202587 > 50331648)" issue_code: 2013 severity: 1 } |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpBatchUpdate::TableWithIndex >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskFaultyGuardWithForced [GOOD] Test command err: 2025-12-23T18:46:58.834920Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.008095s 2025-12-23T18:46:58.867014Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639240 Duration# 0.006606s 2025-12-23T18:46:59.513835Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2025-12-23T18:46:59.513906Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2025-12-23T18:46:59.513991Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:46:59.514035Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2025-12-23T18:46:59.514084Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-12-23T18:46:59.514170Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-12-23T18:46:59.514999Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-12-23T18:46:59.520729Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSl ... oupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1000 } } Group { GroupId: 13 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1001 } } Group { GroupId: 14 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1002 } } Group { GroupId: 15 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1003 } } Group { GroupId: 16 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1000 } } Group { GroupId: 17 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1001 } } Group { GroupId: 18 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1002 } } Group { GroupId: 19 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1003 } } Group { GroupId: 20 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1000 } } Group { GroupId: 21 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1001 } } Group { GroupId: 22 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1002 } } Group { GroupId: 23 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1003 } } Group { GroupId: 24 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1000 } } Group { GroupId: 25 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1001 } } Group { GroupId: 26 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1002 } } Group { GroupId: 27 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1003 } } Group { GroupId: 28 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1000 } } Group { GroupId: 29 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1001 } } Group { GroupId: 30 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1002 } } Group { GroupId: 31 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1003 } } Group { GroupId: 32 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1000 } } Group { GroupId: 33 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1001 } } Group { GroupId: 34 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1002 } } Group { GroupId: 35 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1003 } } Group { GroupId: 36 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1000 } } Group { GroupId: 37 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1001 } } Group { GroupId: 38 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1002 } } Group { GroupId: 39 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1003 } } Group { GroupId: 40 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1000 } } Group { GroupId: 41 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1001 } } Group { GroupId: 42 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1002 } } Group { GroupId: 43 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1003 } } Group { GroupId: 44 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1000 } } Group { GroupId: 45 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1001 } } Group { GroupId: 46 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1002 } } Group { GroupId: 47 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1003 } } Group { GroupId: 48 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1000 } } Group { GroupId: 49 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1001 } } Group { GroupId: 50 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1002 } } Group { GroupId: 51 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1003 } } Group { GroupId: 52 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1000 } } Group { GroupId: 53 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1001 } } Group { GroupId: 54 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1002 } } Group { GroupId: 55 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1003 } } Group { GroupId: 56 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1000 } } Group { GroupId: 57 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1001 } } Group { GroupId: 58 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1002 } } Group { GroupId: 59 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1003 } } Group { GroupId: 60 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1000 } } Group { GroupId: 61 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1001 } } Group { GroupId: 62 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1002 } } Group { GroupId: 63 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1003 } } Group { GroupId: 64 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1000 } } Group { GroupId: 65 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1001 } } Group { GroupId: 66 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1002 } } Group { GroupId: 67 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1003 } } Group { GroupId: 68 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1000 } } Group { GroupId: 69 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1001 } } Group { GroupId: 70 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1002 } } Group { GroupId: 71 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1003 } } Group { GroupId: 72 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1000 } } Group { GroupId: 73 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1001 } } Group { GroupId: 74 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1002 } } Group { GroupId: 75 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1003 } } Group { GroupId: 76 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1000 } } Group { GroupId: 77 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1001 } } Group { GroupId: 78 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1002 } } Group { GroupId: 79 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1003 } } } } Success: true 2025-12-23T18:47:07.575451Z node 13 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 0.000000s 2025-12-23T18:47:07.575506Z node 13 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2025-12-23T18:47:07.575637Z node 13 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-12-23T18:47:07.575690Z node 13 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-12-23T18:47:07.576588Z node 13 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 130 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-130.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 131 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-131.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 132 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-132.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 133 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-133.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 134 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-134.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 135 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-135.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 136 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-136.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 137 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-137.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 138 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-138.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 139 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-139.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2025-12-23T18:47:07.576991Z node 13 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 140 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-140.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 141 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-141.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 142 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-142.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 143 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-143.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 144 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-144.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 145 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-145.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 146 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-146.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 147 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-147.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 148 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-148.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 149 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-149.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2025-12-23T18:47:07.577081Z node 13 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> KqpBatchDelete::TableNotExists |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimpleOnePartition >> TSentinelTests::PDiskRackGuardHalfRack [GOOD] >> TSentinelTests::PDiskRackGuardFullRack |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] >> KqpBatchUpdate::ManyPartitions_1 >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpBatchDelete::HasTxControl >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates >> KqpBatchUpdate::ManyPartitions_3 |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpBatchDelete::ManyPartitions_1 >> PgCatalog::PgRoles [GOOD] >> PgCatalog::PgTables >> KqpBatchDelete::TableNotExists [GOOD] >> KqpBatchUpdate::Large_1 |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableNotExists [GOOD] Test command err: Trying to start YDB, gRPC: 19167, MsgBus: 62664 2025-12-23T18:47:09.213031Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126855082186250:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:09.213275Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bfb/r3tmp/tmpHJXNbq/pdisk_1.dat 2025-12-23T18:47:09.510983Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:09.529962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:09.530027Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:09.579631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:09.625916Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126855082186141:2081] 1766515629193814 != 1766515629193817 2025-12-23T18:47:09.632421Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19167, node 1 2025-12-23T18:47:09.703277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:09.726838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:09.726857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:09.726862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:09.727124Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62664 TClient is connected to server localhost:62664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:10.230824Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515629655 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:10.293607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:10.301226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:12.389757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126867967088916:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:12.389880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:12.390300Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126867967088928:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:12.390353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126867967088929:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:12.390409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:12.394777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:12.409066Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126867967088932:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:47:12.489888Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126867967088983:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:12.757515Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126867967089000:2333], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiDeleteTable!
:2:35: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:47:12.757951Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=OTQ5YzBhMzgtMzc2NzVhOTMtZTVjN2VlMmMtNTkxZGM2YTk=, ActorId: [1:7587126867967088903:2320], ActorState: ExecuteState, TraceId: 01kd68jt833xxhy8v201j3ymkd, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 35 } message: "At function: KiDeleteTable!" end_position { row: 2 column: 35 } severity: 1 issues { position { row: 2 column: 35 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 35 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:47:12.841628Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126867967089018:2339], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:41: Error: At function: KiDeleteTable!
:3:41: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:47:12.843570Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=OTQ5YzBhMzgtMzc2NzVhOTMtZTVjN2VlMmMtNTkxZGM2YTk=, ActorId: [1:7587126867967088903:2320], ActorState: ExecuteState, TraceId: 01kd68jtkv0mxzq0frpcwpchwd, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 41 } message: "At function: KiDeleteTable!" end_position { row: 3 column: 41 } severity: 1 issues { position { row: 3 column: 41 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 41 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-12-23T18:47:02.501260Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126828044308693:2223];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:02.501606Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:47:02.537993Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:47:02.541677Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0034bc/r3tmp/tmp9MZkl5/pdisk_1.dat 2025-12-23T18:47:02.604934Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:47:02.915051Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:02.915181Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:47:02.961251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:03.032651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:03.037498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:03.039744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:03.039809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:03.097657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:03.099168Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:47:03.104224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:03.185717Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:03.201308Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 24038, node 1 2025-12-23T18:47:03.222621Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.005117s 2025-12-23T18:47:03.255172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:03.350084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0034bc/r3tmp/yandexpKAt5h.tmp 2025-12-23T18:47:03.350119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0034bc/r3tmp/yandexpKAt5h.tmp 2025-12-23T18:47:03.350309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0034bc/r3tmp/yandexpKAt5h.tmp 2025-12-23T18:47:03.350433Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:03.405278Z INFO: TTestServer started on Port 28010 GrpcPort 24038 2025-12-23T18:47:03.503000Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:03.581326Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28010 PQClient connected to localhost:24038 === TenantModeEnabled() = 1 === Init PQ - start server on port 24038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515623201 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:03.810629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:47:03.810828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:47:03.811016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T18:47:03.811041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T18:47:03.811188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:47:03.811277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:03.813615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T18:47:03.813777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T18:47:03.813926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:47:03.813960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T18:47:03.813976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-12-23T18:47:03.813988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-12-23T18:47:03.814977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:03.814998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-12-23T18:47:03.815020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:03.819812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:47:03.819882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:47:03.819901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2025-12-23T18:47:03.823875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:47:03.823926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T18:47:03.823950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-12-23T18:47:03.823982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-12-23T18:47:03.828484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:47:03.831341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-12-23T18:47:03.831471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:47:03.834136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766515623880, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T18:47:03.834395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 28 ... 722813Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:13.722848Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.722863Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:13.722880Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.722888Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:47:13.726351Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:13.726383Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.726397Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:13.726415Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.726425Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:47:13.744713Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:13.744749Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.744762Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:13.744780Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.744791Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:47:13.823181Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:13.823214Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.823226Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:13.823247Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.823258Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:47:13.826764Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:13.826794Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.826806Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:13.826822Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.826832Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:47:13.844949Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:13.844982Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.844995Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:13.845014Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.845025Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:47:13.923690Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:13.923720Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.923733Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:13.923759Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.923770Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:47:13.927143Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:13.927186Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.927203Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:13.927224Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.927235Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:47:13.945734Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:13.945766Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.945782Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:13.945815Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:13.945829Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:47:14.023923Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:14.023956Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:14.023980Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:14.024001Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:14.024013Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:47:14.027405Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:14.027439Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:14.027454Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:14.027472Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:14.027495Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:47:14.045931Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:14.045966Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:14.045980Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:14.046000Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:14.046012Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:47:14.124219Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:14.124260Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:14.124277Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:14.124298Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:14.124314Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2025-12-23T18:47:14.128537Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:14.128573Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:14.128586Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:14.128604Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:14.128615Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2025-12-23T18:47:14.146253Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:47:14.146295Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:14.146310Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:47:14.146327Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:47:14.146337Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2025-12-23T18:47:14.151339Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587126857779908734:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:14.151407Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.7%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> KqpPg::PgUpdateCompoundKey+useSink [GOOD] >> KqpPg::PgUpdateCompoundKey-useSink >> KqpBatchUpdate::Large_3 >> KqpBatchDelete::ManyPartitions_2 >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> KqpBatchUpdate::Large_2 >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates [GOOD] |95.7%| [TA] $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpBatchDelete::HasTxControl [GOOD] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] |95.7%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |95.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 21200, MsgBus: 2523 2025-12-23T18:47:11.166552Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126863532060080:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:11.166607Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bf5/r3tmp/tmpZ51SpM/pdisk_1.dat 2025-12-23T18:47:11.450891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:11.457918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:11.458076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:11.531749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:11.562111Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21200, node 1 2025-12-23T18:47:11.699759Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:11.699779Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:11.699791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:11.699891Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:11.703061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2523 TClient is connected to server localhost:2523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:12.199184Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515631615 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:12.261178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:12.276834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:12.409544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:12.552301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:12.623737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:14.469723Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126876416963810:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.469842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.470208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126876416963820:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.470329Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.801041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.839574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.878667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.914010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.954298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.987621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.025472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.076158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.154996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126880711931981:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.155089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.155382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126880711931986:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.155382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126880711931987:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.155449Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.160010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:15.175271Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126880711931990:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:15.266571Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126880711932043:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:16.167211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126863532060080:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:16.168714Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:16.974300Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126885006899609:2526], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029 2025-12-23T18:47:16.974655Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MmQ2MjdmMmUtNzdjZGM2ZDItODFkZGMzZWItZDlhMzYzYjA=, ActorId: [1:7587126885006899600:2520], ActorState: ExecuteState, TraceId: 01kd68jynkc5rdr9z8z883xaq5, ReplyQueryCompileError, status: PRECONDITION_FAILED, issues: { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }, remove tx with tx_id: |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::HasTxControl [GOOD] Test command err: Trying to start YDB, gRPC: 5585, MsgBus: 8637 2025-12-23T18:47:11.531494Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126866915616708:2169];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:11.531754Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bf4/r3tmp/tmph3dVHq/pdisk_1.dat 2025-12-23T18:47:11.769246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:11.769361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:11.770125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:11.775682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5585, node 1 2025-12-23T18:47:11.896607Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:11.909504Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126866915616577:2081] 1766515631517756 != 1766515631517759 2025-12-23T18:47:12.002486Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:12.033825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:12.033855Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:12.033866Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:12.033999Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8637 TClient is connected to server localhost:8637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:12.547231Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515631916 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:12.578491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:12.585474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:12.595070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:12.725124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:47:12.869209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:12.938720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:14.711828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126879800520330:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.711961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.712473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126879800520340:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.712528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.059410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.095498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.128425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.166390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.203915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.241267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.279125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.360471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.459300Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126884095488518:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.459394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.459642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126884095488523:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.459649Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126884095488524:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.459701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.464489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:15.479252Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126884095488527:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:15.553313Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126884095488580:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:16.523983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126866915616708:2169];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:16.524056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:17.177860Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=NGIyYjZjMmQtMmMxZmYxMzctNDI0ZWYyMDgtMmYzNDg2ZDU=, ActorId: [1:7587126892685423477:2530], ActorState: ExecuteState, TraceId: 01kd68jyt17ekz23dgnv0f35zb, Create QueryResponse for error on request, msg: BATCH operation can be executed only in the implicit transaction mode., status: PRECONDITION_FAILED |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 20800, MsgBus: 23126 2025-12-23T18:47:12.045731Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126870257833758:2217];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:12.046159Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bf3/r3tmp/tmpoESOWf/pdisk_1.dat 2025-12-23T18:47:12.270471Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:12.317201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:12.317361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:12.372802Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126870257833579:2081] 1766515632023456 != 1766515632023459 2025-12-23T18:47:12.383795Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:12.391532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20800, node 1 2025-12-23T18:47:12.553760Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:12.576290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:12.576325Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:12.576335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:12.576440Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23126 TClient is connected to server localhost:23126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:13.044241Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515632441 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:13.118825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:13.130482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:13.275752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:13.445440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:13.513371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:15.431929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126883142737340:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.432033Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.432398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126883142737350:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.432486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.808057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.840766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.875278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.909104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.942145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.979414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.040071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.086473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.157091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126887437705519:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.157174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.157384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126887437705524:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.157420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126887437705525:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.157534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.161436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:16.173916Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126887437705528:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:16.267030Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126887437705579:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:17.095415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126870257833758:2217];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:17.095730Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:18.007141Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126891732673188:2536], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029 2025-12-23T18:47:18.007802Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YjhkNDMwZjEtMTQ0MmRmMzUtN2U3ZjUwYWQtNmFhMjlhNTU=, ActorId: [1:7587126891732673179:2530], ActorState: ExecuteState, TraceId: 01kd68jzn20payd33h17963zga, ReplyQueryCompileError, status: PRECONDITION_FAILED, issues: { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }, remove tx with tx_id: |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::MultiStatement >> KqpBatchUpdate::TableWithIndex [GOOD] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpBatchDelete::Returning |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 20181, MsgBus: 8634 2025-12-23T18:47:08.941668Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126853948197243:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:08.941943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bfc/r3tmp/tmpVJziy6/pdisk_1.dat 2025-12-23T18:47:09.195388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:09.197128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:09.197269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:09.201750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:09.304806Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:09.306247Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126853948197126:2081] 1766515628935172 != 1766515628935175 TServer::EnableGrpc on GrpcPort 20181, node 1 2025-12-23T18:47:09.432467Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:09.446233Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:09.446267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:09.446276Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:09.446399Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8634 TClient is connected to server localhost:8634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:09.946675Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515629333 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:47:10.083163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:10.090917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:10.106273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:10.272356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:10.434356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:10.511340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:12.382526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126871128068183:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:12.382647Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:12.383141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126871128068193:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:12.383213Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:12.764497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:12.794678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:12.823184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:12.857886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:12.894145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:12.931931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:12.996005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:13.053654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:13.144944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126875423036364:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:13.145080Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:13.145387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126875423036370:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:13.145697Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126875423036369:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:13.145765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:13.149612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:13.161039Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126875423036373:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:13.261075Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126875423036424:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:13.941671Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126853948197243:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:13.941728Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:14.989509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.044475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:15.074758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:17.095904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpBatchDelete::SimplePartitions |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpBatchUpdate::Returning >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpBatchDelete::ManyPartitions_3 |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpBatchDelete::TableWithIndex >> KqpBatchDelete::MultiStatement >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-fifo] [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-std] >> PgCatalog::PgTables [GOOD] |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates [GOOD] >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates >> KqpBatchUpdate::MultiStatement [GOOD] >> KqpBatchDelete::UnknownColumn >> TSentinelTests::PDiskRackGuardFullRack [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 11159, MsgBus: 29204 2025-12-23T18:47:19.370354Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126899046868515:2232];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:19.370451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005beb/r3tmp/tmp1agloB/pdisk_1.dat 2025-12-23T18:47:19.679060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:19.780260Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126899046868307:2081] 1766515639295147 != 1766515639295150 2025-12-23T18:47:19.785736Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11159, node 1 2025-12-23T18:47:19.794674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:19.794801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:19.803308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:19.902868Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:19.929477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:19.929509Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:19.929526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:19.929632Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29204 2025-12-23T18:47:20.369044Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:29204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515639973 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:20.536918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:20.547471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:20.562465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:20.746311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:20.905259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:20.995105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:22.936853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126911931772078:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:22.937007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:22.937560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126911931772087:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:22.937632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.291250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:23.328272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:23.363936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:23.396487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:23.432874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:23.506529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:23.552655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:23.595444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:23.666100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126916226740255:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.666182Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.666185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126916226740260:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.666512Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126916226740262:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.666596Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.669881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:23.680738Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126916226740263:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:23.770190Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126916226740315:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:24.368691Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126899046868515:2232];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:24.368754Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:25.425866Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126924816675223:2537], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029 2025-12-23T18:47:25.426335Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YjNhNDA5YTgtZDVmNzcxNTQtM2NlZjAyYWQtMjIwNTNjNWI=, ActorId: [1:7587126924816675212:2530], ActorState: ExecuteState, TraceId: 01kd68k6xt6vc78prrcv7h8tfg, ReplyQueryCompileError, status: PRECONDITION_FAILED, issues: { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }, remove tx with tx_id: |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_2 [GOOD] >> KqpBatchDelete::Returning [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::MultiStatement [GOOD] Test command err: Trying to start YDB, gRPC: 27604, MsgBus: 11749 2025-12-23T18:47:20.574337Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126903126140901:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:20.575563Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:47:20.622247Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bea/r3tmp/tmp54PyPo/pdisk_1.dat 2025-12-23T18:47:20.974693Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:20.980231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:20.980320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:20.981989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:21.025808Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27604, node 1 2025-12-23T18:47:21.092049Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:21.092075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:21.092082Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:21.092205Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:21.266840Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11749 2025-12-23T18:47:21.576017Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:11749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515641058 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:21.778077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:21.784636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:21.798603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:21.962933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:22.132131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:22.203697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:24.019394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126920306011890:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:24.019513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:24.023050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126920306011900:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:24.023151Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:24.311752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:24.350329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:24.387967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:24.421112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:24.456038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:24.498507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:24.549724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:24.613488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:24.692372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126920306012771:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:24.692445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:24.692628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126920306012776:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:24.692686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126920306012777:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:24.692744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:24.696894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:24.708102Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126920306012780:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:47:24.780456Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126920306012831:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:25.566838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126903126140901:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:25.566908Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:26.386321Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126928895947738:2536], status: GENERIC_ERROR, issues:
:5:32: Error: BATCH can't be used with multiple writes or reads. 2025-12-23T18:47:26.387068Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=OGIxMGVkMWQtOGY5MmIwZWYtNDJiNTc1NGQtYmRlNmEyOGM=, ActorId: [1:7587126928895947729:2530], ActorState: ExecuteState, TraceId: 01kd68k7v3e8zevkt69k1z9m2j, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 5 column: 32 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 5 column: 32 } severity: 1 }, remove tx with tx_id: 2025-12-23T18:47:26.416467Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126928895947742:2538], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-12-23T18:47:26.416732Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=OGIxMGVkMWQtOGY5MmIwZWYtNDJiNTc1NGQtYmRlNmEyOGM=, ActorId: [1:7587126928895947729:2530], ActorState: ExecuteState, TraceId: 01kd68k7y1cphxzpvqkpvr9hq6, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 }, remove tx with tx_id: 2025-12-23T18:47:26.435588Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126928895947746:2540], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-12-23T18:47:26.435852Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=OGIxMGVkMWQtOGY5MmIwZWYtNDJiNTc1NGQtYmRlNmEyOGM=, ActorId: [1:7587126928895947729:2530], ActorState: ExecuteState, TraceId: 01kd68k7yq5atxtqq4mb51wm0q, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 }, remove tx with tx_id: 2025-12-23T18:47:26.454943Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126928895947750:2542], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2025-12-23T18:47:26.456805Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=OGIxMGVkMWQtOGY5MmIwZWYtNDJiNTc1NGQtYmRlNmEyOGM=, ActorId: [1:7587126928895947729:2530], ActorState: ExecuteState, TraceId: 01kd68k7z99ra908bmccd0kxt7, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 4 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 29 } severity: 1 }, remove tx with tx_id: 2025-12-23T18:47:26.474103Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126928895947754:2544], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2025-12-23T18:47:26.474363Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=OGIxMGVkMWQtOGY5MmIwZWYtNDJiNTc1NGQtYmRlNmEyOGM=, ActorId: [1:7587126928895947729:2530], ActorState: ExecuteState, TraceId: 01kd68k7zxc666159eep95br5s, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 4 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 29 } severity: 1 }, remove tx with tx_id: >> KqpBatchUpdate::HasTxControl >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 23771, MsgBus: 9229 2025-12-23T18:43:08.512416Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125823376672615:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:08.512724Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005124/r3tmp/tmpElFPpE/pdisk_1.dat 2025-12-23T18:43:08.601526Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:08.766966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:08.767086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:08.769310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:08.883435Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23771, node 1 2025-12-23T18:43:08.889227Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:43:08.984445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:08.984477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:08.984508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:08.984646Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9229 TClient is connected to server localhost:9229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515388939 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:09.515182Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:09.518664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 16 2025-12-23T18:43:11.705123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:11.835169Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); 2025-12-23T18:43:11.864701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125836261575381:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:11.864711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125836261575372:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:11.864830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:11.865078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125836261575387:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:11.865137Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:11.868125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:11.877683Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125836261575386:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:43:11.931335Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125836261575439:2607] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '1'::int2, ARRAY ['true'::bool, 'true'::bool] ); 18 2025-12-23T18:43:12.338836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:12.384457Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::"char", '0'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::"char", '1'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::"char", '2'::"char"] ); 21 2025-12-23T18:43:12.861326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:12.907370Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int2, '0'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int2, '1'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int2, '2'::int2] ); 23 2025-12-23T18:43:13.387095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:13.460513Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int4, '0'::int4] ); 2025-12-23T18:43:13.506824Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587125823376672615:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:13.506892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int4, '1'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int4, '2'::int4] ); 20 2025-12-23T18:43:14.052982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:14.087931Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int8, '0'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int8, '1'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int8, '2'::int8] ); 700 2025-12-23T18:43:14.531554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:43:14.578102Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPil ... age: "Cannot update primary key column: key1" end_position { row: 1 column: 1 } severity: 1 } issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key2" end_position { row: 1 column: 1 } severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:47:14.542481Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... Trying to start YDB, gRPC: 21390, MsgBus: 27554 2025-12-23T18:47:17.112584Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587126889023755655:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:17.112710Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005124/r3tmp/tmpJJMcMj/pdisk_1.dat 2025-12-23T18:47:17.133266Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:17.264715Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7587126889023755627:2081] 1766515637110959 != 1766515637110962 2025-12-23T18:47:17.265637Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:17.293470Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:17.293617Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:17.298375Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21390, node 10 2025-12-23T18:47:17.421276Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:17.451553Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:17.451580Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:17.451594Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:17.451731Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27554 2025-12-23T18:47:18.121392Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:27554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515637411 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:18.336437Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:18.343821Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:22.115047Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7587126889023755655:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:22.115176Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:23.352700Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7587126914793560298:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.352840Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.358024Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:23.373289Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7587126914793560359:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.373444Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.373783Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7587126914793560362:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.373875Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.445568Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7587126914793560407:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.445728Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.446009Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7587126914793560412:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.446036Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7587126914793560413:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.446075Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:23.451803Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:23.466368Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7587126914793560416:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:47:23.553857Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7587126914793560467:2601] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:24.232191Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [10:7587126919088527833:2366], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2025-12-23T18:47:24.232781Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=10&id=ZjMxYmIzMC0xNmM1MTVkNi05YWFiMGUzOS00MmM3MzVlOA==, ActorId: [10:7587126919088527826:2362], ActorState: ExecuteState, TraceId: 01kd68k5rr78d6wcd0faj1ak16, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key1" end_position { row: 1 column: 1 } severity: 1 } issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key2" end_position { row: 1 column: 1 } severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:47:24.245007Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_2 [GOOD] Test command err: Trying to start YDB, gRPC: 28025, MsgBus: 63246 2025-12-23T18:47:06.857236Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126845310098632:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:06.857373Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bfe/r3tmp/tmpxP9yZJ/pdisk_1.dat 2025-12-23T18:47:07.078111Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:07.099029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:07.099131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:07.111321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:07.196686Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:07.197164Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126845310098606:2081] 1766515626855571 != 1766515626855574 TServer::EnableGrpc on GrpcPort 28025, node 1 2025-12-23T18:47:07.257871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:07.257899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:07.257910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:07.258007Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:07.378834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:63246 TClient is connected to server localhost:63246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515627240 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:07.744011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:07.759309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:07.866618Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:07.898977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:08.042873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:08.101019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:09.966763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126858195002374:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:09.966913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:09.967268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126858195002384:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:09.967337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:10.331154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:10.366345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:10.404204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:10.436283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:10.470588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:10.516402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:10.562619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:10.608917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:10.689942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126862489970557:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:10.690031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:10.690300Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126862489970562:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:10.690348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126862489970563:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:10.690494Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:10.694195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:10.708184Z node 1 ... != 1766515637839961 2025-12-23T18:47:18.031557Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13628, node 2 2025-12-23T18:47:18.107875Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:18.132244Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:18.132281Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:18.132289Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:18.132366Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26203 TClient is connected to server localhost:26203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515638090 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:18.580154Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:18.591958Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:18.658039Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:18.840517Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:18.892953Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:18.920005Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:21.518918Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587126910198357246:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.519007Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.519343Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587126910198357255:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.519386Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.606903Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.706873Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.744513Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.780095Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.820145Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.876280Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.923169Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.978479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:22.081913Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587126914493325427:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:22.082022Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:22.082115Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587126914493325432:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:22.082167Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587126914493325434:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:22.082196Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:22.085526Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:22.095226Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587126914493325436:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:47:22.200075Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587126914493325487:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:22.851816Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587126893018486377:2212];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:22.851887Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:23.798238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskRackGuardFullRack [GOOD] Test command err: 2025-12-23T18:46:52.054987Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2025-12-23T18:46:52.055049Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2025-12-23T18:46:52.055128Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:46:52.055162Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2025-12-23T18:46:52.055218Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-12-23T18:46:52.055297Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-12-23T18:46:52.056878Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } Hosts { Name: "node-9" State: UNKNOWN Devices { Name: "pdisk-9-36" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-37" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-38" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-39" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 9 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-10" State: UNKNOWN Devices { Name: "pdisk-10-40" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-41" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-42" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-43" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 10 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-11" State: UNKNOWN Devices { Name: "pdisk-11-44" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-45" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-46" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-47" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 11 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-12" State: UNKNOWN Devices { Name: "pdisk-12-48" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-49" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-50" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-51" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 12 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-13" State: UNKNOWN Devices { Name: "pdisk-13-52" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-53" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-54" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-55" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 13 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-14" State: UNKNOWN Devices { Name: "pdisk-14-56" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-57" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-58" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-59" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 14 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-15" State: UNKNOWN Devices { Name: "pdisk-15-60" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-61" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-62" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-63" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 15 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-16" State: UNKNOWN Devices { Name: "pdisk-16-64" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-65" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-66" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-67" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 16 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-12-23T18:46:52.070600Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 36 Path: "/9/pdisk-36.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 37 Path: "/9/pdisk-37.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 38 Path: "/9/pdisk-38.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 39 Path: "/9/pdisk-39.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 40 Path: "/10/pdisk-40.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 41 Path: "/10/pdisk-41.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 42 Path: "/10/pdisk-42.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 43 Path: "/10/pdisk-43.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 44 Path: "/11/pdisk-44.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 45 Path: "/11/pdisk-45.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 46 Path: "/11/pdisk-46.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 47 Path: "/11/pdisk-47.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 48 Path: "/12/pdisk-48.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 49 Path: "/12/pdisk-49.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 50 Path: "/12/pdisk-50.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 51 Path: "/12/pdisk-51.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 52 Path: "/13/pdisk-52.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 53 Path: "/13/pdisk-53.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 54 Path: "/13/pdisk-54.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 55 Path: "/13/pdisk-55.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 56 Path: "/14/pdisk-56.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 57 Path: "/14/pdisk-57.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 58 Path: "/14/pdisk-58.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 59 Path: "/14/pdisk-59.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 60 Path: "/15/pdisk-60.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 61 Path: "/15/pdisk-61.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 62 Path: "/15/pdisk-62.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 63 Path: "/15/pdisk-63.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 64 Path: "/16/pdisk-64.data" G ... id: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2025-12-23T18:47:24.495635Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 112 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-112.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 113 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-113.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 114 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-114.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 115 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-115.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2025-12-23T18:47:24.495756Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 116 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-116.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 117 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-117.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 118 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-118.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 119 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-119.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2025-12-23T18:47:24.495874Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 120 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-120.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 121 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-121.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 122 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-122.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 123 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-123.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2025-12-23T18:47:24.495988Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 124 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-124.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 125 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-125.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 126 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-126.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 127 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-127.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2025-12-23T18:47:24.496105Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 128 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-128.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 129 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-129.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 130 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-130.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 131 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-131.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2025-12-23T18:47:24.496231Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 80 CreateTime: 0 ChangeTime: 0 Path: "/20/pdisk-80.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 81 CreateTime: 0 ChangeTime: 0 Path: "/20/pdisk-81.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 82 CreateTime: 0 ChangeTime: 0 Path: "/20/pdisk-82.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 83 CreateTime: 0 ChangeTime: 0 Path: "/20/pdisk-83.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2025-12-23T18:47:24.496354Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 84 CreateTime: 0 ChangeTime: 0 Path: "/21/pdisk-84.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 85 CreateTime: 0 ChangeTime: 0 Path: "/21/pdisk-85.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 86 CreateTime: 0 ChangeTime: 0 Path: "/21/pdisk-86.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 87 CreateTime: 0 ChangeTime: 0 Path: "/21/pdisk-87.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2025-12-23T18:47:24.496470Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 88 CreateTime: 0 ChangeTime: 0 Path: "/22/pdisk-88.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 89 CreateTime: 0 ChangeTime: 0 Path: "/22/pdisk-89.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 90 CreateTime: 0 ChangeTime: 0 Path: "/22/pdisk-90.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 91 CreateTime: 0 ChangeTime: 0 Path: "/22/pdisk-91.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2025-12-23T18:47:24.496586Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 92 CreateTime: 0 ChangeTime: 0 Path: "/23/pdisk-92.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 93 CreateTime: 0 ChangeTime: 0 Path: "/23/pdisk-93.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 94 CreateTime: 0 ChangeTime: 0 Path: "/23/pdisk-94.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 95 CreateTime: 0 ChangeTime: 0 Path: "/23/pdisk-95.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2025-12-23T18:47:24.496719Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 96 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-96.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 97 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-97.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 98 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-98.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 99 CreateTime: 0 ChangeTime: 0 Path: "/24/pdisk-99.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2025-12-23T18:47:24.496774Z node 17 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:47:24.497578Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 24:96, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:47:24.497638Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 24:97, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:47:24.497671Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 24:98, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:47:24.497704Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 24:99, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:47:24.497738Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 32:128, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:47:24.497772Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 32:129, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:47:24.497806Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 32:130, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:47:24.497871Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 32:131, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-12-23T18:47:24.497913Z node 17 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 8 2025-12-23T18:47:24.498283Z node 17 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 20 2025-12-23T18:47:24.498321Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 24:96 2025-12-23T18:47:24.498350Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 24:97 2025-12-23T18:47:24.498374Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 24:98 2025-12-23T18:47:24.498395Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 24:99 2025-12-23T18:47:24.498416Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 32:128 2025-12-23T18:47:24.498437Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 32:129 2025-12-23T18:47:24.498457Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 32:130 2025-12-23T18:47:24.498605Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 32:131 |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Returning [GOOD] Test command err: Trying to start YDB, gRPC: 25075, MsgBus: 26101 2025-12-23T18:47:21.941107Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126908846890948:2158];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:21.942155Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005be9/r3tmp/tmpQICpSa/pdisk_1.dat 2025-12-23T18:47:22.189227Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:22.223590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:22.226902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:22.289797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:22.360259Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:22.360626Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126908846890825:2081] 1766515641872943 != 1766515641872946 TServer::EnableGrpc on GrpcPort 25075, node 1 2025-12-23T18:47:22.390406Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:22.431293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:22.432974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:22.433016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:22.433170Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26101 TClient is connected to server localhost:26101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515642367 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:22.893746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:22.902962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:22.916180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:22.943898Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:23.073427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:47:23.239938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:23.307486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:25.273559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126926026761879:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:25.273687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:25.274433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126926026761888:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:25.274530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:25.655789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:25.685191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:25.716614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:25.747276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:25.781709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:25.822090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:25.854575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:25.927927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:26.019342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126930321730059:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:26.019437Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:26.019474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126930321730064:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:26.019643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126930321730066:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:26.019689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:26.023546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:26.034325Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126930321730068:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:26.093679Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126930321730119:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:26.969841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126908846890948:2158];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:26.970079Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:27.717505Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126934616697729:2536], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH DELETE is unsupported with RETURNING 2025-12-23T18:47:27.719058Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=ZjE2NDM2ZTYtYTc5NmI2ZjgtYWU2NjhlNzctNDg5M2RmZDU=, ActorId: [1:7587126934616697720:2530], ActorState: ExecuteState, TraceId: 01kd68k96b0pxzx839ahbdfca8, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 2 column: 22 } message: "BATCH DELETE is unsupported with RETURNING" end_position { row: 2 column: 22 } severity: 1 }, remove tx with tx_id: |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpBatchDelete::ColumnTable >> KqpBatchUpdate::SimplePartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 4921, MsgBus: 30777 2025-12-23T18:47:22.514152Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126911002092990:2159];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:22.514491Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005be7/r3tmp/tmpWmyvGE/pdisk_1.dat 2025-12-23T18:47:22.799405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:22.799545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:22.843575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:22.872318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:22.903681Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:22.906825Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126911002092869:2081] 1766515642499957 != 1766515642499960 TServer::EnableGrpc on GrpcPort 4921, node 1 2025-12-23T18:47:23.048303Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:23.083981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:23.084005Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:23.084012Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:23.084118Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30777 TClient is connected to server localhost:30777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:23.526466Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515642983 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:23.565318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:23.582356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:23.739245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:47:23.898009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:23.975816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:25.907927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126923886996624:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:25.908062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:25.908680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126923886996634:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:25.908744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:26.244083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:26.284224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:26.320020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:26.350992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:26.383226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:26.422564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:26.468339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:26.545910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:26.647377Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126928181964816:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:26.647502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:26.647898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126928181964821:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:26.647959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:26.647969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126928181964822:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:26.651348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:26.663882Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126928181964825:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:26.737344Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126928181964876:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:27.512834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126911002092990:2159];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:27.512918Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TA] $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpBatchUpdate::Returning [GOOD] >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates >> KqpBatchUpdate::ColumnTable >> KqpBatchUpdate::NotIdempotent >> KqpBatchUpdate::TableNotExists >> KqpBatchUpdate::ManyPartitions_2 >> KqpBatchDelete::MultiStatement [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TA] {RESULT} $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] >> KqpBatchUpdate::SimpleOnePartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Returning [GOOD] Test command err: Trying to start YDB, gRPC: 15427, MsgBus: 20759 2025-12-23T18:47:24.412816Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126919880612152:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:24.413556Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005be2/r3tmp/tmpynopi0/pdisk_1.dat 2025-12-23T18:47:24.652267Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:24.652411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:24.682908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:24.688385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:24.720078Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:24.726870Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126919880612128:2081] 1766515644395377 != 1766515644395380 TServer::EnableGrpc on GrpcPort 15427, node 1 2025-12-23T18:47:24.806066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:24.806091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:24.806098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:24.806245Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:24.901035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20759 TClient is connected to server localhost:20759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515644747 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:25.293992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:25.298831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:25.308096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:25.419322Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:25.460602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:25.612362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:47:25.691401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:27.784059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126932765515889:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:27.784221Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:27.784751Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126932765515899:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:27.784837Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.217486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.257925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.303087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.335244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.372309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.443660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.473923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.526147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.621265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126937060484066:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.621362Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.621414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126937060484071:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.625185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:28.627149Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126937060484073:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.627205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.640965Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126937060484074:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:28.730318Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126937060484126:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:29.416039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126919880612152:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:29.416172Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:30.427236Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126945650419035:2536], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH UPDATE is unsupported with RETURNING 2025-12-23T18:47:30.428974Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=NTdjZjAwODQtM2EyODRiYzMtM2UzYzlkOTYtNDY5ZDE2ZmY=, ActorId: [1:7587126945650419024:2530], ActorState: ExecuteState, TraceId: 01kd68kbv34hwwnpeh1fbszfxs, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 2 column: 22 } message: "BATCH UPDATE is unsupported with RETURNING" end_position { row: 2 column: 22 } severity: 1 }, remove tx with tx_id: |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::MultiStatement [GOOD] Test command err: Trying to start YDB, gRPC: 62008, MsgBus: 5250 2025-12-23T18:47:25.381578Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126925095934481:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:25.382161Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bdf/r3tmp/tmpUAIUEZ/pdisk_1.dat 2025-12-23T18:47:25.642552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:25.644566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:25.644681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:25.669250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:25.750464Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126925095934451:2081] 1766515645379882 != 1766515645379885 2025-12-23T18:47:25.756310Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62008, node 1 2025-12-23T18:47:25.825113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:25.825133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:25.825139Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:25.825233Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:25.879226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5250 TClient is connected to server localhost:5250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515645762 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:26.293103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:47:26.306834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:26.389900Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:26.414618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:26.576729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:26.641980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:28.471740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126937980838216:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.471831Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.472148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126937980838226:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.472201Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.824342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.866705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.903977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.936379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.973145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:29.028952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:29.107300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:29.175069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:29.266018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126942275806391:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.266071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.266381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126942275806396:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.266420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126942275806397:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.266650Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.270032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:29.281670Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126942275806400:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:29.374602Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126942275806451:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:30.382039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126925095934481:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:30.382130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:31.053909Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126946570774063:2536], status: GENERIC_ERROR, issues:
:4:32: Error: BATCH can't be used with multiple writes or reads. 2025-12-23T18:47:31.055075Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MWUwZDY1OGYtYjJlZTlmNGItYTkyMmY3NzctOTEwNTljYWM=, ActorId: [1:7587126946570774054:2530], ActorState: ExecuteState, TraceId: 01kd68kcdb9n2d2stae41afx5y, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 4 column: 32 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 32 } severity: 1 }, remove tx with tx_id: 2025-12-23T18:47:31.078577Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126950865741363:2538], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-12-23T18:47:31.079041Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MWUwZDY1OGYtYjJlZTlmNGItYTkyMmY3NzctOTEwNTljYWM=, ActorId: [1:7587126946570774054:2530], ActorState: ExecuteState, TraceId: 01kd68kcfq843n29g29hh194yr, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 }, remove tx with tx_id: 2025-12-23T18:47:31.114074Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126950865741367:2540], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2025-12-23T18:47:31.114402Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MWUwZDY1OGYtYjJlZTlmNGItYTkyMmY3NzctOTEwNTljYWM=, ActorId: [1:7587126946570774054:2530], ActorState: ExecuteState, TraceId: 01kd68kcgqa0247tkdgw5n5298, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 }, remove tx with tx_id: 2025-12-23T18:47:31.178183Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126950865741371:2542], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2025-12-23T18:47:31.180549Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MWUwZDY1OGYtYjJlZTlmNGItYTkyMmY3NzctOTEwNTljYWM=, ActorId: [1:7587126946570774054:2530], ActorState: ExecuteState, TraceId: 01kd68kchm4zxn930mad7hb2d4, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 4 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 29 } severity: 1 }, remove tx with tx_id: 2025-12-23T18:47:31.211861Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126950865741375:2544], status: GENERIC_ERROR, issues:
:3:29: Error: BATCH can't be used with multiple writes or reads. 2025-12-23T18:47:31.214110Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MWUwZDY1OGYtYjJlZTlmNGItYTkyMmY3NzctOTEwNTljYWM=, ActorId: [1:7587126946570774054:2530], ActorState: ExecuteState, TraceId: 01kd68kckza4std4r7128tkb73, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 3 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 3 column: 29 } severity: 1 }, remove tx with tx_id: |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates [GOOD] >> KqpPg::TableDeleteWhere+useSink [GOOD] >> KqpPg::TableDeleteWhere-useSink >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] >> KqpBatchDelete::DeleteOn >> KqpBatchDelete::UnknownColumn [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 24085, MsgBus: 32521 2025-12-23T18:47:26.494972Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126927482872792:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:26.495054Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bde/r3tmp/tmptzdEim/pdisk_1.dat 2025-12-23T18:47:26.732667Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:26.742973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:26.743107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:26.803358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:26.817364Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:26.820411Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126927482872764:2081] 1766515646493818 != 1766515646493821 TServer::EnableGrpc on GrpcPort 24085, node 1 2025-12-23T18:47:26.934267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:26.934293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:26.934301Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:26.934411Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:26.955686Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:32521 TClient is connected to server localhost:32521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515646889 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:27.438151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:27.444541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:27.457317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:27.514913Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:27.605942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:27.787566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:27.859901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:29.867206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126940367776519:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.867326Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.867701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126940367776529:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.867744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:30.233073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:30.273789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:30.311384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:30.353768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:30.462505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:30.508028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:30.554953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:30.603575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:30.689606Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126944662744707:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:30.689694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:30.690147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126944662744712:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:30.690185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126944662744713:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:30.690232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:30.693793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:30.708234Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126944662744716:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:47:30.808758Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126944662744767:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:31.495105Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126927482872792:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:31.495169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:32.736263Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126953252679635:2528], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029 2025-12-23T18:47:32.739445Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=Yzk5OWRlZTYtMmM0MDZjMzctNDAxZDlhNmYtOWY4ODNkMWE=, ActorId: [1:7587126953252679626:2522], ActorState: ExecuteState, TraceId: 01kd68ke280949y02megr662bj, ReplyQueryCompileError, status: PRECONDITION_FAILED, issues: { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }, remove tx with tx_id: |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 19626, MsgBus: 14772 2025-12-23T18:43:10.554250Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125831947547705:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:10.556034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005118/r3tmp/tmpAFBooL/pdisk_1.dat 2025-12-23T18:43:10.783707Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:10.797751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:10.797881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:10.845294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:10.882256Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125831947547673:2081] 1766515390551579 != 1766515390551582 2025-12-23T18:43:10.901551Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19626, node 1 2025-12-23T18:43:11.018330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:11.018361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:11.018376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:11.018482Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:11.022208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:14772 TClient is connected to server localhost:14772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515390955 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:11.547996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:11.559231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:43:11.561010Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 1042 2025-12-23T18:43:13.673110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce_pgbpchar_17472595041006102391_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) 2025-12-23T18:43:13.915645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125844832450544:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.915781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.916157Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125844832450556:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.916228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587125844832450557:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.916368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:43:13.921113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:43:13.935673Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587125844832450560:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:43:14.011444Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587125849127417907:2595] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:43:14.351638Z node 1 :TX_DATASHARD CRIT: execute_kqp_data_tx_unit.cpp:484: Exception while executing KQP transaction [0:281474976710663] at 72075186224037888: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-12-23T18:43:14.353298Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710663 at tablet 72075186224037888 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-12-23T18:43:14.353574Z node 1 :KQP_EXECUTER ERROR: kqp_data_executer.cpp:843: ActorId: [1:7587125849127417958:2331] TxId: 281474976710663. Ctx: { TraceId: 01kd68bhbs2wfta8zsq1j2qavn, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NGUwMmQ1ZTMtYTgwMTIwNWEtMjYwMzFjMzMtMzE2ZGU4NzM=, PoolId: default, IsStreamingQuery: 0}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; 2025-12-23T18:43:14.364499Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=NGUwMmQ1ZTMtYTgwMTIwNWEtMjYwMzFjMzMtMzE2ZGU4NzM=, ActorId: [1:7587125844832450541:2331], ActorState: ExecuteState, TraceId: 01kd68bhbs2wfta8zsq1j2qavn, Create QueryResponse for error on request, msg: , status: GENERIC_ERROR, issues: { message: "Error executing transaction (ExecError): Execution failed" severity: 1 issues { message: "[UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2)\n" severity: 1 } }
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-12-23T18:43:14.397064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce__pgbpchar_17472595041006102391_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) 2025-12-23T18:43:14.693352Z node 1 :TX_DATASHARD CRIT: execute_kqp_data_tx_unit.cpp:484: Exception while executing KQP transaction [0:281474976710668] at 72075186224037889: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-12-23T18:43:14.694664Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710668 at tablet 72075186224037889 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-12-23T18:43:14.694797Z node 1 :KQP_EXECUTER ERR ... 63: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:11.060434Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7587126864395315811:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:11.060518Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:11.064216Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:11.080694Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7587126864395315810:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:47:11.151911Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7587126864395315863:2541] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19234, MsgBus: 16704 2025-12-23T18:47:12.527390Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7587126870913601593:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:12.527454Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005118/r3tmp/tmpyJZpHT/pdisk_1.dat 2025-12-23T18:47:12.551717Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:12.682429Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:12.712377Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:12.712506Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:12.749946Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19234, node 11 2025-12-23T18:47:12.838615Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:12.838649Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:12.838660Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:12.838797Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:12.842349Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16704 2025-12-23T18:47:13.537871Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:16704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515632812 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:47:13.708279Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:17.530254Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7587126870913601593:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:17.530393Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:19.105411Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126900978373525:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:19.105536Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:19.105624Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126900978373537:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:19.106992Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587126900978373540:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:19.107095Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:19.113080Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:19.149195Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7587126900978373539:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:47:19.210990Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7587126900978373592:2545] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:19.267741Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:19.355647Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:24.215387Z node 11 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 11, TabletId: 72075186224037888 not found 2025-12-23T18:47:24.268377Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:24.664239Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [11:7587126922453210608:2439], TxId: 281474976710671, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68k5xsec5yr7atrwzft84y. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=11&id=MTg3NzZjNzktNTI3MzZmMTYtNGFhZmZjMDAtNmJlZjgxN2M=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2025-12-23T18:47:24.664983Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [11:7587126922453210609:2440], TxId: 281474976710671, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68k5xsec5yr7atrwzft84y. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=11&id=MTg3NzZjNzktNTI3MzZmMTYtNGFhZmZjMDAtNmJlZjgxN2M=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [11:7587126922453210605:2435], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T18:47:24.666631Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=11&id=MTg3NzZjNzktNTI3MzZmMTYtNGFhZmZjMDAtNmJlZjgxN2M=, ActorId: [11:7587126922453210596:2435], ActorState: ExecuteState, TraceId: 01kd68k5xsec5yr7atrwzft84y, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: \"pg_proc\"\n\n" severity: 1 } |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> KqpBatchDelete::Large_3 |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] [GOOD] Test command err: run test with cloud_id=CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc folder_id=folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc iam_token=usr_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc cloud_account=acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc 2025-12-23T18:46:47.969576Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc]","tx_id":"281474976720695","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:46:48.088526Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc/000000000000000106hn]","tx_id":"281474976720701","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:46:48.122664Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc/000000000000000106hn/v2]","tx_id":"281474976720702","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:46:48.302644Z: {"request_id":"d7e7e8be-43125283-fee41de9-e00af9ba","cloud_id":"CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc"} ======================================== 2025-12-23T18:46:49.333468Z: {"request_id":"d7e7e8be-43125283-fee41de9-e00af9ba","permission":"ymq.queues.create","id":"14812710134322071584$CreateMessageQueue$2025-12-23T18:46:49.333270Z","idempotency_id":"14812710134322071584$CreateMessageQueue$2025-12-23T18:46:48.029000Z","cloud_id":"CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:46:48.029000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_bc3de0dd-e02f-11f0-9e87-d00d1a8d6ccc.fifo","resource_id":"000000000000000106hn","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:46:50.221529Z: {"request_id":"4229e9ec-2b6927f8-a5a63f0a-ddecc51b","permission":"ymq.queues.setAttributes","id":"5496767135299788635$UpdateMessageQueue$2025-12-23T18:46:50.221310Z","idempotency_id":"5496767135299788635$UpdateMessageQueue$2025-12-23T18:46:49.391000Z","cloud_id":"CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:46:49.391000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_bc3de0dd-e02f-11f0-9e87-d00d1a8d6ccc.fifo","resource_id":"000000000000000106hn","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:46:51.543564Z: {"request_id":"62f3b50a-4679ad9f-700419d8-68eed2be","permission":"ymq.queues.setAttributes","id":"7738528146863323480$UpdateMessageQueue$2025-12-23T18:46:51.543406Z","idempotency_id":"7738528146863323480$UpdateMessageQueue$2025-12-23T18:46:50.498000Z","cloud_id":"CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:46:50.498000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_bc3de0dd-e02f-11f0-9e87-d00d1a8d6ccc.fifo","resource_id":"000000000000000106hn","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:46:51.684616Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc/000000000000000106hn/v2]","tx_id":"281474976720719","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:46:51.701940Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc/000000000000000106hn]","tx_id":"281474976720720","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:46:51.725551Z: {"request_id":"493bdf9f-d9b150b0-4f997055-703f3218","cloud_id":"CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","subject":"fake_user_sid@as","queue":"000000000000000106hn","resource_id":"000000000000000106hn","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc"} ======================================== 2025-12-23T18:46:53.752033Z: {"request_id":"493bdf9f-d9b150b0-4f997055-703f3218","permission":"ymq.queues.delete","id":"17326927609229416182$DeleteMessageQueue$2025-12-23T18:46:53.751870Z","idempotency_id":"17326927609229416182$DeleteMessageQueue$2025-12-23T18:46:51.588000Z","cloud_id":"CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:46:51.588000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_bc3de0dd-e02f-11f0-9e87-d00d1a8d6ccc.fifo","resource_id":"000000000000000106hn","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:46:53.758258Z: {"request_id":"493bdf9f-d9b150b0-4f997055-703f3218","permission":"ymq.queues.delete","id":"17326927609229416182$DeleteMessageQueue$2025-12-23T18:46:53.758117Z","idempotency_id":"17326927609229416182$DeleteMessageQueue$2025-12-23T18:46:51.588000Z","cloud_id":"CLOUD_FOR_folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:46:51.588000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_bc3de0dd-e02f-11f0-9e87-d00d1a8d6ccc.fifo","resource_id":"000000000000000106hn","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_bc6bfbe1-e02f-11f0-b996-d00d1a8d6ccc","component":"ymq"} ======================================== ======================================== run test with cloud_id=CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc folder_id=folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc iam_token=usr_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc cloud_account=acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc 2025-12-23T18:47:03.828233Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc]","tx_id":"281474976720734","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:03.912586Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc/000000000000000306s6]","tx_id":"281474976720740","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:03.943716Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc/000000000000000306s6/v4]","tx_id":"281474976720741","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:04.020179Z: {"request_id":"9329fd16-47beaec2-cf63907e-a42cd7b3","cloud_id":"CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc"} ======================================== 2025-12-23T18:47:05.957758Z: {"request_id":"9329fd16-47beaec2-cf63907e-a42cd7b3","permission":"ymq.queues.create","id":"3497662984728791269$CreateMessageQueue$2025-12-23T18:47:05.957603Z","idempotency_id":"3497662984728791269$CreateMessageQueue$2025-12-23T18:47:03.866000Z","cloud_id":"CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:47:03.866000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_c5dbc1de-e02f-11f0-a561-d00d1a8d6ccc","resource_id":"000000000000000306s6","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:47:05.958163Z: {"request_id":"2e94c70c-7b07280-66c168b1-3622c5ae","permission":"ymq.queues.setAttributes","id":"6920312352250689611$UpdateMessageQueue$2025-12-23T18:47:05.957655Z","idempotency_id":"6920312352250689611$UpdateMessageQueue$2025-12-23T18:47:05.105000Z","cloud_id":"CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:47:05.105000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_c5dbc1de-e02f-11f0-a561-d00d1a8d6ccc","resource_id":"000000000000000306s6","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:47:07.277386Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc/000000000000000306s6/v4]","tx_id":"281474976720760","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:07.306785Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc/000000000000000306s6]","tx_id":"281474976720761","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:07.334083Z: {"request_id":"ed080b16-db16b54f-597aa9f1-a9112e84","cloud_id":"CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","subject":"fake_user_sid@as","queue":"000000000000000306s6","resource_id":"000000000000000306s6","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc"} ======================================== 2025-12-23T18:47:07.974877Z: {"request_id":"db7e355f-9091d8f0-b3e4e535-1ff0f06f","permission":"ymq.queues.setAttributes","id":"3027039104406736711$UpdateMessageQueue$2025-12-23T18:47:07.974657Z","idempotency_id":"3027039104406736711$UpdateMessageQueue$2025-12-23T18:47:06.177000Z","cloud_id":"CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:47:06.177000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_c5dbc1de-e02f-11f0-a561-d00d1a8d6ccc","resource_id":"000000000000000306s6","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:47:07.975262Z: {"request_id":"ed080b16-db16b54f-597aa9f1-a9112e84","permission":"ymq.queues.delete","id":"7213362001569738167$DeleteMessageQueue$2025-12-23T18:47:07.974711Z","idempotency_id":"7213362001569738167$DeleteMessageQueue$2025-12-23T18:47:07.223000Z","cloud_id":"CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:47:07.223000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_c5dbc1de-e02f-11f0-a561-d00d1a8d6ccc","resource_id":"000000000000000306s6","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:47:07.976083Z: {"request_id":"db7e355f-9091d8f0-b3e4e535-1ff0f06f","permission":"ymq.queues.setAttributes","id":"3027039104406736711$UpdateMessageQueue$2025-12-23T18:47:07.975831Z","idempotency_id":"3027039104406736711$UpdateMessageQueue$2025-12-23T18:47:06.177000Z","cloud_id":"CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:47:06.177000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_c5dbc1de-e02f-11f0-a561-d00d1a8d6ccc","resource_id":"000000000000000306s6","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:47:07.976398Z: {"request_id":"ed080b16-db16b54f-597aa9f1-a9112e84","permission":"ymq.queues.delete","id":"7213362001569738167$DeleteMessageQueue$2025-12-23T18:47:07.975925Z","idempotency_id":"7213362001569738167$DeleteMessageQueue$2025-12-23T18:47:07.223000Z","cloud_id":"CLOUD_FOR_folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:47:07.223000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_c5dbc1de-e02f-11f0-a561-d00d1a8d6ccc","resource_id":"000000000000000306s6","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_c5ea7e88-e02f-11f0-b6ee-d00d1a8d6ccc","component":"ymq"} ======================================== ======================================== |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpBatchUpdate::HasTxControl [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::UnknownColumn [GOOD] Test command err: Trying to start YDB, gRPC: 25243, MsgBus: 26764 2025-12-23T18:47:27.480120Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126932918289634:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:27.481469Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bdc/r3tmp/tmpDEPlvC/pdisk_1.dat 2025-12-23T18:47:27.741243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:27.779802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:27.779955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:27.819966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:27.842826Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126932918289608:2081] 1766515647477817 != 1766515647477820 2025-12-23T18:47:27.844569Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25243, node 1 2025-12-23T18:47:27.999413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:27.999448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:27.999456Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:27.999554Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:28.013985Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26764 TClient is connected to server localhost:26764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:28.493934Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515647904 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:28.541575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:28.558718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:28.704402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:28.877289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:28.951947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:30.953345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126945803193371:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:30.953523Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:30.955726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126945803193382:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:30.955838Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.307289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.351722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.385219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.417986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.451345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.505332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.583332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.628294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.731526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126950098161554:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.731604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.731951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126950098161560:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.731996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126950098161558:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.732250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.736539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:31.753736Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126950098161563:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:31.818618Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126950098161614:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:32.480031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126932918289634:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:32.480097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:33.536982Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126958688096523:2537], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:31: Error: At lambda, At function: Coalesce
:3:37: Error: At function: ==
:3:23: Error: At function: Member
:3:23: Error: Member not found: UnknownColumn 2025-12-23T18:47:33.539229Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YzhhYTNmNjAtYjAwOGVjYTYtNzkwMjQ4YWUtMTNkZjcyOTU=, ActorId: [1:7587126958688096512:2530], ActorState: ExecuteState, TraceId: 01kd68kevgb4y1am7y4g425tkm, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 31 } message: "At lambda, At function: Coalesce" end_position { row: 2 column: 31 } severity: 1 issues { position { row: 3 column: 37 } message: "At function: ==" end_position { row: 3 column: 37 } severity: 1 issues { position { row: 3 column: 23 } message: "At function: Member" end_position { row: 3 column: 23 } severity: 1 issues { position { row: 3 column: 23 } message: "Member not found: UnknownColumn" end_position { row: 3 column: 23 } severity: 1 } } } } }, remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 7545, MsgBus: 6261 2025-12-23T18:47:27.378198Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126934066019411:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:27.378825Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bdd/r3tmp/tmptxzqC2/pdisk_1.dat 2025-12-23T18:47:27.603238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:27.606343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:27.606486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:27.608337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:27.713939Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126934066019382:2081] 1766515647376400 != 1766515647376403 2025-12-23T18:47:27.729663Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7545, node 1 2025-12-23T18:47:27.876016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:27.876039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:27.876046Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:27.876131Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:27.877229Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6261 TClient is connected to server localhost:6261 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-12-23T18:47:28.404302Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515647883 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:28.506127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:28.525579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:28.700302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:28.875433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:28.975798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:31.000190Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126946950923150:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.000308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.000934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126946950923160:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.000976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.348698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.379585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.417207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.463069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.503211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.556355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.597312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.644054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:31.744741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126951245891329:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.744836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.745263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126951245891334:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.745315Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126951245891335:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.745478Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:31.749795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:31.767723Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126951245891338:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:31.862525Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126951245891389:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:32.378399Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126934066019411:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:32.378450Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:33.797837Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126959835826258:2528], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029 2025-12-23T18:47:33.798318Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=ZWU2MjFlMjktOTA3N2ZkYjktNDM5ZDIyYi1mOTI5YzY4Yw==, ActorId: [1:7587126959835826249:2522], ActorState: ExecuteState, TraceId: 01kd68kf2jcmaxdr9gbz5t0jr9, ReplyQueryCompileError, status: PRECONDITION_FAILED, issues: { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }, remove tx with tx_id: >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableWithIndex [GOOD] >> KqpBatchUpdate::TableNotExists [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> KqpBatchUpdate::UpdateOn >> KqpBatchDelete::Large_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::HasTxControl [GOOD] Test command err: Trying to start YDB, gRPC: 6957, MsgBus: 18443 2025-12-23T18:47:28.813381Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126937377990374:2262];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:28.818346Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:47:28.868896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bdb/r3tmp/tmpeSAAao/pdisk_1.dat 2025-12-23T18:47:29.127425Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:29.194386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:29.194525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:29.220647Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:29.230847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:29.234933Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126937377990131:2081] 1766515648777028 != 1766515648777031 TServer::EnableGrpc on GrpcPort 6957, node 1 2025-12-23T18:47:29.345463Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:29.353454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:29.353477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:29.353483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:29.353555Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18443 TClient is connected to server localhost:18443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:29.808201Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515649290 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:29.892808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:29.911802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:30.026500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:30.182789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:47:30.248468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:32.237423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126954557861189:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:32.237539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:32.238136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126954557861199:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:32.238203Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:32.625303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:32.667788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:32.706401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:32.748087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:32.799942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:32.860231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:32.938123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:32.993619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:33.101792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126958852829378:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:33.101895Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:33.102199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126958852829383:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:33.102243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126958852829384:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:33.102274Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:33.106567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:33.117396Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126958852829387:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:33.223648Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126958852829440:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:33.813208Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126937377990374:2262];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:33.813289Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:35.234951Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=MzljODU2NzQtOTU2ZDJhYTItNGE0MWQwNTMtNGZkYzkyOGU=, ActorId: [1:7587126963147797046:2532], ActorState: ExecuteState, TraceId: 01kd68kgb85x7ha44qe4sr92k8, Create QueryResponse for error on request, msg: BATCH operation can be executed only in the implicit transaction mode., status: PRECONDITION_FAILED |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableNotExists [GOOD] Test command err: Trying to start YDB, gRPC: 8950, MsgBus: 1829 2025-12-23T18:47:32.300626Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126953273979934:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:32.317353Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bd4/r3tmp/tmpc08nhJ/pdisk_1.dat 2025-12-23T18:47:32.582873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:32.671094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:32.671198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:32.673147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:32.727462Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8950, node 1 2025-12-23T18:47:32.866236Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:32.883244Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:32.883265Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:32.883271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:32.883344Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1829 2025-12-23T18:47:33.321987Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515652748 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:33.494654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:33.500903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:35.835051Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126966158882658:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.835555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126966158882650:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.835690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.837625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126966158882669:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.837716Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.839547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:35.849983Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126966158882664:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:47:35.946783Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126966158882717:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:36.259665Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126966158882734:2333], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:34: Error: At function: KiUpdateTable!
:3:34: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:47:36.260411Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MTAwZTk1YzAtYjI0MmMyNTQtZDNhZjBhMjMtN2NiMjhmNjk=, ActorId: [1:7587126966158882637:2321], ActorState: ExecuteState, TraceId: 01kd68kh4r1j7am7zyp8mdhk4d, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 34 } message: "At function: KiUpdateTable!" end_position { row: 3 column: 34 } severity: 1 issues { position { row: 3 column: 34 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 34 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:47:36.321884Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126970453850048:2339], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:4:41: Error: At function: KiUpdateTable!
:4:41: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:47:36.323186Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MTAwZTk1YzAtYjI0MmMyNTQtZDNhZjBhMjMtN2NiMjhmNjk=, ActorId: [1:7587126966158882637:2321], ActorState: ExecuteState, TraceId: 01kd68khjc3y8b4fc6f82w6st4, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 4 column: 41 } message: "At function: KiUpdateTable!" end_position { row: 4 column: 41 } severity: 1 issues { position { row: 4 column: 41 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 4 column: 41 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 8419, MsgBus: 4825 2025-12-23T18:47:25.191300Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126924280043453:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:25.196451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005be1/r3tmp/tmpvirdJ9/pdisk_1.dat 2025-12-23T18:47:25.422942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:25.423049Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:25.461619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:25.499620Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:25.547187Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126924280043327:2081] 1766515645155568 != 1766515645155571 2025-12-23T18:47:25.553845Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8419, node 1 2025-12-23T18:47:25.625570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:25.625594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:25.625603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:25.625696Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:25.745885Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4825 TClient is connected to server localhost:4825 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515645566 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:26.099446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:26.104008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:47:26.109044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:26.200191Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:26.237484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:47:26.384186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:26.451187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:28.394082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126937164947085:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.394198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.395046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126937164947095:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.395126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.769761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.805662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.836123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.872805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.903819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.938327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.990538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:29.037652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:29.122333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126941459915260:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.122681Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.123106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126941459915265:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.123192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126941459915266:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.123248Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.128636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:29.147074Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126941459915269:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:29.246465Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126941459915322:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:30.190223Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126924280043453:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:30.190279Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:30.819546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:30.880873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:30.924460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:33.026077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> Worker::Basic |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> KqpBatchDelete::ColumnTable [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates [GOOD] >> KqpBatchUpdate::NotIdempotent [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> KqpBatchUpdate::ColumnTable [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ColumnTable [GOOD] Test command err: Trying to start YDB, gRPC: 27243, MsgBus: 21778 2025-12-23T18:47:30.358273Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126948336554721:2201];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:30.358443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bd9/r3tmp/tmpUYKlJj/pdisk_1.dat 2025-12-23T18:47:30.662351Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:30.704309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:30.704395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:30.716476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:30.806516Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:30.810712Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126948336554556:2081] 1766515650284207 != 1766515650284210 TServer::EnableGrpc on GrpcPort 27243, node 1 2025-12-23T18:47:30.999342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:30.999368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:30.999375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:30.999457Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:31.000530Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21778 2025-12-23T18:47:31.357060Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515650830 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:31.579133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:31.591490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:33.911989Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126961221457341:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:33.911993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126961221457330:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:33.912111Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:33.912488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126961221457345:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:33.912559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:33.917038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:33.927066Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126961221457344:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:47:34.016376Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126965516424693:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:34.287371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:47:34.892942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587126965516425199:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:47:34.893163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587126965516425199:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:47:34.893267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587126965516425136:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:47:34.893322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587126965516425136:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:47:34.893438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587126965516425199:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:47:34.893548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587126965516425136:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:47:34.893590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587126965516425199:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:47:34.893654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587126965516425136:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:47:34.893707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587126965516425199:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:47:34.893757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587126965516425136:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:47:34.893836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587126965516425199:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:47:34.893856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587126965516425136:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:47:34.893946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587126965516425199:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:47:34.893959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587126965516425136:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:47:34.894075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587126965516425136:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:47:34.894081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587126965516425199:2341];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:47:34.894219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:758712696551 ... 976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.879699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.879754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.879768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.886393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.886472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.886490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.888000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.888060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.888082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.895133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.895176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.895187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.895747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.895821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.895837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.901350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.901410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.901426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037916;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.904855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.904916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.904931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.907527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.907594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.907608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.913269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.913323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.913337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.914913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.914973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.914990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.919588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.919665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.919680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.922603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.922667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.922681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.927062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.927121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.927142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.931740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.931820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.931834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.934056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.934117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:36.934130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.853422Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=NjhmMDVjYmItZDM3M2VkNjgtOGI3ZmI2YjktOTZiNjRmMw==, ActorId: [1:7587126961221457300:2318], ActorState: ExecuteState, TraceId: 01kd68kjqf5tfzg54qq0gzh0rg, Create QueryResponse for error on request, msg: BATCH operations are not supported for column tables at the current time., status: PRECONDITION_FAILED >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> KqpRm::SingleSnapshotByExchanger |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::NotIdempotent [GOOD] Test command err: Trying to start YDB, gRPC: 20255, MsgBus: 15008 2025-12-23T18:47:32.281311Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126957069395287:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:32.281526Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bd5/r3tmp/tmpfPS3Rc/pdisk_1.dat 2025-12-23T18:47:32.631726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:32.762006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:32.762152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:32.764505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:32.781304Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:32.805892Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126957069395152:2081] 1766515652204496 != 1766515652204499 TServer::EnableGrpc on GrpcPort 20255, node 1 2025-12-23T18:47:32.892003Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:32.922691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:32.922721Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:32.922733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:32.922862Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15008 2025-12-23T18:47:33.287777Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:15008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515652818 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:33.533015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:33.540338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:33.554829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:33.686466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:33.831334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:33.896880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:35.721224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126969954298919:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.721323Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.721638Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126969954298929:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.721681Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.037029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.078143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.134284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.209629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.245673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.333143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.397923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.471199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.583125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126974249267105:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.583206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.583449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126974249267111:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.583504Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126974249267110:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.583623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.587439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:36.599666Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126974249267114:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:47:36.691161Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126974249267165:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:37.278210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126957069395287:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:37.278278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:38.505469Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126982839202076:2538], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2025-12-23T18:47:38.507101Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=N2FiNDM1YWUtMTRjZDQ2MGYtZjg5ZmQxZTgtNTZlZTI5Ng==, ActorId: [1:7587126982839202067:2532], ActorState: ExecuteState, TraceId: 01kd68kkpz1srwp4r5pc7eg851, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Table intent determination" issue_code: 1040 severity: 1 issues { position { row: 3 column: 43 } message: "Batch update is only supported for idempotent updates." end_position { row: 3 column: 43 } severity: 1 } }, remove tx with tx_id: 2025-12-23T18:47:38.534132Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126982839202080:2540], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2025-12-23T18:47:38.535187Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=N2FiNDM1YWUtMTRjZDQ2MGYtZjg5ZmQxZTgtNTZlZTI5Ng==, ActorId: [1:7587126982839202067:2532], ActorState: ExecuteState, TraceId: 01kd68kkrj3csgsh8bwcq3jse5, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Table intent determination" issue_code: 1040 severity: 1 issues { position { row: 3 column: 43 } message: "Batch update is only supported for idempotent updates." end_position { row: 3 column: 43 } severity: 1 } }, remove tx with tx_id: 2025-12-23T18:47:38.559051Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126982839202084:2542], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:51: Error: Batch update is only supported for idempotent updates. 2025-12-23T18:47:38.559399Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=N2FiNDM1YWUtMTRjZDQ2MGYtZjg5ZmQxZTgtNTZlZTI5Ng==, ActorId: [1:7587126982839202067:2532], ActorState: ExecuteState, TraceId: 01kd68kksg8gyqcvrt4hc2srpm, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Table intent determination" issue_code: 1040 severity: 1 issues { position { row: 3 column: 51 } message: "Batch update is only supported for idempotent updates." end_position { row: 3 column: 51 } severity: 1 } }, remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 30513, MsgBus: 26336 2025-12-23T18:47:31.803075Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126951296094017:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:31.803153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bd6/r3tmp/tmpWnLOIW/pdisk_1.dat 2025-12-23T18:47:32.159143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:32.159241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:32.168393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:32.215488Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:32.257335Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30513, node 1 2025-12-23T18:47:32.343141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:32.343159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:32.343165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:32.343219Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:32.514815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26336 2025-12-23T18:47:32.814331Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:26336 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515652391 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:32.924373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:32.931308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:32.938352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:33.106224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:33.290209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:33.364846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:35.436986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126968475965042:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.437098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.438630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126968475965052:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.438695Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.834554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:35.870628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:35.903037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:35.956696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:35.990213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.030931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.070173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.120574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.213926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126972770933216:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.213987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.214224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126972770933221:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.214269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126972770933222:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.214290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.218257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:36.230151Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126972770933225:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:36.303733Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126972770933276:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:36.804248Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126951296094017:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:36.804331Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> KqpBatchDelete::DeleteOn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ColumnTable [GOOD] Test command err: Trying to start YDB, gRPC: 18485, MsgBus: 4436 2025-12-23T18:47:31.734705Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126949333377467:2133];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:31.736853Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bd7/r3tmp/tmpnXGv5w/pdisk_1.dat 2025-12-23T18:47:32.003356Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:32.010696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:32.013801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:32.146174Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126949333377363:2081] 1766515651693818 != 1766515651693821 2025-12-23T18:47:32.152106Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:32.159552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18485, node 1 2025-12-23T18:47:32.281778Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:32.309011Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:32.309030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:32.309037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:32.309116Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4436 TClient is connected to server localhost:4436 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-12-23T18:47:32.739444Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515652209 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:32.873766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:34.974675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126962218280149:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:34.974806Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126962218280141:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:34.974976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:34.978970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126962218280156:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:34.979106Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:34.980209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:34.994801Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126962218280155:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:47:35.068347Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126966513247504:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:35.371015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:47:35.813890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7587126966513247930:2338];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:47:35.814443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7587126966513247930:2338];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:47:35.814717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7587126966513247930:2338];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:47:35.814941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7587126966513247930:2338];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:47:35.815073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7587126966513247930:2338];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:47:35.815204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7587126966513247930:2338];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:47:35.815360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7587126966513247930:2338];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:47:35.815505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7587126966513247930:2338];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:47:35.815615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7587126966513247930:2338];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:47:35.815763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7587126966513247930:2338];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:47:35.815906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7587126966513247930:2338];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:47:35.816101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7587126966513247930:2338];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:47:35.816220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;self_id=[1:7587126966513247930:2338];tablet_id=72075186224037951;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:47:35.823983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7587126966513247933:2341];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:47:35.824043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7587126966513247933:2341];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:47:35.824207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7587126966513247933:2341];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:47:35.824364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7587126966513247933:2341];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:47:35.82 ... 976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.887527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.887577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.887589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.889476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.889514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.889526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.895951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.896006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.896024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.896636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.896681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.896692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.906398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.906453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.906470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.908606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.908686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.908701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.914644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.914725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.914760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.916503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.916551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.916563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.923851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.923908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.923922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.924488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.924534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.924542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.931354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.931412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.931426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.933010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.933083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.933102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.938348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.938384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.938392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.944103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.944154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.944167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.951427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.951490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:37.951505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2025-12-23T18:47:38.561122Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=OWI2ZTE3MjctZWJmZjE1YjgtNDk0NTBlMzktODI4NGQ0YmQ=, ActorId: [1:7587126962218280127:2320], ActorState: ExecuteState, TraceId: 01kd68kkf7df3aaztczjr89c4n, Create QueryResponse for error on request, msg: BATCH operations are not supported for column tables at the current time., status: PRECONDITION_FAILED |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> BasicUsage::WaitEventBlocksBeforeDiscovery >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> KqpRm::ManyTasks >> BasicUsage::RetryDiscoveryWithCancel |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DeleteOn [GOOD] Test command err: Trying to start YDB, gRPC: 23897, MsgBus: 12138 2025-12-23T18:47:34.110686Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126964198308486:2148];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:34.110808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bce/r3tmp/tmpqLt3w1/pdisk_1.dat 2025-12-23T18:47:34.355106Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:34.357256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:34.357435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:34.388269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:34.433357Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126964198308375:2081] 1766515654103406 != 1766515654103409 2025-12-23T18:47:34.451794Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23897, node 1 2025-12-23T18:47:34.551643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:34.551682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:34.551694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:34.551817Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:34.657657Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12138 TClient is connected to server localhost:12138 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515654505 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:35.140604Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:35.146425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:35.153558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:35.165873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:35.339210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:35.513428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:35.591155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:37.539710Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126977083212144:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.539828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.540313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126977083212154:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.540366Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.867395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.913665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.956657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.992167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:38.034993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:38.127296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:38.188529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:38.234029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:38.320618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126981378180328:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:38.320707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:38.321211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126981378180333:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:38.321280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126981378180334:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:38.321324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:38.325495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:38.346984Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126981378180337:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:38.447070Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126981378180388:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:39.112453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126964198308486:2148];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:39.112546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:40.211573Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126989968115299:2538], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH DELETE is unsupported with ON 2025-12-23T18:47:40.211922Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=ODg1MjRjODYtNmM4ZjI4NzgtOTBhMDE5NWEtMjI3YjhkYzg=, ActorId: [1:7587126989968115290:2532], ActorState: ExecuteState, TraceId: 01kd68knd24na803k7dsp7b5vd, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 2 column: 22 } message: "BATCH DELETE is unsupported with ON" end_position { row: 2 column: 22 } severity: 1 }, remove tx with tx_id: |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2025-12-23T18:40:27.059868Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:27.059940Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:27.319144Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:28.590644Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:28.590718Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:28.688209Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:33.053316Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:33.053389Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:33.168110Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:34.161056Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:34.161134Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:34.255841Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:35.336280Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:35.336359Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:35.442167Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:36.650314Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:36.650392Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:36.752038Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:37.981369Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:37.981452Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:38.112420Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:39.296425Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:39.296509Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:39.388741Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:40.489454Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:40.489538Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:40.606765Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:41.924103Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:41.924185Z node 15 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:42.047173Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:44.163087Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:44.163179Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:44.284386Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:46.395443Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:46.395530Z node 17 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:46.507175Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:48.935444Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:48.935539Z node 18 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:49.048953Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:51.339522Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:51.339616Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:51.459055Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:53.914657Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:53.914762Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:54.040510Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:56.849553Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:56.849656Z node 21 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:57.062238Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:40:59.320384Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:40:59.320481Z node 22 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:40:59.424917Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:01.605249Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:01.605357Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:01.712753Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:04.530793Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:04.530884Z node 24 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:04.701537Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:05.504769Z node 24 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1240: Unexpected config sender died for subscription id=1 2025-12-23T18:41:06.440318Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:06.440429Z node 25 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:06.708540Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:08.848512Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:08.848617Z node 26 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:08.963551Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:10.953526Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:41:10.953629Z node 27 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:41:11.064696Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:41:19.083514Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:41:19.083633Z node 27 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:28.748575Z node 27 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[100:28538277257700723:0] nodeid=100 host=host100 tenant=tenant-100 nodetype=type100 kinds=2 lastprovidedconfig= 2025-12-23T18:45:29.620254Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:45:29.620396Z node 28 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:29.721983Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:45:36.579165Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:45:36.579292Z node 28 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:40.926265Z node 29 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:47:40.926379Z node 29 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:41.027314Z node 29 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> KqpRm::ManyTasks [GOOD] >> KqpRm::SingleSnapshotByExchanger [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-std] [GOOD] >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2025-12-23T18:47:41.276319Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:47:41.276801Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c4e/r3tmp/tmpnkjwjs/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:47:41.278058Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c4e/r3tmp/tmpnkjwjs/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/003c4e/r3tmp/tmpnkjwjs/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15716943162539139739 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:47:41.330125Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:47:41.330485Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:47:41.348345Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:471:2102] with ResourceBroker at [2:441:2101] 2025-12-23T18:47:41.348487Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:472:2103] 2025-12-23T18:47:41.348541Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:469:2349] with ResourceBroker at [1:440:2330] 2025-12-23T18:47:41.348633Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:470:2350] 2025-12-23T18:47:41.348711Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:47:41.348747Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:47:41.348792Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:47:41.348813Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:47:41.349122Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:47:41.370160Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515661 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:47:41.370409Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:47:41.370521Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515661 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:47:41.370938Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:47:41.371137Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:47:41.371174Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:47:41.371305Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515661 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:47:41.371522Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:47:41.371552Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:47:41.371627Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515661 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:47:41.371741Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:47:41.372359Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-12-23T18:47:41.372459Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:47:41.373026Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:47:41.373486Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-12-23T18:47:41.373696Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:47:41.373799Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:47:41.374023Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:47:41.374144Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:47:41.374353Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:47:41.374454Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:47:41.377553Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-1 (1 by [1:469:2349]) priority=0 resources={0, 100} 2025-12-23T18:47:41.377624Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-1 (1 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:41.377681Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:469:2349]) from queue queue_kqp_resource_manager 2025-12-23T18:47:41.377726Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-1 (1 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:41.377783Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:469:2349])) 2025-12-23T18:47:41.378002Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:47:41.378098Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-2-1-2 (2 by [1:469:2349]) priority=0 resources={0, 100} 2025-12-23T18:47:41.378142Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-2-1-2 (2 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:41.378187Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:469:2349]) from queue queue_kqp_resource_manager 2025-12-23T18:47:41.378226Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-2-1-2 (2 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:41.378285Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:469:2349])) 2025-12-23T18:47:41.378375Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:47:41.378580Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:47:41.378709Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515661 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-12-23T18:47:41.379339Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:47:42.482172Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2025-12-23T18:47:42.482305Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-2-1 (1 by [1:469:2349]) (release resources {0, 100}) 2025-12-23T18:47:42.482376Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.300050 (remove task kqp-1-2-1 (1 by [1:469:2349])) 2025-12-23T18:47:42.482413Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.100100 2025-12-23T18:47:42.482460Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-12-23T18:47:42.482504Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-2-1-2 (2 by [1:469:2349]) (release resources {0, 100}) 2025-12-23T18:47:42.482546Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.300050 to 0.100100 (remove task kqp-2-1-2 (2 by [1:469:2349])) 2025-12-23T18:47:42.482582Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-12-23T18:47:42.482819Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:47:42.482974Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515662 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:47:42.483336Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:47:42.766952Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2025-12-23T18:47:42.696849Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T18:47:42.697355Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c4d/r3tmp/tmpo398zp/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T18:47:42.698131Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c4d/r3tmp/tmpo398zp/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/003c4d/r3tmp/tmpo398zp/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13401715002564173702 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T18:47:42.748430Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:47:42.748798Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2025-12-23T18:47:42.766440Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:471:2102] with ResourceBroker at [2:441:2101] 2025-12-23T18:47:42.766553Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:472:2103] 2025-12-23T18:47:42.766609Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:469:2349] with ResourceBroker at [1:440:2330] 2025-12-23T18:47:42.766699Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:470:2350] 2025-12-23T18:47:42.766804Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:47:42.766839Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:47:42.766884Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-12-23T18:47:42.766932Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-12-23T18:47:42.767175Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:47:42.782326Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515662 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:47:42.782593Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:47:42.782683Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515662 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:47:42.783050Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:47:42.783249Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:47:42.783287Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:47:42.783417Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1766515662 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:47:42.783625Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-12-23T18:47:42.783654Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2025-12-23T18:47:42.783724Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1766515662 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-12-23T18:47:42.783851Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-12-23T18:47:42.784518Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-12-23T18:47:42.784605Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:47:42.784945Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:47:42.785465Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-12-23T18:47:42.785718Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:47:42.785852Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:47:42.786109Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-12-23T18:47:42.786248Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:47:42.786440Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2025-12-23T18:47:42.786575Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2025-12-23T18:47:42.789404Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [1:469:2349]) priority=0 resources={0, 100} 2025-12-23T18:47:42.789471Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.789523Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:469:2349]) from queue queue_kqp_resource_manager 2025-12-23T18:47:42.789572Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.789629Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:469:2349])) 2025-12-23T18:47:42.789817Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:47:42.790022Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-2 (2 by [1:469:2349]) priority=0 resources={0, 100} 2025-12-23T18:47:42.790056Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-2 (2 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.790087Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-2-2 (2 by [1:469:2349]) from queue queue_kqp_resource_manager 2025-12-23T18:47:42.790131Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-2 (2 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.790197Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-1-2-2 (2 by [1:469:2349])) 2025-12-23T18:47:42.790247Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:47:42.790382Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-3-3 (3 by [1:469:2349]) priority=0 resources={0, 100} 2025-12-23T18:47:42.790420Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-3-3 (3 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.790450Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-3-3 (3 by [1:469:2349]) from queue queue_kqp_resource_manager 2025-12-23T18:47:42.790533Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-3-3 (3 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.790568Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.750000 (insert task kqp-1-3-3 (3 by [1:469:2349])) 2025-12-23T18:47:42.790589Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 3. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:47:42.790680Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-4-4 (4 by [1:469:2349]) priority=0 resources={0, 100} 2025-12-23T18:47:42.790706Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-4-4 (4 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.790733Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-4-4 (4 by [1:469:2349]) from queue queue_kqp_resource_manager 2025-12-23T18:47:42.790775Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-4-4 (4 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.790799Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.750000 to 1.000000 (insert task kqp-1-4-4 (4 by [1:469:2349])) 2025-12-23T18:47:42.790824Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 4. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:47:42.790926Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-5-5 (5 by [1:469:2349]) priority=0 resources={0, 100} 2025-12-23T18:47:42.790952Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-5-5 (5 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.790992Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-5-5 (5 by [1:469:2349]) from queue queue_kqp_resource_manager 2025-12-23T18:47:42.791048Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-5-5 (5 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.791084Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.000000 to 1.250000 (insert task kqp-1-5-5 (5 by [1:469:2349])) 2025-12-23T18:47:42.791112Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 5. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:47:42.791216Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-6-6 (6 by [1:469:2349]) priority=0 resources={0, 100} 2025-12-23T18:47:42.791241Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-6-6 (6 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.791296Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-6-6 (6 by [1:469:2349]) from queue queue_kqp_resource_manager 2025-12-23T18:47:42.791328Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-6-6 (6 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.791356Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.250000 to 1.500000 (insert task kqp-1-6-6 (6 by [1:469:2349])) 2025-12-23T18:47:42.791383Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 6. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:47:42.791505Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-7-7 (7 by [1:469:2349]) priority=0 resources={0, 100} 2025-12-23T18:47:42.791535Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-7-7 (7 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.791560Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-7-7 (7 by [1:469:2349]) from queue queue_kqp_resource_manager 2025-12-23T18:47:42.791596Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-7-7 (7 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.791625Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.500000 to 1.750000 (insert task kqp-1-7-7 (7 by [1:469:2349])) 2025-12-23T18:47:42.791643Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 7. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:47:42.791728Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-8-8 (8 by [1:469:2349]) priority=0 resources={0, 100} 2025-12-23T18:47:42.791772Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-8-8 (8 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.791798Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-8-8 (8 by [1:469:2349]) from queue queue_kqp_resource_manager 2025-12-23T18:47:42.791813Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-8-8 (8 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.791827Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.750000 to 2.000000 (insert task kqp-1-8-8 (8 by [1:469:2349])) 2025-12-23T18:47:42.791842Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 8. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:47:42.791900Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-9-9 (9 by [1:469:2349]) priority=0 resources={0, 100} 2025-12-23T18:47:42.791922Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-9-9 (9 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.791957Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-9-9 (9 by [1:469:2349]) from queue queue_kqp_resource_manager 2025-12-23T18:47:42.791996Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-9-9 (9 by [1:469:2349]) to queue queue_kqp_resource_manager 2025-12-23T18:47:42.792045Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 2.000000 to 2.250000 (insert task kqp-1-9-9 (9 by [1:469:2349])) 2025-12-23T18:47:42.792090Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 9. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-12-23T18:47:42.792153Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-1-1 (1 by [1:469:2349]) (release resources {0, 100}) 2025-12-23T18:47:42.792185Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 2.250000 to 2.000000 (remove task kqp-1-1-1 (1 by [1:469:2349])) 2025-12-23T18:47:42.792217Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest |95.8%| [TA] $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpBatchUpdate::UpdateOn [GOOD] |95.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-anonymous |95.8%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 7214, MsgBus: 62464 2025-12-23T18:47:36.308539Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126970745476900:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:36.308607Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bcb/r3tmp/tmpyviHzd/pdisk_1.dat 2025-12-23T18:47:36.707009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:36.707143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:36.715280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:36.755038Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:36.757798Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:36.762545Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126970745476870:2081] 1766515656306544 != 1766515656306547 TServer::EnableGrpc on GrpcPort 7214, node 1 2025-12-23T18:47:36.956037Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:36.967492Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:36.967523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:36.967530Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:36.967607Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62464 2025-12-23T18:47:37.319129Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:62464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515656787 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:47:37.589601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:37.630021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.806279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:37.999910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:38.082015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:40.139788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126987925347937:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.140037Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.140666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126987925347947:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.140727Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.481486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:40.515894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:40.546029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:40.607775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:40.643182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:40.678209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:40.706904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:40.752199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:40.828495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126987925348817:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.828587Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.828780Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126987925348822:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.828812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126987925348823:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.828857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.832855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:40.844237Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126987925348826:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:40.903914Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126987925348877:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:41.308810Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126970745476900:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:41.308855Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:42.690086Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587126996515283743:2528], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029 2025-12-23T18:47:42.690536Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YWQ3Y2ZlYzMtZDFjNDQ0M2EtZDA0Yjc3YTMtYjViYTI3ZTY=, ActorId: [1:7587126996515283734:2522], ActorState: ExecuteState, TraceId: 01kd68kqrx15610r5k1mdbm36n, ReplyQueryCompileError, status: PRECONDITION_FAILED, issues: { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }, remove tx with tx_id: |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UpdateOn [GOOD] Test command err: Trying to start YDB, gRPC: 9595, MsgBus: 11167 2025-12-23T18:47:37.443486Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126976960257083:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:37.443734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bca/r3tmp/tmpTUHjnr/pdisk_1.dat 2025-12-23T18:47:37.764509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:37.764629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:37.812330Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:37.835112Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:37.839641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9595, node 1 2025-12-23T18:47:37.960718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:37.960740Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:37.960746Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:37.960890Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:37.976348Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11167 TClient is connected to server localhost:11167 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-12-23T18:47:38.452806Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515657893 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:38.541912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:38.549424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:47:38.563937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:38.725898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:38.909312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:38.989864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:41.044971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126994140128112:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.045085Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.045773Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126994140128122:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.045846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.371700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.399039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.430345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.464306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.499086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.543979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.576812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.624551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.704116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126994140128996:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.704199Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.704224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126994140129001:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.704339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126994140129003:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.704369Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.708263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:41.721677Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126994140129005:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:41.804177Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126994140129056:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:42.443866Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587126976960257083:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:42.443929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:43.324651Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587127002730063962:2536], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH UPDATE is unsupported with ON 2025-12-23T18:47:43.326147Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=ZTI0OTQzOGEtZWFkMzUwMjgtMzBjZWMwZWMtYWY2ZWNkNDE=, ActorId: [1:7587127002730063952:2530], ActorState: ExecuteState, TraceId: 01kd68kre762vz6txpk2qtc36j, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 2 column: 22 } message: "BATCH UPDATE is unsupported with ON" end_position { row: 2 column: 22 } severity: 1 }, remove tx with tx_id: |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_2 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-ordinaryuser |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBillingLabels >> TSchemeShardServerLess::Fake [GOOD] >> TSchemeShardServerLess::StorageBilling >> TSchemeShardServerLess::TestServerlessComputeResourcesMode >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true >> Worker::Basic [GOOD] >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] >> KqpBatchDelete::SimpleOnePartition [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-anonymous >> KqpBatchUpdate::Large_1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_2 [GOOD] Test command err: Trying to start YDB, gRPC: 15064, MsgBus: 1268 2025-12-23T18:47:17.643692Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126891181681732:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:17.643764Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bec/r3tmp/tmpdxkxYN/pdisk_1.dat 2025-12-23T18:47:17.985905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:18.000273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:18.000394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:18.004854Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:18.006431Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126891181681707:2081] 1766515637642017 != 1766515637642020 2025-12-23T18:47:18.012878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15064, node 1 2025-12-23T18:47:18.175385Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:18.175420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:18.175433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:18.175525Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:18.276979Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1268 TClient is connected to server localhost:1268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:18.698300Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515638062 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:18.706794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:18.721900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:18.866883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:19.039346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:19.127110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:21.155769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126908361552760:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.155890Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.156231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126908361552770:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.156265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.529781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.571814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.601842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.643995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.690275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.729899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.773878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.823865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.902036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126908361553644:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.902118Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.902193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126908361553649:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.902406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126908361553651:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.902479Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.905522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:21.916336Z node 1 :K ... nected -> Connecting 2025-12-23T18:47:33.422507Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21516, node 2 2025-12-23T18:47:33.519769Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:33.553041Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:33.553062Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:33.553070Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:33.553146Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25236 TClient is connected to server localhost:25236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515653469 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:47:33.979655Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:33.993109Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:34.050333Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:47:34.198867Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:34.284121Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:34.295110Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:36.891841Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587126970714727971:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.891931Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.892500Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587126970714727981:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.892546Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.976025Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.018223Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.054363Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.098780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.140941Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.197586Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.240528Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.310260Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.426889Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587126975009696150:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.426988Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.427093Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587126975009696155:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.427283Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587126975009696157:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.427331Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.431847Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:37.460533Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587126975009696159:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:37.546720Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587126975009696210:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:38.285271Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587126957829824252:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:38.285340Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:39.406338Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2025-12-23T18:47:38.943492Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126981613619226:2223];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:38.943577Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003dd5/r3tmp/tmpBIa9zG/pdisk_1.dat 2025-12-23T18:47:39.370187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:39.457317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:39.457436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:39.470037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:39.513444Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:39.523020Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126981613619038:2081] 1766515658909835 != 1766515658909838 2025-12-23T18:47:39.616977Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16932 TServer::EnableGrpc on GrpcPort 1742, node 1 2025-12-23T18:47:39.830564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:39.830598Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:39.830612Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:39.830681Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:39.947967Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:16932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515659545 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:40.205316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:40.216241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:40.393255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1766515660483 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) 2025-12-23T18:47:40.529759Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7587126990203554649:2607] Handshake: worker# [1:7587126990203554648:2607] 2025-12-23T18:47:40.530472Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handshake: worker# [1:7587126990203554648:2607] 2025-12-23T18:47:40.531384Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:40.533076Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-12-23T18:47:40.534237Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Send handshake: worker# [1:7587126990203554648:2607] 2025-12-23T18:47:40.534316Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7587126990203554648:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-12-23T18:47:40.534358Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:173: [Worker][1:7587126990203554648:2607] Handshake with writer: sender# [1:7587126990203554650:2607] 2025-12-23T18:47:40.537533Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7587126990203554649:2607] Create read session: session# [1:7587126990203554656:2487] 2025-12-23T18:47:40.537571Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7587126990203554648:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-12-23T18:47:40.537582Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:165: [Worker][1:7587126990203554648:2607] Handshake with reader: sender# [1:7587126990203554649:2607] 2025-12-23T18:47:40.537605Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7587126990203554649:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-12-23T18:47:40.624455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:40.624495Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7587126990203554649:2607] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_1_1811927380476702920_v1 } } 2025-12-23T18:47:42.142716Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126998793489425:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.142716Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126998793489417:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.142833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126998793489426:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.142929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.144088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126998793489438:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.144255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.147931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but ... pp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:47:45.572025Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7587126990203554649:2607] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-12-23T18:47:45.557000Z WriteTime: 2025-12-23T18:47:45.561000Z MessageGroupId: producer ProducerId: producer }] } } 2025-12-23T18:47:45.572149Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7587126990203554648:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-12-23T18:47:45.557000Z WriteTime: 2025-12-23T18:47:45.561000Z MessageGroupId: producer ProducerId: producer }] } 2025-12-23T18:47:45.572232Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-12-23T18:47:45.557000Z WriteTime: 2025-12-23T18:47:45.561000Z MessageGroupId: producer ProducerId: producer }] } 2025-12-23T18:47:45.572361Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2025-12-23T18:47:45.572531Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7587127011678392071:2607] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-12-23T18:47:45.572578Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-12-23T18:47:45.573370Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7587127011678392071:2607] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-12-23T18:47:45.576800Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7587127011678392071:2607] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:47:45.576866Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-12-23T18:47:45.576944Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2025-12-23T18:47:45.577027Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7587126990203554648:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-12-23T18:47:45.577074Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7587126990203554649:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-12-23T18:47:45.776918Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7587126990203554649:2607] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-12-23T18:47:45.728000Z WriteTime: 2025-12-23T18:47:45.726000Z MessageGroupId: producer ProducerId: producer }] } } 2025-12-23T18:47:45.777001Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7587126990203554648:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-12-23T18:47:45.728000Z WriteTime: 2025-12-23T18:47:45.726000Z MessageGroupId: producer ProducerId: producer }] } 2025-12-23T18:47:45.777055Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-12-23T18:47:45.728000Z WriteTime: 2025-12-23T18:47:45.726000Z MessageGroupId: producer ProducerId: producer }] } 2025-12-23T18:47:45.777171Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2025-12-23T18:47:45.777308Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7587127011678392071:2607] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-12-23T18:47:45.778965Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7587127011678392071:2607] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:47:45.779029Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-12-23T18:47:45.779071Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-12-23T18:47:45.779129Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7587126990203554648:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-12-23T18:47:45.779181Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7587126990203554649:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-12-23T18:47:45.911620Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7587126990203554649:2607] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-12-23T18:47:45.900000Z WriteTime: 2025-12-23T18:47:45.903000Z MessageGroupId: producer ProducerId: producer }] } } 2025-12-23T18:47:45.911705Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7587126990203554648:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-12-23T18:47:45.900000Z WriteTime: 2025-12-23T18:47:45.903000Z MessageGroupId: producer ProducerId: producer }] } 2025-12-23T18:47:45.911769Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-12-23T18:47:45.900000Z WriteTime: 2025-12-23T18:47:45.903000Z MessageGroupId: producer ProducerId: producer }] } 2025-12-23T18:47:45.911892Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2025-12-23T18:47:45.912009Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7587127011678392071:2607] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-12-23T18:47:45.913822Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7587127011678392071:2607] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-12-23T18:47:45.913878Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-12-23T18:47:45.913914Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7587126990203554650:2607] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2025-12-23T18:47:45.914004Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7587126990203554648:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-12-23T18:47:45.914055Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7587126990203554649:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-12-23T18:47:46.080961Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:125: [RemoteTopicReader][/Root/topic][0][1:7587126990203554649:2607] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2025-12-23T18:47:46.081017Z node 1 :REPLICATION_SERVICE INFO: topic_reader.cpp:138: [RemoteTopicReader][/Root/topic][0][1:7587126990203554649:2607] Leave 2025-12-23T18:47:46.081123Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:267: [Worker][1:7587126990203554648:2607] Reader has gone: sender# [1:7587126990203554649:2607]: NKikimr::NReplication::NService::TEvWorker::TEvGone { Status: UNAVAILABLE ErrorDescription: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } 2025-12-23T18:47:46.081180Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7587127015973359538:2607] Handshake: worker# [1:7587126990203554648:2607] 2025-12-23T18:47:46.085148Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7587127015973359538:2607] Create read session: session# [1:7587127015973359539:2487] 2025-12-23T18:47:46.085197Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7587126990203554648:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-12-23T18:47:46.085210Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:165: [Worker][1:7587126990203554648:2607] Handshake with reader: sender# [1:7587127015973359538:2607] 2025-12-23T18:47:46.085246Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7587127015973359538:2607] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] |95.8%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimpleOnePartition [GOOD] Test command err: Trying to start YDB, gRPC: 20754, MsgBus: 20515 2025-12-23T18:47:09.774983Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126857812996711:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:09.778401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:47:09.821556Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bf7/r3tmp/tmpjsyonS/pdisk_1.dat 2025-12-23T18:47:10.183679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:10.183848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:10.244727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:10.255364Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:10.280933Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:10.282810Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126857812996672:2081] 1766515629740068 != 1766515629740071 TServer::EnableGrpc on GrpcPort 20754, node 1 2025-12-23T18:47:10.353072Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:10.353106Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:10.353113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:10.353227Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:10.511917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20515 TClient is connected to server localhost:20515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:10.782631Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515630334 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:10.829484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:10.844773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:11.005046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:11.153824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:11.232682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:13.260950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126874992867732:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:13.261058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:13.261336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126874992867742:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:13.261412Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:13.638266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:13.681747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:13.716910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:13.751684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:13.785966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:13.857407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:13.901923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:13.944341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.022092Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126879287835916:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.022172Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.022232Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126879287835921:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.022514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126879287835923:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.022564Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.026098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreate ... 4037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:37.700332Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:37.705668Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17951, node 4 2025-12-23T18:47:37.768919Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:37.768935Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:37.768939Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:37.768990Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:37.792971Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16979 TClient is connected to server localhost:16979 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515657753 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:38.223494Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:38.229722Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:38.245180Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:38.329043Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:38.478115Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:38.549024Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:38.605195Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:41.170422Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126995776940495:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.170512Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.170840Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126995776940505:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.170938Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.246844Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.280819Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.313679Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.353284Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.382886Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.419464Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.468468Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.526226Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.623850Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126995776941373:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.623949Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.624248Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126995776941379:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.624286Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.624335Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587126995776941378:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.628055Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:41.640811Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587126995776941382:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:41.714983Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587126995776941433:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:42.541109Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587126978597069476:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:42.541178Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-ordinaryuser >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:47:47.035632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:47:47.035756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.035814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:47:47.035858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:47:47.035905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:47:47.035937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:47:47.035994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.036071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:47:47.036891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:47:47.037919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:47:47.137972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:47:47.138028Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:47.148941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:47:47.149165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:47:47.149863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:47:47.165916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:47:47.167227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:47:47.171128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:47.172993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:47:47.182718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.182934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:47:47.189171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:47:47.189247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.189426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:47:47.189492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:47:47.189710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:47:47.189850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:47:47.196773Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:243:2058] recipient: [1:15:2062] 2025-12-23T18:47:47.317717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:47:47.319428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:47.320680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:47:47.320740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:47:47.321972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:47:47.322053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:47.327874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:47.330179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:47:47.330404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:47.330557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:47:47.330604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:47:47.330640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:47:47.332809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:47.332862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:47:47.332922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:47:47.335007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:47.335057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:47.335128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:47:47.335180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:47:47.340999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:47:47.343359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:47:47.345165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:47:47.346498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:47.346645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:47:47.346694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:47:47.348424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:47:47.348495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:47:47.348700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:47:47.349598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:47:47.360128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:47:47.360190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409551 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409551 2025-12-23T18:47:48.253885Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409550 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409550 2025-12-23T18:47:48.254321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-12-23T18:47:48.254631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409551 2025-12-23T18:47:48.255964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-12-23T18:47:48.256536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-12-23T18:47:48.256756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186234409550 2025-12-23T18:47:48.257583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-12-23T18:47:48.258187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:47:48.258275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-12-23T18:47:48.258431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-12-23T18:47:48.259189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:47:48.259244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-12-23T18:47:48.259321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:47:48.262382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2025-12-23T18:47:48.262459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409549 2025-12-23T18:47:48.262561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2025-12-23T18:47:48.262606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409551 2025-12-23T18:47:48.264456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2025-12-23T18:47:48.264538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409550 2025-12-23T18:47:48.264810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:47:48.264936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-12-23T18:47:48.265289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-12-23T18:47:48.265345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-12-23T18:47:48.265893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-12-23T18:47:48.266000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:47:48.266046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:943:2802] TestWaitNotification: OK eventTxId 106 2025-12-23T18:47:48.266721Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:47:48.267062Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 317us result status StatusPathDoesNotExist 2025-12-23T18:47:48.267283Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:47:48.267926Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:47:48.268144Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 215us result status StatusPathDoesNotExist 2025-12-23T18:47:48.268318Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:47:48.269044Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:47:48.269266Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 219us result status StatusSuccess 2025-12-23T18:47:48.269775Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409549 is deleted wait until 72075186234409550 is deleted wait until 72075186234409551 is deleted wait until 72075186234409552 is deleted 2025-12-23T18:47:48.271992Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 2025-12-23T18:47:48.273148Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409550 2025-12-23T18:47:48.273224Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409551 2025-12-23T18:47:48.273317Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409552 Deleted tabletId 72075186234409549 Deleted tabletId 72075186234409550 Deleted tabletId 72075186234409551 Deleted tabletId 72075186234409552 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_1 [GOOD] Test command err: Trying to start YDB, gRPC: 13172, MsgBus: 23284 2025-12-23T18:47:13.799710Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126872952437836:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:13.799878Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bf0/r3tmp/tmpBciTIs/pdisk_1.dat 2025-12-23T18:47:14.033141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:14.033293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:14.089907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:14.095765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:14.100969Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:14.101060Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126872952437810:2081] 1766515633796779 != 1766515633796782 TServer::EnableGrpc on GrpcPort 13172, node 1 2025-12-23T18:47:14.206023Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:14.206044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:14.206053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:14.206139Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:14.267045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23284 TClient is connected to server localhost:23284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515634170 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:14.674288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:47:14.692407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.810035Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:14.811397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:14.971859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:47:15.032343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.926987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126885837341571:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.927131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.931132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126885837341581:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.931207Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:17.276237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:17.317844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:17.397054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:17.431882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:17.462025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:17.538445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:17.597045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:17.646014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:17.736141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126890132309754:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:17.736250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:17.736651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126890132309759:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:17.736703Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126890132309760:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:17.737114Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:17.741004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:17.752621Z node 1 ... n GrpcPort 4342, node 3 2025-12-23T18:47:38.644937Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:38.644970Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:38.644983Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:38.645087Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:38.725853Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17441 TClient is connected to server localhost:17441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515658600 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:39.265422Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:39.274199Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:39.288226Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:39.347900Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:39.394034Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:47:39.603899Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:39.682644Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:42.359044Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587126999083652352:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.359141Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.359738Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587126999083652361:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.359809Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.472069Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:42.562065Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:42.599857Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:42.637439Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:42.674175Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:42.718007Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:42.762409Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:42.810440Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:42.888654Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587126999083653234:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.888750Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.888872Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587126999083653240:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.888888Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587126999083653239:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.888910Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.892977Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:42.904744Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587126999083653243:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:42.967132Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587126999083653294:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:43.334573Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587126981903781392:2137];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:43.334645Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:44.934084Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:47:47.034423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:47:47.034519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.034574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:47:47.034641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:47:47.034687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:47:47.034715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:47:47.034793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.034917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:47:47.035880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:47:47.037804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:47:47.133877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:47:47.133932Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:47.146531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:47:47.146914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:47:47.149553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:47:47.166025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:47:47.167165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:47:47.170264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:47.173024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:47:47.181295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.181484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:47:47.189174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:47:47.189245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.189432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:47:47.189491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:47:47.189611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:47:47.189766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:47:47.356473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.357723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.359034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.359136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.359224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T18:47:48.645911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2025-12-23T18:47:48.645974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-12-23T18:47:48.646076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:47:48.646174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 39], Generation: 2, ActorId:[1:1055:2987], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-12-23T18:47:48.648126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:47:48.648167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:47:48.648388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:48.648420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 104, path id: 39 2025-12-23T18:47:48.648753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:47:48.648816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-12-23T18:47:48.648853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 240 -> 240 2025-12-23T18:47:48.649428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:47:48.649514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:47:48.649551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:47:48.649585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2025-12-23T18:47:48.649618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 6 2025-12-23T18:47:48.649691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-12-23T18:47:48.652558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:47:48.652633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:47:48.652772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:47:48.652813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:47:48.652852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:47:48.652892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:47:48.652945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-12-23T18:47:48.652984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:47:48.653024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:47:48.653054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:47:48.653268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:47:48.653996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:47:48.655941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:47:48.655987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:47:48.656430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:47:48.656542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:47:48.656578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1262:3142] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-12-23T18:47:48.659853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:47:48.660070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2025-12-23T18:47:48.660131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2025-12-23T18:47:48.660286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-12-23T18:47:48.660348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-12-23T18:47:48.662825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:47:48.663109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-12-23T18:47:48.666581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:47:48.667078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2025-12-23T18:47:48.667139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2025-12-23T18:47:48.667286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-12-23T18:47:48.667343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-12-23T18:47:48.670236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:47:48.670434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:47:47.060724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:47:47.060844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.060881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:47:47.060912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:47:47.060956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:47:47.060981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:47:47.061034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.061116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:47:47.062006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:47:47.062268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:47:47.154716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:47:47.154806Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:47.164446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:47:47.164680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:47:47.164813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:47:47.168260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:47:47.168442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:47:47.170255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:47.173119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:47:47.178549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.180028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:47:47.189069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:47:47.189184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.189348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:47:47.189401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:47:47.189634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:47:47.189796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:47:47.369560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.370532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.370672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.370760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.370828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.370920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.370984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.371065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.371142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.371217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.371280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.371362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.371472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.371554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.371639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... pathId: [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:47:48.659330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6433: Update domain reply, message: Origin: 72075186233409546 TxId: 106, at schemeshard: 72057594046678944 2025-12-23T18:47:48.659369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-12-23T18:47:48.659489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-12-23T18:47:48.659534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:795: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2025-12-23T18:47:48.659574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 106:0 138 -> 240 2025-12-23T18:47:48.664037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:47:48.664242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:47:48.664293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 106:0 ProgressState 2025-12-23T18:47:48.664394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2025-12-23T18:47:48.664432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:47:48.664470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2025-12-23T18:47:48.664499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:47:48.664555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-12-23T18:47:48.664612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:47:48.664656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2025-12-23T18:47:48.664696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 106:0 2025-12-23T18:47:48.664752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-12-23T18:47:48.667978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-12-23T18:47:48.668043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-12-23T18:47:48.668639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-12-23T18:47:48.668760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:47:48.668819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1350:3231] TestWaitNotification: OK eventTxId 106 2025-12-23T18:47:48.669501Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:47:48.669764Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 226us result status StatusSuccess 2025-12-23T18:47:48.670202Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:47:48.671095Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2025-12-23T18:47:48.671303Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 203us result status StatusSuccess 2025-12-23T18:47:48.671804Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186234409549 PathType: EPathTypeDir CreateFinished: false CreateTxId: 281474976725657 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 2025-12-23T18:47:48.672498Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:47:48.672704Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 196us result status StatusSuccess 2025-12-23T18:47:48.673058Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:47:48.673542Z node 1 :HIVE INFO: tablet_helpers.cpp:1652: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:39 |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-ordinaryuser |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:47:47.473224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:47:47.473335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.473379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:47:47.473425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:47:47.473465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:47:47.473495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:47:47.473552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.473667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:47:47.474596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:47:47.474913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:47:47.563303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:47:47.563385Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:47.569736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:47:47.569957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:47:47.570090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:47:47.573317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:47:47.573526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:47:47.574263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:47.574485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:47:47.576619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.576805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:47:47.577962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:47:47.578022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.578162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:47:47.578216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:47:47.578327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:47:47.578480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:47:47.731193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.732164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.732296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.732374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.732435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.732497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.732540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.732597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.732688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.732766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.732844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.732930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.733015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.733076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.733170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 86234409549, txId: 281474976725658 2025-12-23T18:47:48.982657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186234409549, txId: 281474976725658, pathId: [OwnerId: 72075186234409549, LocalPathId: 1], version: 6 2025-12-23T18:47:48.982676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186234409549, LocalPathId: 1] was 5 2025-12-23T18:47:48.982723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725658, ready parts: 0/1, is published: true 2025-12-23T18:47:48.985964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725657 2025-12-23T18:47:48.986065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725657 2025-12-23T18:47:48.986140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-12-23T18:47:48.986215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2025-12-23T18:47:48.986495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725658 2025-12-23T18:47:48.986622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:48.986707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:47:48.986754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-12-23T18:47:48.987034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2025-12-23T18:47:48.987099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-12-23T18:47:48.987201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:47:48.987301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 39], Generation: 2, ActorId:[1:1056:2986], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-12-23T18:47:48.989303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:47:48.989333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:47:48.989437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:48.989467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 104, path id: 39 2025-12-23T18:47:48.989800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:47:48.989837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-12-23T18:47:48.989873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 240 -> 240 2025-12-23T18:47:48.990535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:47:48.990664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:47:48.990691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:47:48.990718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2025-12-23T18:47:48.990765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 6 2025-12-23T18:47:48.990845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-12-23T18:47:48.993233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:47:48.993298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:47:48.993403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:47:48.993442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:47:48.993491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:47:48.993528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:47:48.993569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-12-23T18:47:48.993605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:47:48.993643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:47:48.993676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:47:48.993821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:47:48.994269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:47:48.995927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:47:48.995978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:47:48.996387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:47:48.996476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:47:48.996527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1260:3141] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-12-23T18:47:48.999860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:47:49.000022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2025-12-23T18:47:49.000062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2025-12-23T18:47:49.000233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-12-23T18:47:49.000293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-12-23T18:47:49.002430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:47:49.002656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:47:47.036577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:47:47.036688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.036769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:47:47.036822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:47:47.036874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:47:47.036915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:47:47.037014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.037114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:47:47.038163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:47:47.038525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:47:47.135036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:47:47.135095Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:47.147934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:47:47.148143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:47:47.149546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:47:47.167283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:47:47.167523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:47:47.170530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:47.172992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:47:47.183166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.183335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:47:47.189486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:47:47.189554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.189737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:47:47.189784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:47:47.189987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:47:47.190113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:47:47.363544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.364540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.364668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.364767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.364830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.364923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.364992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.365080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.365161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.365229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.365301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.365388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.365474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.365567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.365671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... pg_class' 2025-12-23T18:47:49.156941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720662 2025-12-23T18:47:49.157040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720675 2025-12-23T18:47:49.157094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720662 2025-12-23T18:47:49.157174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/partition_stats' 2025-12-23T18:47:49.157208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720675 2025-12-23T18:47:49.157307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_queries_by_request_units_one_hour' 2025-12-23T18:47:49.161452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720674 2025-12-23T18:47:49.161580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720676 2025-12-23T18:47:49.161702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720674 2025-12-23T18:47:49.161812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_partitions_by_tli_one_hour' 2025-12-23T18:47:49.161866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720676 2025-12-23T18:47:49.161923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720677 2025-12-23T18:47:49.161979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_partitions_one_hour' 2025-12-23T18:47:49.162106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720678 2025-12-23T18:47:49.162166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720678 2025-12-23T18:47:49.162256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_queries_by_duration_one_minute' 2025-12-23T18:47:49.162311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720677 2025-12-23T18:47:49.162412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/pg_tables' 2025-12-23T18:47:49.166305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720680 2025-12-23T18:47:49.166465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720680 2025-12-23T18:47:49.166580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/auth_users' 2025-12-23T18:47:49.166664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720679 2025-12-23T18:47:49.166788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720682 2025-12-23T18:47:49.166851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720679 2025-12-23T18:47:49.166943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/auth_group_members' 2025-12-23T18:47:49.167000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720682 2025-12-23T18:47:49.167079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/streaming_queries' 2025-12-23T18:47:49.167130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720681 2025-12-23T18:47:49.167175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720681 2025-12-23T18:47:49.167268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/resource_pools' 2025-12-23T18:47:49.171075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720683 2025-12-23T18:47:49.171199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720684 2025-12-23T18:47:49.171292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720683 2025-12-23T18:47:49.171387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/auth_permissions' 2025-12-23T18:47:49.171456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720684 2025-12-23T18:47:49.171533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:867:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_partitions_one_minute' 2025-12-23T18:47:49.171569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:867:2820] at schemeshard: 72075186233409546 Send TEvRosterUpdateFinished TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-12-23T18:47:49.176173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerlessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" PartitionConfig { ColumnFamilies { Id: 0 ColumnCacheMode: ColumnCacheModeTryKeepInMemory } ColumnFamilies { Id: 1 Name: "Other" ColumnCacheMode: ColumnCacheModeRegular } } } } TxId: 108 TabletId: 72075186233409549 , at schemeshard: 72075186233409549 2025-12-23T18:47:49.176434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/ServerlessDB/Table, pathId: , opId: 108:0, at schemeshard: 72075186233409549 2025-12-23T18:47:49.176704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: , at schemeshard: 72075186233409549 2025-12-23T18:47:49.179436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: " TxId: 108 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2025-12-23T18:47:49.179726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot/ServerlessDB, subject: , status: StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: , operation: ALTER TABLE, path: /MyRoot/ServerlessDB/Table TestModificationResult got TxId: 108, wait until txId: 108 TestModificationResults wait txId: 109 2025-12-23T18:47:49.183407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerlessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" PartitionConfig { ColumnFamilies { Id: 0 ColumnCacheMode: ColumnCacheModeRegular } ColumnFamilies { Id: 1 Name: "Other" ColumnCacheMode: ColumnCacheModeTryKeepInMemory } } } } TxId: 109 TabletId: 72075186233409549 , at schemeshard: 72075186233409549 2025-12-23T18:47:49.183680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/ServerlessDB/Table, pathId: , opId: 109:0, at schemeshard: 72075186233409549 2025-12-23T18:47:49.183999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other, at schemeshard: 72075186233409549 2025-12-23T18:47:49.186566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other" TxId: 109 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2025-12-23T18:47:49.186855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot/ServerlessDB, subject: , status: StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other, operation: ALTER TABLE, path: /MyRoot/ServerlessDB/Table TestModificationResult got TxId: 109, wait until txId: 109 |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey >> TMiniKQLEngineFlatTest::TestEmptyProgram >> TMiniKQLEngineFlatTest::TestPureProgram >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-false >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange >> TMiniKQLEngineFlatTest::TestEraseRowNoShards >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo |95.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable >> TMiniKQLProtoTestYdb::TestExportStructYdb >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-ordinaryuser [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-dbadmin >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-true >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [FAIL] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail >> KqpSinkTx::OlapDeferredEffects >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-ordinaryuser >> TExtSubDomainTest::GenericCases |95.9%| [TA] $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-false [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-dbadmin >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> KqpSinkTx::TIsolationSettingTest-IsOlap-UsePragma >> KqpSinkMvcc::LostUpdate+IsOlap |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] |95.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpTx::TooManyTx >> KqpSinkLocks::TInvalidate >> KqpSinkTx::SnapshotRO >> KqpTx::RollbackManyTx >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-false [GOOD] Test command err: 2025-12-23T18:47:51.044909Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127035201692689:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:51.044972Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003919/r3tmp/tmpcy5GAB/pdisk_1.dat 2025-12-23T18:47:51.310316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:47:51.319812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:51.319961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:51.323124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:51.436239Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:51.605835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:26709 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:47:51.652487Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587127035201692925:2105] Handle TEvNavigate describe path dc-1 2025-12-23T18:47:51.652542Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587127035201693229:2267] HANDLE EvNavigateScheme dc-1 2025-12-23T18:47:51.652657Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587127035201692932:2108], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:51.652764Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587127035201693092:2196][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587127035201692932:2108], cookie# 1 2025-12-23T18:47:51.654345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127035201693124:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127035201693121:2196], cookie# 1 2025-12-23T18:47:51.654372Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127035201693125:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127035201693122:2196], cookie# 1 2025-12-23T18:47:51.654383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127035201693126:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127035201693123:2196], cookie# 1 2025-12-23T18:47:51.654426Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127035201692630:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127035201693124:2196], cookie# 1 2025-12-23T18:47:51.654446Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127035201692633:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127035201693125:2196], cookie# 1 2025-12-23T18:47:51.654474Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127035201692636:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127035201693126:2196], cookie# 1 2025-12-23T18:47:51.654517Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127035201693124:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587127035201692630:2049], cookie# 1 2025-12-23T18:47:51.654535Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127035201693125:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587127035201692633:2052], cookie# 1 2025-12-23T18:47:51.654551Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127035201693126:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587127035201692636:2055], cookie# 1 2025-12-23T18:47:51.654590Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127035201693092:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587127035201693121:2196], cookie# 1 2025-12-23T18:47:51.654608Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587127035201693092:2196][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:47:51.654626Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127035201693092:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587127035201693122:2196], cookie# 1 2025-12-23T18:47:51.654642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587127035201693092:2196][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:47:51.654682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127035201693092:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7587127035201693123:2196], cookie# 1 2025-12-23T18:47:51.654694Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587127035201693092:2196][/dc-1] Sync cookie mismatch: sender# [1:7587127035201693123:2196], cookie# 1, current cookie# 0 2025-12-23T18:47:51.654732Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587127035201692932:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:47:51.665976Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587127035201692932:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587127035201693092:2196] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:47:51.666125Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587127035201692932:2108], cacheItem# { Subscriber: { Subscriber: [1:7587127035201693092:2196] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:47:51.669597Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127035201693230:2268], recipient# [1:7587127035201693229:2267], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:51.669679Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587127035201693229:2267] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:47:51.714210Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587127035201693229:2267] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:47:51.717500Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587127035201693229:2267] Handle TEvDescribeSchemeResult Forward to# [1:7587127035201693228:2266] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 C ... IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:47:51.980100Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127035201693284:2308], recipient# [1:7587127035201693276:2306], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:51.980144Z node 1 :TX_PROXY INFO: describe.cpp:354: Actor# [1:7587127035201693276:2306] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515671970 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766515672012 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 ... (TRUNCATED) 2025-12-23T18:47:51.983489Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587127035201692925:2105] Handle TEvNavigate describe path /dc-1 2025-12-23T18:47:51.983527Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587127035201693286:2310] HANDLE EvNavigateScheme /dc-1 2025-12-23T18:47:51.983594Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587127035201692932:2108], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:51.983716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587127035201693092:2196][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587127035201692932:2108], cookie# 4 2025-12-23T18:47:51.983766Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127035201693124:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127035201693121:2196], cookie# 4 2025-12-23T18:47:51.983797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127035201693125:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127035201693122:2196], cookie# 4 2025-12-23T18:47:51.983812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127035201693126:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127035201693123:2196], cookie# 4 2025-12-23T18:47:51.983833Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127035201692630:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127035201693124:2196], cookie# 4 2025-12-23T18:47:51.983864Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127035201692633:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127035201693125:2196], cookie# 4 2025-12-23T18:47:51.983958Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127035201692636:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127035201693126:2196], cookie# 4 2025-12-23T18:47:51.984004Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127035201693124:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127035201692630:2049], cookie# 4 2025-12-23T18:47:51.984040Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127035201693125:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127035201692633:2052], cookie# 4 2025-12-23T18:47:51.984058Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127035201693126:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127035201692636:2055], cookie# 4 2025-12-23T18:47:51.984087Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127035201693092:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127035201693121:2196], cookie# 4 2025-12-23T18:47:51.984106Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587127035201693092:2196][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:47:51.984144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127035201693092:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127035201693122:2196], cookie# 4 2025-12-23T18:47:51.984164Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587127035201693092:2196][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:47:51.984187Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127035201693092:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127035201693123:2196], cookie# 4 2025-12-23T18:47:51.984202Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587127035201693092:2196][/dc-1] Sync cookie mismatch: sender# [1:7587127035201693123:2196], cookie# 4, current cookie# 0 2025-12-23T18:47:51.984265Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587127035201692932:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:47:51.984331Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587127035201692932:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587127035201693092:2196] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766515671970 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:47:51.984386Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587127035201692932:2108], cacheItem# { Subscriber: { Subscriber: [1:7587127035201693092:2196] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766515671970 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-12-23T18:47:51.984516Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127035201693287:2311], recipient# [1:7587127035201693286:2310], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:51.984542Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587127035201693286:2310] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:47:51.984604Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587127035201693286:2310] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:47:51.985306Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587127035201693286:2310] Handle TEvDescribeSchemeResult Forward to# [1:7587127035201693285:2309] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515671970 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> KqpSinkMvcc::WriteSkewInsert+IsOlap >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-anonymous >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [FAIL] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-true [GOOD] Test command err: 2025-12-23T18:47:52.019169Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127039728907694:2146];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:52.022416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003914/r3tmp/tmpePXBBH/pdisk_1.dat 2025-12-23T18:47:52.387691Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:47:52.409034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:52.409200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:52.453229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:52.508116Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:52.530904Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127035433940278:2081] 1766515672005489 != 1766515672005492 2025-12-23T18:47:52.606791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:1178 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:47:52.725063Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587127039728907835:2105] Handle TEvNavigate describe path dc-1 2025-12-23T18:47:52.725127Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587127039728908337:2464] HANDLE EvNavigateScheme dc-1 2025-12-23T18:47:52.725219Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587127039728907858:2118], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:52.725296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587127039728908023:2206][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587127039728907858:2118], cookie# 1 2025-12-23T18:47:52.745246Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127039728908057:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127039728908054:2206], cookie# 1 2025-12-23T18:47:52.745328Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127039728908058:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127039728908055:2206], cookie# 1 2025-12-23T18:47:52.745345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127039728908059:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127039728908056:2206], cookie# 1 2025-12-23T18:47:52.745373Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127035433940249:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127039728908058:2206], cookie# 1 2025-12-23T18:47:52.745378Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127035433940246:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127039728908057:2206], cookie# 1 2025-12-23T18:47:52.745431Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127035433940252:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127039728908059:2206], cookie# 1 2025-12-23T18:47:52.745496Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127039728908057:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127035433940246:2049], cookie# 1 2025-12-23T18:47:52.745520Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127039728908058:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127035433940249:2052], cookie# 1 2025-12-23T18:47:52.745558Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127039728908059:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127035433940252:2055], cookie# 1 2025-12-23T18:47:52.745602Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127039728908023:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127039728908054:2206], cookie# 1 2025-12-23T18:47:52.745625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587127039728908023:2206][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:47:52.745642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127039728908023:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127039728908055:2206], cookie# 1 2025-12-23T18:47:52.745674Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587127039728908023:2206][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:47:52.745721Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127039728908023:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127039728908056:2206], cookie# 1 2025-12-23T18:47:52.745745Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587127039728908023:2206][/dc-1] Sync cookie mismatch: sender# [1:7587127039728908056:2206], cookie# 1, current cookie# 0 2025-12-23T18:47:52.745803Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587127039728907858:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:47:52.745897Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587127039728907858:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587127039728908023:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:47:52.746005Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587127039728907858:2118], cacheItem# { Subscriber: { Subscriber: [1:7587127039728908023:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:47:52.748632Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127039728908338:2465], recipient# [1:7587127039728908337:2464], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:52.748696Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587127039728908337:2464] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:47:52.805683Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587127039728908337:2464] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:47:52.809037Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587127039728908337:2464] Handle TEvDescribeSchemeResult Forward to# [1:7587127039728908336:2463] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ... rtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:47:53.196669Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127044023875692:2504], recipient# [1:7587127044023875684:2502], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:38:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 38] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 38] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:53.196708Z node 1 :TX_PROXY INFO: describe.cpp:354: Actor# [1:7587127044023875684:2502] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-12-23T18:47:53.201365Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587127039728907835:2105] Handle TEvNavigate describe path /dc-1 2025-12-23T18:47:53.201410Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587127044023875694:2506] HANDLE EvNavigateScheme /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515673202 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515672523 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "USER_0" PathId: ... (TRUNCATED) 2025-12-23T18:47:53.201512Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587127039728907858:2118], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:53.201654Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587127039728908023:2206][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587127039728907858:2118], cookie# 4 2025-12-23T18:47:53.201714Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127039728908057:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127039728908054:2206], cookie# 4 2025-12-23T18:47:53.201737Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127039728908058:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127039728908055:2206], cookie# 4 2025-12-23T18:47:53.201750Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127039728908059:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127039728908056:2206], cookie# 4 2025-12-23T18:47:53.201764Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127035433940246:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127039728908057:2206], cookie# 4 2025-12-23T18:47:53.201792Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127035433940249:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127039728908058:2206], cookie# 4 2025-12-23T18:47:53.201814Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127035433940252:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127039728908059:2206], cookie# 4 2025-12-23T18:47:53.201825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127039728908057:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7587127035433940246:2049], cookie# 4 2025-12-23T18:47:53.201843Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127039728908058:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7587127035433940249:2052], cookie# 4 2025-12-23T18:47:53.201861Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127039728908059:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7587127035433940252:2055], cookie# 4 2025-12-23T18:47:53.201909Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127039728908023:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7587127039728908054:2206], cookie# 4 2025-12-23T18:47:53.201937Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587127039728908023:2206][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:47:53.201965Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127039728908023:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7587127039728908055:2206], cookie# 4 2025-12-23T18:47:53.201988Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587127039728908023:2206][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:47:53.202025Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127039728908023:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7587127039728908056:2206], cookie# 4 2025-12-23T18:47:53.202038Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587127039728908023:2206][/dc-1] Sync cookie mismatch: sender# [1:7587127039728908056:2206], cookie# 4, current cookie# 0 2025-12-23T18:47:53.202135Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587127039728907858:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:47:53.202204Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587127039728907858:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587127039728908023:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766515673202 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:47:53.202304Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587127039728907858:2118], cacheItem# { Subscriber: { Subscriber: [1:7587127039728908023:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766515673202 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-12-23T18:47:53.202467Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127044023875695:2507], recipient# [1:7587127044023875694:2506], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:53.202500Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587127044023875694:2506] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:47:53.202561Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587127044023875694:2506] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:47:53.203314Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587127044023875694:2506] Handle TEvDescribeSchemeResult Forward to# [1:7587127044023875693:2505] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 128 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515673202 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-ordinaryuser >> KqpSinkTx::OlapExplicitTcl >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false [FAIL] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-clusteradmin >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false [FAIL] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink >> test_restarts.py::test_basic [FAIL] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false [FAIL] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [FAIL] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true >> KqpSnapshotRead::TestReadOnly+withSink >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-anonymous >> TExtSubDomainTest::GenericCases [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::GenericCases [GOOD] Test command err: 2025-12-23T18:47:54.619224Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127050153797376:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:54.619272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:47:54.698484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00390b/r3tmp/tmpwejUlX/pdisk_1.dat 2025-12-23T18:47:54.964224Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:47:55.001392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:55.001471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:55.050354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:55.103561Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:55.239746Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:12045 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:47:55.306399Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587127050153797613:2120] Handle TEvNavigate describe path dc-1 2025-12-23T18:47:55.306508Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587127054448765688:2729] HANDLE EvNavigateScheme dc-1 2025-12-23T18:47:55.306632Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587127050153797631:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:55.306767Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587127050153797877:2290][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587127050153797631:2129], cookie# 1 2025-12-23T18:47:55.308746Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127050153797912:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127050153797909:2290], cookie# 1 2025-12-23T18:47:55.308801Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127050153797913:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127050153797910:2290], cookie# 1 2025-12-23T18:47:55.308819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127050153797914:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127050153797911:2290], cookie# 1 2025-12-23T18:47:55.308865Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127050153797295:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127050153797912:2290], cookie# 1 2025-12-23T18:47:55.308890Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127050153797298:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127050153797913:2290], cookie# 1 2025-12-23T18:47:55.308906Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127050153797301:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127050153797914:2290], cookie# 1 2025-12-23T18:47:55.308985Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127050153797912:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127050153797295:2050], cookie# 1 2025-12-23T18:47:55.309012Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127050153797913:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127050153797298:2053], cookie# 1 2025-12-23T18:47:55.309032Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127050153797914:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127050153797301:2056], cookie# 1 2025-12-23T18:47:55.309071Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127050153797877:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127050153797909:2290], cookie# 1 2025-12-23T18:47:55.309112Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587127050153797877:2290][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:47:55.309162Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127050153797877:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127050153797910:2290], cookie# 1 2025-12-23T18:47:55.309189Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587127050153797877:2290][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:47:55.309231Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127050153797877:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127050153797911:2290], cookie# 1 2025-12-23T18:47:55.309246Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587127050153797877:2290][/dc-1] Sync cookie mismatch: sender# [1:7587127050153797911:2290], cookie# 1, current cookie# 0 2025-12-23T18:47:55.309366Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587127050153797631:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:47:55.309513Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587127050153797631:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587127050153797877:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:47:55.309668Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587127050153797631:2129], cacheItem# { Subscriber: { Subscriber: [1:7587127050153797877:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:47:55.312535Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127054448765689:2730], recipient# [1:7587127054448765688:2729], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:55.312639Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587127054448765688:2729] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:47:55.354358Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587127054448765688:2729] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:47:55.358002Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587127054448765688:2729] Handle TEvDescribeSchemeResult Forward to# [1:7587127054448765687:2728] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 ... able RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:58.529172Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587127050153797295:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587127067333668467:3396] 2025-12-23T18:47:58.529187Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587127050153797295:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587127067333668468:3395] 2025-12-23T18:47:58.529203Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587127050153797295:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7587127067333668477:3397] 2025-12-23T18:47:58.529210Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587127050153797295:2050] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-12-23T18:47:58.529232Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587127050153797295:2050] Subscribe: subscriber# [1:7587127067333668477:3397], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:47:58.530970Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7587127067333668478:3397][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7587127050153797298:2053] 2025-12-23T18:47:58.530990Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587127050153797301:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7587127067333668479:3397] 2025-12-23T18:47:58.531003Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587127050153797301:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-12-23T18:47:58.531012Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7587127067333668477:3397][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7587127050153797295:2050] 2025-12-23T18:47:58.531062Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7587127067333668473:3397][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7587127067333668475:3397] 2025-12-23T18:47:58.531066Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587127050153797301:2056] Subscribe: subscriber# [1:7587127067333668479:3397], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:47:58.531105Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587127050153797298:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587127067333668478:3397] 2025-12-23T18:47:58.531129Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587127050153797295:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587127067333668477:3397] 2025-12-23T18:47:58.531136Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7587127067333668473:3397][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7587127067333668474:3397] 2025-12-23T18:47:58.531180Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587127067333668473:3397][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7587127050153797631:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:47:58.531236Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7587127067333668479:3397][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7587127050153797301:2056] 2025-12-23T18:47:58.531256Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587127050153797631:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-12-23T18:47:58.531270Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7587127067333668473:3397][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7587127067333668476:3397] 2025-12-23T18:47:58.531302Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7587127067333668473:3397][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [1:7587127050153797631:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:47:58.531336Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587127050153797631:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7587127067333668473:3397] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:47:58.531338Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587127050153797301:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587127067333668479:3397] 2025-12-23T18:47:58.531429Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587127050153797631:2129], cacheItem# { Subscriber: { Subscriber: [1:7587127067333668473:3397] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:47:58.531533Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127067333668481:3399], recipient# [1:7587127067333668460:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:58.636895Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587127050153797631:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:58.637027Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587127050153797631:2129], cacheItem# { Subscriber: { Subscriber: [1:7587127054448765816:2834] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:47:58.637116Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127067333668489:3400], recipient# [1:7587127067333668488:2331], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:59.534533Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587127050153797631:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:59.534667Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587127050153797631:2129], cacheItem# { Subscriber: { Subscriber: [1:7587127067333668473:3397] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:47:59.534785Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127071628635799:3404], recipient# [1:7587127071628635798:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> KqpTx::TooManyTx [GOOD] >> KqpTx::SnapshotROInteractive1 >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [FAIL] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [FAIL] >> KqpSinkTx::SnapshotRO [GOOD] >> KqpSinkTx::SnapshotROInteractive1 >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-dbadmin >> KqpSinkLocks::TInvalidate [GOOD] >> KqpSinkLocks::TInvalidateOlap >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-ordinaryuser |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] >> KqpBatchDelete::Large_1 [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [FAIL] |95.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpSinkMvcc::TxReadsCommitted+IsOlap >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_1 [GOOD] Test command err: Trying to start YDB, gRPC: 9688, MsgBus: 27008 2025-12-23T18:47:37.854182Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126977930122139:2258];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:37.854264Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bc9/r3tmp/tmpNsYRcc/pdisk_1.dat 2025-12-23T18:47:38.206156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:38.281787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:38.282077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:38.283423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:38.339312Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:38.341700Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126977930121909:2081] 1766515657828902 != 1766515657828905 TServer::EnableGrpc on GrpcPort 9688, node 1 2025-12-23T18:47:38.419579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:38.419606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:38.419618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:38.419731Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:38.494782Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27008 TClient is connected to server localhost:27008 2025-12-23T18:47:38.862901Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515658355 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:39.014346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:39.023634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:39.049711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:39.229686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:39.423495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:39.519653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:41.401874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126995109992967:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.401971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.402396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126995109992976:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.402483Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:41.697368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.733160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.771417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.811326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.847482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.902528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:41.958890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:42.006406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:42.076856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126999404961147:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.076948Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.077322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126999404961152:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.077378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126999404961153:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.077510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:42.081470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sche ... GrpcPort 21351, node 3 2025-12-23T18:47:56.351304Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:56.351326Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:56.351333Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:56.351469Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:56.390852Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17374 TClient is connected to server localhost:17374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515676282 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:47:56.819605Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:56.826789Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:56.838052Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:56.913057Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:57.064032Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:47:57.115520Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:57.172358Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:59.674209Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127071026189446:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.674285Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.674798Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127071026189456:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.674874Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.739951Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.779321Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.809169Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.837501Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.871173Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.912492Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.964876Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:00.063269Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:00.166231Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127075321157626:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.166344Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.166928Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127075321157632:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.167199Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127075321157631:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.167237Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.171137Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:00.184896Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587127075321157635:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:48:00.248702Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587127075321157686:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:01.058878Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587127058141285897:2246];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:01.059653Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:02.344569Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-std] [GOOD] Test command err: run test with cloud_id=CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc folder_id=folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc iam_token=usr_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc cloud_account=acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc 2025-12-23T18:47:11.232621Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc]","tx_id":"281474976720692","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:11.333925Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060]","tx_id":"281474976720698","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:11.361476Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2]","tx_id":"281474976720699","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:11.397455Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/Groups]","tx_id":"281474976720703","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:11.397663Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/Deduplication]","tx_id":"281474976720702","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:11.398205Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/SentTimestampIdx]","tx_id":"281474976720707","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:11.398376Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/Messages]","tx_id":"281474976720705","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:11.398493Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/Data]","tx_id":"281474976720700","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:11.398602Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/Attributes]","tx_id":"281474976720701","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:11.399037Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/State]","tx_id":"281474976720706","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:11.400236Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/Reads]","tx_id":"281474976720704","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:11.763468Z: {"request_id":"bc67d703-8298f453-77868e7f-d90d139f","cloud_id":"CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc"} ======================================== 2025-12-23T18:47:13.638431Z: {"request_id":"bc67d703-8298f453-77868e7f-d90d139f","permission":"ymq.queues.create","id":"8438842895600781646$CreateMessageQueue$2025-12-23T18:47:13.638193Z","idempotency_id":"8438842895600781646$CreateMessageQueue$2025-12-23T18:47:11.283000Z","cloud_id":"CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:47:11.283000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_ca355846-e02f-11f0-b44b-d00d1a8d6ccc.fifo","resource_id":"00000000000000010060","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:47:13.641450Z: {"request_id":"bd13efdf-9c154fac-b380f1eb-4a1b4a00","permission":"ymq.queues.setAttributes","id":"5625545811864658259$UpdateMessageQueue$2025-12-23T18:47:13.638262Z","idempotency_id":"5625545811864658259$UpdateMessageQueue$2025-12-23T18:47:12.835000Z","cloud_id":"CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:47:12.835000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_ca355846-e02f-11f0-b44b-d00d1a8d6ccc.fifo","resource_id":"00000000000000010060","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:47:13.674543Z: {"request_id":"bc67d703-8298f453-77868e7f-d90d139f","permission":"ymq.queues.create","id":"8438842895600781646$CreateMessageQueue$2025-12-23T18:47:13.674300Z","idempotency_id":"8438842895600781646$CreateMessageQueue$2025-12-23T18:47:11.283000Z","cloud_id":"CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:47:11.283000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_ca355846-e02f-11f0-b44b-d00d1a8d6ccc.fifo","resource_id":"00000000000000010060","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:47:13.677536Z: {"request_id":"bd13efdf-9c154fac-b380f1eb-4a1b4a00","permission":"ymq.queues.setAttributes","id":"5625545811864658259$UpdateMessageQueue$2025-12-23T18:47:13.674365Z","idempotency_id":"5625545811864658259$UpdateMessageQueue$2025-12-23T18:47:12.835000Z","cloud_id":"CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:47:12.835000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_ca355846-e02f-11f0-b44b-d00d1a8d6ccc.fifo","resource_id":"00000000000000010060","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:47:15.030072Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/SentTimestampIdx]","tx_id":"281474976720723","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:15.090841Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/State]","tx_id":"281474976720724","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:15.138350Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/Reads]","tx_id":"281474976720725","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:15.175659Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/Messages]","tx_id":"281474976720726","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:15.227456Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/Groups]","tx_id":"281474976720727","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:15.279550Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/Deduplication]","tx_id":"281474976720728","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:15.319991Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/Data]","tx_id":"281474976720729","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:15.399313Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ca54e5c9-e02f-11f0-a223-d00d1a8d6ccc/00000000000000010060/v2/Attributes]","tx_id":"281474976720730","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ====== ... :"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:31.920881Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/3/Infly]","tx_id":"281474976720798","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:31.962124Z: {"request_id":"81e79fb9-ab69e3d9-dfc62d7d-2988370e","permission":"ymq.queues.setAttributes","id":"11053968771417430062$UpdateMessageQueue$2025-12-23T18:47:31.961992Z","idempotency_id":"11053968771417430062$UpdateMessageQueue$2025-12-23T18:47:30.641000Z","cloud_id":"CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:47:30.641000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_d3ffc20a-e02f-11f0-9e97-d00d1a8d6ccc","resource_id":"000000000000000307nu","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:47:31.962399Z: {"request_id":"12570f5b-5edff9ef-b0b5e0eb-938bb1eb","permission":"ymq.queues.delete","id":"728492704314792282$DeleteMessageQueue$2025-12-23T18:47:31.962029Z","idempotency_id":"728492704314792282$DeleteMessageQueue$2025-12-23T18:47:31.716000Z","cloud_id":"CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2025-12-23T18:47:31.716000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_d3ffc20a-e02f-11f0-9e97-d00d1a8d6ccc","resource_id":"000000000000000307nu","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc","component":"ymq"} ======================================== 2025-12-23T18:47:32.003334Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/3/Messages]","tx_id":"281474976720801","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:32.058869Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/3/MessageData]","tx_id":"281474976720802","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:32.162441Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/2/SentTimestampIdx]","tx_id":"281474976720803","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:32.260783Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/2/Infly]","tx_id":"281474976720804","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:32.312197Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/2/Messages]","tx_id":"281474976720805","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:32.371473Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/2/MessageData]","tx_id":"281474976720806","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:32.453167Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/1/SentTimestampIdx]","tx_id":"281474976720807","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:32.538482Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/1/Infly]","tx_id":"281474976720808","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:32.592219Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/1/Messages]","tx_id":"281474976720809","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:32.638722Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/1/MessageData]","tx_id":"281474976720810","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:32.756434Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/0/SentTimestampIdx]","tx_id":"281474976720811","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:32.824379Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/0/Infly]","tx_id":"281474976720812","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:32.886454Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/0/Messages]","tx_id":"281474976720813","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:32.938571Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/0/MessageData]","tx_id":"281474976720814","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:33.012673Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/State]","tx_id":"281474976720815","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:33.121786Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/Attributes]","tx_id":"281474976720816","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2025-12-23T18:47:33.185529Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/3]","tx_id":"281474976720817","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:33.227834Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/2]","tx_id":"281474976720820","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:33.247350Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/1]","tx_id":"281474976720821","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:33.268897Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4/0]","tx_id":"281474976720822","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:33.292072Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu/v4]","tx_id":"281474976720823","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:33.361781Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc/000000000000000307nu]","tx_id":"281474976720824","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2025-12-23T18:47:33.393565Z: {"request_id":"12570f5b-5edff9ef-b0b5e0eb-938bb1eb","cloud_id":"CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc","subject":"fake_user_sid@as","queue":"000000000000000307nu","resource_id":"000000000000000307nu","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_d40f8867-e02f-11f0-8495-d00d1a8d6ccc"} ======================================== ======================================== >> KqpSinkMvcc::WriteSkewUpsert+IsOlap |95.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpSnapshotRead::TestReadOnly+withSink [GOOD] >> KqpSnapshotRead::TestReadOnly-withSink >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] |95.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpTx::RollbackManyTx [GOOD] >> KqpTx::RollbackRoTx >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-clusteradmin >> KqpSinkTx::OlapDeferredEffects [GOOD] >> KqpSinkMvcc::InsertConflictingKey-IsOlap-CommitOnInsert >> KqpSinkTx::LocksAbortOnCommit >> KqpTx::SnapshotROInteractive1 [GOOD] >> KqpTx::SnapshotROInteractive2 >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-system >> KqpLocks::InvalidateOnCommit |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] |95.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> KqpSinkTx::SnapshotROInteractive1 [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [FAIL] Test command err: 2025-12-23T18:47:52.486373Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127040723629660:2261];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:52.486437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003912/r3tmp/tmpKRYKJw/pdisk_1.dat 2025-12-23T18:47:52.858843Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:47:52.915175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:52.915316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:52.945970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:52.983236Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:53.138821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:26362 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:47:53.289265Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587127040723629692:2117] Handle TEvNavigate describe path dc-1 2025-12-23T18:47:53.289337Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587127045018597782:2733] HANDLE EvNavigateScheme dc-1 2025-12-23T18:47:53.289478Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587127040723629705:2120], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:53.289579Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587127040723629963:2288][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587127040723629705:2120], cookie# 1 2025-12-23T18:47:53.291529Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127040723629991:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127040723629988:2288], cookie# 1 2025-12-23T18:47:53.291590Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127040723629992:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127040723629989:2288], cookie# 1 2025-12-23T18:47:53.291606Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127040723629993:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127040723629990:2288], cookie# 1 2025-12-23T18:47:53.291659Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127040723629390:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127040723629991:2288], cookie# 1 2025-12-23T18:47:53.291693Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127040723629393:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127040723629992:2288], cookie# 1 2025-12-23T18:47:53.291713Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127040723629396:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127040723629993:2288], cookie# 1 2025-12-23T18:47:53.291797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127040723629991:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127040723629390:2049], cookie# 1 2025-12-23T18:47:53.291844Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127040723629992:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127040723629393:2052], cookie# 1 2025-12-23T18:47:53.291863Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127040723629993:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127040723629396:2055], cookie# 1 2025-12-23T18:47:53.291932Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127040723629963:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127040723629988:2288], cookie# 1 2025-12-23T18:47:53.291960Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587127040723629963:2288][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:47:53.291999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127040723629963:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127040723629989:2288], cookie# 1 2025-12-23T18:47:53.292053Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587127040723629963:2288][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:47:53.292089Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127040723629963:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127040723629990:2288], cookie# 1 2025-12-23T18:47:53.292114Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587127040723629963:2288][/dc-1] Sync cookie mismatch: sender# [1:7587127040723629990:2288], cookie# 1, current cookie# 0 2025-12-23T18:47:53.292175Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587127040723629705:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:47:53.292625Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587127040723629705:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587127040723629963:2288] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:47:53.292770Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587127040723629705:2120], cacheItem# { Subscriber: { Subscriber: [1:7587127040723629963:2288] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:47:53.299865Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127045018597783:2734], recipient# [1:7587127045018597782:2733], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:53.299972Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587127045018597782:2733] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:47:53.343282Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587127045018597782:2733] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:47:53.346893Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587127045018597782:2733] Handle TEvDescribeSchemeResult Forward to# [1:7587127045018597781:2732] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 Create ... BUG: proxy_impl.cpp:402: actor# [2:7587127061313456472:2107] Handle TEvNavigate describe path /dc-1 2025-12-23T18:47:58.342349Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7587127065608424906:2966] HANDLE EvNavigateScheme /dc-1 2025-12-23T18:47:58.342439Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7587127061313456506:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:58.342516Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7587127061313456619:2181][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7587127061313456506:2127], cookie# 4 2025-12-23T18:47:58.342598Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7587127061313456623:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587127061313456620:2181], cookie# 4 2025-12-23T18:47:58.342625Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7587127061313456624:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587127061313456621:2181], cookie# 4 2025-12-23T18:47:58.342635Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7587127061313456188:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587127061313456623:2181], cookie# 4 2025-12-23T18:47:58.342645Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7587127061313456625:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587127061313456622:2181], cookie# 4 2025-12-23T18:47:58.342662Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7587127061313456191:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587127061313456624:2181], cookie# 4 2025-12-23T18:47:58.342676Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7587127061313456623:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [2:7587127061313456188:2049], cookie# 4 2025-12-23T18:47:58.342688Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7587127061313456194:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7587127061313456625:2181], cookie# 4 2025-12-23T18:47:58.342693Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7587127061313456624:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [2:7587127061313456191:2052], cookie# 4 2025-12-23T18:47:58.342713Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7587127061313456625:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [2:7587127061313456194:2055], cookie# 4 2025-12-23T18:47:58.342777Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7587127061313456619:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [2:7587127061313456620:2181], cookie# 4 2025-12-23T18:47:58.342795Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7587127061313456619:2181][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:47:58.342817Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7587127061313456619:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [2:7587127061313456621:2181], cookie# 4 2025-12-23T18:47:58.342835Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7587127061313456619:2181][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:47:58.342887Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7587127061313456619:2181][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [2:7587127061313456622:2181], cookie# 4 2025-12-23T18:47:58.342909Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7587127061313456506:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:47:58.342917Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7587127061313456619:2181][/dc-1] Sync cookie mismatch: sender# [2:7587127061313456622:2181], cookie# 4, current cookie# 0 2025-12-23T18:47:58.342979Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7587127061313456506:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7587127061313456619:2181] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766515677766 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:47:58.343056Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7587127061313456506:2127], cacheItem# { Subscriber: { Subscriber: [2:7587127061313456619:2181] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1766515677766 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-12-23T18:47:58.343172Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7587127065608424907:2967], recipient# [2:7587127065608424906:2966], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:58.343213Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7587127065608424906:2966] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:47:58.343279Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7587127065608424906:2966] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:47:58.343952Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7587127065608424906:2966] Handle TEvDescribeSchemeResult Forward to# [2:7587127065608424905:2965] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 128 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515677766 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515677766 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515677514 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "USER_0" PathId: ... (TRUNCATED) assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:71, void DeclareAndDefineWithoutNodes(TTestEnvWithPoolsSupport &): (ver.Version == 5) failed: (8 != 5) TBackTrace::Capture()+28 (0x1B0C63EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B5B77AC) DeclareAndDefineWithoutNodes(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&)+14700 (0x1AC23B2C) void NTestSuiteTExtSubDomainTest::TestDeclareAndDefineWithoutNodes(NUnitTest::TTestContext&)+209 (0x1ACDD811) std::__y1::__function::__func, void ()>::operator()()+280 (0x1ACC6308) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B5F043A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B5BE488) NTestSuiteTExtSubDomainTest::TCurrentTest::Execute()+1229 (0x1ACC540D) NUnitTest::TTestFactory::Execute()+2176 (0x1B5BFC40) NUnitTest::RunMain(int, char**)+5805 (0x1B5EA29D) ??+0 (0x7FB873702D90) __libc_start_main+128 (0x7FB873702E40) _start+41 (0x18622029) |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [FAIL] Test command err: 2025-12-23T18:47:52.293280Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127043104635320:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:52.297535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003913/r3tmp/tmpucSAnv/pdisk_1.dat 2025-12-23T18:47:52.600968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:47:52.636293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:52.636423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:52.645722Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:52.728236Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:52.822792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:19675 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:47:53.006115Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587127043104635539:2117] Handle TEvNavigate describe path dc-1 2025-12-23T18:47:53.006185Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587127047399603632:2743] HANDLE EvNavigateScheme dc-1 2025-12-23T18:47:53.006328Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587127043104635545:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:53.006415Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587127043104635810:2294][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587127043104635545:2119], cookie# 1 2025-12-23T18:47:53.008130Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127043104635869:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127043104635866:2294], cookie# 1 2025-12-23T18:47:53.008164Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127043104635870:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127043104635867:2294], cookie# 1 2025-12-23T18:47:53.008212Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127043104635871:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127043104635868:2294], cookie# 1 2025-12-23T18:47:53.008250Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127043104635228:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127043104635869:2294], cookie# 1 2025-12-23T18:47:53.008304Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127043104635231:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127043104635870:2294], cookie# 1 2025-12-23T18:47:53.008327Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127043104635234:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127043104635871:2294], cookie# 1 2025-12-23T18:47:53.008393Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127043104635869:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127043104635228:2050], cookie# 1 2025-12-23T18:47:53.008418Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127043104635870:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127043104635231:2053], cookie# 1 2025-12-23T18:47:53.008432Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127043104635871:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127043104635234:2056], cookie# 1 2025-12-23T18:47:53.008488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127043104635810:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127043104635866:2294], cookie# 1 2025-12-23T18:47:53.008524Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587127043104635810:2294][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:47:53.008553Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127043104635810:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127043104635867:2294], cookie# 1 2025-12-23T18:47:53.008575Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587127043104635810:2294][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:47:53.008613Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127043104635810:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127043104635868:2294], cookie# 1 2025-12-23T18:47:53.008628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587127043104635810:2294][/dc-1] Sync cookie mismatch: sender# [1:7587127043104635868:2294], cookie# 1, current cookie# 0 2025-12-23T18:47:53.008680Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587127043104635545:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:47:53.008759Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587127043104635545:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587127043104635810:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:47:53.008855Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587127043104635545:2119], cacheItem# { Subscriber: { Subscriber: [1:7587127043104635810:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:47:53.012252Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127047399603633:2744], recipient# [1:7587127047399603632:2743], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:53.012344Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587127047399603632:2743] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:47:53.066896Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587127047399603632:2743] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:47:53.070099Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587127047399603632:2743] Handle TEvDescribeSchemeResult Forward to# [1:7587127047399603631:2742] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 Create ... [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:01.570587Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7587127066643938541:2053] Subscribe: subscriber# [3:7587127079528842091:3156], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:48:01.570599Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7587127066643938857:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-12-23T18:48:01.570616Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7587127066643938541:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7587127079528842097:3157] 2025-12-23T18:48:01.570626Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7587127066643938541:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-12-23T18:48:01.570646Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7587127066643938857:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7587127079528842085:3157] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:48:01.570648Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7587127066643938541:2053] Subscribe: subscriber# [3:7587127079528842097:3157], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:48:01.570679Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7587127079528842091:3156][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7587127066643938541:2053] 2025-12-23T18:48:01.570699Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587127066643938857:2121], cacheItem# { Subscriber: { Subscriber: [3:7587127079528842085:3157] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:01.570706Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587127079528842084:3156][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7587127079528842088:3156] 2025-12-23T18:48:01.570786Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587127079528842086:3158], recipient# [3:7587127079528842066:2303], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:48:01.570830Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7587127079528842097:3157][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587127066643938541:2053] 2025-12-23T18:48:01.570874Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587127079528842085:3157][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587127079528842094:3157] 2025-12-23T18:48:01.570908Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7587127079528842085:3157][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7587127066643938857:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:48:01.570932Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127066643938541:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127079528842091:3156] 2025-12-23T18:48:01.570951Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127066643938541:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127079528842097:3157] 2025-12-23T18:48:01.571012Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587127079528842099:3159], recipient# [3:7587127079528842075:2311], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:48:01.572255Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7587127079528842084:3156][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7587127066643938857:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:48:02.403496Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587127066643938857:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:48:02.403646Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587127066643938857:2121], cacheItem# { Subscriber: { Subscriber: [3:7587127070938907412:3140] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:02.403747Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587127083823809412:3163], recipient# [3:7587127083823809411:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:48:02.572439Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587127066643938857:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:48:02.572600Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587127066643938857:2121], cacheItem# { Subscriber: { Subscriber: [3:7587127079528842073:3155] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:02.572760Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587127083823809417:3164], recipient# [3:7587127083823809416:2315], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:96, void DeclareAndDefineWithNodes(TTestEnvWithPoolsSupport &): (ver.Version == (env.GetSettings().FeatureFlags.GetEnableAlterDatabaseCreateHiveFirst() ? 5 : 4)) failed: (8 != 5) TBackTrace::Capture()+28 (0x1B0C63EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B5B77AC) DeclareAndDefineWithNodes(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&)+15042 (0x1AC29422) void NTestSuiteTExtSubDomainTest::TestDeclareAndDefineWithNodes(NUnitTest::TTestContext&)+212 (0x1ACDDE34) std::__y1::__function::__func, void ()>::operator()()+280 (0x1ACC6308) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B5F043A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B5BE488) NTestSuiteTExtSubDomainTest::TCurrentTest::Execute()+1229 (0x1ACC540D) NUnitTest::TTestFactory::Execute()+2176 (0x1B5BFC40) NUnitTest::RunMain(int, char**)+5805 (0x1B5EA29D) ??+0 (0x7FA991C29D90) __libc_start_main+128 (0x7FA991C29E40) _start+41 (0x18622029) >> KqpSinkLocks::VisibleUncommittedRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [FAIL] Test command err: 2025-12-23T18:47:53.183159Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127045256114153:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:53.218566Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:47:53.273277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00390f/r3tmp/tmps0mCPH/pdisk_1.dat 2025-12-23T18:47:53.643136Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:47:53.715668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:53.715750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:53.734858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:53.824817Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:53.832766Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127045256114091:2082] 1766515673133045 != 1766515673133048 2025-12-23T18:47:53.837825Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:22320 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:47:54.090776Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587127045256114370:2118] Handle TEvNavigate describe path dc-1 2025-12-23T18:47:54.090840Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587127049551082445:2728] HANDLE EvNavigateScheme dc-1 2025-12-23T18:47:54.090998Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587127045256114391:2126], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:54.091098Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587127045256114633:2288][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587127045256114391:2126], cookie# 1 2025-12-23T18:47:54.092796Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127045256114659:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127045256114656:2288], cookie# 1 2025-12-23T18:47:54.092843Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127045256114660:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127045256114657:2288], cookie# 1 2025-12-23T18:47:54.092859Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127045256114661:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127045256114658:2288], cookie# 1 2025-12-23T18:47:54.092895Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127045256114059:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127045256114659:2288], cookie# 1 2025-12-23T18:47:54.092939Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127045256114062:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127045256114660:2288], cookie# 1 2025-12-23T18:47:54.092964Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127045256114065:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127045256114661:2288], cookie# 1 2025-12-23T18:47:54.093014Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127045256114659:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127045256114059:2050], cookie# 1 2025-12-23T18:47:54.093068Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127045256114660:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127045256114062:2053], cookie# 1 2025-12-23T18:47:54.093086Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127045256114661:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127045256114065:2056], cookie# 1 2025-12-23T18:47:54.093121Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127045256114633:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127045256114656:2288], cookie# 1 2025-12-23T18:47:54.093148Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587127045256114633:2288][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:47:54.093182Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127045256114633:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127045256114657:2288], cookie# 1 2025-12-23T18:47:54.093209Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587127045256114633:2288][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:47:54.093237Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127045256114633:2288][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127045256114658:2288], cookie# 1 2025-12-23T18:47:54.093250Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587127045256114633:2288][/dc-1] Sync cookie mismatch: sender# [1:7587127045256114658:2288], cookie# 1, current cookie# 0 2025-12-23T18:47:54.093301Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587127045256114391:2126], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:47:54.093402Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587127045256114391:2126], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587127045256114633:2288] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:47:54.093507Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587127045256114391:2126], cacheItem# { Subscriber: { Subscriber: [1:7587127045256114633:2288] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:47:54.096282Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127049551082446:2729], recipient# [1:7587127049551082445:2728], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:54.096370Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587127049551082445:2728] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:47:54.130984Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587127049551082445:2728] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:47:54.134064Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587127049551082445:2728] Handle TEvDescribeSchemeResult Forward to# [1:7587127049551082444:2727] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 ... BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7587127082227499752:3147][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7587127069342596351:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:48:02.805564Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7587127082227499753:3148][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7587127069342596351:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:48:02.805572Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127069342596209:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082227499757:3146] 2025-12-23T18:48:02.805586Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127069342596215:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082227499759:3146] 2025-12-23T18:48:02.805588Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127069342596209:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082227499768:3148] 2025-12-23T18:48:02.805601Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127069342596209:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082227499763:3147] 2025-12-23T18:48:02.805604Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127069342596215:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082227499771:3148] 2025-12-23T18:48:02.805617Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127069342596215:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082227499770:3147] 2025-12-23T18:48:02.805639Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127069342596212:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082227499769:3148] 2025-12-23T18:48:02.805657Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127069342596212:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082227499767:3147] 2025-12-23T18:48:02.805684Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7587127069342596351:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-12-23T18:48:02.805794Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7587127069342596351:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7587127082227499751:3146] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:48:02.805893Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587127069342596351:2129], cacheItem# { Subscriber: { Subscriber: [3:7587127082227499751:3146] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:02.805927Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7587127069342596351:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-12-23T18:48:02.805972Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7587127069342596351:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7587127082227499752:3147] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:48:02.806016Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587127069342596351:2129], cacheItem# { Subscriber: { Subscriber: [3:7587127082227499752:3147] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:02.806074Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7587127069342596351:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-12-23T18:48:02.806142Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7587127069342596351:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7587127082227499753:3148] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:48:02.806181Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587127082227499772:3149], recipient# [3:7587127082227499745:2308], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:48:02.806190Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587127069342596351:2129], cacheItem# { Subscriber: { Subscriber: [3:7587127082227499753:3148] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:02.806240Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587127082227499773:3150], recipient# [3:7587127082227499747:2310], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:48:03.467159Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587127069342596351:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:48:03.467348Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587127069342596351:2129], cacheItem# { Subscriber: { Subscriber: [3:7587127073637565049:3097] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:03.467459Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587127086522467083:3154], recipient# [3:7587127086522467082:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:518, void CreateTableInsideAndAlterDomainAndTable(TTestEnvWithPoolsSupport &): (ver.Version == 4) failed: (6 != 4) TBackTrace::Capture()+28 (0x1B0C63EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B5B77AC) CreateTableInsideAndAlterDomainAndTable(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&)+28275 (0x1AC91223) void NTestSuiteTExtSubDomainTest::TestCreateTableInsideAndAlterDomainAndTable(NUnitTest::TTestContext&)+212 (0x1ACDF6B4) std::__y1::__function::__func, void ()>::operator()()+280 (0x1ACC6308) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B5F043A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B5BE488) NTestSuiteTExtSubDomainTest::TCurrentTest::Execute()+1229 (0x1ACC540D) NUnitTest::TTestFactory::Execute()+2176 (0x1B5BFC40) NUnitTest::RunMain(int, char**)+5805 (0x1B5EA29D) ??+0 (0x7F80CDD79D90) __libc_start_main+128 (0x7F80CDD79E40) _start+41 (0x18622029) |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> KqpSinkMvcc::WriteSkewInsert+IsOlap [GOOD] >> KqpSinkMvcc::WriteSkewInsert-IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [FAIL] Test command err: 2025-12-23T18:47:52.719092Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127042142648859:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:52.719139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003910/r3tmp/tmpivKLog/pdisk_1.dat 2025-12-23T18:47:53.066817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:47:53.097373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:53.097474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:53.104587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:53.215650Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:53.235729Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:11524 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:47:53.522978Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587127042142649107:2143] Handle TEvNavigate describe path dc-1 2025-12-23T18:47:53.523043Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587127046437617161:2742] HANDLE EvNavigateScheme dc-1 2025-12-23T18:47:53.523129Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587127042142649113:2145], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:53.523199Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587127046437616630:2287][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587127042142649113:2145], cookie# 1 2025-12-23T18:47:53.524902Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127046437616691:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127046437616688:2287], cookie# 1 2025-12-23T18:47:53.524957Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127046437616692:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127046437616689:2287], cookie# 1 2025-12-23T18:47:53.524972Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127046437616693:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127046437616690:2287], cookie# 1 2025-12-23T18:47:53.525003Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127042142648756:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127046437616691:2287], cookie# 1 2025-12-23T18:47:53.525039Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127042142648759:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127046437616692:2287], cookie# 1 2025-12-23T18:47:53.525095Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127042142648762:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127046437616693:2287], cookie# 1 2025-12-23T18:47:53.525154Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127046437616691:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127042142648756:2050], cookie# 1 2025-12-23T18:47:53.525172Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127046437616692:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127042142648759:2053], cookie# 1 2025-12-23T18:47:53.525195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127046437616693:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127042142648762:2056], cookie# 1 2025-12-23T18:47:53.525255Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127046437616630:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127046437616688:2287], cookie# 1 2025-12-23T18:47:53.525276Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587127046437616630:2287][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:47:53.525292Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127046437616630:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127046437616689:2287], cookie# 1 2025-12-23T18:47:53.525319Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587127046437616630:2287][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:47:53.525368Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127046437616630:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127046437616690:2287], cookie# 1 2025-12-23T18:47:53.525381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587127046437616630:2287][/dc-1] Sync cookie mismatch: sender# [1:7587127046437616690:2287], cookie# 1, current cookie# 0 2025-12-23T18:47:53.525427Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587127042142649113:2145], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:47:53.525528Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587127042142649113:2145], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587127046437616630:2287] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:47:53.525640Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587127042142649113:2145], cacheItem# { Subscriber: { Subscriber: [1:7587127046437616630:2287] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:47:53.529969Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127046437617162:2743], recipient# [1:7587127046437617161:2742], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:53.530053Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587127046437617161:2742] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:47:53.576180Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587127046437617161:2742] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:47:53.580495Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587127046437617161:2742] Handle TEvDescribeSchemeResult Forward to# [1:7587127046437617160:2741] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 Create ... 12-23T18:48:02.507551Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7587127082793661684:3176][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [3:7587127069908758421:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:48:02.507577Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7587127082793661691:3176][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7587127065613790816:2056] 2025-12-23T18:48:02.507602Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587127082793661684:3176][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7587127082793661688:3176] 2025-12-23T18:48:02.507633Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7587127082793661684:3176][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7587127069908758421:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:48:02.507659Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7587127069908758421:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-12-23T18:48:02.507669Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127065613790816:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082793661691:3176] 2025-12-23T18:48:02.507686Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127065613790810:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082793661696:3177] 2025-12-23T18:48:02.507700Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127065613790810:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082793661689:3176] 2025-12-23T18:48:02.507715Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7587127069908758421:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7587127082793661684:3176] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:48:02.507720Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7587127082793661697:3177][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587127065613790813:2053] 2025-12-23T18:48:02.507739Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7587127082793661698:3177][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587127065613790816:2056] 2025-12-23T18:48:02.507765Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587127082793661685:3177][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587127082793661694:3177] 2025-12-23T18:48:02.507780Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587127069908758421:2121], cacheItem# { Subscriber: { Subscriber: [3:7587127082793661684:3176] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:02.507789Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7587127082793661685:3177][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7587127069908758421:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:48:02.507806Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127065613790816:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082793661698:3177] 2025-12-23T18:48:02.507808Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587127082793661685:3177][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587127082793661695:3177] 2025-12-23T18:48:02.507827Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7587127082793661685:3177][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7587127069908758421:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:48:02.507829Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7587127069908758421:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-12-23T18:48:02.507842Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127065613790813:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082793661690:3176] 2025-12-23T18:48:02.507863Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127065613790813:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127082793661697:3177] 2025-12-23T18:48:02.507890Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7587127069908758421:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7587127082793661685:3177] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:48:02.507934Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587127069908758421:2121], cacheItem# { Subscriber: { Subscriber: [3:7587127082793661685:3177] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:02.508090Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587127082793661699:3179], recipient# [3:7587127082793661674:2311], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:48:03.059730Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587127069908758421:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:48:03.059869Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587127069908758421:2121], cacheItem# { Subscriber: { Subscriber: [3:7587127074203726892:3043] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:03.059956Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587127087088629009:3183], recipient# [3:7587127087088629008:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:129, void CreateTableInsideAndLs(TTestEnvWithPoolsSupport &): (ver.Version == (env.GetSettings().FeatureFlags.GetEnableAlterDatabaseCreateHiveFirst() ? 5 : 4)) failed: (8 != 5) TBackTrace::Capture()+28 (0x1B0C63EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B5B77AC) CreateTableInsideAndLs(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&)+17562 (0x1AC3115A) void NTestSuiteTExtSubDomainTest::TestCreateTableInsideAndLs(NUnitTest::TTestContext&)+212 (0x1ACDE454) std::__y1::__function::__func, void ()>::operator()()+280 (0x1ACC6308) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B5F043A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B5BE488) NTestSuiteTExtSubDomainTest::TCurrentTest::Execute()+1229 (0x1ACC540D) NUnitTest::TTestFactory::Execute()+2176 (0x1B5BFC40) NUnitTest::RunMain(int, char**)+5805 (0x1B5EA29D) ??+0 (0x7F1859D20D90) __libc_start_main+128 (0x7F1859D20E40) _start+41 (0x18622029) |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> KqpSinkMvcc::LostUpdate+IsOlap [GOOD] >> KqpSinkMvcc::LostUpdate-IsOlap |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] |95.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [FAIL] Test command err: 2025-12-23T18:47:51.634197Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127036837510648:2153];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:51.634354Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003918/r3tmp/tmpuxr08D/pdisk_1.dat 2025-12-23T18:47:51.985817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:47:52.018098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:52.018219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:52.022279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:52.092866Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:52.278471Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:30805 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:47:52.341010Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587127036837510779:2119] Handle TEvNavigate describe path dc-1 2025-12-23T18:47:52.341104Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587127041132478881:2752] HANDLE EvNavigateScheme dc-1 2025-12-23T18:47:52.341233Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587127036837510785:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:47:52.341341Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587127036837511046:2291][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587127036837510785:2121], cookie# 1 2025-12-23T18:47:52.343017Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127036837511098:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127036837511095:2291], cookie# 1 2025-12-23T18:47:52.343101Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127036837511099:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127036837511096:2291], cookie# 1 2025-12-23T18:47:52.343118Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587127036837511100:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127036837511097:2291], cookie# 1 2025-12-23T18:47:52.343159Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127036837510466:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127036837511098:2291], cookie# 1 2025-12-23T18:47:52.343186Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127036837510469:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127036837511099:2291], cookie# 1 2025-12-23T18:47:52.343201Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587127036837510472:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587127036837511100:2291], cookie# 1 2025-12-23T18:47:52.343254Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127036837511098:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127036837510466:2050], cookie# 1 2025-12-23T18:47:52.343276Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127036837511099:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127036837510469:2053], cookie# 1 2025-12-23T18:47:52.343291Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587127036837511100:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127036837510472:2056], cookie# 1 2025-12-23T18:47:52.343333Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127036837511046:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127036837511095:2291], cookie# 1 2025-12-23T18:47:52.343379Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587127036837511046:2291][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:47:52.343404Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127036837511046:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127036837511096:2291], cookie# 1 2025-12-23T18:47:52.343428Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587127036837511046:2291][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:47:52.343459Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587127036837511046:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587127036837511097:2291], cookie# 1 2025-12-23T18:47:52.343472Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587127036837511046:2291][/dc-1] Sync cookie mismatch: sender# [1:7587127036837511097:2291], cookie# 1, current cookie# 0 2025-12-23T18:47:52.343525Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587127036837510785:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:47:52.343656Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587127036837510785:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587127036837511046:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:47:52.343749Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587127036837510785:2121], cacheItem# { Subscriber: { Subscriber: [1:7587127036837511046:2291] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:47:52.346588Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587127041132478882:2753], recipient# [1:7587127041132478881:2752], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:47:52.346653Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587127041132478881:2752] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:47:52.384484Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587127041132478881:2752] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:47:52.388020Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587127041132478881:2752] Handle TEvDescribeSchemeResult Forward to# [1:7587127041132478880:2751] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 Create ... : cache.cpp:1892: FillEntry for TNavigate: self# [3:7587127067345896700:2121], cacheItem# { Subscriber: { Subscriber: [3:7587127080230800059:3269] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:01.592027Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7587127067345896382:2050] Subscribe: subscriber# [3:7587127080230800076:3270], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:48:01.592030Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7587127067345896385:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7587127080230800077:3270] 2025-12-23T18:48:01.592041Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7587127067345896385:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-12-23T18:48:01.592055Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127067345896382:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127080230800070:3269] 2025-12-23T18:48:01.592069Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7587127067345896385:2053] Subscribe: subscriber# [3:7587127080230800077:3270], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:48:01.592074Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7587127067345896388:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7587127080230800078:3270] 2025-12-23T18:48:01.592081Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7587127067345896388:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-12-23T18:48:01.592093Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127067345896385:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127080230800071:3269] 2025-12-23T18:48:01.592100Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7587127067345896388:2056] Subscribe: subscriber# [3:7587127080230800078:3270], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:48:01.592121Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127067345896388:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127080230800072:3269] 2025-12-23T18:48:01.592121Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7587127080230800076:3270][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587127067345896382:2050] 2025-12-23T18:48:01.592137Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127067345896382:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127080230800076:3270] 2025-12-23T18:48:01.592142Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7587127080230800077:3270][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587127067345896385:2053] 2025-12-23T18:48:01.592157Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127067345896385:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127080230800077:3270] 2025-12-23T18:48:01.592162Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7587127080230800078:3270][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587127067345896388:2056] 2025-12-23T18:48:01.592175Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7587127067345896388:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7587127080230800078:3270] 2025-12-23T18:48:01.592197Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587127080230800060:3270][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587127080230800073:3270] 2025-12-23T18:48:01.592230Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587127080230800060:3270][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587127080230800074:3270] 2025-12-23T18:48:01.592259Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7587127080230800060:3270][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7587127067345896700:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:48:01.592293Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7587127080230800060:3270][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7587127080230800075:3270] 2025-12-23T18:48:01.592315Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7587127080230800060:3270][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7587127067345896700:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:48:01.592342Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7587127067345896700:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-12-23T18:48:01.592382Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7587127067345896700:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7587127080230800060:3270] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:48:01.592427Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587127067345896700:2121], cacheItem# { Subscriber: { Subscriber: [3:7587127080230800060:3270] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:01.592490Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587127080230800080:3272], recipient# [3:7587127080230800051:2309], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:48:02.135091Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587127067345896700:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:48:02.135229Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587127067345896700:2121], cacheItem# { Subscriber: { Subscriber: [3:7587127071640865319:3188] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:48:02.135315Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587127084525767390:3276], recipient# [3:7587127084525767389:2316], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:209, void DeclareAndAlterPools(TTestEnvWithPoolsSupport &): (ver.PathId == 2) failed: (32 != 2) TBackTrace::Capture()+28 (0x1B0C63EC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B5B77AC) DeclareAndAlterPools(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&)+27411 (0x1AC45443) void NTestSuiteTExtSubDomainTest::TestDeclareAndAlterPools(NUnitTest::TTestContext&)+212 (0x1ACDEA74) std::__y1::__function::__func, void ()>::operator()()+280 (0x1ACC6308) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B5F043A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B5BE488) NTestSuiteTExtSubDomainTest::TCurrentTest::Execute()+1229 (0x1ACC540D) NUnitTest::TTestFactory::Execute()+2176 (0x1B5BFC40) NUnitTest::RunMain(int, char**)+5805 (0x1B5EA29D) ??+0 (0x7FC4B761AD90) __libc_start_main+128 (0x7FC4B761AE40) _start+41 (0x18622029) |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> KqpSinkTx::OlapExplicitTcl [GOOD] >> KqpSinkTx::OlapInvalidateOnError >> KqpBatchDelete::Large_3 [GOOD] |95.9%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotRead::TestReadOnly-withSink [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration+withSink |95.9%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpTx::RollbackRoTx [GOOD] >> KqpTx::RollbackInvalidated >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-anonymous >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink >> KqpBatchUpdate::SimpleOnePartition [GOOD] >> KqpLocksTricky::TestNoWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_3 [GOOD] Test command err: Trying to start YDB, gRPC: 21256, MsgBus: 18578 2025-12-23T18:47:35.608139Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126969764862194:2134];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:35.608197Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bcc/r3tmp/tmpTFGrMj/pdisk_1.dat 2025-12-23T18:47:35.910730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:35.910864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:35.912736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:35.965603Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:36.009847Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126969764862099:2081] 1766515655604203 != 1766515655604206 2025-12-23T18:47:36.009968Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21256, node 1 2025-12-23T18:47:36.151289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:36.151313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:36.151340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:36.151407Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:36.242238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18578 TClient is connected to server localhost:18578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:36.619233Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515656080 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:47:36.708317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:36.734178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:36.920462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:37.134425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:37.211055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:39.404624Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126986944733160:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:39.404789Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:39.407190Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126986944733170:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:39.407286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:39.796770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:39.840799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:39.878389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:39.953262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:39.985125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:40.018550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:40.057239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:40.096364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:40.187164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126991239701348:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.187271Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.187709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126991239701353:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.187757Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.187800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126991239701354:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:40.191814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:40.203793Z node 1 ... riptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2629 TClient is connected to server localhost:2629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515675183 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:47:55.660887Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:55.689470Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:55.773883Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:55.927467Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:56.002643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:56.022446Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:58.768776Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127065245016052:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.769129Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.769558Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127065245016062:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.769629Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.769884Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127065245016064:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.769930Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.840976Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:58.879590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:58.951272Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:58.994707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.036268Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.099000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.150920Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.222642Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.353404Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127069539984234:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.353515Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.354101Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127069539984239:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.354193Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127069539984240:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.354287Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.358422Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:59.372469Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587127069539984243:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:59.469280Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587127069539984294:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:59.973393Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587127048065145095:2135];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:59.973481Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:01.360539Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:10.090515Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:48:10.090546Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpPg::TableDeleteWhere-useSink [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-system >> KqpSinkMvcc::InsertConflictingKey-IsOlap-CommitOnInsert [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap-CommitOnInsert >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp [GOOD] >> KqpSnapshotIsolation::TUniqueSecondaryWriteIndexOltp >> KqpTx::SnapshotROInteractive2 [GOOD] >> KqpSinkTx::LocksAbortOnCommit [GOOD] >> KqpSinkTx::InvalidateOnError >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-ordinaryuser >> KqpBatchUpdate::Large_3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimpleOnePartition [GOOD] Test command err: Trying to start YDB, gRPC: 61319, MsgBus: 21357 2025-12-23T18:47:32.858228Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126953898108013:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:32.858280Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bcf/r3tmp/tmpyT2Rfw/pdisk_1.dat 2025-12-23T18:47:33.189578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:33.189713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:33.209025Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:33.221518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:33.237407Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61319, node 1 2025-12-23T18:47:33.384890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:33.384915Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:33.384926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:33.385040Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:33.514821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21357 TClient is connected to server localhost:21357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:33.879194Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515653308 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:47:33.919918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:33.936106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:34.077817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:34.231848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:47:34.307697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.517088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126971077979029:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.517223Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.517799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126971077979039:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.517876Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.952626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.008259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.048018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.087126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.168567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.214156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.245806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.332252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:37.450651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126975372947218:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.450708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.450862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126975372947223:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.450914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126975372947224:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.450997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:37.455763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:37.474634Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126975372947227:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Tra ... 776079 2025-12-23T18:48:04.894644Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:04.949788Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:04.949879Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:04.956504Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5952, node 4 2025-12-23T18:48:05.007381Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:05.007408Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:05.007416Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:05.007496Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:05.029416Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2613 TClient is connected to server localhost:2613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515685004 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:05.406583Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:05.430091Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:05.492622Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:05.703126Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:05.772805Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:05.787378Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:08.616190Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127109357042703:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:08.616281Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:08.616734Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127109357042713:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:08.616788Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:08.683224Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:08.713410Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:08.743736Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:08.786574Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:08.836364Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:08.880993Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:08.924098Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:08.977445Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:09.057438Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127113652010880:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:09.057519Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:09.057798Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127113652010885:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:09.057830Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127113652010886:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:09.057871Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:09.061939Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:09.075206Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587127113652010889:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:48:09.170700Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587127113652010940:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:09.781017Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587127092177171710:2082];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:09.781716Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpLocks::InvalidateOnCommit [GOOD] >> KqpLocks::TwoPhaseTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 4077, MsgBus: 19561 2025-12-23T18:47:55.392903Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127054854975213:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:55.393056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040fc/r3tmp/tmpgeofFf/pdisk_1.dat 2025-12-23T18:47:55.743067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:55.757339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:55.757479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:55.836703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:55.917050Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4077, node 1 2025-12-23T18:47:55.976159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:56.051493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:56.051519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:56.051526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:56.051623Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19561 2025-12-23T18:47:56.403237Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:19561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515675918 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:56.693823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:56.703983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:56.718055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:56.847371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:47:57.010643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:57.077454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:58.871467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127067739878933:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.871549Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.871993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127067739878943:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.872050Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.179822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.220002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.256126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.300440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.339090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.382464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.450790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.507354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.590477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127072034847112:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.590624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.590971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127072034847117:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.590991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127072034847118:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.591014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.595721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:59.605705Z node 1 :KQP_WORKLOAD_S ... 594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:09.409853Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:09.414074Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21565, node 3 2025-12-23T18:48:09.531076Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:09.595408Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:09.595432Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:09.595441Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:09.595529Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9027 TClient is connected to server localhost:9027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515689463 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:10.135769Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:10.144631Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:48:10.160406Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:10.239268Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:10.266118Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:10.423305Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:10.512113Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:12.976089Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127126431606542:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:12.976196Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:12.979031Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127126431606552:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:12.979148Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:13.069073Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.113462Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.192976Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.239787Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.280794Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.325573Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.375801Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.431403Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.518624Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127130726574722:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:13.518770Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:13.518825Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127130726574727:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:13.519153Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127130726574729:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:13.519214Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:13.523966Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:13.535244Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587127130726574730:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:48:13.633964Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587127130726574782:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:14.217936Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587127113546702808:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:14.218003Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test >> test_restarts.py::test_basic [FAIL] |95.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> KqpSinkMvcc::TxReadsCommitted+IsOlap [GOOD] >> KqpSinkMvcc::TxReadsCommitted-IsOlap >> KqpSinkLocks::TInvalidateOlap [GOOD] >> KqpSinkLocks::UncommittedRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_3 [GOOD] Test command err: Trying to start YDB, gRPC: 25126, MsgBus: 17164 2025-12-23T18:47:16.965104Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126885068973275:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:16.965649Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bee/r3tmp/tmpB3C78T/pdisk_1.dat 2025-12-23T18:47:17.252237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:17.268648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:17.268763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:17.278046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:17.361327Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126885068973171:2081] 1766515636960953 != 1766515636960956 2025-12-23T18:47:17.366985Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:17.421074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 25126, node 1 2025-12-23T18:47:17.467337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:17.467365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:17.467377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:17.467490Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17164 TClient is connected to server localhost:17164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:17.975845Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515637418 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:18.036541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:18.043650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:18.053383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:18.200511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:18.367192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:18.446618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:20.409093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126902248844227:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:20.409238Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:20.409800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126902248844237:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:20.409852Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:20.812389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:20.849705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:20.893753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:20.932824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:20.973619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.051208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.090985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.141303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.250954Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126906543812413:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.251050Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.251382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126906543812416:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.251464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.251592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126906543812420:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.256398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... _CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:45.167604Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:45.167704Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24557 2025-12-23T18:47:45.472402Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515665243 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:47:45.626670Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:45.634044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:45.646013Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:45.710454Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:45.879433Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:45.953631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:45.955043Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:47:48.814862Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127023682439918:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:48.814981Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:48.815338Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127023682439928:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:48.815406Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:48.884734Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:48.915306Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:48.949086Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:48.984543Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:49.016625Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:49.054266Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:49.090853Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:49.140646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:49.235182Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127027977408093:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:49.235293Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:49.237044Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127027977408098:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:49.237093Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127027977408099:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:49.237154Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:49.241799Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:49.273453Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587127027977408102:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:47:49.376098Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587127027977408153:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:49.947667Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587127006502568889:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:49.947739Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:47:51.171431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:00.163438Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:48:00.163475Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |95.9%| [TA] $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |95.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] |95.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TestSqsTopicHttpProxy::TestGetQueueUrlEmpty >> TestSqsTopicHttpProxy::TestReceiveMessageNonExistingQueue >> TestSqsTopicHttpProxy::TestGetQueueUrlWithConsumer >> TestSqsTopicHttpProxy::TestSendMessage >> TestSqsTopicHttpProxy::TestSendMessageBatch >> TestSqsTopicHttpProxy::TestDeleteMessage >> TestSqsTopicHttpProxy::TestGetQueueUrl >> KqpSinkLocks::VisibleUncommittedRows [GOOD] >> KqpSinkLocks::VisibleUncommittedRowsUpdate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 3928, MsgBus: 28277 2025-12-23T18:43:10.729037Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587125831073245228:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:43:10.729872Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005115/r3tmp/tmpJN2oMP/pdisk_1.dat 2025-12-23T18:43:10.919636Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:43:10.945715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:43:10.945804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:43:11.027392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:43:11.076123Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587125831073245175:2081] 1766515390725863 != 1766515390725866 2025-12-23T18:43:11.080892Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3928, node 1 2025-12-23T18:43:11.152336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:43:11.152363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:43:11.152375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:43:11.152478Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:43:11.170329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28277 TClient is connected to server localhost:28277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515391088 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:43:11.635101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:43:11.737281Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:43:13.886014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-12-23T18:43:14.053707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-12-23T18:43:14.109370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2025-12-23T18:43:14.206028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2025-12-23T18:43:14.333851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2025-12-23T18:43:14.462919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"abcd ","abcd "} 2025-12-23T18:43:14.606878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-12-23T18:43:14.664229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-12-23T18:43:14.717901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2025-12-23T18:43:14.804015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2025-12-23T18:43:14.890986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2025-12-23T18:43:14.971690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2025-12-23T18:43:15.059968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-12-23T18:43:15.113817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-12-23T18:43:15.171093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 1111 2025-12-23T18:43:15.383190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {1111,1111} 2025-12-23T18:43:15.478230Z node 1 :FLAT_TX_SCH ... ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715844 at tablet 72075186224037943 status: ERROR errors: WRONG_SHARD_STATE (cannot reach sink actor) | 2025-12-23T18:48:11.552277Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715845:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:11.680518Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715846:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 869 2025-12-23T18:48:11.773278Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:48:11.804880Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715848:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:11.890898Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:48:11.917223Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715850:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:12.009391Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 650 2025-12-23T18:48:12.057119Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715852:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:12.158947Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:48:12.194431Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715854:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 829 2025-12-23T18:48:12.325990Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715855:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:12.444473Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715856:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 774 2025-12-23T18:48:12.590672Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715857:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:12.683530Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:48:12.723571Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715859:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:12.822577Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2950 2025-12-23T18:48:12.850322Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715861:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:12.989664Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715862:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.088082Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 114 2025-12-23T18:48:13.125831Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715865:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.222681Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:48:13.263859Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715867:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 3802 2025-12-23T18:48:13.389400Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715868:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.562646Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715869:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 4072 2025-12-23T18:48:13.702535Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:48:13.736606Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715871:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.912791Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715872:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.031753Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 142 2025-12-23T18:48:14.067526Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715874:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.208741Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715875:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.295459Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 3615 2025-12-23T18:48:14.334735Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715877:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.441672Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:48:14.471672Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715879:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.565711Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 3614 2025-12-23T18:48:14.684024Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715881:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.777240Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2025-12-23T18:48:14.822342Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715883:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.926379Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 22 2025-12-23T18:48:14.956264Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715885:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:15.081485Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715886:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:15.154996Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-system >> KqpSinkMvcc::WriteSkewInsert-IsOlap [GOOD] >> KqpSinkMvcc::WriteSkewReplace+IsOlap >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-ordinaryuser |95.9%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkMvcc::LostUpdate-IsOlap [GOOD] >> KqpSinkMvcc::OlapNamedStatement >> KqpTx::RollbackInvalidated [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessageEmpty >> KqpSinkMvcc::WriteSkewUpsert+IsOlap [GOOD] >> KqpSinkMvcc::WriteSkewUpsert-IsOlap >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-anonymous >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] |95.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackInvalidated [GOOD] Test command err: Trying to start YDB, gRPC: 22116, MsgBus: 22109 2025-12-23T18:47:55.653741Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127053128888417:2254];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:55.653809Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040f3/r3tmp/tmpcypxaE/pdisk_1.dat 2025-12-23T18:47:55.951540Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:56.060641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:56.060757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:56.071463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:56.101628Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:56.104360Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127053128888189:2081] 1766515675608416 != 1766515675608419 TServer::EnableGrpc on GrpcPort 22116, node 1 2025-12-23T18:47:56.214644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:56.232442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:56.232466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:56.232473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:56.232555Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22109 2025-12-23T18:47:56.651694Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515676121 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:56.829864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:47:56.848880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:57.012944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:57.165155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:57.239634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:59.345911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127070308759235:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.346032Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.347194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127070308759245:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.347308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.788526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.829401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.875194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.923396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.961931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:00.022779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:00.071327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:00.139089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:00.257524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127074603727412:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.257615Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.258418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127074603727417:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.258492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127074603727418:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.258825Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.264152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:00.282878Z node 1 ... Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515695532 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:48:15.997423Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:48:16.002904Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:48:16.008932Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:16.068939Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:16.218764Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:16.324101Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:16.370346Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:18.808264Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127152991184850:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:18.808372Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:18.808798Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127152991184860:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:18.808896Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:18.885025Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:18.969312Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.000077Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.027095Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.059006Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.091235Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.123345Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.184931Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.260639Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127157286153028:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.260719Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.260806Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127157286153033:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.260916Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127157286153035:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.260968Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.266098Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:19.275761Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587127157286153037:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:48:19.354011Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587127157286153088:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:20.357491Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587127140106281131:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:20.357567Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:21.240574Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7587127165876088019:2540], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:48:21.241070Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=3&id=ZWM2Yzc2NWQtZWM2Y2I3ZWEtOTUzYjNlOWMtOTljNWExYWE=, ActorId: [3:7587127165876087989:2530], ActorState: ExecuteState, TraceId: 01kd68mxf4ch3rte49qk3rzjm5, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 13 } message: "At function: KiReadTable!" end_position { row: 2 column: 13 } severity: 1 issues { position { row: 2 column: 13 } message: "Cannot find table \'db.[/Root/BadTable]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 13 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 01kd68mxekfsfhdtmz2k7xp2np 2025-12-23T18:48:21.254806Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=ZWM2Yzc2NWQtZWM2Y2I3ZWEtOTUzYjNlOWMtOTljNWExYWE=, ActorId: [3:7587127165876087989:2530], ActorState: ReadyState, TraceId: 01kd68mxg61xp6kes1wyw494p4, Create QueryResponse for error on request, msg: , status: NOT_FOUND, issues: { message: "Transaction not found: 01kd68mxekfsfhdtmz2k7xp2np" issue_code: 2015 severity: 1 } |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 22390, MsgBus: 18988 2025-12-23T18:47:58.325503Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127067407529697:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:58.326047Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040e8/r3tmp/tmpgLnKaF/pdisk_1.dat 2025-12-23T18:47:58.646622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:58.646764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:58.673137Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:58.701243Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:58.705143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22390, node 1 2025-12-23T18:47:58.839029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:58.839048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:58.839055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:58.839119Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:58.955305Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18988 2025-12-23T18:47:59.332640Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515678753 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:59.460757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:59.466666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:59.477482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:59.661740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:59.844019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:47:59.928814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:01.820260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127080292433411:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:01.820415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:01.822583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127080292433421:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:01.822648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:02.183626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:02.227749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:02.259111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:02.291554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:02.323997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:02.362947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:02.414226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:02.466615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:02.538551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127084587401585:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:02.538621Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:02.538945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127084587401591:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:02.538979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:02.539046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127084587401590:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:02.542113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:02.551927Z node 1 :KQP_WORKLOAD ... stributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:15.906960Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:15.906966Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:15.907039Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:16.002161Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10234 TClient is connected to server localhost:10234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515695917 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:16.399855Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:16.405225Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:48:16.418562Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:16.490025Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:16.640330Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:16.700597Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:16.744184Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:18.973978Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127152263080902:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:18.974078Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:18.974642Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127152263080912:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:18.974713Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.065702Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.106495Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.147606Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.198365Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.229566Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.317689Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.359376Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.417100Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.497648Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127156558049079:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.497757Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.497806Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127156558049084:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.497954Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127156558049086:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.497993Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.502330Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:19.516261Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587127156558049087:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:48:19.586334Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587127156558049139:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:20.739024Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587127139378177175:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:20.739134Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:22.231778Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=YzkyOWQ5NGMtZjk1YzZjZGEtYWZkZmFmOTYtZDU4MzRlZDk=, ActorId: [3:7587127165147983996:2520], ActorState: ExecuteState, TraceId: 01kd68mybv31r72e4tm3g6ycmt, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken, status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/TwoShard`" issue_code: 2001 severity: 1 } |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpLocks::TwoPhaseTx [GOOD] >> KqpLocks::MixedTxFail+useSink >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-dbadmin >> KqpSinkTx::InvalidateOnError [GOOD] >> KqpSnapshotIsolation::TUniqueSecondaryWriteIndexOltp [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-anonymous |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::PassAwayOnCreatingReadSession |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrlEmpty [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueUrlWithConsumer [GOOD] >> KqpSinkMvcc::TxReadsCommitted-IsOlap [GOOD] >> KqpSinkMvcc::TxReadsItsOwnWrites+IsOlap >> TestSqsTopicHttpProxy::TestReceiveMessageNonExistingQueue [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueUrl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 23764, MsgBus: 3453 2025-12-23T18:47:53.805671Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127044732924298:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:53.805781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004100/r3tmp/tmpMnMFE2/pdisk_1.dat 2025-12-23T18:47:54.111042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:54.111164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:54.164963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:54.194937Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:54.202015Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127044732924272:2081] 1766515673800940 != 1766515673800943 2025-12-23T18:47:54.207187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23764, node 1 2025-12-23T18:47:54.346078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:54.346095Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:54.346109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:54.346174Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:54.396431Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3453 TClient is connected to server localhost:3453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:54.839128Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515674259 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:47:54.936313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:54.943608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:57.191001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127061912794331:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:57.191005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127061912794345:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:57.191155Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:57.191478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127061912794356:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:57.191534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:57.196208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:57.209010Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127061912794355:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:47:57.283500Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127061912794408:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:57.566051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:47:57.716033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127061912794574:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:47:57.716290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127061912794574:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:47:57.716503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127061912794574:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:47:57.716616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127061912794574:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:47:57.716695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127061912794574:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:47:57.716773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127061912794574:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:47:57.716860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127061912794574:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:47:57.716941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127061912794574:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:47:57.717036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127061912794574:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:47:57.717143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127061912794574:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:47:57.717289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127061912794574:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:47:57.717375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127061912794574:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:47:57.717458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127061912794574:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:47:57.720267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127061912794589:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:47:57.720318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127061912794589:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:47:57.720487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127061912794589:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:47:57.720580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587 ... issue_code: 2001 severity: 1 } Trying to start YDB, gRPC: 16512, MsgBus: 31763 2025-12-23T18:48:17.566367Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127146570852934:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:17.566405Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004100/r3tmp/tmpvBu8Hc/pdisk_1.dat 2025-12-23T18:48:17.625871Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:17.710344Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:17.731615Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:17.731687Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:17.746846Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16512, node 3 2025-12-23T18:48:17.790065Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:17.790088Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:17.790095Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:17.790168Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:17.925128Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:31763 TClient is connected to server localhost:31763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515697793 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:18.271079Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:18.278803Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:48:18.606972Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:21.368899Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127163750722923:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:21.368973Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127163750722943:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:21.369058Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:21.369421Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127163750722961:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:21.369465Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:21.372529Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:21.383164Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587127163750722960:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:48:21.438474Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587127163750723013:2535] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:21.493711Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:21.557739Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:22.625405Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587127146570852934:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:22.630018Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:22.684263Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:24.083392Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2025-12-23T18:48:24.083665Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037889 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:48:24.083853Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037889 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:48:24.084089Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:966: SelfId: [3:7587127176635632956:2963], Table: `/Root/KV` ([72057594046644480:43:1]), SessionActorId: [3:7587127172340665635:2963]Got CONSTRAINT VIOLATION for table `/Root/KV`. ShardID=72075186224037889, Sink=[3:7587127176635632956:2963].{
: Error: Conflict with existing key., code: 2012 } 2025-12-23T18:48:24.084185Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [3:7587127176635632948:2963], SessionActorId: [3:7587127172340665635:2963], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7587127172340665635:2963]. 2025-12-23T18:48:24.084452Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=3&id=YWViMmZkZWMtZDJjMjZkMjQtOGYzNGE1MDItNDJlZjcyYTg=, ActorId: [3:7587127172340665635:2963], ActorState: ExecuteState, TraceId: 01kd68n05yffd9f1xfk7fdevg3, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7587127176635632950:2963] from: [3:7587127176635632948:2963] 2025-12-23T18:48:24.084542Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [3:7587127176635632950:2963] TxId: 281474976715664. Ctx: { TraceId: 01kd68n05yffd9f1xfk7fdevg3, Database: /Root, SessionId: ydb://session/3?node_id=3&id=YWViMmZkZWMtZDJjMjZkMjQtOGYzNGE1MDItNDJlZjcyYTg=, PoolId: default, IsStreamingQuery: 0}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KV`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-12-23T18:48:24.084893Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=YWViMmZkZWMtZDJjMjZkMjQtOGYzNGE1MDItNDJlZjcyYTg=, ActorId: [3:7587127172340665635:2963], ActorState: ExecuteState, TraceId: 01kd68n05yffd9f1xfk7fdevg3, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Constraint violated. Table: `/Root/KV`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } }
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Conflict with existing key., code: 2012 2025-12-23T18:48:24.174711Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=YWViMmZkZWMtZDJjMjZkMjQtOGYzNGE1MDItNDJlZjcyYTg=, ActorId: [3:7587127172340665635:2963], ActorState: ExecuteState, TraceId: 01kd68n097ccxfm4zrx4t74ank, Create QueryResponse for error on request, msg: , status: NOT_FOUND, issues: { message: "Transaction not found: 01kd68n05qds7jt7680q2t47jw" issue_code: 2015 severity: 1 }
: Error: Transaction not found: 01kd68n05qds7jt7680q2t47jw, code: 2015 |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TestSqsTopicHttpProxy::TestDeleteMessage [GOOD] >> TestSqsTopicHttpProxy::TestSendMessage [GOOD] >> KqpSinkLocks::UncommittedRead [GOOD] >> KqpSinkLocks::UpdateLocksOneShard >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TestSqsTopicHttpProxy::TestReceiveMessage >> TestSqsTopicHttpProxy::TestDeleteMessageInvalid >> TestSqsTopicHttpProxy::TestDeleteMessageBatch >> KqpSinkLocks::VisibleUncommittedRowsUpdate [GOOD] >> KqpSinkLocks::UpdateLocksTwoShards >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl >> TestSqsTopicHttpProxy::TestSendMessageBatch [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNamesCore::NameListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::PrefixListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::ExceptionsListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::SystemNamesForbiddenForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesForbiddenForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForSystemUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForSystemUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForSystemUser [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpLocksTricky::TestNoWrite [GOOD] >> KqpLocksTricky::TestSnapshotIfInsertRead >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-ordinaryuser >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] >> KqpSinkTx::OlapInvalidateOnError [GOOD] >> KqpSinkTx::OlapInteractive >> RemoteTopicReader::ReadTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForSystemUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:47:44.676546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:47:44.676646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:44.676698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:47:44.676749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:47:44.676790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:47:44.676818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:47:44.676870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:44.676935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:47:44.677799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:47:44.678085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:47:44.791619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:47:44.791705Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:44.792559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:47:44.807473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:47:44.807623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:47:44.807798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:47:44.819682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:47:44.819979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:47:44.820691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:44.821061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:47:44.825270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:44.825475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:47:44.826699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:47:44.826779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:44.826929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:47:44.826971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:47:44.827009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:47:44.827175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:47:44.834056Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-12-23T18:47:44.969899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:47:44.970117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:44.970321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:47:44.970375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:47:44.970617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:47:44.970666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:44.972720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:44.972934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:47:44.973148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:44.973221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:47:44.973268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:47:44.973307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:47:44.975066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:44.975123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:47:44.975161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:47:44.976684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:44.976727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:44.976779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:47:44.976824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:47:44.986654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:47:44.991025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:47:44.991245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:47:44.992302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:44.992437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:47:44.992495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:47:44.992750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:47:44.992802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:47:44.992973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:47:44.993043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:47:44.995754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard ... as 2 2025-12-23T18:48:28.383108Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2025-12-23T18:48:28.383143Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2025-12-23T18:48:28.383177Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2025-12-23T18:48:28.383236Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 5 2025-12-23T18:48:28.383268Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2025-12-23T18:48:28.386038Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:48:28.386146Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:48:28.386189Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:48:28.386229Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2025-12-23T18:48:28.386271Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2025-12-23T18:48:28.387057Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:48:28.387150Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:48:28.387186Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:48:28.387222Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2025-12-23T18:48:28.387262Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2025-12-23T18:48:28.388713Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:48:28.388806Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:48:28.388844Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:48:28.388877Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 5 2025-12-23T18:48:28.388912Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2025-12-23T18:48:28.390110Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:48:28.390199Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:48:28.390232Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:48:28.390265Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2025-12-23T18:48:28.390298Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 1 2025-12-23T18:48:28.390369Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2025-12-23T18:48:28.393692Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:48:28.393853Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:48:28.396108Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:48:28.396221Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2025-12-23T18:48:28.397770Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2025-12-23T18:48:28.397820Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2025-12-23T18:48:28.399611Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2025-12-23T18:48:28.399737Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2025-12-23T18:48:28.399778Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [13:2739:4728] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2025-12-23T18:48:28.401113Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2025-12-23T18:48:28.401161Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2025-12-23T18:48:28.401277Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2025-12-23T18:48:28.401319Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2025-12-23T18:48:28.401392Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2025-12-23T18:48:28.401425Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2025-12-23T18:48:28.401486Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2025-12-23T18:48:28.401512Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2025-12-23T18:48:28.401570Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2025-12-23T18:48:28.401599Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2025-12-23T18:48:28.403538Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2025-12-23T18:48:28.403735Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2025-12-23T18:48:28.403799Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [13:2742:4731] 2025-12-23T18:48:28.404279Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2025-12-23T18:48:28.404436Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2025-12-23T18:48:28.404515Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2025-12-23T18:48:28.404569Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2025-12-23T18:48:28.404605Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [13:2742:4731] 2025-12-23T18:48:28.404678Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2025-12-23T18:48:28.404713Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [13:2742:4731] 2025-12-23T18:48:28.404916Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2025-12-23T18:48:28.404978Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2025-12-23T18:48:28.405009Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [13:2742:4731] 2025-12-23T18:48:28.405202Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2025-12-23T18:48:28.405237Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [13:2742:4731] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> RemoteTopicReader::PassAwayOnCreatingReadSession [GOOD] >> KqpSinkMvcc::WriteSkewUpsert-IsOlap [GOOD] >> KqpSinkMvcc::WriteSkewReplace-IsOlap >> TestSqsTopicHttpProxy::TestReceiveMessageEmpty [GOOD] >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 18974, MsgBus: 20304 2025-12-23T18:47:55.454766Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127054054905733:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:55.454831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:47:55.512096Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040f6/r3tmp/tmpqGms1y/pdisk_1.dat 2025-12-23T18:47:55.747838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:55.747979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:55.750820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:55.793142Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:55.815776Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127054054905706:2081] 1766515675452474 != 1766515675452477 2025-12-23T18:47:55.836234Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18974, node 1 2025-12-23T18:47:56.044665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:56.075448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:56.075474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:56.075481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:56.075580Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20304 2025-12-23T18:47:56.457065Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:20304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515676100 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:56.776196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:56.782086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:58.852009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127066939808483:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.852216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.853505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127066939808496:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.860506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127066939808508:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.860571Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.861004Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127066939808521:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.861046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.865770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:58.878846Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127066939808498:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:47:58.943399Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127066939808553:2541] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:59.310465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.407370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:00.455224Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127054054905733:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:00.455367Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:00.456974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:02.640921Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=NWRlOTVhZDAtZDI3ZTlhMGMtNjQzM2NiN2YtNmU2OGY5MmY=, ActorId: [1:7587127079824718344:2964], ActorState: ExecuteState, TraceId: 01kd68mb84a681gd6tzjhk20de, Create QueryResponse for error on request, msg: , status: GENERIC_ERROR, issues: { position { row: 3 column: 29 } message: "Operation \'Upsert\' can\'t be performed in read only transaction" end_position { row: 3 column: 29 } issue_code: 2008 severity: 1 }
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 Trying to start YDB, gRPC: 64868, MsgBus: 12928 2025-12-23T18:48:03.796033Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127089177442708:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:03.796111Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040f6/r3tmp/tmp95FjEb/pdisk_1.dat 2025-12-23T18:48:03.858919Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:03.982600Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:03.982708Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:03.984750Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:04.000003Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64868, node 2 2025-12-23T18:48:04.105336Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:04.105356Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:04.105371Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:04.105441Z node 2 :NET_CLASSI ... nternal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.600024Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.602248Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.602290Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.602306Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.607970Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.608029Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.608045Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.610581Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.610626Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.610641Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.616375Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.616431Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.616445Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.618332Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.618386Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.618401Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.624977Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.625036Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.625053Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.626218Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.626277Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.626291Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.632794Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.632857Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.632872Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.633556Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.633608Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.633623Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.640983Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.641046Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.641061Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.641170Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.641228Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.641248Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.650002Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038055;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.650062Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038055;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.650077Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038055;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.650333Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.650376Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.650390Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.658901Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.658988Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.659007Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.659560Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.659624Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:24.659639Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:26.462525Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:48:26.462564Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::PassAwayOnCreatingReadSession [GOOD] Test command err: 2025-12-23T18:48:27.010799Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127190106024858:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:27.010883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00401c/r3tmp/tmpKaOblr/pdisk_1.dat 2025-12-23T18:48:27.276882Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:27.343543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:27.343625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:27.387930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:27.426713Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:27.514720Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5021 TServer::EnableGrpc on GrpcPort 26747, node 1 2025-12-23T18:48:27.869015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:27.869086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:27.869093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:27.869152Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:28.020919Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515707481 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:28.499541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:28.510776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:48:28.519443Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7587127194400992886:2494] Handshake: worker# [1:7587127194400992884:2492] 2025-12-23T18:48:28.519899Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7587127194400992886:2494] Create read session: session# [1:7587127194400992887:2495] >> TestSqsTopicHttpProxy::TestReceiveMessageGroup |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpSinkMvcc::InsertConflictingKey+IsOlap-CommitOnInsert [GOOD] >> KqpSinkMvcc::InsertConflictingKey-IsOlap+CommitOnInsert >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] >> KqpLocks::MixedTxFail+useSink [GOOD] >> KqpLocks::MixedTxFail-useSink >> TTicketParserTest::LoginGood >> AuthorizeRequestToAccessService::CanAuthorizeYdbInAccessService >> TTicketParserTest::TicketFromCertificateWithValidationGood >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerGood >> TTicketParserTest::LoginRefreshGroupsWithError >> TTicketParserTest::NebiusAuthenticationUnavailable >> TTicketParserTest::BulkAuthorizationRetryError >> TestSqsTopicHttpProxy::TestDeleteMessageBatch [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] >> TTicketParserTest::AuthorizationRetryError >> TestSqsTopicHttpProxy::TestDeleteMessageInvalid [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 23688, MsgBus: 64772 2025-12-23T18:47:59.540355Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127070741516372:2251];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:59.540508Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040e7/r3tmp/tmppZaMV3/pdisk_1.dat 2025-12-23T18:47:59.918960Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:59.932356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:59.932490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:00.015414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:00.097716Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:00.102856Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127070741516147:2081] 1766515679512991 != 1766515679512994 TServer::EnableGrpc on GrpcPort 23688, node 1 2025-12-23T18:48:00.271104Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:00.299456Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:00.299491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:00.299500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:00.299580Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64772 2025-12-23T18:48:00.549482Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:64772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515680090 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:00.999172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:01.008394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:48:01.019718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:01.180989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:01.383161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:01.470729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:03.325709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127087921387200:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:03.325883Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:03.326922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127087921387209:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:03.326998Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:03.671442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:03.706243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:03.739010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:03.779204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:03.812602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:03.880490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:03.922081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:03.972297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:04.059999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127092216355383:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:04.060090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:04.060332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127092216355388:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:04.060372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127092216355389:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:04.060498Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:04.064160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... hemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:15.941030Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:16.004080Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:16.154336Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:16.237032Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:16.263073Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:18.880533Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127152766845702:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:18.880638Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:18.880904Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127152766845712:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:18.880946Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:18.955543Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:18.994411Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.029471Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.066630Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.097867Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.128815Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.161672Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.224175Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:19.301315Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127157061813876:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.301403Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.301492Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127157061813881:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.301773Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127157061813883:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.301830Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:19.304680Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:19.314066Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587127157061813884:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:48:19.388057Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587127157061813936:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:20.224714Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587127139881942056:2146];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:20.224770Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:30.297889Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:48:30.297939Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:31.945937Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2061: SelfId: [3:7587127208601422220:2674], TxId: 281474976715682, task: 1. Ctx: { TraceId : 01kd68n7pg6psmgy6q9fx1xg9t. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=MzI5NGVkYzItYmFhOWY1YWItZDA1YjYyY2EtMjNiMThmMDY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 38 has no snapshot at v1766515701000/18446744073709551615 shard 72075186224037888 with lowWatermark v1766515701461/18446744073709551615 (node# 3 state# Ready) } } 2025-12-23T18:48:31.946606Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [3:7587127208601422220:2674], TxId: 281474976715682, task: 1. Ctx: { TraceId : 01kd68n7pg6psmgy6q9fx1xg9t. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=MzI5NGVkYzItYmFhOWY1YWItZDA1YjYyY2EtMjNiMThmMDY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 38 has no snapshot at v1766515701000/18446744073709551615 shard 72075186224037888 with lowWatermark v1766515701461/18446744073709551615 (node# 3 state# Ready) } }. 2025-12-23T18:48:31.947039Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [3:7587127208601422221:2675], TxId: 281474976715682, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68n7pg6psmgy6q9fx1xg9t. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=MzI5NGVkYzItYmFhOWY1YWItZDA1YjYyY2EtMjNiMThmMDY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7587127208601422216:2530], status: ABORTED, reason: {
: Error: Terminate execution } 2025-12-23T18:48:31.947589Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=MzI5NGVkYzItYmFhOWY1YWItZDA1YjYyY2EtMjNiMThmMDY=, ActorId: [3:7587127161356781539:2530], ActorState: ExecuteState, TraceId: 01kd68n7pg6psmgy6q9fx1xg9t, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Read request aborted" severity: 1 issues { message: "Table id 38 has no snapshot at v1766515701000/18446744073709551615 shard 72075186224037888 with lowWatermark v1766515701461/18446744073709551615 (node# 3 state# Ready)" severity: 1 } } |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl [GOOD] >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty [GOOD] >> TTicketParserTest::CanGetErrorIfAppropriateLoginProviderIsAbsent ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] Test command err: 2025-12-23T18:48:20.499638Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127160653939996:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:20.499676Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:20.556222Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0058db/r3tmp/tmphSi3Sd/pdisk_1.dat 2025-12-23T18:48:20.849476Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:20.898335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:20.898771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:20.904386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:20.993123Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14886, node 1 2025-12-23T18:48:21.034264Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:21.262492Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:21.262525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:21.262581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:21.262656Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:21.519276Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10161 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515700985 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:21.768021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:21.777207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:10161 2025-12-23T18:48:21.976071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:48:21.998984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:48:22.008115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:48:22.036823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-12-23T18:48:22.046087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.213151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.272118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.317598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.351525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.383320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:22.415688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.449474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.486835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:22.524580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:23.790094Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127173538843502:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.790140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127173538843514:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.790264Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.790576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127173538843517:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.790647Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.796286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:23.809845Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127173538843516:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:48:23.891121Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127173538843569:3101] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], ty ... ist { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:48:32.559485Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 14ms 2025-12-23T18:48:32.559697Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:48:32.559730Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-12-23T18:48:32.559794Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 14ms 2025-12-23T18:48:32.560033Z node 2 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:48:32.561167Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:48:32.561190Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 18ms 2025-12-23T18:48:32.561569Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:48:32.561607Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-12-23T18:48:32.561687Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 18ms 2025-12-23T18:48:32.562066Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:48:32.713590Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587127215039195642:2437]: Pool not found 2025-12-23T18:48:32.713827Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-12-23T18:48:32.904050Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587127193564356772:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:32.904109Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:32.971101Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587127215039195625:2433]: Pool not found 2025-12-23T18:48:32.971387Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-12-23T18:48:32.974100Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127215039195742:2453], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:32.974184Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587127215039195743:2454], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:48:32.974236Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:32.978865Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127215039195746:2455], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:32.978947Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:33.256821Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587127215039195740:2452]: Pool not found 2025-12-23T18:48:33.257151Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete >> TestSqsTopicHttpProxy::TestReceiveMessage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 61325, MsgBus: 21521 2025-12-23T18:48:08.269929Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127109392851724:2227];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:08.270140Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040e0/r3tmp/tmpYcpxRI/pdisk_1.dat 2025-12-23T18:48:08.567723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:08.567816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:08.595581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:08.629316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:08.680654Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127109392851534:2081] 1766515688230383 != 1766515688230386 2025-12-23T18:48:08.681285Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61325, node 1 2025-12-23T18:48:08.749240Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:08.749266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:08.749273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:08.749421Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:08.867717Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21521 TClient is connected to server localhost:21521 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515688700 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:09.274766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:09.287060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:48:09.287984Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:11.384466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127122277754306:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:11.384476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127122277754318:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:11.384555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:11.384854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127122277754321:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:11.384895Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:11.388748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:11.399833Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127122277754320:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:48:11.460895Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127122277754374:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:11.728433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:11.852104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:12.842893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.573178Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127109392851724:2227];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:13.601927Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:13.912790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:15.830718Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=ZDA5NGNmZDctN2VhNjVjODEtMTU4OWMwNjktMTM2MTk4M2U=, ActorId: [1:7587127130867696908:2960], ActorState: ExecuteState, TraceId: 01kd68mqg41zm0zphbzt13vejv, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken, status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`" issue_code: 2001 severity: 1 } Trying to start YDB, gRPC: 21094, MsgBus: 16267 2025-12-23T18:48:17.042197Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127150346250400:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:17.046183Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040e0/r3tmp/tmpq7rwys/pdisk_1.dat 2025-12-23T18:48:17.062029Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:17.148031Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:17.177150Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:17.177236Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 21094, node 2 2025-12-23T18:48:17.183415Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:17.236130Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:17.236156Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:17.236162Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:17.236237Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:17.351053Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16267 TClient is connected to server localhost:16267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Roo ... 4: Notification cookie mismatch for subscription [3:7587127185179292076:2081] 1766515706211166 != 1766515706211169 2025-12-23T18:48:26.383344Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:26.383432Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:26.400556Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5360, node 3 2025-12-23T18:48:26.462878Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:26.462902Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:26.462910Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:26.463008Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:26.586957Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1781 TClient is connected to server localhost:1781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515706452 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:26.958864Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:26.977212Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:27.042922Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:27.186101Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:27.239803Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:27.287604Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:29.761916Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127198064195835:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:29.761978Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:29.762224Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127198064195844:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:29.762282Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:29.911178Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:29.945213Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:29.986357Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:30.015878Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:30.050060Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:30.087663Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:30.133289Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:30.186524Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:30.290484Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127202359164013:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:30.290601Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:30.290937Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127202359164018:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:30.290959Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127202359164019:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:30.291009Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:30.296144Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:30.306608Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587127202359164022:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:48:30.406171Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587127202359164073:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:31.212355Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587127185179292103:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:31.212428Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::UpdateLocksTwoShards [GOOD] >> KqpSinkLocks::UpdateLocksOneShardRowExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2025-12-23T18:48:20.522431Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127159510030350:2259];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:20.522629Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0058d0/r3tmp/tmpipmgNU/pdisk_1.dat 2025-12-23T18:48:20.845695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:20.845807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:20.941832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:21.081430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:21.085348Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22800, node 1 2025-12-23T18:48:21.261126Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:21.261329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:21.261337Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:21.261408Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:21.353015Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:21.522877Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:14527 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515701097 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:21.757872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:21.769497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:14527 2025-12-23T18:48:21.979050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:48:21.984358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:48:21.988722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:48:22.033543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.211225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.263914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-12-23T18:48:22.268107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.306242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.343175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.381675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.412405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.449790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:22.491233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.527323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:24.047859Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127176689900952:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.048022Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.048233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127176689900964:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.048529Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127176689900966:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.048571Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.052713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:24.064774Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127176689900967:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:48:24.128218Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127176689901019:3105] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:24.770650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, subope ... 4. Send blob request. 2025-12-23T18:48:33.885883Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2025-12-23T18:48:33.885910Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#36,[::1]:36356) <- (200 , 436 bytes) 2025-12-23T18:48:33.885975Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 3 count 3 size 175 from pos 0 cbcount 3 2025-12-23T18:48:33.886033Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#36,[::1]:36356) connection closed 2025-12-23T18:48:33.886043Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 3 parts_count 0 source 1 size 195 accessed 1 times before, last time 2025-12-23T18:48:33.000000Z 2025-12-23T18:48:33.886059Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 4. All 1 blobs are from cache. 2025-12-23T18:48:33.886079Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1026: Topic 'topic1' partition 0 user consumer readTimeStamp done, result 1766515713879 queuesize 0 startOffset 0 2025-12-23T18:48:33.886131Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2025-12-23T18:48:33.886186Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 3 parts 0 suffix '63' 2025-12-23T18:48:33.886198Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 3 count 3 size 175 from pos 0 cbcount 3 2025-12-23T18:48:33.886264Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-12-23T18:48:33.886982Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:36372) incoming connection opened 2025-12-23T18:48:33.887054Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:36372) -> (POST /Root, 101 bytes) 2025-12-23T18:48:33.887152Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f808:8c33:ad7b:0:e008:8c33:ad7b:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 606f08b6-59e4420f-c4feea65-f4fc0b 2025-12-23T18:48:33.887595Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [606f08b6-59e4420f-c4feea65-f4fc0b] got new request from [f808:8c33:ad7b:0:e008:8c33:ad7b:0] database '/Root' stream '' 2025-12-23T18:48:33.888024Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [606f08b6-59e4420f-c4feea65-f4fc0b] [auth] Authorized successfully 2025-12-23T18:48:33.888069Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [606f08b6-59e4420f-c4feea65-f4fc0b] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:48:33.889981Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1766515718888 VisibilityDeadlineMilliseconds: 1766515743888 MaxNumberOfMessages: 10 Http output full {"Messages":[{"MD5OfBody":"3bf7e6d806a0b8062135ae945eca30bf","Attributes":{"SentTimestamp":"1766515713879","MessageGroupId":"MessageGroupId-1"},"ReceiptHandle":"CAAQAA==","Body":"MessageBody-1","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"},{"MD5OfBody":"7034dd2039d12b6dd94a9e6dfb820b77","Attributes":{"SentTimestamp":"1766515713881"},"ReceiptHandle":"CAAQAQ==","Body":"MessageBody-2","MessageId":"E43C2D66-7305-5166-8EE2-D86E421B5F98"},{"MD5OfBody":"f23251df60f088df56a4be0a5fb1ae75","Attributes":{"SentTimestamp":"1766515713881"},"ReceiptHandle":"CAAQAg==","Body":"MessageBody-3","MessageId":"F4B995B7-701A-5815-BE20-755014677779"}]} 2025-12-23T18:48:33.891188Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'topic1' requestId: 2025-12-23T18:48:33.891223Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2025-12-23T18:48:33.891308Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 5 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 3 size 26214400 endOffset 3 max time lag 0ms effective offset 0 2025-12-23T18:48:33.891457Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 5 added 1 blobs, size 195 count 3 last offset 0, current partition end offset: 3 2025-12-23T18:48:33.891469Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2025-12-23T18:48:33.891496Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 3 parts_count 0 source 1 size 195 accessed 2 times before, last time 2025-12-23T18:48:33.000000Z 2025-12-23T18:48:33.891517Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 5. All 1 blobs are from cache. 2025-12-23T18:48:33.891538Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2025-12-23T18:48:33.891601Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 3 count 3 size 175 from pos 0 cbcount 3 2025-12-23T18:48:33.891660Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-12-23T18:48:33.891835Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 3 parts 0 suffix '63' 2025-12-23T18:48:33.892398Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [606f08b6-59e4420f-c4feea65-f4fc0b] reply ok 2025-12-23T18:48:33.892539Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:36372) <- (200 , 641 bytes) 2025-12-23T18:48:33.892625Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:36372) connection closed 2025-12-23T18:48:33.893616Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#40,[::1]:36376) incoming connection opened 2025-12-23T18:48:33.893687Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#40,[::1]:36376) -> (POST /Root, 397 bytes) 2025-12-23T18:48:33.893808Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [7898:8b33:ad7b:0:6098:8b33:ad7b:0] request [DeleteMessageBatch] url [/Root] database [/Root] requestId: b87227ef-bdc0edbc-81d464e7-4107349 2025-12-23T18:48:33.894340Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [DeleteMessageBatch] requestId [b87227ef-bdc0edbc-81d464e7-4107349] got new request from [7898:8b33:ad7b:0:6098:8b33:ad7b:0] database '/Root' stream '' 2025-12-23T18:48:33.894952Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DeleteMessageBatch] requestId [b87227ef-bdc0edbc-81d464e7-4107349] [auth] Authorized successfully 2025-12-23T18:48:33.895025Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [DeleteMessageBatch] requestId [b87227ef-bdc0edbc-81d464e7-4107349] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:48:33.898244Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:33.898296Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:33.898312Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:33.898329Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:33.898338Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:33.899478Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:50: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPCommitRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 Offset: 2 Offset: 0 Offset: 1 2025-12-23T18:48:33.900457Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:33.900477Z node 2 :PERSQUEUE DEBUG: partition.cpp:2409: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:33.900498Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:48:33.900510Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:33.900525Z node 2 :PERSQUEUE DEBUG: partition.cpp:2473: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:33.900569Z node 2 :PERSQUEUE DEBUG: partition.cpp:3880: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer offset is set to 3 (startOffset 0) session 2025-12-23T18:48:33.900581Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:48:33.900593Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:48:33.900605Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:33.900795Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:33.901401Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DeleteMessageBatch] requestId [b87227ef-bdc0edbc-81d464e7-4107349] reply ok 2025-12-23T18:48:33.901762Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#40,[::1]:36376) <- (200 , 219 bytes) 2025-12-23T18:48:33.901852Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#40,[::1]:36376) connection closed Http output full {"Successful":[{"Id":"delete-id-2"},{"Id":"delete-id-0"},{"Id":"delete-id-1"}],"Failed":[{"Message":"The specified receipt handle isn't valid.","Id":"delete-invalid","Code":"ReceiptHandleIsInvalid","SenderFault":true}]} 2025-12-23T18:48:33.902714Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:48:33.902792Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:934: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:33.902873Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:48:33.902909Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:33.902921Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:33.902931Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:33.902945Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:33.902955Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:33.903023Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl [GOOD] Test command err: 2025-12-23T18:48:20.516543Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127163445611909:2189];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:20.519070Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0058da/r3tmp/tmpGJuIyi/pdisk_1.dat 2025-12-23T18:48:20.912952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:20.918890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:20.919009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:20.989847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:21.053858Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10883, node 1 2025-12-23T18:48:21.216822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:21.263493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:21.263518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:21.263523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:21.263582Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:21.514592Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:11984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515701055 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:21.806695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:21.832606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:11984 2025-12-23T18:48:22.110927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:48:22.129139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:48:22.139355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:48:22.171748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-12-23T18:48:22.183900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.302900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.340069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-12-23T18:48:22.343715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.387964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-12-23T18:48:22.393173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.431798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.472170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.511147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.584333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.623914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.678585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:23.982675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127176330515297:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.982783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127176330515288:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.982887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.984908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127176330515303:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.984999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.986088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:23.998803Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127176330515302:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:48:24.102597Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127180625482651:3102] txid# 281474976710674, issues: { message: "Check failed: path: ... : List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:48:33.369238Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587127215719577811:2436]: Pool not found 2025-12-23T18:48:33.369405Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-12-23T18:48:33.586909Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587127215719577802:2434]: Pool not found 2025-12-23T18:48:33.587213Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-12-23T18:48:33.590151Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127215719577924:2453], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:33.590185Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7587127215719577925:2454], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:48:33.590243Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:33.590459Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127215719577928:2455], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:33.590551Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:33.880815Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7587127215719577922:2452]: Pool not found 2025-12-23T18:48:33.881434Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2025-12-23T18:48:34.199138Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:40122) incoming connection opened 2025-12-23T18:48:34.199226Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:40122) -> (POST /Root, 52 bytes) 2025-12-23T18:48:34.199392Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f863:7c31:b07b:0:e063:7c31:b07b:0] request [SendMessage] url [/Root] database [/Root] requestId: d6bf08f6-eed0a4db-6b33114c-9827f1d0 2025-12-23T18:48:34.199776Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessage] requestId [d6bf08f6-eed0a4db-6b33114c-9827f1d0] got new request from [f863:7c31:b07b:0:e063:7c31:b07b:0] database '/Root' stream '' 2025-12-23T18:48:34.204428Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.schemas.getMetadata) 2025-12-23T18:48:34.204528Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.connect) 2025-12-23T18:48:34.204564Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.create) 2025-12-23T18:48:34.204600Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.select) 2025-12-23T18:48:34.204630Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.write) 2025-12-23T18:48:34.204660Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.streams.write) 2025-12-23T18:48:34.204690Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.list) Http output full {"__type":"MissingParameter","message":"No QueueUrl parameter."} 2025-12-23T18:48:34.251558Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (C9049D91) permission ydb.databases.list now has a valid subject "Service1_id@as" 2025-12-23T18:48:34.251669Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.streams.write now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:48:34.252563Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.create now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:48:34.252606Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.schemas.getMetadata now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:48:34.252631Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.connect now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:48:34.252654Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.write now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:48:34.252677Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.select now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:48:34.252758Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2025-12-23T18:48:34.252904Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessage] requestId [d6bf08f6-eed0a4db-6b33114c-9827f1d0] [auth] Authorized successfully 2025-12-23T18:48:34.252955Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessage] requestId [d6bf08f6-eed0a4db-6b33114c-9827f1d0] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:48:34.253399Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessage] requestId [d6bf08f6-eed0a4db-6b33114c-9827f1d0] Not retrying GRPC response. Code: 400, Error: MissingParameter 2025-12-23T18:48:34.253485Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessage] requestId [d6bf08f6-eed0a4db-6b33114c-9827f1d0] reply with status: STATUS_UNDEFINED message: No QueueUrl parameter. 2025-12-23T18:48:34.253709Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:40122) <- (400 MissingParameter, 64 bytes) 2025-12-23T18:48:34.253772Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:40122) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"", "MessageBody":"MessageBody-0" } 2025-12-23T18:48:34.253810Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:40122) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: d6bf08f6-eed0a4db-6b33114c-9827f1d0 Content-Type: application/x-amz-json-1.1 Content-Length: 64 2025-12-23T18:48:34.253930Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:40122) connection closed 2025-12-23T18:48:34.255327Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:40126) incoming connection opened 2025-12-23T18:48:34.255400Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:40126) -> (POST /Root, 100 bytes) 2025-12-23T18:48:34.255556Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [9800:8330:b07b:0:8000:8330:b07b:0] request [SendMessage] url [/Root] database [/Root] requestId: 190bbbf3-f12deca8-bc805809-e6837596 2025-12-23T18:48:34.255915Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessage] requestId [190bbbf3-f12deca8-bc805809-e6837596] got new request from [9800:8330:b07b:0:8000:8330:b07b:0] database '/Root' stream '' 2025-12-23T18:48:34.256758Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessage] requestId [190bbbf3-f12deca8-bc805809-e6837596] [auth] Authorized successfully 2025-12-23T18:48:34.256805Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessage] requestId [190bbbf3-f12deca8-bc805809-e6837596] sending grpc request to '' database: '/Root' iam token size: 0 Http output full {"__type":"AWS.SimpleQueueService.NonExistentQueue","message":"You do not have access or the '/Root/ExampleQueueName' does not exist"} 2025-12-23T18:48:34.259003Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessage] requestId [190bbbf3-f12deca8-bc805809-e6837596] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.NonExistentQueue 2025-12-23T18:48:34.259079Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessage] requestId [190bbbf3-f12deca8-bc805809-e6837596] reply with status: STATUS_UNDEFINED message: You do not have access or the '/Root/ExampleQueueName' does not exist 2025-12-23T18:48:34.259479Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:40126) <- (400 AWS.SimpleQueueService.NonExistentQueue, 134 bytes) 2025-12-23T18:48:34.259550Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:40126) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"/v1/5//Root/16/ExampleQueueName/13/user_consumer", "MessageBody":"MessageBody-0" } 2025-12-23T18:48:34.259598Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:40126) Response: HTTP/1.1 400 AWS.SimpleQueueService.NonExistentQueue Connection: close x-amzn-requestid: 190bbbf3-f12deca8-bc805809-e6837596 Content-Type: application/x-amz-json-1.1 Content-Length: 134 2025-12-23T18:48:34.259752Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:40126) connection closed |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue [GOOD] >> KqpSinkLocks::UpdateLocksOneShard [GOOD] >> AuthorizeRequestToAccessService::CanAuthorizeYdbInAccessService [GOOD] >> AuthorizeRequestToAccessService::CanRefreshTokenForAccessService >> TTicketParserTest::TicketFromCertificateCheckIssuerGood [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad >> TTicketParserTest::NebiusAuthenticationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryError >> TTicketParserTest::LoginGood [GOOD] >> TTicketParserTest::LoginGoodWithGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty [GOOD] Test command err: 2025-12-23T18:48:20.502252Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127159625800713:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:20.502341Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0058d3/r3tmp/tmpVxo31G/pdisk_1.dat 2025-12-23T18:48:20.781403Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:20.877469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:20.877577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:20.937855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:21.017467Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23187, node 1 2025-12-23T18:48:21.076799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:21.264609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:21.264655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:21.264673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:21.264739Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:21.538881Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5213 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515701006 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:21.801255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:21.808847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:5213 waiting... 2025-12-23T18:48:21.991586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2025-12-23T18:48:21.999356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:48:22.002213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:48:22.037640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.195338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.240807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-12-23T18:48:22.245047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.287501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-12-23T18:48:22.292177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.327588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.365541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.401092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.446908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.485987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.532585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:23.788511Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127172510704213:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.788656Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.789174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127172510704227:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.789251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.790630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127172510704225:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:23.796152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:23.808224Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127172510704229:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:48:23.892049Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127172510704280:3102] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNo ... 07][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:35.102480Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:35.102629Z node 2 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1766515715146, TxId 281474976715690 2025-12-23T18:48:35.102647Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.102657Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:48:35.102673Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.102697Z node 2 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2025-12-23T18:48:35.106889Z node 2 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037907][Partition][0][StateIdle] Schedule reply tx done 281474976715690 2025-12-23T18:48:35.106919Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:48:35.106930Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:48:35.106943Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:35.107203Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:35.109836Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:48:35.110025Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 1 2025-12-23T18:48:35.110039Z node 2 :PERSQUEUE INFO: partition_mlp.cpp:127: [72075186224037907][Partition][0][StateIdle] Updateing MLP consumer 'consumer' config 2025-12-23T18:48:35.110129Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:48:35.110149Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.110162Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.110172Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.110186Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.110196Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:35.110217Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:35.110551Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic1" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/topic1" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: false DeadLetterPolicy: DEAD_LETTER_POLICY_UNSPECIFIED MaxProcessingAttempts: 0 DefaultProcessingTimeoutSeconds: 20 } MonitoringProjectId: "" 2025-12-23T18:48:35.110654Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:946: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS 2025-12-23T18:48:35.110825Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:48:35.115354Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) { status: SUCCESS, issues: }ct.IsSuccess() = 1, ct.IsTransportError() = 0, ct.GetEndpoint() = [::]:17287 { status: SUCCESS, issues: }consumer 2025-12-23T18:48:35.143127Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:56120) incoming connection opened 2025-12-23T18:48:35.143219Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:56120) -> (POST /Root, 76 bytes) 2025-12-23T18:48:35.143436Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [18e2:e57f:547c:0:e2:e57f:547c:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: b836bc24-31b8fe47-ff513496-60f624b1 2025-12-23T18:48:35.143848Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessageBatch] requestId [b836bc24-31b8fe47-ff513496-60f624b1] got new request from [18e2:e57f:547c:0:e2:e57f:547c:0] database '/Root' stream '' 2025-12-23T18:48:35.146051Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.schemas.getMetadata) 2025-12-23T18:48:35.146128Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.connect) 2025-12-23T18:48:35.146157Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.create) 2025-12-23T18:48:35.146183Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.select) 2025-12-23T18:48:35.146208Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.write) 2025-12-23T18:48:35.146238Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.streams.write) 2025-12-23T18:48:35.146270Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.list) 2025-12-23T18:48:35.172791Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.streams.write now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:48:35.187342Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (C9049D91) permission ydb.databases.list now has a valid subject "Service1_id@as" 2025-12-23T18:48:35.187420Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.schemas.getMetadata now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:48:35.192846Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.connect now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:48:35.192908Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.create now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:48:35.193023Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.select now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:48:35.193249Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.write now has a permanent error "Permission Denied" retryable:0 2025-12-23T18:48:35.193318Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2025-12-23T18:48:35.193432Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessageBatch] requestId [b836bc24-31b8fe47-ff513496-60f624b1] [auth] Authorized successfully 2025-12-23T18:48:35.193466Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessageBatch] requestId [b836bc24-31b8fe47-ff513496-60f624b1] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:48:35.193993Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessageBatch] requestId [b836bc24-31b8fe47-ff513496-60f624b1] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.EmptyBatchRequest 2025-12-23T18:48:35.194089Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessageBatch] requestId [b836bc24-31b8fe47-ff513496-60f624b1] reply with status: STATUS_UNDEFINED message: The batch request doesn't contain any entries. 2025-12-23T18:48:35.194444Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:56120) <- (400 AWS.SimpleQueueService.EmptyBatchRequest, 112 bytes) 2025-12-23T18:48:35.194508Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:56120) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"/v1/5//Root/6/topic1/8/consumer", "Entries": [ ] } 2025-12-23T18:48:35.194542Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:56120) Response: HTTP/1.1 400 AWS.SimpleQueueService.EmptyBatchRequest Connection: close x-amzn-requestid: b836bc24-31b8fe47-ff513496-60f624b1 Content-Type: application/x-amz-json-1.1 Content-Length: 112 2025-12-23T18:48:35.194629Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:56120) connection closed Http output full {"__type":"AWS.SimpleQueueService.EmptyBatchRequest","message":"The batch request doesn't contain any entries."} 2025-12-23T18:48:35.202477Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.202507Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.202518Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.202535Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.202546Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:35.302860Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.302904Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.302915Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.302932Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.302943Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessage [GOOD] Test command err: 2025-12-23T18:48:20.499367Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127159549564869:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:20.499429Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0058d1/r3tmp/tmptdW4Jc/pdisk_1.dat 2025-12-23T18:48:20.850957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:20.900464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:20.900578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:20.991520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:21.065727Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:21.074904Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127159549564840:2081] 1766515700495090 != 1766515700495093 TServer::EnableGrpc on GrpcPort 6688, node 1 2025-12-23T18:48:21.135856Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:21.261147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:21.261177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:21.261299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:21.261371Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:21.521186Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:27610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515701090 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:21.813704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:27610 2025-12-23T18:48:22.010763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:48:22.017213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:48:22.018880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:48:22.047299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.202922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.251748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-12-23T18:48:22.258953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.300881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.334799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:22.371933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.402460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.433834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.467864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.508840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:24.016725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127176729435680:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.016864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.017038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127176729435691:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.017304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127176729435694:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.017356Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.020853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:24.032110Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127176729435696:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:48:24.097228Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127176729435747:3107] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:24.767687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo u ... gDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 530 2025-12-23T18:48:34.850878Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:34.850910Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:34.850927Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:34.850942Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:34.850951Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:34.952852Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:34.952880Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:34.952891Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:34.952906Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:34.952916Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:35.053416Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.053451Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.053463Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.053479Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.053490Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:35.155507Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.155532Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.155539Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.155551Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.155557Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:35.257104Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.257133Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.257145Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.257162Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.257171Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:35.357525Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.357556Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.357568Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.357584Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.357593Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:35.458226Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.458257Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.458266Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.458278Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.458285Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:35.558592Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.558623Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.558631Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.558644Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.558651Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:35.659015Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.659051Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.659063Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.659079Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.659090Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:35.759530Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.759565Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.759576Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.759595Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.759606Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:35.843588Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:72: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "consumer" CountersValues: 0 CountersValues: 1 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 MessageLocksValues: 1 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 84 2025-12-23T18:48:35.844045Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [83a61283-7fe36dc7-81a31e2f-b2137ee9] reply ok 2025-12-23T18:48:35.844212Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:39254) <- (200 , 2 bytes) Http output full {} 2025-12-23T18:48:35.844317Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:39254) connection closed 2025-12-23T18:48:35.860743Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.860774Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.860785Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.860802Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.860813Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:35.961106Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:35.961138Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.961150Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:35.961166Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:35.961179Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:36.061442Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:36.061474Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.061486Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:36.061503Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.061515Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageGroup [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-dbadmin >> RemoteTopicReader::ReadTopic [GOOD] >> TTicketParserTest::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] >> TTicketParserTest::LoginBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue [GOOD] Test command err: 2025-12-23T18:48:20.499325Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127159385653938:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:20.499378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0058d2/r3tmp/tmpelPLHL/pdisk_1.dat 2025-12-23T18:48:20.837541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:20.853690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:20.853812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:20.935495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:21.007666Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:21.054832Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 29226, node 1 2025-12-23T18:48:21.263594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:21.263631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:21.263638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:21.263736Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:21.506907Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:17947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515700992 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:21.892218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:21.903619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:17947 2025-12-23T18:48:22.173994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:48:22.188035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:48:22.190931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:48:22.224786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.368609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:48:22.413167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.466930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-12-23T18:48:22.472309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.513589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.552481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.585414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.624039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.665928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.716952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:24.064151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127176565524738:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.064154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127176565524746:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.064311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.064730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127176565524753:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.064838Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.068301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:24.080150Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127176565524752:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:48:24.135336Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127176565524805:3103] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:24.763648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, subope ... n and tx pending commits 2025-12-23T18:48:36.179967Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.179977Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:36.280307Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:36.280342Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.280354Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:36.280372Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.280383Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:36.382899Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:36.382939Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.382952Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:36.382972Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.382983Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:36.483532Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:36.483557Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.483566Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:36.483582Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.483592Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:36.583977Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:36.584010Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.584022Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:36.584039Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.584050Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:36.684300Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:36.684332Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.684344Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:36.684361Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.684381Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:36.784411Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:36.784442Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.784454Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:36.784473Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.784488Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:36.884796Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:36.884833Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.884847Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:36.884866Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.884882Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:36.985173Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:36.985213Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.985226Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:36.985243Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:36.985254Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:37.050790Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:72: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "consumer" CountersValues: 0 CountersValues: 1 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 MessageLocksValues: 1 MessageLocksValues: 1 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 1 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 60 2025-12-23T18:48:37.051761Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'topic1' requestId: 2025-12-23T18:48:37.051796Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2025-12-23T18:48:37.051881Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 6 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 1 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-12-23T18:48:37.052074Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 6 added 1 blobs, size 72 count 1 last offset 0, current partition end offset: 1 2025-12-23T18:48:37.052098Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 6. Send blob request. 2025-12-23T18:48:37.052143Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 72 accessed 3 times before, last time 2025-12-23T18:48:34.000000Z 2025-12-23T18:48:37.052181Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 6. All 1 blobs are from cache. 2025-12-23T18:48:37.052213Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2025-12-23T18:48:37.052293Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2025-12-23T18:48:37.052381Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-12-23T18:48:37.052598Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2025-12-23T18:48:37.052944Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [279e634a-95bcc94e-838a8ab9-7d469c86] reply ok 2025-12-23T18:48:37.053108Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:41264) <- (200 , 211 bytes) 2025-12-23T18:48:37.053235Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:41264) connection closed Http output full {"Messages":[{"MD5OfBody":"94a29778a1f1f41bf68142847b2e6106","Attributes":{"SentTimestamp":"1766515714107"},"ReceiptHandle":"CAAQAA==","Body":"MessageBody-0","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"}]} jsonReceived = { "Messages": [ { "Attributes": { "SentTimestamp":"1766515714107" }, "Body":"MessageBody-0", "MD5OfBody":"94a29778a1f1f41bf68142847b2e6106", "MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125", "ReceiptHandle":"CAAQAA==" } ] } 2025-12-23T18:48:37.085442Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:37.085470Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:37.085482Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:37.085496Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:37.085507Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:37.185776Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:37.185799Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:37.185807Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:37.185820Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:37.185827Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::UpdateLocksOneShard [GOOD] Test command err: Trying to start YDB, gRPC: 13690, MsgBus: 12672 2025-12-23T18:47:55.498583Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127055868249931:2163];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:55.500689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040f4/r3tmp/tmpbBE8uF/pdisk_1.dat 2025-12-23T18:47:55.782374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:55.821904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:55.822015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:55.828729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:55.897837Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:55.934815Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127055868249801:2081] 1766515675492378 != 1766515675492381 TServer::EnableGrpc on GrpcPort 13690, node 1 2025-12-23T18:47:56.005329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:56.028875Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:56.028911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:56.028921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:56.029033Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12672 TClient is connected to server localhost:12672 2025-12-23T18:47:56.527048Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515675876 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:56.677834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:58.903124Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127068753152556:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.903247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.903431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127068753152589:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.903621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127068753152594:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.903675Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.908191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:58.922844Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127068753152596:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:47:59.027765Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127073048119943:2538] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:59.353227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:59.488000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:00.498849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127055868249931:2163];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:00.498960Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:00.731754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:02.638157Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:7587127085933029824:2963], SessionActorId: [1:7587127085933029774:2963], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 . sessionActorId=[1:7587127085933029774:2963]. 2025-12-23T18:48:02.662023Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=ODZkYjI0YTYtNDljYTRkZGItNTkwMDEyODItNjRjZmVmYzc=, ActorId: [1:7587127085933029774:2963], ActorState: ExecuteState, TraceId: 01kd68mb7rcbz1k9wt7795h776, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7587127085933029825:2963] from: [1:7587127085933029824:2963] 2025-12-23T18:48:02.662171Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:7587127085933029825:2963] TxId: 281474976715665. Ctx: { TraceId: 01kd68mb7rcbz1k9wt7795h776, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODZkYjI0YTYtNDljYTRkZGItNTkwMDEyODItNjRjZmVmYzc=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 } 2025-12-23T18:48:02.662565Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=ODZkYjI0YTYtNDljYTRkZGItNTkwMDEyODItNjRjZmVmYzc=, ActorId: [1:7587127085933029774:2963], ActorState: ExecuteState, TraceId: 01kd68mb7rcbz1k9wt7795h776, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 }
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 13469, MsgBus: 12730 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040f4/r3tmp/tmp2HmY85/pdisk_1.dat 2025-12-23T18:48:04.069047Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:04.069144Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:48:04.156102Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:04.158941Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127091328955985:2081] 1766515684011638 != 1766515684011641 2025-12-23T18:48:04.188735Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:04.188823Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13469, node 2 2025-12-23T18:48:04.203062Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:04.313963Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:04.332655Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:04.332681Z node 2 :NET_CLASSIFIER WAR ... e 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587127157106452298:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:24.528311Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:24.742135Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 1353, MsgBus: 4575 2025-12-23T18:48:27.849972Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587127189803227515:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:27.850126Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040f4/r3tmp/tmp8gb2rR/pdisk_1.dat 2025-12-23T18:48:27.883707Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:28.013784Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:28.055566Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:28.055687Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:28.092313Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1353, node 4 2025-12-23T18:48:28.116664Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:28.222551Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:28.222570Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:28.222580Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:28.222657Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4575 TClient is connected to server localhost:4575 2025-12-23T18:48:28.863640Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515708139 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:28.877199Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:32.151273Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127211278064860:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:32.151274Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127211278064851:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:32.151355Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:32.151596Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127211278064866:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:32.151662Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:32.154659Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:32.166209Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587127211278064865:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:48:32.251984Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587127211278064918:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:32.313198Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:32.360780Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:33.244847Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587127189803227515:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:33.257419Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:33.630338Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:35.583150Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-12-23T18:48:35.583448Z node 4 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 5 at tablet 72075186224037889 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-12-23T18:48:35.583634Z node 4 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037889 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-12-23T18:48:35.583908Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [4:7587127224162974816:2963], Table: `/Root/KV` ([72057594046644480:43:1]), SessionActorId: [4:7587127224162974790:2963]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[4:7587127224162974816:2963].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-12-23T18:48:35.584045Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [4:7587127224162974809:2963], SessionActorId: [4:7587127224162974790:2963], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[4:7587127224162974790:2963]. 2025-12-23T18:48:35.584342Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=4&id=MmI0N2FkOS1kOTZlNmY0Ny0zZDEyMzM1NC0xNDY4YjdiNQ==, ActorId: [4:7587127224162974790:2963], ActorState: ExecuteState, TraceId: 01kd68nbcwf6f9cc6nbtbxw493, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7587127224162974847:2963] from: [4:7587127224162974809:2963] 2025-12-23T18:48:35.584458Z node 4 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [4:7587127224162974847:2963] TxId: 281474976710666. Ctx: { TraceId: 01kd68nbcwf6f9cc6nbtbxw493, Database: /Root, SessionId: ydb://session/3?node_id=4&id=MmI0N2FkOS1kOTZlNmY0Ny0zZDEyMzM1NC0xNDY4YjdiNQ==, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-12-23T18:48:35.584835Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=4&id=MmI0N2FkOS1kOTZlNmY0Ny0zZDEyMzM1NC0xNDY4YjdiNQ==, ActorId: [4:7587127224162974790:2963], ActorState: ExecuteState, TraceId: 01kd68nbcwf6f9cc6nbtbxw493, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::WriteSkewReplace-IsOlap [GOOD] >> KqpSinkMvcc::InsertConflictingKey-IsOlap+CommitOnInsert [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert >> KqpSinkMvcc::WriteSkewReplace+IsOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic [GOOD] Test command err: 2025-12-23T18:48:30.258540Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127202474796293:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:30.258592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ff4/r3tmp/tmp7SKxE4/pdisk_1.dat 2025-12-23T18:48:30.484465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:30.508182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:30.508314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:30.517918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:30.585641Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:30.672089Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5569 TServer::EnableGrpc on GrpcPort 11188, node 1 2025-12-23T18:48:30.807563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:30.807591Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:30.807630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:30.807746Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515710652 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:31.131958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:31.277060Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:31.487091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:33.312838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127215359699248:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:33.312924Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127215359699263:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:33.312941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127215359699264:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:33.313034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:33.313746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127215359699269:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:33.313814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:33.319408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:33.325582Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127215359699271:2633] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:48:33.332965Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127215359699267:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:48:33.332972Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127215359699268:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:48:33.388817Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127215359699318:2664] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:33.431654Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127215359699336:2672] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:34.985455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:48:35.258870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127202474796293:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:35.258947Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:35.399001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:35.819434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T18:48:36.275401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T18:48:37.241371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:48:38.173896Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7587127236834536518:2978] Handshake: worker# [1:7587127206769764368:2488] 2025-12-23T18:48:38.179917Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7587127236834536518:2978] Create read session: session# [1:7587127236834536519:2487] 2025-12-23T18:48:38.185031Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7587127236834536518:2978] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-12-23T18:48:38.227727Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7587127236834536518:2978] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_1_3359016584301600678_v1 } } 2025-12-23T18:48:38.243789Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7587127236834536518:2978] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 0 SeqNo: 1 CreateTime: 2025-12-23T18:48:38.060000Z WriteTime: 2025-12-23T18:48:38.063000Z MessageGroupId: producer ProducerId: producer }] } } 2025-12-23T18:48:38.245713Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7587127236834536518:2978] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-12-23T18:48:38.367722Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7587127236834536518:2978] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-12-23T18:48:38.356000Z WriteTime: 2025-12-23T18:48:38.356000Z MessageGroupId: producer ProducerId: producer }] } } 2025-12-23T18:48:38.459478Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7587127236834536631:3015] Handshake: worker# [1:7587127206769764368:2488] 2025-12-23T18:48:38.464223Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7587127236834536631:3015] Create read session: session# [1:7587127236834536632:2487] 2025-12-23T18:48:38.464531Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7587127236834536631:3015] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-12-23T18:48:38.474736Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7587127236834536631:3015] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_2_4576939537076072563_v1 } } 2025-12-23T18:48:38.476771Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7587127236834536631:3015] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-12-23T18:48:38.356000Z WriteTime: 2025-12-23T18:48:38.356000Z MessageGroupId: producer ProducerId: producer }] } } |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpLocksTricky::TestSnapshotIfInsertRead [GOOD] >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageGroup [GOOD] Test command err: 2025-12-23T18:48:22.869920Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127168157073496:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:22.870007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0058cf/r3tmp/tmpY9Jvsq/pdisk_1.dat 2025-12-23T18:48:23.165680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:23.184014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:23.184121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:23.260129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:23.351371Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18873, node 1 2025-12-23T18:48:23.394044Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:23.410718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:23.410761Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:23.410778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:23.410895Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515703316 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:23.658371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:23.664224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:3969 2025-12-23T18:48:23.828042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:48:23.835488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:48:23.837260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:48:23.857413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-12-23T18:48:23.865294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:23.880252Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:24.014818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:24.062677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:24.106367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:24.149925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:24.183602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:24.220635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:24.258149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:24.292558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:24.337713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:26.215530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127185336944288:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:26.215698Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:26.216230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127185336944299:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:26.216969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127185336944302:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:26.217035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:26.221370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:26.239758Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127185336944303:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-12-23T18:48:26.300271Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127185336944355:3101] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:26.639316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, subopera ... -FC22-50FE-8DFD-BD2858742EA9"}]} 2025-12-23T18:48:38.895944Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:56348) incoming connection opened 2025-12-23T18:48:38.896056Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:56348) -> (POST /Root, 100 bytes) 2025-12-23T18:48:38.896192Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f8e9:2cd4:c77b:0:e0e9:2cd4:c77b:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: efbb9453-d0dafb6c-cde04ed2-9c97270f 2025-12-23T18:48:38.896628Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [efbb9453-d0dafb6c-cde04ed2-9c97270f] got new request from [f8e9:2cd4:c77b:0:e0e9:2cd4:c77b:0] database '/Root' stream '' 2025-12-23T18:48:38.897175Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [efbb9453-d0dafb6c-cde04ed2-9c97270f] [auth] Authorized successfully 2025-12-23T18:48:38.897177Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [efbb9453-d0dafb6c-cde04ed2-9c97270f] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:48:38.897961Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1766515723897 VisibilityDeadlineMilliseconds: 1766515748897 MaxNumberOfMessages: 7 2025-12-23T18:48:38.899131Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'topic1' requestId: 2025-12-23T18:48:38.899179Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2025-12-23T18:48:38.899319Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 75 Topic 'topic1' partition 0 user consumer offset 290 partno 0 count 7 size 26214400 endOffset 300 max time lag 0ms effective offset 290 2025-12-23T18:48:38.899539Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 75 added 1 blobs, size 377 count 10 last offset 290, current partition end offset: 300 2025-12-23T18:48:38.899561Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 75. Send blob request. 2025-12-23T18:48:38.899624Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 290 partno 0 count 10 parts_count 0 source 1 size 377 accessed 1 times before, last time 2025-12-23T18:48:38.000000Z 2025-12-23T18:48:38.899660Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 75. All 1 blobs are from cache. 2025-12-23T18:48:38.899693Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2025-12-23T18:48:38.899753Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 290 partno 0 count 10 parts 0 suffix '63' 2025-12-23T18:48:38.899816Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 290 totakecount 10 count 10 size 356 from pos 0 cbcount 10 2025-12-23T18:48:38.899940Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-12-23T18:48:38.901287Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [efbb9453-d0dafb6c-cde04ed2-9c97270f] reply ok Http output full {"Messages":[{"MD5OfBody":"4ebde9bfeb2c142908b6897295e27d7d","Attributes":{"SentTimestamp":"1766515718505"},"ReceiptHandle":"CAAQogI=","Body":"MessageBody-290","MessageId":"B665CFF5-0D15-5E6E-B795-7F7F031BADFC"},{"MD5OfBody":"00716a52e19ced3758e9add7738a4de6","Attributes":{"SentTimestamp":"1766515718506"},"ReceiptHandle":"CAAQowI=","Body":"MessageBody-291","MessageId":"686D0DEA-9B4B-5B7D-A517-ECDB47DF33C7"},{"MD5OfBody":"6b2cce807faa840ccd5a8f944df80bad","Attributes":{"SentTimestamp":"1766515718506"},"ReceiptHandle":"CAAQpAI=","Body":"MessageBody-292","MessageId":"A9887F98-B0B1-5F36-BEA7-95EBF1DD25E9"},{"MD5OfBody":"c59fd6ecc9a283019c9179d342110fcb","Attributes":{"SentTimestamp":"1766515718506"},"ReceiptHandle":"CAAQpQI=","Body":"MessageBody-293","MessageId":"DCE69C82-8CDA-5A5D-91F0-0AF6FFD574C6"},{"MD5OfBody":"000dd65dc815f7e13c7ab8922f0418be","Attributes":{"SentTimestamp":"1766515718506"},"ReceiptHandle":"CAAQpgI=","Body":"MessageBody-294","MessageId":"46CD018C-4816-5E0E-9483-13F4A15BAB58"},{"MD5OfBody":"042479648840e5a3c4e86196590acb75","Attributes":{"SentTimestamp":"1766515718506"},"ReceiptHandle":"CAAQpwI=","Body":"MessageBody-295","MessageId":"FAE15508-B62A-5219-9518-9937762A66B2"},{"MD5OfBody":"a11e0f7a28004b695b04e3899672981b","Attributes":{"SentTimestamp":"1766515718506"},"ReceiptHandle":"CAAQqAI=","Body":"MessageBody-296","MessageId":"95BA32E8-4312-5A6C-85D8-3477673707AA"}]} 2025-12-23T18:48:38.901493Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:56348) <- (200 , 1407 bytes) 2025-12-23T18:48:38.901584Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:56348) connection closed 2025-12-23T18:48:38.902570Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:56362) incoming connection opened 2025-12-23T18:48:38.902665Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:56362) -> (POST /Root, 100 bytes) 2025-12-23T18:48:38.902815Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [38d7:2cd4:c77b:0:20d7:2cd4:c77b:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 8942ff2b-99aad183-bc26d2fc-f1528f0b 2025-12-23T18:48:38.903199Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [8942ff2b-99aad183-bc26d2fc-f1528f0b] got new request from [38d7:2cd4:c77b:0:20d7:2cd4:c77b:0] database '/Root' stream '' 2025-12-23T18:48:38.903579Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [8942ff2b-99aad183-bc26d2fc-f1528f0b] [auth] Authorized successfully 2025-12-23T18:48:38.903588Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [8942ff2b-99aad183-bc26d2fc-f1528f0b] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:48:38.904187Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1766515723904 VisibilityDeadlineMilliseconds: 1766515748904 MaxNumberOfMessages: 8 2025-12-23T18:48:38.905309Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'topic1' requestId: 2025-12-23T18:48:38.905354Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2025-12-23T18:48:38.905464Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 76 Topic 'topic1' partition 0 user consumer offset 297 partno 0 count 3 size 26214400 endOffset 300 max time lag 0ms effective offset 297 2025-12-23T18:48:38.905701Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 76 added 1 blobs, size 0 count 3 last offset 290, current partition end offset: 300 2025-12-23T18:48:38.905727Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 76. Send blob request. 2025-12-23T18:48:38.905782Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 290 partno 0 count 10 parts_count 0 source 1 size 377 accessed 2 times before, last time 2025-12-23T18:48:38.000000Z 2025-12-23T18:48:38.905825Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 76. All 1 blobs are from cache. 2025-12-23T18:48:38.905861Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2025-12-23T18:48:38.905955Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 290 totakecount 10 count 10 size 356 from pos 7 cbcount 10 2025-12-23T18:48:38.906055Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-12-23T18:48:38.906285Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 290 partno 0 count 10 parts 0 suffix '63' 2025-12-23T18:48:38.906978Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [8942ff2b-99aad183-bc26d2fc-f1528f0b] reply ok 2025-12-23T18:48:38.907162Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:56362) <- (200 , 611 bytes) 2025-12-23T18:48:38.907296Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:56362) connection closed Http output full {"Messages":[{"MD5OfBody":"ed83ae0894ecf1dcca98701cefa96b63","Attributes":{"SentTimestamp":"1766515718506"},"ReceiptHandle":"CAAQqQI=","Body":"MessageBody-297","MessageId":"A9F7CE8B-9B04-59FC-8942-BC1375C9CABC"},{"MD5OfBody":"1199cbe1edbbd44c325cfce6309d033e","Attributes":{"SentTimestamp":"1766515718506"},"ReceiptHandle":"CAAQqgI=","Body":"MessageBody-298","MessageId":"42A487A9-840A-5640-8D1B-996F67C88717"},{"MD5OfBody":"5b9e997bca262b61080f0ec85590ea89","Attributes":{"SentTimestamp":"1766515718506"},"ReceiptHandle":"CAAQqwI=","Body":"MessageBody-299","MessageId":"ABB10D4F-48AA-55E2-BEC7-BD08AF90AFDC"}]} batchSizesHistogram (9): 1: 6 2: 7 3: 8 4: 7 5: 7 6: 7 7: 7 8: 6 9: 6 2025-12-23T18:48:38.914636Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:38.914675Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:38.914701Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:38.914769Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:38.914791Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:39.015004Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:39.015050Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:39.015064Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:39.015082Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:39.015095Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:39.118363Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:39.118401Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:39.118414Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:39.118433Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:39.118447Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TTicketParserTest::LoginGoodWithGroups [GOOD] >> TTicketParserTest::LoginGoodWithDelayUpdateSecurityState >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-ordinaryuser >> AuthorizeRequestToAccessService::CanRefreshTokenForAccessService [GOOD] >> TTicketParserTest::AccessServiceAuthenticationOk |95.9%| [TA] $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TTicketParserTest::TicketFromCertificateCheckIssuerBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad >> ReadSessionImplTest::ProperlyOrdersDecompressedData >> PersQueueSdkReadSessionTest::SettingsValidation >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> KqpLocks::MixedTxFail-useSink [GOOD] >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::WriteSkewReplace-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 3711, MsgBus: 63904 2025-12-23T18:48:06.815753Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127103438151493:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:06.816948Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040e2/r3tmp/tmpEaL0gi/pdisk_1.dat 2025-12-23T18:48:07.012439Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:07.035426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:07.035533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:07.043516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:07.110476Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:07.111907Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127103438151470:2081] 1766515686813213 != 1766515686813216 TServer::EnableGrpc on GrpcPort 3711, node 1 2025-12-23T18:48:07.221062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:07.221090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:07.221101Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:07.221181Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:07.277555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:63904 TClient is connected to server localhost:63904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515687188 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:07.671427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:07.823003Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:09.854579Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127116323054251:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:09.854667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127116323054243:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:09.854850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:09.864875Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127116323054258:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:09.865012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:09.877856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:09.892383Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127116323054257:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:48:09.964195Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127116323054310:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:10.254901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:48:10.419344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127120618021739:2337];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:48:10.419361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587127120618021747:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:48:10.419601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127120618021739:2337];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:48:10.419822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127120618021739:2337];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:48:10.419905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127120618021739:2337];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:48:10.419985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127120618021739:2337];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:48:10.420083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127120618021739:2337];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:48:10.420134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587127120618021747:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:48:10.420239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127120618021739:2337];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:48:10.420343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587127120618021747:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:48:10.420375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127120618021739:2337];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:48:10.420447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587127120618021747:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:48:10.420489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127120618021739:2337];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:48:10.420560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587127120618021747:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:48:10.420609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127120618021739:2337];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:48:10.420663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587127120618021747:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:48:10.420708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127120618021739:2337];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:48:10.420825Z ... emeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:31.942756Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:32.180492Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:34.787908Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127223494280017:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:34.788029Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:34.788434Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127223494280051:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:34.790465Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127223494280061:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:34.790574Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:34.790821Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587127223494280078:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:34.790950Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:34.792538Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:34.802707Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587127223494280053:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:48:34.902069Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587127223494280108:2531] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:34.969631Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:35.028767Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:36.180877Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587127210609377318:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:36.184098Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:36.191603Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:38.248499Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715668; 2025-12-23T18:48:38.272051Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [3:7587127240674158009:2966], Table: `/Root/KV2` ([72057594046644480:44:1]), SessionActorId: [3:7587127236379190082:2966]Got LOCKS BROKEN for table `/Root/KV2`. ShardID=72075186224037989, Sink=[3:7587127240674158009:2966].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-12-23T18:48:38.272178Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7587127236379190082:2966]. 2025-12-23T18:48:38.272365Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.272384Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.272395Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.272415Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.272430Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.272511Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=3&id=YWU0MDdmMmItZWExZWY3YmQtZmI3ZmJmMy1jZTBkMDM2NA==, ActorId: [3:7587127236379190082:2966], ActorState: ExecuteState, TraceId: 01kd68ne2j3tgre2kz0wq2c472, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7587127240674158003:2966] from: [3:7587127240674158002:2966] 2025-12-23T18:48:38.272529Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.272544Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.272645Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [3:7587127240674158003:2966] TxId: 281474976715668. Ctx: { TraceId: 01kd68ne2j3tgre2kz0wq2c472, Database: /Root, SessionId: ydb://session/3?node_id=3&id=YWU0MDdmMmItZWExZWY3YmQtZmI3ZmJmMy1jZTBkMDM2NA==, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-12-23T18:48:38.272804Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.272819Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.272836Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.273072Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=YWU0MDdmMmItZWExZWY3YmQtZmI3ZmJmMy1jZTBkMDM2NA==, ActorId: [3:7587127236379190082:2966], ActorState: ExecuteState, TraceId: 01kd68ne2j3tgre2kz0wq2c472, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV2`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } 2025-12-23T18:48:38.273452Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.273470Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.273484Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.273496Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.273508Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.273522Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.273536Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.273550Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.273563Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 2025-12-23T18:48:38.273618Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [3:7587127240674158002:2966], SessionActorId: [3:7587127236379190082:2966], StateRollback: unknown message 278003713 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-12-23T18:48:41.474606Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.474638Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.474660Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.475139Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.475819Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.488297Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.488766Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.490729Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.490773Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.490797Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.491438Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.491990Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.492228Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.492427Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.492792Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-12-23T18:48:41.493791Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.493813Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.493830Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.494131Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.494708Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.494912Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.495729Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.500668Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.503762Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:48:41.503918Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.503975Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-12-23T18:48:41.505214Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.505325Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.505364Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.505847Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.506333Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.506472Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.506676Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-12-23T18:48:41.507743Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:48:41.508014Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-12-23T18:48:41.508393Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-12-23T18:48:41.508640Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-12-23T18:48:41.508793Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.508837Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:48:41.508874Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-12-23T18:48:41.509015Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-12-23T18:48:41.509131Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-12-23T18:48:41.509155Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-12-23T18:48:41.509180Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-12-23T18:48:41.509323Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-12-23T18:48:41.509400Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-12-23T18:48:41.509424Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-12-23T18:48:41.509445Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-12-23T18:48:41.509545Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-12-23T18:48:41.509610Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-12-23T18:48:41.509661Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-12-23T18:48:41.509687Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-12-23T18:48:41.509795Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-12-23T18:48:41.511208Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.511245Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.511267Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.511625Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.512080Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.512241Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.512451Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-12-23T18:48:41.513408Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:48:41.513624Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-12-23T18:48:41.513925Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-12-23T18:48:41.514147Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-12-23T18:48:41.514277Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.514318Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:48:41.514338Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-12-23T18:48:41.514357Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-12-23T18:48:41.514401Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-12-23T18:48:41.514618Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2025-12-23T18:48:41.514728Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-12-23T18:48:41.514770Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-12-23T18:48:41.514795Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-12-23T18:48:41.514824Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-12-23T18:48:41.514862Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-12-23T18:48:41.515087Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2025-12-23T18:48:41.516365Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.516392Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.516429Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.516728Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.517272Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.517428Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.517598Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.518543Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:48:41.519798Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:48:41.520123Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-12-23T18:48:41.520258Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-12-23T18:48:41.520360Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.520406Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:48:41.520429Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-12-23T18:48:41.520447Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-12-23T18:48:41.520477Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-12-23T18:48:41.520510Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-12-23T18:48:41.520657Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2025-12-23T18:48:41.520799Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 |96.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-12-23T18:48:41.619433Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.619491Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.619516Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.619861Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-12-23T18:48:41.619901Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.619927Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.620872Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008573s 2025-12-23T18:48:41.621584Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.622158Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-12-23T18:48:41.622288Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.623315Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.623333Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.623350Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.623706Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-12-23T18:48:41.623751Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.623787Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.623866Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009772s 2025-12-23T18:48:41.624562Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.624919Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-12-23T18:48:41.625027Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.626050Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.626074Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.626099Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.626385Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-12-23T18:48:41.626413Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.626432Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.626492Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.181948s 2025-12-23T18:48:41.626942Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.627276Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-12-23T18:48:41.627350Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.628173Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.628212Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.628228Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.628516Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-12-23T18:48:41.628539Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.628561Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.628608Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.220257s 2025-12-23T18:48:41.628987Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.629337Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-12-23T18:48:41.629450Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.630285Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.630306Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.630328Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.630564Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.631008Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.640197Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.643149Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-12-23T18:48:41.643189Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.643207Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.643254Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.298199s 2025-12-23T18:48:41.643396Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-12-23T18:48:41.645859Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.645880Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.645900Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.646316Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.646697Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.646901Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.647368Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.748304Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.750966Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-12-23T18:48:41.751072Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.751132Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-12-23T18:48:41.751208Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-12-23T18:48:41.851611Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-12-23T18:48:41.854827Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-12-23T18:48:41.858608Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.858647Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.858672Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.859079Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.859533Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.859681Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.860134Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.963747Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.964016Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-12-23T18:48:41.964089Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.964130Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-12-23T18:48:41.964207Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-12-23T18:48:41.964332Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-12-23T18:48:41.970525Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-12-23T18:48:41.970645Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-12-23T18:48:41.970971Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |96.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> KqpSinkMvcc::OlapNamedStatement [GOOD] >> KqpSinkMvcc::OlapMultiSinks >> TTicketParserTest::LoginBad [GOOD] >> TTicketParserTest::LoginCheckRemovedUser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::WriteSkewReplace+IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 5229, MsgBus: 17098 2025-12-23T18:47:56.236472Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127059761783242:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:56.236508Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040ec/r3tmp/tmpu8gzGK/pdisk_1.dat 2025-12-23T18:47:56.711679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:56.711804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:56.771614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:56.780467Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:56.795640Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5229, node 1 2025-12-23T18:47:56.953192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:56.953211Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:56.953227Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:56.953348Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:56.965437Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17098 2025-12-23T18:47:57.251256Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:17098 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515676821 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:57.541674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:57.559718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:59.841734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127072646685957:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.842026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.842533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127072646685986:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.842579Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127072646685985:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.842626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:59.847095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:59.860204Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127072646685989:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:47:59.947106Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127072646686040:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:00.271820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:48:00.478039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127076941653473:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:48:00.478301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127076941653473:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:48:00.478517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127076941653473:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:48:00.478609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127076941653473:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:48:00.478708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127076941653473:2338];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:48:00.479434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127076941653472:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:48:00.479523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127076941653472:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:48:00.479660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127076941653472:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:48:00.479743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127076941653472:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:48:00.479825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127076941653472:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:48:00.479943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127076941653472:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:48:00.480048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127076941653472:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:48:00.480144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127076941653472:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:48:00.480234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127076941653472:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:48:00.480361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127076941653472:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:48:00.480469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127076941653472:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:48:00.480575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127076941653472:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:48:00.480687Z node 1 :TX_COLUMNS ... OLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179151Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179163Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179248Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179267Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179339Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179360Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179430Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179454Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179524Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179541Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179617Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179628Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179714Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179718Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179832Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179846Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179944Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.179954Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180031Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180047Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180118Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180143Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180207Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180257Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180297Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180364Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180384Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180474Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180493Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180563Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180587Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180653Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180697Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180747Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180801Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180845Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180892Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180939Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.180997Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181029Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181136Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181155Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181254Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181255Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181361Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181381Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181510Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181558Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181620Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181716Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181832Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181841Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181937Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.181943Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.182020Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.182117Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.182211Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.182307Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.182400Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.182489Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.182568Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.182573Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.182644Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.182657Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.182732Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.182822Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.182932Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.183035Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.183151Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.183264Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.183350Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.183455Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.183562Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.183670Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.183799Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.183929Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.184029Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.184110Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.184215Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.184240Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.184319Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.184365Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.184390Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.184445Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; 2025-12-23T18:48:38.184463Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715673; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::MixedTxFail-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 8774, MsgBus: 28500 2025-12-23T18:48:10.664107Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127117109018947:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:10.664344Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:10.703409Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040dc/r3tmp/tmpX9DDlD/pdisk_1.dat 2025-12-23T18:48:11.006941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:11.035132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:11.035244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:11.107597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:11.121700Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127117109018921:2081] 1766515690663026 != 1766515690663029 2025-12-23T18:48:11.137408Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8774, node 1 2025-12-23T18:48:11.282839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:11.311430Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:11.311459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:11.311480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:11.311568Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28500 2025-12-23T18:48:11.679197Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515691192 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:11.816204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:11.824123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:48:11.841619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:12.000234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:12.195982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:12.276026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:14.183519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127134288889983:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:14.183619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:14.184268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127134288889993:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:14.184310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:14.529896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.561579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.590323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.618400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.643401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.718462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.791371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.837243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.904594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127134288890870:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:14.904673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:14.904970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127134288890875:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:14.904987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127134288890876:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:14.905047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025 ... bles_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.658557Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.658574Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.661463Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.661522Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.661537Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.665099Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.665156Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.665170Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.669005Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.669060Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.669075Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.671336Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.671386Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.671401Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.676934Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.676990Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.677006Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.678140Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.678195Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.678210Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.684302Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.684354Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.684371Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.684671Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.684723Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.684736Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.690660Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.690710Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.690726Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.692422Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.692473Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.692487Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.697162Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.697213Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.697230Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.699954Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.700007Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.700023Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.703398Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037952;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.703447Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037952;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.703461Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037952;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.708711Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.708804Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:39.708821Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2025-12-23T18:48:40.380219Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=4&id=ZDA2ZGY4NzEtMjEyNzUwZGMtZmQ2MjRiMmMtODNkM2U0Nzg=, ActorId: [4:7587127243167191273:2686], ActorState: ExecuteState, TraceId: 01kd68ng3e012brqxxwb2vghhj, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/DataShard`" issue_code: 2001 severity: 1 } 2025-12-23T18:48:40.383369Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976710670; |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TPQTabletTests::Multiple_PQTablets_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence [GOOD] Test command err: 2025-12-23T18:48:20.541715Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127163139267776:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:20.542281Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0058dc/r3tmp/tmpzRdndn/pdisk_1.dat 2025-12-23T18:48:20.898861Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:20.944065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:20.944214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:21.010069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:21.106541Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13306, node 1 2025-12-23T18:48:21.147160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:21.263491Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:21.263533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:21.263540Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:21.263634Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:21.539462Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:63541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515701076 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:21.838038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:21.849590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:63541 2025-12-23T18:48:22.172684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:48:22.183221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:48:22.185517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:48:22.221450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.344586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:48:22.390721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.440496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-12-23T18:48:22.447634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.490326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.533758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.589742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.634122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:48:22.682205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.724262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:24.117609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127180319138490:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.117735Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.117965Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127180319138500:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.118306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127180319138504:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.118352Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:24.122920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:24.135368Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127180319138506:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:48:24.216385Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127180319138557:3104] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:24.763353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, subope ... g cookie 4. Send blob request. Http output full {"SequenceNumber":"0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"} 2025-12-23T18:48:41.363206Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2025-12-23T18:48:41.363330Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2025-12-23T18:48:41.363425Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 72 accessed 1 times before, last time 2025-12-23T18:48:41.000000Z 2025-12-23T18:48:41.363465Z node 3 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 4. All 1 blobs are from cache. 2025-12-23T18:48:41.363495Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1026: Topic 'topic1' partition 0 user consumer readTimeStamp done, result 1766515721358 queuesize 0 startOffset 0 2025-12-23T18:48:41.363495Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2025-12-23T18:48:41.363572Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2025-12-23T18:48:41.363641Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2025-12-23T18:48:41.363709Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-12-23T18:48:41.363926Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:42002) incoming connection opened 2025-12-23T18:48:41.364028Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:42002) -> (POST /Root, 74 bytes) 2025-12-23T18:48:41.364200Z node 3 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [783f:5867:697b:0:603f:5867:697b:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: c3912a37-70754efd-db8c022-2966f604 2025-12-23T18:48:41.364636Z node 3 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [c3912a37-70754efd-db8c022-2966f604] got new request from [783f:5867:697b:0:603f:5867:697b:0] database '/Root' stream '' 2025-12-23T18:48:41.365037Z node 3 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [c3912a37-70754efd-db8c022-2966f604] [auth] Authorized successfully 2025-12-23T18:48:41.365050Z node 3 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [c3912a37-70754efd-db8c022-2966f604] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:48:41.365927Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1766515741365 VisibilityDeadlineMilliseconds: 1766515751365 MaxNumberOfMessages: 1 2025-12-23T18:48:41.366903Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'topic1' requestId: 2025-12-23T18:48:41.366947Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2025-12-23T18:48:41.367049Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 5 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 1 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-12-23T18:48:41.367249Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 5 added 1 blobs, size 72 count 1 last offset 0, current partition end offset: 1 2025-12-23T18:48:41.367270Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2025-12-23T18:48:41.367308Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 72 accessed 2 times before, last time 2025-12-23T18:48:41.000000Z 2025-12-23T18:48:41.367340Z node 3 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 5. All 1 blobs are from cache. 2025-12-23T18:48:41.367370Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2025-12-23T18:48:41.367392Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2025-12-23T18:48:41.367472Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2025-12-23T18:48:41.367548Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-12-23T18:48:41.368315Z node 3 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [c3912a37-70754efd-db8c022-2966f604] reply ok 2025-12-23T18:48:41.368504Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:42002) <- (200 , 211 bytes) 2025-12-23T18:48:41.368625Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:42002) connection closed Http output full {"Messages":[{"MD5OfBody":"94a29778a1f1f41bf68142847b2e6106","Attributes":{"SentTimestamp":"1766515721358"},"ReceiptHandle":"CAAQAA==","Body":"MessageBody-0","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"}]} 2025-12-23T18:48:41.369389Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:42006) incoming connection opened 2025-12-23T18:48:41.369493Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:42006) -> (POST /Root, 80 bytes) 2025-12-23T18:48:41.369624Z node 3 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d874:5667:697b:0:c074:5667:697b:0] request [DeleteMessage] url [/Root] database [/Root] requestId: 20a20de-d6e5e824-8b6b3420-64532224 2025-12-23T18:48:41.369975Z node 3 :HTTP_PROXY INFO: http_req.cpp:1324: http request [DeleteMessage] requestId [20a20de-d6e5e824-8b6b3420-64532224] got new request from [d874:5667:697b:0:c074:5667:697b:0] database '/Root' stream '' 2025-12-23T18:48:41.370394Z node 3 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DeleteMessage] requestId [20a20de-d6e5e824-8b6b3420-64532224] [auth] Authorized successfully 2025-12-23T18:48:41.370413Z node 3 :HTTP_PROXY INFO: http_req.cpp:1077: http request [DeleteMessage] requestId [20a20de-d6e5e824-8b6b3420-64532224] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:48:41.371153Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:50: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPCommitRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 Offset: 0 2025-12-23T18:48:41.372079Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:41.372108Z node 3 :PERSQUEUE DEBUG: partition.cpp:2409: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:41.372140Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:48:41.372153Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:41.372178Z node 3 :PERSQUEUE DEBUG: partition.cpp:2473: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:41.372230Z node 3 :PERSQUEUE DEBUG: partition.cpp:3880: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer offset is set to 1 (startOffset 0) session 2025-12-23T18:48:41.372245Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:48:41.372259Z node 3 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:48:41.372283Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:41.372429Z node 3 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DeleteMessage] requestId [20a20de-d6e5e824-8b6b3420-64532224] reply ok Http output full {} 2025-12-23T18:48:41.372555Z node 3 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:41.372567Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:42006) <- (200 , 2 bytes) 2025-12-23T18:48:41.372670Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:42006) connection closed 2025-12-23T18:48:41.373121Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:48:41.373168Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:934: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer readTimeStamp for offset 1 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:41.373205Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:48:41.373226Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:41.373238Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:41.373247Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:41.373268Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:41.373282Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:41.373309Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:41.427331Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:41.427368Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:41.427380Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:41.427398Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:41.427410Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2025-12-23T18:48:41.527722Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:41.527755Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:41.527770Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:41.527789Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:41.527802Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> TPQTabletTests::Multiple_PQTablets_1 [GOOD] >> TPQTest::TestCmdReadWithLastOffset >> ReadSessionImplTest::SuccessfulInit [GOOD] >> TPQTabletTests::Multiple_PQTablets_2 >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease |96.0%| [TA] $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-dbadmin |96.0%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-12-23T18:48:41.322356Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.322416Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.322451Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.323868Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.325941Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.337650Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.341519Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.347028Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:48:41.347427Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:48:41.347615Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-12-23T18:48:41.347717Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:48:41.347845Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.347884Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-12-23T18:48:41.347914Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-12-23T18:48:41.347931Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-12-23T18:48:41.350292Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.350318Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.350359Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.350633Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.351038Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.351208Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.351404Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-12-23T18:48:41.352202Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:48:41.352450Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-12-23T18:48:41.352739Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-12-23T18:48:41.352998Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-12-23T18:48:41.353125Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.353178Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:48:41.353220Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-12-23T18:48:41.353472Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-12-23T18:48:41.354612Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-12-23T18:48:41.354638Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-12-23T18:48:41.354669Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-12-23T18:48:41.354858Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-12-23T18:48:41.354922Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-12-23T18:48:41.354942Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-12-23T18:48:41.354961Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-12-23T18:48:41.355048Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-12-23T18:48:41.355110Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-12-23T18:48:41.355143Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-12-23T18:48:41.355185Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-12-23T18:48:41.355303Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-12-23T18:48:41.356710Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.356747Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.356778Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.357104Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.357468Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.365820Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.366142Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-12-23T18:48:41.367541Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:48:41.367879Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-12-23T18:48:41.368326Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-12-23T18:48:41.368607Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-12-23T18:48:41.368783Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.368834Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-12-23T18:48:41.368951Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2025-12-23T18:48:41.369039Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:48:41.369060Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-12-23T18:48:41.369134Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2025-12-23T18:48:41.369217Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-12-23T18:48:41.369248Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-12-23T18:48:41.369306Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2025-12-23T18:48:41.369364Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-12-23T18:48:41.369388Z :DEBUG: [db] [sessionid] [cluster] The application data ... er". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-12-23T18:48:43.601588Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2025-12-23T18:48:43.680134Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-12-23T18:48:43.680211Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-12-23T18:48:43.680300Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:43.681953Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:43.682569Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:43.683001Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-12-23T18:48:43.683999Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-12-23T18:48:43.802943Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-12-23T18:48:43.807809Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:43.809899Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:48:43.812974Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-12-23T18:48:43.813929Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-12-23T18:48:43.819194Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-12-23T18:48:43.820141Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-12-23T18:48:43.821063Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-12-23T18:48:43.821944Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-12-23T18:48:43.831035Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-12-23T18:48:43.831773Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-12-23T18:48:43.831861Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-12-23T18:48:43.832046Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-12-23T18:48:43.835636Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2025-12-23T18:48:43.856801Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:43.856850Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:43.856893Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:43.857264Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:43.857874Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:43.858060Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:43.858344Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:43.858944Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-12-23T18:48:43.862037Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:43.862129Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:43.862164Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:43.862536Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:43.863000Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:43.863152Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:43.863793Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:43.868670Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:48:43.868868Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:43.868954Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-12-23T18:48:43.874880Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 |96.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTabletTests::Multiple_PQTablets_2 [GOOD] >> TPartitionTests::UserActCount >> TPQTabletTests::One_Tablet_For_All_Partitions |96.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> TTicketParserTest::AccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk >> TTicketParserTest::LoginGoodWithDelayUpdateSecurityState [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions >> KqpSinkLocks::UpdateLocksOneShardRowExists [GOOD] >> TPQTabletTests::One_Tablet_For_All_Partitions [GOOD] >> TPQTabletTests::One_New_Partition_In_Another_Tablet >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test >> PQCountersLabeled::Partition ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-12-23T18:48:44.304332Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.304363Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.304383Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:44.304993Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:44.305646Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-12-23T18:48:44.305713Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.306637Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.306677Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.306697Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:44.307014Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:44.307317Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-12-23T18:48:44.307368Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.308210Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.308296Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.308331Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:44.308624Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-12-23T18:48:44.308689Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.308720Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.309065Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-12-23T18:48:44.309953Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.309974Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.309991Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:44.310419Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-12-23T18:48:44.310450Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.310483Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.310543Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-12-23T18:48:44.315510Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-12-23T18:48:44.315531Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-12-23T18:48:44.315546Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:44.315943Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:44.316487Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:44.329826Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-12-23T18:48:44.330345Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:44.330758Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-12-23T18:48:44.334257Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-12-23T18:48:44.334541Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:44.334586Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:48:44.334610Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-12-23T18:48:44.334630Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-12-23T18:48:44.334658Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-12-23T18:48:44.334679Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-12-23T18:48:44.334712Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-12-23T18:48:44.334779Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-12-23T18:48:44.334824Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-12-23T18:48:44.334855Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-12-23T18:48:44.334873Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-12-23T18:48:44.334891Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-12-23T18:48:44.334907Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-12-23T18:48:44.334923Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-12-23T18:48:44.334948Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-12-23T18:48:44.334972Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-12-23T18:48:44.335023Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-12-23T18:48:44.335043Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-12-23T18:48:44.335061Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-12-23T18:48:44.335079Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-12-23T18:48:44.335099Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-12-23T18:48:44.335116Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-12-23T18:48:44.335133Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-12-23T18:48:44.335154Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-12-23T18:48:44.335170Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-12-23T18:48:44.335193Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-12-23T18:48:44.335210Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-12-23T18:48:44.335227Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-12-23T18:48:44.335244Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-12-23T18:48:44.335261Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-12-23T18:48:44.335288Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-12-23T18:48:44.335311Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-12-23T18:48:44.335387Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-12-23T18:48:44.335407Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-12-23T18:48:44.335423Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-12-23T18:48:44.335451Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-12-23T18:48:44.335475Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-12-23T18:48:44.335492Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-12-23T18:48:44.335534Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-12-23T18:48:44.335558Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-12-23T18:48:44.335576Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-12-23T18:48:44.335597Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-12-23T18:48:44.335614Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-12-23T18:48:44.335630Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-12-23T18:48:44.335660Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-12-23T18:48:44.335678Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-12-23T18:48:44.335693Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-12-23T18:48:44.335710Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-12-23T18:48:44.335726Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-12-23T18:48:44.335748Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-12-23T18:48:44.335808Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-12-23T18:48:44.338627Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-12-23T18:48:44.339020Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-12-23T18:48:44.339063Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-12-23T18:48:44.339087Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-12-23T18:48:44.339119Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-12-23T18:48:44.339147Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-12-23T18:48:44.339186Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-12-23T18:48:44.339208Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-12-23T18:48:44.339227Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-12-23T18:48:44.339262Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-12-23T18:48:44.339292Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-12-23T18:48:44.339310Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-12-23T18:48:44.339330Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-12-23T18:48:44.339347Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-12-23T18:48:44.339364Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-12-23T18:48:44.339380Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-12-23T18:48:44.339402Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-12-23T18:48:44.339445Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-12-23T18:48:44.339464Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-12-23T18:48:44.339481Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-12-23T18:48:44.339565Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-12-23T18:48:44.339588Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-12-23T18:48:44.339605Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-12-23T18:48:44.339622Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-12-23T18:48:44.339641Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-12-23T18:48:44.339658Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-12-23T18:48:44.339690Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-12-23T18:48:44.339707Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-12-23T18:48:44.339725Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-12-23T18:48:44.339741Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-12-23T18:48:44.339758Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-12-23T18:48:44.339781Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-12-23T18:48:44.339806Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-12-23T18:48:44.339881Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-12-23T18:48:44.339905Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-12-23T18:48:44.339932Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-12-23T18:48:44.339960Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-12-23T18:48:44.339981Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-12-23T18:48:44.339999Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-12-23T18:48:44.340016Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-12-23T18:48:44.340034Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-12-23T18:48:44.340051Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-12-23T18:48:44.340081Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-12-23T18:48:44.340097Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-12-23T18:48:44.340115Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-12-23T18:48:44.340133Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-12-23T18:48:44.340150Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-12-23T18:48:44.340167Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-12-23T18:48:44.340188Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-12-23T18:48:44.340218Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-12-23T18:48:44.340236Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-12-23T18:48:44.340288Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-12-23T18:48:44.340425Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-12-23T18:48:44.341759Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.341796Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.341817Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:44.342145Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:44.342546Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:44.342942Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.343363Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:44.447764Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.448035Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-12-23T18:48:44.448100Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:44.448147Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-12-23T18:48:44.448217Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-12-23T18:48:44.648673Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-12-23T18:48:44.750951Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-12-23T18:48:44.751113Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-12-23T18:48:44.754939Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-12-23T18:48:44.756161Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.756182Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.756203Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:44.756530Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:44.757019Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:44.757168Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.758642Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:44.859775Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:44.860007Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-12-23T18:48:44.860059Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:44.860099Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-12-23T18:48:44.860171Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-12-23T18:48:44.860282Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-12-23T18:48:44.860462Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-12-23T18:48:44.860525Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-12-23T18:48:44.860619Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> KqpSinkTx::TIsolationSettingTest-IsOlap-UsePragma [GOOD] >> KqpSinkTx::TIsolationSettingTest-IsOlap+UsePragma >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] |96.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> TPQTabletTests::Limit_On_The_Number_Of_Transactons >> BasicUsage::SimpleHandlers [GOOD] >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood >> TTicketParserTest::LoginRefreshGroupsWithError [GOOD] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TQuotaTracker::TestSmallMessages [GOOD] >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> TSourceIdTests::ExpensiveCleanup >> KqpSinkMvcc::TxReadsItsOwnWrites+IsOlap [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::UpdateLocksOneShardRowExists [GOOD] Test command err: Trying to start YDB, gRPC: 16501, MsgBus: 5081 2025-12-23T18:48:13.177213Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127130495260351:2153];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:13.180009Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040d2/r3tmp/tmpRl8kfT/pdisk_1.dat 2025-12-23T18:48:13.535937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:13.536031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:13.539463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:13.587276Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:13.588083Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:13.594872Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127130495260236:2081] 1766515693153145 != 1766515693153148 TServer::EnableGrpc on GrpcPort 16501, node 1 2025-12-23T18:48:13.731189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:13.731214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:13.731226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:13.731286Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:13.838448Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5081 2025-12-23T18:48:14.178872Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515693614 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:14.321988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:14.328545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:48:16.361316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127143380163007:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:16.361425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:16.361567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127143380163015:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:16.361653Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127143380163021:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:16.361745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:16.365991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:16.379777Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127143380163023:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:48:16.441222Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127143380163074:2533] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:16.680932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:16.772340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:17.684381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:18.423686Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127130495260351:2153];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:18.458269Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 63018, MsgBus: 18903 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040d2/r3tmp/tmpwp7nNC/pdisk_1.dat 2025-12-23T18:48:20.534898Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:20.535012Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:48:20.642878Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:20.646874Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127161415285698:2081] 1766515700494933 != 1766515700494936 2025-12-23T18:48:20.673534Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:20.673635Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:20.678320Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63018, node 2 2025-12-23T18:48:20.789821Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:20.880968Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:20.881009Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:20.881018Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:20.881104Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18903 TClient is connected to server localhost:18903 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515700719 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057 ... ; 2025-12-23T18:48:34.524714Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [3:7587127219972257172:2961] TxId: 281474976715666. Ctx: { TraceId: 01kd68nab51vktj52amy7rbb83, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NTc2N2U1NmEtNGE5ODA2MzYtYTFiNWI4YWEtN2FhZmM4NTQ=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-12-23T18:48:34.524898Z node 3 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [1766515714537 : 281474976715666] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2025-12-23T18:48:34.525153Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=NTc2N2U1NmEtNGE5ODA2MzYtYTFiNWI4YWEtN2FhZmM4NTQ=, ActorId: [3:7587127219972257102:2961], ActorState: ExecuteState, TraceId: 01kd68nab51vktj52amy7rbb83, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV2`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } Trying to start YDB, gRPC: 3203, MsgBus: 64449 2025-12-23T18:48:36.229918Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587127231916856097:2169];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:36.230087Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040d2/r3tmp/tmpcTtpir/pdisk_1.dat 2025-12-23T18:48:36.249882Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:36.350187Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:36.350296Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:36.353602Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:36.403514Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3203, node 4 2025-12-23T18:48:36.507362Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:36.507391Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:36.507400Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:36.507487Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:36.551683Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:64449 TClient is connected to server localhost:64449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515716455 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:37.104891Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:37.233304Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:40.024766Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127249096726002:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:40.024875Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:40.025243Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127249096726029:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:40.032941Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:40.053615Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587127249096726032:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:48:40.131581Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587127249096726094:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:40.191063Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:40.237002Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:41.336413Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587127231916856097:2169];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:41.339905Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:41.391211Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:43.408992Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-12-23T18:48:43.409405Z node 4 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 5 at tablet 72075186224037889 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-12-23T18:48:43.411281Z node 4 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037889 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-12-23T18:48:43.411566Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [4:7587127261981635994:2963], Table: `/Root/KV` ([72057594046644480:43:1]), SessionActorId: [4:7587127257686668672:2963]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[4:7587127261981635994:2963].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-12-23T18:48:43.411703Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [4:7587127261981635987:2963], SessionActorId: [4:7587127257686668672:2963], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[4:7587127257686668672:2963]. 2025-12-23T18:48:43.411979Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=4&id=NzE0ZTUwMzMtNWUxNTIxYmYtNTg0YWFmMDAtZGJjZWFhNDg=, ActorId: [4:7587127257686668672:2963], ActorState: ExecuteState, TraceId: 01kd68nk1p5becz1gdjr12zcv6, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7587127261981636026:2963] from: [4:7587127261981635987:2963] 2025-12-23T18:48:43.412078Z node 4 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [4:7587127261981636026:2963] TxId: 281474976710666. Ctx: { TraceId: 01kd68nk1p5becz1gdjr12zcv6, Database: /Root, SessionId: ydb://session/3?node_id=4&id=NzE0ZTUwMzMtNWUxNTIxYmYtNTg0YWFmMDAtZGJjZWFhNDg=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-12-23T18:48:43.412414Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=4&id=NzE0ZTUwMzMtNWUxNTIxYmYtNTg0YWFmMDAtZGJjZWFhNDg=, ActorId: [4:7587127257686668672:2963], ActorState: ExecuteState, TraceId: 01kd68nk1p5becz1gdjr12zcv6, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TPQTabletTests::Limit_On_The_Number_Of_Transactons [GOOD] |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageTestClean >> TSourceIdTests::ExpensiveCleanup [GOOD] >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TPQTabletTests::Non_Kafka_Transaction_Supportive_Partitions_Should_Not_Be_Deleted_After_Timeout >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: processed_blobs=41800 quoted_time=9.980000s Iteration 0 Iteration 1 Iteration 2 Iteration 3 Iteration 4 Iteration 5 Iteration 6 Iteration 7 Iteration 8 Iteration 9 Iteration 10 Iteration 11 Iteration 12 Iteration 13 Iteration 14 Iteration 15 Iteration 16 Iteration 17 Iteration 18 Iteration 19 Iteration 20 Iteration 21 Iteration 22 Iteration 23 Iteration 24 Iteration 25 Iteration 26 Iteration 27 Iteration 28 Iteration 29 Iteration 30 Iteration 31 Iteration 32 Iteration 33 Iteration 34 Iteration 35 Iteration 36 Iteration 37 Iteration 38 Iteration 39 Iteration 40 Iteration 41 Iteration 42 Iteration 43 Iteration 44 Iteration 45 Iteration 46 Iteration 47 Iteration 48 Iteration 49 Iteration 50 Iteration 51 Iteration 52 Iteration 53 Iteration 54 Iteration 55 Iteration 56 Iteration 57 Iteration 58 Iteration 59 Iteration 60 Iteration 61 Iteration 62 Iteration 63 Iteration 64 Iteration 65 Iteration 66 Iteration 67 Iteration 68 Iteration 69 Iteration 70 Iteration 71 Iteration 72 Iteration 73 Iteration 74 Iteration 75 Iteration 76 Iteration 77 Iteration 78 Iteration 79 Iteration 80 Iteration 81 Iteration 82 Iteration 83 Iteration 84 Iteration 85 Iteration 86 Iteration 87 Iteration 88 Iteration 89 Iteration 90 Iteration 91 Iteration 92 Iteration 93 Iteration 94 Iteration 95 Iteration 96 Iteration 97 Iteration 98 Iteration 99 Iteration 100 Iteration 101 Iteration 102 Iteration 103 Iteration 104 Iteration 105 Iteration 106 Iteration 107 Iteration 108 Iteration 109 Iteration 110 Iteration 111 Iteration 112 Iteration 113 Iteration 114 Iteration 115 Iteration 116 Iteration 117 Iteration 118 Iteration 119 Iteration 120 Iteration 121 Iteration 122 Iteration 123 Iteration 124 Iteration 125 Iteration 126 Iteration 127 Iteration 128 Iteration 129 Iteration 130 Iteration 131 Iteration 132 Iteration 133 Iteration 134 Iteration 135 Iteration 136 Iteration 137 Iteration 138 Iteration 139 Iteration 140 Iteration 141 Iteration 142 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTest::TestCmdReadWithLastOffset [GOOD] >> TPQTest::TestAccountReadQuota >> TTicketParserTest::BulkAuthorizationRetryError [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageTestClean [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2025-12-23T18:47:41.518010Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1766515661517976 2025-12-23T18:47:41.814466Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126994166241290:2212];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:41.814520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:47:41.851529Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587126994019313458:2151];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056e8/r3tmp/tmp5BDIQM/pdisk_1.dat 2025-12-23T18:47:41.859841Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:47:41.871792Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:47:41.872750Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:47:42.067053Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:42.069698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:42.100882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:42.100983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:42.104004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:42.104086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:42.124670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:42.125038Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:47:42.138715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:42.191547Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:42.250567Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 15643, node 1 2025-12-23T18:47:42.330204Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:42.375941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0056e8/r3tmp/yandexB9iOfz.tmp 2025-12-23T18:47:42.375984Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0056e8/r3tmp/yandexB9iOfz.tmp 2025-12-23T18:47:42.376166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0056e8/r3tmp/yandexB9iOfz.tmp 2025-12-23T18:47:42.376275Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:42.413059Z INFO: TTestServer started on Port 6323 GrpcPort 15643 TClient is connected to server localhost:6323 PQClient connected to localhost:15643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515662254 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:42.688447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:47:42.814391Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:42.858902Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:45.183387Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127011199182886:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:45.183485Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127011199182860:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:45.183556Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:45.185393Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127011199182898:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:45.185517Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:45.199631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:45.233552Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587127011199182897:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-12-23T18:47:45.474321Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587127011199182927:2138] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:45.512291Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587127011346111690:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:47:45.512794Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=M2ZiNjRiMWMtNmY3OWYzOTQtZDBkZjg3YTktOGQ1Y2IxNmM=, ActorId: [1:7587127011346111664:2328], ActorState: ExecuteState, TraceId: 01kd68ktacc78kda32s598vms5, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:47:45.513613Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587127011199182942:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:47:45.513999Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=ZWQ0YWVlZDctNzNjNjYyY2EtNTg1YWVjYjgtOGZkMDFiOWQ=, ActorId: [2:7587127011199182858:2300], ActorState: ExecuteState, TraceId: 01kd68kt8t3sbd9gz688w37jfm, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:47:45.515118Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Clu ... ed/user session shared/user_3_2_8902199394056779233_v1 grpc closed 2025-12-23T18:48:44.281642Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 3 consumer shared/user session shared/user_3_3_13818388757849533163_v1 grpc read done: success# 0, data# { } 2025-12-23T18:48:44.281655Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer shared/user session shared/user_3_2_8902199394056779233_v1 is DEAD 2025-12-23T18:48:44.281657Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 3 consumer shared/user session shared/user_3_3_13818388757849533163_v1 grpc read failed 2025-12-23T18:48:44.281681Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 3 consumer shared/user session shared/user_3_3_13818388757849533163_v1 grpc closed 2025-12-23T18:48:44.281715Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer shared/user session shared/user_3_3_13818388757849533163_v1 is DEAD 2025-12-23T18:48:44.282589Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_13119113009436732740_v1 grpc read done: success# 0, data# { } 2025-12-23T18:48:44.282606Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_3_1_13119113009436732740_v1 grpc read failed 2025-12-23T18:48:44.282623Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_3_1_13119113009436732740_v1 grpc closed 2025-12-23T18:48:44.282638Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_3_1_13119113009436732740_v1 is DEAD 2025-12-23T18:48:44.282726Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: src_id|ba3d3792-48c18162-1a2f7b8f-459cd13c_0 grpc read done: success: 0 data: 2025-12-23T18:48:44.282753Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: src_id|ba3d3792-48c18162-1a2f7b8f-459cd13c_0 grpc read failed 2025-12-23T18:48:44.282782Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: src_id|ba3d3792-48c18162-1a2f7b8f-459cd13c_0 grpc closed 2025-12-23T18:48:44.282808Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: src_id|ba3d3792-48c18162-1a2f7b8f-459cd13c_0 is DEAD 2025-12-23T18:48:44.283281Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:48:44.283830Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72075186224037892] Destroy direct read session shared/user_3_3_13818388757849533163_v1 2025-12-23T18:48:44.283888Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [3:7587127258892970627:2482] destroyed 2025-12-23T18:48:44.283910Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [3:7587127258892970650:2472] destroyed 2025-12-23T18:48:44.283933Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T18:48:44.283956Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:44.283968Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:44.283977Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:44.283989Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:44.283998Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:48:44.284051Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_3_13818388757849533163_v1 2025-12-23T18:48:44.284214Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127258892970602:2470] disconnected. 2025-12-23T18:48:44.284246Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127258892970602:2470] disconnected; active server actors: 1 2025-12-23T18:48:44.284275Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127258892970602:2470] client user disconnected session shared/user_3_2_8902199394056779233_v1 2025-12-23T18:48:44.284323Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-12-23T18:48:44.284377Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127258892970604:2471] disconnected. 2025-12-23T18:48:44.284397Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127258892970604:2471] disconnected; active server actors: 1 2025-12-23T18:48:44.284410Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127258892970604:2471] client user disconnected session shared/user_3_3_13818388757849533163_v1 2025-12-23T18:48:44.284455Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127258892970603:2469] disconnected. 2025-12-23T18:48:44.284467Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127258892970603:2469] disconnected; active server actors: 1 2025-12-23T18:48:44.284480Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127258892970603:2469] client user disconnected session shared/user_3_1_13119113009436732740_v1 2025-12-23T18:48:44.294257Z :INFO: [/Root] [/Root] [77df8615-61c65041-6dc5bd7e-15927d16] Closing read session. Close timeout: 0.000000s 2025-12-23T18:48:44.294320Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-12-23T18:48:44.294356Z :INFO: [/Root] [/Root] [77df8615-61c65041-6dc5bd7e-15927d16] Counters: { Errors: 0 CurrentSessionLifetimeMs: 955 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:48:44.294400Z :INFO: [/Root] [/Root] [98153317-ba713e74-660c63c9-9b8460c6] Closing read session. Close timeout: 0.000000s 2025-12-23T18:48:44.294425Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-12-23T18:48:44.294446Z :INFO: [/Root] [/Root] [98153317-ba713e74-660c63c9-9b8460c6] Counters: { Errors: 0 CurrentSessionLifetimeMs: 945 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:48:44.294464Z :INFO: [/Root] [/Root] [67e70c5c-3ecda0e3-c4a29594-3b8c7eec] Closing read session. Close timeout: 0.000000s 2025-12-23T18:48:44.294485Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-12-23T18:48:44.294505Z :INFO: [/Root] [/Root] [67e70c5c-3ecda0e3-c4a29594-3b8c7eec] Counters: { Errors: 0 CurrentSessionLifetimeMs: 941 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:48:44.294528Z :INFO: [/Root] [/Root] [67e70c5c-3ecda0e3-c4a29594-3b8c7eec] Closing read session. Close timeout: 0.000000s 2025-12-23T18:48:44.294558Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-12-23T18:48:44.294594Z :INFO: [/Root] [/Root] [67e70c5c-3ecda0e3-c4a29594-3b8c7eec] Counters: { Errors: 0 CurrentSessionLifetimeMs: 941 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:48:44.294686Z :NOTICE: [/Root] [/Root] [67e70c5c-3ecda0e3-c4a29594-3b8c7eec] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:48:44.295666Z :INFO: [/Root] [/Root] [98153317-ba713e74-660c63c9-9b8460c6] Closing read session. Close timeout: 0.000000s 2025-12-23T18:48:44.295701Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-12-23T18:48:44.295726Z :INFO: [/Root] [/Root] [98153317-ba713e74-660c63c9-9b8460c6] Counters: { Errors: 0 CurrentSessionLifetimeMs: 946 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:48:44.295769Z :NOTICE: [/Root] [/Root] [98153317-ba713e74-660c63c9-9b8460c6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:48:44.296201Z :INFO: [/Root] [/Root] [77df8615-61c65041-6dc5bd7e-15927d16] Closing read session. Close timeout: 0.000000s 2025-12-23T18:48:44.296229Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-12-23T18:48:44.296252Z :INFO: [/Root] [/Root] [77df8615-61c65041-6dc5bd7e-15927d16] Counters: { Errors: 0 CurrentSessionLifetimeMs: 957 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:48:44.296292Z :NOTICE: [/Root] [/Root] [77df8615-61c65041-6dc5bd7e-15927d16] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:48:44.336231Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:44.336286Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:44.336302Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:44.336326Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:44.336343Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:48:44.436623Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:44.436661Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:44.436674Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:44.436692Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:44.436703Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TTicketParserTest::AuthorizationRetryError [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately >> TPartitionTests::TooManyImmediateTxs >> TPQTabletTests::Non_Kafka_Transaction_Supportive_Partitions_Should_Not_Be_Deleted_After_Timeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::TxReadsItsOwnWrites+IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 19719, MsgBus: 22960 2025-12-23T18:48:04.544988Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127093628509969:2164];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:04.545212Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040e3/r3tmp/tmphJhbt8/pdisk_1.dat 2025-12-23T18:48:04.810371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:04.810432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:04.869338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:04.870141Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:04.871156Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127093628509840:2081] 1766515684535215 != 1766515684535218 2025-12-23T18:48:04.871203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 19719, node 1 2025-12-23T18:48:05.008610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:05.008637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:05.008645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:05.008769Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:05.051875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22960 TClient is connected to server localhost:22960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515684934 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:05.485341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:05.496148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:48:05.554870Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:07.391289Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127106513412611:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:07.391385Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:07.391575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127106513412623:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:07.391626Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127106513412625:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:07.391667Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:07.395419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:07.407707Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127106513412627:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:48:07.495280Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127106513412678:2534] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:07.787966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:48:07.956708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127106513412856:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:48:07.956708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587127106513412855:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:48:07.957019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587127106513412855:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:48:07.957020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127106513412856:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:48:07.957239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587127106513412855:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:48:07.957241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127106513412856:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:48:07.957347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127106513412856:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:48:07.957356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587127106513412855:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:48:07.957473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587127106513412855:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:48:07.957485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127106513412856:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:48:07.957606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127106513412856:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:48:07.957638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587127106513412855:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:48:07.957761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587127106513412855:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:48:07.957761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127106513412856:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:48:07.957874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7587127106513412856:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:48:07.957878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7587127106513412855:2341];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:48:07.957979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:758712710651 ... e;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.326478Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.326550Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.326569Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.329522Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.329585Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.329604Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.335048Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.335112Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.335130Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038072;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.337146Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.337208Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.337229Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.344428Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.344495Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.344507Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.344529Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.344529Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.344543Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.352378Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038021;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.352441Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038021;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.352463Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038021;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.354459Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.354522Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.354540Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.360697Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.360782Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.360799Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.363038Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.363099Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.363116Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.367354Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.367402Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.367416Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.372361Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.372433Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.372452Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038031;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.375035Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038020;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.375088Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038020;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.375116Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038020;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:41.402194Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd68n48q21fqthp3kb5hchdb", SessionId: ydb://session/3?node_id=3&id=ZjQwYzM5NjktOGJiMjI4NzItYjBhZmUwNzUtZTEyYTFiMA==, Slow query, duration: 10.148009s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2025-12-23T18:48:42.577344Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:48:42.577375Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk [GOOD] >> TTicketParserTest::AuthenticationRetryError >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Non_Kafka_Transaction_Supportive_Partitions_Should_Not_Be_Deleted_After_Timeout [GOOD] Test command err: 2025-12-23T18:48:43.909711Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:43.988895Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:48:43.992687Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:48:43.993039Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:43.993100Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:43.993155Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-12-23T18:48:43.993205Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4927: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-12-23T18:48:43.993260Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:43.993338Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:48:44.045402Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:210:2214], now have 1 active actors on pipe 2025-12-23T18:48:44.045601Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:48:44.063284Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1470: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-12-23T18:48:44.069181Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-12-23T18:48:44.069311Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:44.070240Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-12-23T18:48:44.070387Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:44.070773Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:44.071212Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:218:2142] 2025-12-23T18:48:44.072199Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:48:44.072251Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2025-12-23T18:48:44.072288Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:218:2142] 2025-12-23T18:48:44.072340Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:48:44.072388Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:44.072915Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:44.073345Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:48:44.073408Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:44.073444Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:44.073501Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:44.073540Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2025-12-23T18:48:44.073573Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:44.073622Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:44.073735Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-12-23T18:48:44.073777Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:48:44.073807Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:44.073832Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2025-12-23T18:48:44.073851Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2025-12-23T18:48:44.073872Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2025-12-23T18:48:44.073899Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2025-12-23T18:48:44.073936Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:44.074167Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:44.074222Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:44.074326Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:44.074487Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:44.074656Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:44.079693Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:48:44.079841Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:48:44.079903Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:44.079958Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:44.079992Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:44.080023Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:44.080071Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:44.080120Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:44.080537Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:225:2222], now have 1 active actors on pipe 2025-12-23T18:48:44.082805Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:228:2224], now have 1 active actors on pipe 2025-12-23T18:48:44.086350Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3135: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 181 RawX2: 4294969490 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-12-23T18:48:44.086493Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3321: [PQ: 72057594037927937] distributed transaction 2025-12-23T18:48:44.086592Z node 1 :PQ_TX INFO: pq_impl.cpp:3645: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-12-23T18:48:44.086645Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-12-23T18:48:44.086688Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-12-23T18:48:44.086759Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3934: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-12-23T18:48:44.086810Z node 1 :PQ_TX INFO: pq_impl.cpp:4264: [ ... T topic topic partitition {0, KafkaTransactionWriteId{1, 0}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:48:47.434358Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:47.434393Z node 6 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process pending events. Count 0 2025-12-23T18:48:47.434424Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2025-12-23T18:48:47.434461Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:47.434495Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:47.434532Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:47.434564Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2025-12-23T18:48:47.434634Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:47.434810Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2025-12-23T18:48:47.435014Z node 6 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie -=[ 0wn3r ]=-|909f5680-8be1bc59-afc31ee6-a577bceb_0 generated for partition {0, KafkaTransactionWriteId{1, 0}, 100001} topic 'topic' owner -=[ 0wn3r ]=- 2025-12-23T18:48:47.435075Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2025-12-23T18:48:47.435119Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:48:47.435173Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:48:47.435216Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:47.435286Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:48:47.435342Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:48:47.435381Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Batch completed (1) 2025-12-23T18:48:47.435425Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2025-12-23T18:48:47.435487Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ReplyOwnerOk. Partition: {0, KafkaTransactionWriteId{1, 0}, 100001} 2025-12-23T18:48:47.435565Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 4 2025-12-23T18:48:47.436204Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3052: [PQ: 72057594037927937] Transaction for Kafka producer {Id: 1, Epoch: 0} is expired 2025-12-23T18:48:47.436258Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5270: [PQ: 72057594037927937] send TEvPQ::TEvDeletePartition to partition {0, KafkaTransactionWriteId{1, 0}, 100001} 2025-12-23T18:48:47.440525Z node 6 :PERSQUEUE DEBUG: partition.cpp:4402: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Handle TEvPQ::TEvDeletePartition 2025-12-23T18:48:47.441027Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:47.441085Z node 6 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from D0000100001(+) to D0000100002(-) 2025-12-23T18:48:47.441590Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:48:47.441758Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2025-12-23T18:48:47.441849Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2025-12-23T18:48:47.441891Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:47.441932Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:47.441981Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:47.442027Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2025-12-23T18:48:47.442079Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2025-12-23T18:48:47.442439Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1198: [PQ: 72057594037927937] Topic 'topic' counters. CacheSize 0 CachedBlobs 0 2025-12-23T18:48:47.442569Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5192: [PQ: 72057594037927937] Handle TEvPQ::TEvDeletePartitionDone {0, KafkaTransactionWriteId{1, 0}, 100001} 2025-12-23T18:48:47.442627Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:5186: [PQ: 72057594037927937] DeletePartition {0, KafkaTransactionWriteId{1, 0}, 100001} 2025-12-23T18:48:47.442685Z node 6 :PQ_TX INFO: pq_impl.cpp:4605: [PQ: 72057594037927937] delete WriteId KafkaTransactionWriteId{1, 0} 2025-12-23T18:48:47.442790Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:48:47.451183Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:48:47.463011Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:47.496602Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:47.507296Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:47.507387Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:47.507434Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:47.507486Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:47.507530Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:47.517947Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx events 2025-12-23T18:48:47.518037Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:47.518078Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:47.518118Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:47.518154Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Try persist 2025-12-23T18:48:47.646341Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:47.656767Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] No data for blobs compaction 2025-12-23T18:48:47.921543Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:47.942381Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:47.942445Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:47.942477Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:47.942512Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:47.942540Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:47.953414Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx events 2025-12-23T18:48:47.953476Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:47.953511Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:47.953547Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:47.953575Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Try persist 2025-12-23T18:48:47.966968Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:47.988107Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Parallel_Transactions_1 >> TPQTest::TestCompaction >> TPQTabletTests::Parallel_Transactions_1 [GOOD] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryError >> TPQTabletTests::Parallel_Transactions_2 >> KqpSinkTx::OlapInteractive [GOOD] >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_After_Timeout >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad >> TPQTabletTests::Parallel_Transactions_2 [GOOD] >> TPartitionTests::TooManyImmediateTxs [GOOD] >> TPQTabletTests::Partition_Send_Predicate_With_False >> TPQTest::TestPartitionTotalQuota >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> TPartitionTests::TestNonConflictingActsBatchOk >> TPQTabletTests::Partition_Send_Predicate_With_False [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3c >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_After_Timeout [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryError [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately >> TPQTabletTests::ProposeTx_Missing_Operations >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_With_Delete_Partition_Done_Event_Drop >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3c [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5c >> TPQTabletTests::DropTablet_And_Tx >> TPQTabletTests::ProposeTx_Missing_Operations [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::BulkAuthorization >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead >> TPQTabletTests::ProposeTx_Unknown_Partition_1 >> TPQTest::TestAccountReadQuota [GOOD] >> TPQTest::TestAlreadyWritten >> TPQTabletTests::DropTablet_And_Tx [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_1 [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5c [GOOD] >> TPQTabletTests::DropTablet_Before_Write >> TPQTabletTests::ProposeTx_Unknown_WriteId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 9786, MsgBus: 30221 2025-12-23T18:47:58.012210Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127066841226976:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:58.012298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040ea/r3tmp/tmp480wMw/pdisk_1.dat 2025-12-23T18:47:58.290837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:58.294704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:58.294937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:58.319850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:58.411762Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9786, node 1 2025-12-23T18:47:58.559448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:58.559474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:58.559487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:58.559590Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:58.566649Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30221 2025-12-23T18:47:59.015765Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:30221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515678466 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:59.162940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:59.171739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:48:01.453727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127079726129610:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:01.453744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127079726129620:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:01.453839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:01.454470Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127079726129640:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:01.454517Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:01.459235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:01.472404Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127079726129639:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:48:01.575656Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127079726129692:2542] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:01.902267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:48:02.085269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587127084021097156:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:48:02.085512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587127084021097156:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:48:02.085721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587127084021097156:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:48:02.085868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587127084021097156:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:48:02.085978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587127084021097156:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:48:02.086057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587127084021097156:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:48:02.086175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587127084021097156:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:48:02.086300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587127084021097156:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:48:02.086425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587127084021097156:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:48:02.086549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587127084021097156:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:48:02.086673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587127084021097156:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:48:02.086806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587127084021097156:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:48:02.086961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7587127084021097156:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:48:02.091278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127084021097158:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:48:02.091369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127084021097158:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:48:02.091527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127084021097158:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:48:02.091630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7587127084021097158:2339];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:48:02.091750Z node 1 ... e;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.563198Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.563203Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.563220Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.573077Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.573137Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.573155Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.573481Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038032;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.573542Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038032;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.573563Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038032;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.582850Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038028;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.582922Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038028;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.582940Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038074;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.582946Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038028;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.582991Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038074;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.583010Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038074;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.592483Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.592548Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.592571Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.592651Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.592723Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.592749Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.602092Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037998;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.602143Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037998;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.602160Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037998;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.602707Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.602794Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.602814Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.611279Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.611347Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.611366Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.611933Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.611990Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.612007Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.621123Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.621153Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.621216Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.621217Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.621241Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.621241Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:44.651105Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd68n6wf8pcsg97m3kjdk0n2", SessionId: ydb://session/3?node_id=3&id=MjE1M2IyZmYtNWNiMGNkYzAtMjNjNDg2MzAtNDllNzE1YzI=, Slow query, duration: 10.662049s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2025-12-23T18:48:45.247345Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:48:45.247389Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5a |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TPQTabletTests::ProposeTx_Unknown_WriteId [GOOD] >> TPQTabletTests::DropTablet_Before_Write [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 >> TPQTabletTests::ProposeTx_Unknown_Partition_2 >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction >> TPQTabletTests::ProposeTx_Unknown_Partition_2 [GOOD] >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5a [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-dbadmin >> TPQTabletTests::ProposeTx_Command_After_Propose >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_With_Delete_Partition_Done_Event_Drop [GOOD] >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction [GOOD] >> TPQTabletTests::ReadQuoter_ExclusiveLock >> TPQTabletTests::Huge_ProposeTransacton >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_TEvDeletePartitionDone_Came_Should_Be_Processed_After_Previous_Complete_Erasure >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_TEvDeletePartitionDone_Came_Should_Be_Processed_After_Previous_Complete_Erasure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5a [GOOD] Test command err: 2025-12-23T18:48:49.139223Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:49.200293Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:48:49.203751Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:48:49.204060Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:49.204138Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:49.204176Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-12-23T18:48:49.204220Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4927: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-12-23T18:48:49.204278Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:49.204350Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:48:49.230930Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:210:2214], now have 1 active actors on pipe 2025-12-23T18:48:49.231088Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:48:49.250607Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1470: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-12-23T18:48:49.253277Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-12-23T18:48:49.253391Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:49.254310Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-12-23T18:48:49.254447Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:49.254758Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:49.255154Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:218:2142] 2025-12-23T18:48:49.256152Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:48:49.256200Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2025-12-23T18:48:49.256253Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:218:2142] 2025-12-23T18:48:49.256304Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:48:49.256356Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:49.256927Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:49.257344Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:48:49.257404Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:49.257456Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:49.257512Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:49.257543Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2025-12-23T18:48:49.257590Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:49.257640Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:49.257711Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-12-23T18:48:49.257748Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:48:49.257787Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:49.257807Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2025-12-23T18:48:49.257822Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2025-12-23T18:48:49.257840Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2025-12-23T18:48:49.257864Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2025-12-23T18:48:49.257891Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:49.258075Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:49.258114Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:49.258190Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:49.258324Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:49.258495Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:49.261355Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:48:49.261482Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:48:49.261535Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:49.261566Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:49.261596Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:49.261628Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:49.261666Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:49.261714Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:49.262069Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:225:2222], now have 1 active actors on pipe 2025-12-23T18:48:49.262714Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:228:2224], now have 1 active actors on pipe 2025-12-23T18:48:49.263592Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3135: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 181 RawX2: 4294969490 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-12-23T18:48:49.263671Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3321: [PQ: 72057594037927937] distributed transaction 2025-12-23T18:48:49.263743Z node 1 :PQ_TX INFO: pq_impl.cpp:3645: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-12-23T18:48:49.263797Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-12-23T18:48:49.263839Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-12-23T18:48:49.263882Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3934: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-12-23T18:48:49.263918Z node 1 :PQ_TX INFO: pq_impl.cpp:4264: [ ... TxId 67890 State WAIT_RS FrontTxId 67890 2025-12-23T18:48:52.199591Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4468: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-12-23T18:48:52.199666Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-12-23T18:48:52.199711Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] Responses received from the partitions 0/1 2025-12-23T18:48:52.199763Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-12-23T18:48:52.199807Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:52.199841Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:48:52.199874Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:52.199934Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2025-12-23T18:48:52.200027Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 67890 2025-12-23T18:48:52.200073Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2025-12-23T18:48:52.200109Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:48:52.200144Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:48:52.200181Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:52.200389Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:52.202931Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:48:52.203053Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:48:52.203095Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:52.203136Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:52.203177Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:52.203220Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:52.203255Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:52.203296Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:52.203369Z node 6 :PQ_TX INFO: pq_impl.cpp:3483: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2025-12-23T18:48:52.203410Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-12-23T18:48:52.203447Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-12-23T18:48:52.203483Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4310: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-12-23T18:48:52.203518Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] Responses received from the partitions 1/1 2025-12-23T18:48:52.203557Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4196: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-12-23T18:48:52.203595Z node 6 :PQ_TX INFO: pq_impl.cpp:4504: [PQ: 72057594037927937] Complete TxId 67890 2025-12-23T18:48:52.203635Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-12-23T18:48:52.203705Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3981: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-12-23T18:48:52.203764Z node 6 :PQ_TX INFO: pq_impl.cpp:3983: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22225 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-12-23T18:48:52.203819Z node 6 :PQ_TX INFO: pq_impl.cpp:3983: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22226 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-12-23T18:48:52.203850Z node 6 :PQ_TX INFO: pq_impl.cpp:3983: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-12-23T18:48:52.203878Z node 6 :PQ_TX INFO: pq_impl.cpp:3983: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22223 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-12-23T18:48:52.203907Z node 6 :PQ_TX INFO: pq_impl.cpp:3983: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22224 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-12-23T18:48:52.203937Z node 6 :PQ_TX INFO: pq_impl.cpp:4535: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-12-23T18:48:52.203986Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-12-23T18:48:52.204022Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2025-12-23T18:48:52.204050Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4544: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-12-23T18:48:52.204084Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2025-12-23T18:48:52.225082Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:52.259046Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:52.259157Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:52.259211Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:52.259264Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:52.259308Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:52.269684Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:52.290987Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:52.291062Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:52.291098Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:52.291139Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:52.291172Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:52.291266Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:52.303018Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:52.327029Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:52.327097Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:52.327135Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:52.327174Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:52.327207Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:52.348125Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:52.359264Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:52.359333Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:52.359370Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:52.359409Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:52.359444Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:52.380095Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:52.380168Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:52.380217Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:52.380262Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:52.380296Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:52.390653Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:52.411616Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:52.422562Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [6:386:2330], now have 1 active actors on pipe |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Deleting_State >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot [GOOD] >> KqpLocksTricky::TestSnapshotWithDependentReads+UseSink >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Deleting_State [GOOD] >> TPQTest::TestCompaction [GOOD] >> TPQTest::TestDirectReadHappyWay >> PQCountersLabeled::PartitionFirstClass [GOOD] >> PQCountersLabeled::ImportantFlagSwitching >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAuthorization >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Successful_Commit >> TPQTest::TestUserInfoCompatibility ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] Test command err: 2025-12-23T18:48:34.031893Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127223737501617:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:34.031960Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f8/r3tmp/tmph3Xwvv/pdisk_1.dat 2025-12-23T18:48:34.113103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:48:34.427771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:34.427888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:34.428999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:34.480357Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:34.481087Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 28323, node 1 2025-12-23T18:48:34.634470Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:34.634495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:34.634505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:34.634560Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:34.777108Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:14419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:48:35.064091Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515714509 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:35.132351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:35.139644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:48:35.150437Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AD101C7FDDCA0CDCAC4F4307607CFF447899B4F902D6E2D0066FAA84FB8231E0 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-12-23T18:48:37.920862Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127234958884828:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:37.920913Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f8/r3tmp/tmp07b4m4/pdisk_1.dat 2025-12-23T18:48:37.982528Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:38.070877Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127234958884803:2081] 1766515717919045 != 1766515717919048 2025-12-23T18:48:38.072414Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:38.114828Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:38.114954Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:38.120600Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28261, node 2 2025-12-23T18:48:38.215630Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:38.215659Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:38.215666Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:38.215744Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:38.240777Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21302 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515718170 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:38.424064Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:38.432705Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket 91208685E28DB20B797BECF96E5314456947E73032BC2351208620724D7A80A0 () has now permanent error message 'Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers' 2025-12-23T18:48:38.433451Z node 2 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket 91208685E28DB20B797BECF96E5314456947E73032BC2351208620724D7A80A0: Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers 2025-12-23T18:48:41.608576Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127250753664621:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:41.608640Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f8/r3tmp/tmpFULaj6/pdisk_1.dat 2025-12-23T18:48:41.630850Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 20595, node 3 2025-12-23T18:48:41.738725Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:41.738888Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:41.754771Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:41.759570Z node 3 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T18:48:41.759598Z node 3 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T18:48:41.764350Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:41.787846Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:41.787868Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:41.787875Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:41.787937Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:41.838242Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11966 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" Effect ... 759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:45.504303Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:45.515074Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-12-23T18:48:45.515152Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d1c69a13a50] Connect to grpc://localhost:14387 2025-12-23T18:48:45.519895Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d1c69a13a50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } 0: "OK" 2025-12-23T18:48:45.535184Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d1c69a13a50] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } } 2025-12-23T18:48:45.535555Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1292: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-12-23T18:48:45.535671Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:48:45.536391Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-12-23T18:48:45.536641Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d1c69a13a50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } 0: "OK" 2025-12-23T18:48:45.538826Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d1c69a13a50] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } } 2025-12-23T18:48:45.540144Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1292: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-12-23T18:48:45.540200Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'something.write for aaaa1234 bbbb4554 - PERMISSION_DENIED' 2025-12-23T18:48:48.948724Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587127283626291368:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:48.948793Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:48.959604Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f8/r3tmp/tmp4Saeud/pdisk_1.dat 2025-12-23T18:48:49.055440Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:49.056779Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7587127283626291339:2081] 1766515728947869 != 1766515728947872 2025-12-23T18:48:49.089091Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:49.091463Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:49.091535Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 16291, node 5 2025-12-23T18:48:49.097229Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:49.122780Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:49.122802Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:49.122810Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:49.122914Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515729139 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:49.303069Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:49.319981Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-12-23T18:48:49.320065Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d1c69a49e50] Connect to grpc://localhost:17083 2025-12-23T18:48:49.321137Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d1c69a49e50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } 14: "Service Unavailable" 2025-12-23T18:48:49.329144Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d1c69a49e50] Status 14 Service Unavailable 2025-12-23T18:48:49.329333Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-12-23T18:48:49.329364Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-12-23T18:48:49.329399Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:49.329477Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-12-23T18:48:49.329858Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d1c69a49e50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2025-12-23T18:48:49.332819Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d1c69a49e50] Status 1 CANCELLED 2025-12-23T18:48:49.332948Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2025-12-23T18:48:49.332979Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2025-12-23T18:48:49.333017Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TTicketParserTest::LoginCheckRemovedUser [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Successful_Commit [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_Is_In_DELETED_State_Should_Be_Processed_After_Previous_Complete_Erasure >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_Is_In_DELETED_State_Should_Be_Processed_After_Previous_Complete_Erasure [GOOD] >> TTicketParserTest::BulkAuthorization [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] Test command err: 2025-12-23T18:48:34.095976Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127221127864079:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:34.096018Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032fb/r3tmp/tmpMDWB7T/pdisk_1.dat 2025-12-23T18:48:34.485456Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:34.489600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:34.489737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:34.572051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:34.577261Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7128, node 1 2025-12-23T18:48:34.700437Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:34.770938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:34.770969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:34.770975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:34.771097Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:48:35.126975Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515714628 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:35.177527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:35.183965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:48:35.191291Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket 8E45637209435CB3AAE23E3ABAE80D5B14BEA1FFFB4208F1ECC97BB4961A3757 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-12-23T18:48:38.356955Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127238601645543:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:38.357052Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032fb/r3tmp/tmpr6Qozp/pdisk_1.dat 2025-12-23T18:48:38.390766Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:38.475637Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:38.475752Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:38.498853Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:38.500268Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127238601645515:2081] 1766515718355976 != 1766515718355979 2025-12-23T18:48:38.515842Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2003, node 2 2025-12-23T18:48:38.576233Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:38.576264Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:38.576270Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:38.576353Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:38.636655Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515718576 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:48:38.767292Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:48:38.775148Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:48:38.783078Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket E5F290C749255E9DBDBED260D31097533ACBC15E86AB794C9E59F6F5E46153ED () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-12-23T18:48:42.208512Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127256118617833:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:42.208560Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032fb/r3tmp/tmpa9Zy2F/pdisk_1.dat 2025-12-23T18:48:42.256252Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:42.390499Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:42.399432Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587127256118617791:2081] 1766515722184346 != 1766515722184349 2025-12-23T18:48:42.401278Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:42.401355Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:42.449760Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3426, node 3 2025-12-23T18:48:42.545260Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:42.586962Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:42.586985Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:42.586991Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:42.587064Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515722503 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:42.786506Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:42.796748Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:48:42.804276Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket 2BCFB4F909776A6BCFAE740F7C42D1D2DBDB43650DF8A7471C205EA0DA385ECD () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-12-23T18:48:42.804753Z node 3 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket 2BCFB4F909776A6BCFAE740F7C42D1D2DBDB43650DF8A7471C205EA0DA385ECD: Cannot create token from certificate. Client certificate failed verification 2025-12-23T18:48:46.433949Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587127272181094933:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:46.434094Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032fb/r3tmp/tmpmICaEL/pdisk_1.dat 2025-12-23T18:48:46.450529Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:46.559156Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587127272181094904:2081] 1766515726432917 != 1766515726432920 2025-12-23T18:48:46.561782Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:46.565156Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:46.565234Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:46.587224Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5415, node 4 2025-12-23T18:48:46.635532Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:46.635566Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:46.635573Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:46.635657Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:46.749078Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1343 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515726640 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:46.891883Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:46.900303Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket CC426ED8669479CCFC6A9FAE542B2878EFBA9625C24E5E01254B3B4FB9E9E588 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-12-23T18:48:50.044797Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587127292017339490:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:50.045790Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032fb/r3tmp/tmps0k5vs/pdisk_1.dat 2025-12-23T18:48:50.064504Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:50.131699Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:50.161045Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:50.161119Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:50.187635Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13627, node 5 2025-12-23T18:48:50.253256Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:50.253284Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:50.253292Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:50.253397Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:50.340519Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515730238 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:50.485891Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:50.494492Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket 52A3D901EA09320FC85486E8C2A309C99C594A00840DE3104167194242FC3C5D () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-12-23T18:48:50.495321Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket 52A3D901EA09320FC85486E8C2A309C99C594A00840DE3104167194242FC3C5D: Cannot create token from certificate. Client certificate failed verification |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-dbadmin >> TPQTest::TestDirectReadHappyWay [GOOD] >> TPQTest::TestMessageNo >> TPartitionTests::TestNonConflictingActsBatchOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_Is_In_DELETED_State_Should_Be_Processed_After_Previous_Complete_Erasure [GOOD] Test command err: 2025-12-23T18:48:49.932083Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:50.004883Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:48:50.012011Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:48:50.012384Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:50.012442Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:50.012493Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-12-23T18:48:50.012544Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4927: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-12-23T18:48:50.012596Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:50.012649Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:48:50.030882Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2025-12-23T18:48:50.031006Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:48:50.046933Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1470: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:48:50.053231Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:48:50.053371Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:50.054426Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:48:50.054604Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:50.055089Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:50.055665Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2025-12-23T18:48:50.056703Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:48:50.056755Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2025-12-23T18:48:50.056802Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:190:2142] 2025-12-23T18:48:50.056872Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:48:50.056937Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:50.057537Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:48:50.057586Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:50.057626Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:50.057696Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:48:50.057734Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:50.057778Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:50.057889Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-12-23T18:48:50.057931Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:48:50.057989Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:48:50.058033Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:48:50.058079Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:50.058296Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:50.058362Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:50.058532Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:50.058905Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:50.063714Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:48:50.063836Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:48:50.063888Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:50.063954Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:50.064001Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:50.064041Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:50.064081Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:50.064141Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:50.064544Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:197:2203], now have 1 active actors on pipe 2025-12-23T18:48:50.065370Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:200:2205], now have 1 active actors on pipe 2025-12-23T18:48:50.065490Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'topic' requestId: 2025-12-23T18:48:50.065602Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-12-23T18:48:50.065668Z node 1 :PQ_TX INFO: pq_impl.cpp:2565: [PQ: 72057594037927937] partition {0, KafkaTransactionWriteId{1, 0}, 100000} for WriteId KafkaTransactionWriteId{1, 0} 2025-12-23T18:48:50.065813Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:48:50.068688Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:48:50.069488Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:50.069857Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:50.070134Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateInit] bootstrapping {0, KafkaTransactionWriteId{1, 0}, 100000} [1:206:2142] 2025-12-23T18:48:50.071035Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitDiskStatusStep 2025-12-23T18:48:50.072351Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitMetaStep 2025-12-23T18:48:50.072706Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitInfoRangeStep 2025-12-23T18:48:50.072841Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From M0000100000 to M0000100001 2025-12-23T18:48:50.073139Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitDataRangeStep 2025-12-23T18:48:50.073216Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From D0000100000 to D0000100001 2025-12-23T18:48:50.073446Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitDataStep 2025-12-23T ... 72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:54.858587Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:54.858618Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:54.858645Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Try persist 2025-12-23T18:48:54.858680Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:173: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Blobs compaction is stopped 2025-12-23T18:48:54.859142Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5192: [PQ: 72057594037927937] Handle TEvPQ::TEvDeletePartitionDone {0, KafkaTransactionWriteId{1, 0}, 100000} 2025-12-23T18:48:54.859200Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:5186: [PQ: 72057594037927937] DeletePartition {0, KafkaTransactionWriteId{1, 0}, 100000} 2025-12-23T18:48:54.859252Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state WAIT_RS_ACKS 2025-12-23T18:48:54.859290Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State WAIT_RS_ACKS 2025-12-23T18:48:54.859335Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2025-12-23T18:48:54.859373Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4591: [PQ: 72057594037927937] WriteId KafkaTransactionWriteId{1, 0} Partitions.size=0 2025-12-23T18:48:54.859402Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4544: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-12-23T18:48:54.859435Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2025-12-23T18:48:54.859474Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4591: [PQ: 72057594037927937] WriteId KafkaTransactionWriteId{1, 0} Partitions.size=0 2025-12-23T18:48:54.859513Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4623: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2025-12-23T18:48:54.859555Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS_ACKS to DELETING 2025-12-23T18:48:54.859596Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3835: [PQ: 72057594037927937] Delete key for TxId 67890 2025-12-23T18:48:54.859710Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:48:54.861851Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'topic' requestId: 2025-12-23T18:48:54.861902Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-12-23T18:48:54.861946Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2496: [PQ: 72057594037927937] GetOwnership request for the next Kafka transaction while previous is being deleted. Saving it till the complete delete of the previous tx.%01 2025-12-23T18:48:54.862005Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:48:54.862045Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state DELETING 2025-12-23T18:48:54.862081Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State DELETING 2025-12-23T18:48:54.862125Z node 6 :PQ_TX INFO: pq_impl.cpp:4605: [PQ: 72057594037927937] delete WriteId KafkaTransactionWriteId{1, 0} 2025-12-23T18:48:54.862179Z node 6 :PQ_TX INFO: pq_impl.cpp:4568: [PQ: 72057594037927937] delete TxId 67890 2025-12-23T18:48:54.862242Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'topic' requestId: 2025-12-23T18:48:54.862283Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-12-23T18:48:54.862336Z node 6 :PQ_TX INFO: pq_impl.cpp:2565: [PQ: 72057594037927937] partition {0, KafkaTransactionWriteId{1, 0}, 100001} for WriteId KafkaTransactionWriteId{1, 0} 2025-12-23T18:48:54.862436Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:48:54.864259Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:48:54.864809Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:54.865121Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:54.865370Z node 6 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] bootstrapping {0, KafkaTransactionWriteId{1, 0}, 100001} [6:241:2142] 2025-12-23T18:48:54.866197Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitDiskStatusStep 2025-12-23T18:48:54.867281Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitMetaStep 2025-12-23T18:48:54.867660Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitInfoRangeStep 2025-12-23T18:48:54.867777Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From M0000100001 to M0000100002 2025-12-23T18:48:54.868037Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitDataRangeStep 2025-12-23T18:48:54.868114Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From D0000100001 to D0000100002 2025-12-23T18:48:54.868290Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitDataStep 2025-12-23T18:48:54.868335Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-12-23T18:48:54.868376Z node 6 :PERSQUEUE INFO: partition_init.cpp:1016: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:48:54.868419Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitMessageDeduplicatorStep 2025-12-23T18:48:54.868459Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TDeleteKeysStep 2025-12-23T18:48:54.868515Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:48:54.868556Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Initializing completed. 2025-12-23T18:48:54.868601Z node 6 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] init complete for topic 'topic' partition {0, KafkaTransactionWriteId{1, 0}, 100001} generation 2 [6:241:2142] 2025-12-23T18:48:54.868651Z node 6 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] SYNC INIT topic topic partitition {0, KafkaTransactionWriteId{1, 0}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:48:54.868700Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:54.868733Z node 6 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process pending events. Count 0 2025-12-23T18:48:54.868769Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2025-12-23T18:48:54.868802Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:54.868833Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:54.868871Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:54.868902Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2025-12-23T18:48:54.868971Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:54.869119Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2025-12-23T18:48:54.869284Z node 6 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie -=[ 0wn3r ]=-|b8c4dafc-c375721d-18e37762-e07bc18c_0 generated for partition {0, KafkaTransactionWriteId{1, 0}, 100001} topic 'topic' owner -=[ 0wn3r ]=- 2025-12-23T18:48:54.869341Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2025-12-23T18:48:54.869382Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:48:54.869428Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:48:54.869465Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:54.869504Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:48:54.869558Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:48:54.869593Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Batch completed (1) 2025-12-23T18:48:54.869633Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2025-12-23T18:48:54.869763Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ReplyOwnerOk. Partition: {0, KafkaTransactionWriteId{1, 0}, 100001} 2025-12-23T18:48:54.869845Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 5 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> CompressExecutor::TestReorderedExecutor >> TPartitionTests::TestTxBatchInFederation >> TPQTest::TestUserInfoCompatibility [GOOD] >> TPQTest::TestWaitInOwners >> TPQTest::TestSourceIdDropByUserWrites >> TPQTest::TestMessageNo [GOOD] >> TPQTest::TestGetTimestamps >> TPQTabletTests::ReadQuoter_ExclusiveLock [GOOD] >> TPartitionTests::CommitOffsetRanges >> TTicketParserTest::NebiusAuthorization [GOOD] >> TTicketParserTest::NebiusAuthorizationModify >> TPQTabletTests::Huge_ProposeTransacton [GOOD] >> TPartitionTests::CorrectRange_Multiple_Transactions >> TPQTabletTests::In_Kafka_Txn_Only_Supportive_Partitions_That_Exceeded_Timeout_Should_Be_Deleted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::ReadQuoter_ExclusiveLock [GOOD] Test command err: 2025-12-23T18:48:51.011048Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:51.091369Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:48:51.095024Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:48:51.095342Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:51.095413Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:51.095485Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-12-23T18:48:51.095533Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4927: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-12-23T18:48:51.095599Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:51.095665Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:48:51.112998Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2025-12-23T18:48:51.113174Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:48:51.132556Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1470: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:48:51.135586Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:48:51.135711Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:51.136652Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:48:51.136819Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:51.137185Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:51.137621Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2025-12-23T18:48:51.138667Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:48:51.138724Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2025-12-23T18:48:51.138789Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:190:2142] 2025-12-23T18:48:51.138859Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:48:51.138918Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:51.139471Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:48:51.139566Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:51.139620Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:51.139689Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:48:51.139775Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:51.139823Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:51.139895Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-12-23T18:48:51.139939Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:48:51.139973Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:48:51.140008Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:48:51.140060Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:51.140286Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:51.140370Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:51.140547Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:51.140772Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:51.143551Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:48:51.143644Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:48:51.143692Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:51.143748Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:51.143796Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:51.143835Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:51.143869Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:51.143929Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:51.144263Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:197:2203], now have 1 active actors on pipe 2025-12-23T18:48:51.144924Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:200:2205], now have 1 active actors on pipe 2025-12-23T18:48:51.145772Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3135: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 181 RawX2: 4294969490 } TxId: 2 Data { Immediate: false } 2025-12-23T18:48:51.145845Z node 1 :PQ_TX WARN: pq_impl.cpp:3232: [PQ: 72057594037927937] TxId 2 empty list of operations 2025-12-23T18:48:51.145919Z node 1 :PQ_TX INFO: pq_impl.cpp:4750: [PQ: 72057594037927937] TxId: 2 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-12-23T18:48:51.507111Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:51.549627Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:48:51.552902Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:48:51.553165Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:51.553205Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:51.553238Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-12-23T18:48:51.553271Z node 2 :PQ_TX DEBUG: pq_impl.cpp:4927: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-12-23T18:48:51.553315Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:51.553364Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:48:51.571573Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [2:182:2195], now have 1 active actors on pipe 2025-12-23T18:48:51.571683Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:48:51.571871Z node 2 :PQ_TX DEBUG: pq_impl.cpp:1470: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicNam ... persist 2025-12-23T18:48:56.811282Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:56.811414Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.811449Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:56.811481Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.811508Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:56.832049Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:56.832119Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.832150Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:56.832181Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.832209Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:56.852777Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:56.852837Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.852884Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:56.852950Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.852979Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:56.873468Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:56.873512Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.873532Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:56.873558Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.873592Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:56.896430Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:56.896489Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.896517Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:56.896546Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.896572Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:56.927584Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:56.927661Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.927704Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:56.927737Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.927768Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:56.948339Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:56.948404Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.948452Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:56.948492Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.948522Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:56.969173Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:56.969230Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.969259Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:56.969290Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.969317Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:56.989911Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:56.989974Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.990004Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:56.990054Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:56.990091Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:57.000422Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.021284Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:57.021347Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:57.021378Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:57.021411Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:57.021438Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:57.043150Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:57.043219Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:57.043248Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:57.043279Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:57.043306Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:57.065278Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:57.065336Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:57.065364Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:57.065406Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:57.065435Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:57.086234Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:57.086298Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:57.086327Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:57.086367Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:57.086396Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:57.106998Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:57.107068Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:57.107097Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:57.107130Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:57.107155Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:57.117468Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.171181Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:57.171243Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:57.171271Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:57.171300Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:57.171325Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TTicketParserTest::BulkAuthorizationWithUserAccount [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 >> TPartitionTests::CommitOffsetRanges [GOOD] >> TPartitionTests::ConflictingActsInSeveralBatches >> TTicketParserTest::AuthorizationWithUserAccount [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 >> TPartitionTests::CorrectRange_Multiple_Transactions [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable >> TPQTabletTests::In_Kafka_Txn_Only_Supportive_Partitions_That_Exceeded_Timeout_Should_Be_Deleted [GOOD] >> TPQTest::TestGetTimestamps [GOOD] >> TPQTest::TestMaxTimeLagRewind >> TPartitionTests::CorrectRange_Rollback >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-clusteradmin >> KqpLimits::TooBigQuery+useSink [GOOD] >> KqpLimits::TooBigQuery-useSink >> TPartitionTests::CorrectRange_Rollback [GOOD] >> TPartitionTests::DataTxCalcPredicateOk >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::In_Kafka_Txn_Only_Supportive_Partitions_That_Exceeded_Timeout_Should_Be_Deleted [GOOD] Test command err: 2025-12-23T18:48:51.306266Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:51.384824Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:48:51.388938Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:48:51.389329Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:51.389414Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:51.389456Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-12-23T18:48:51.389508Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4927: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-12-23T18:48:51.389563Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:51.389644Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:48:51.414283Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2025-12-23T18:48:51.414553Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:48:51.435312Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1470: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:48:51.438618Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:48:51.438921Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:51.440600Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:48:51.440751Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:51.440838Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:51.441382Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:51.441819Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2025-12-23T18:48:51.443439Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:48:51.443515Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2025-12-23T18:48:51.443572Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:190:2142] 2025-12-23T18:48:51.443641Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:48:51.443713Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:51.444272Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:48:51.444328Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:51.444365Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:51.444432Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:48:51.444470Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:51.444576Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:51.444681Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-12-23T18:48:51.444722Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:48:51.444765Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:48:51.444823Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:48:51.444895Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:51.445098Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:51.445169Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:51.445335Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:51.445644Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:192:2142] 2025-12-23T18:48:51.446402Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:48:51.446464Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:1:Initializer] Initializing completed. 2025-12-23T18:48:51.446500Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [1:192:2142] 2025-12-23T18:48:51.446547Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:48:51.446595Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:51.447037Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2025-12-23T18:48:51.447092Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:48:51.447122Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:51.447161Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:48:51.447186Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:51.447218Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:51.447261Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-12-23T18:48:51.447294Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-12-23T18:48:51.447331Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:48:51.447354Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2025-12-23T18:48:51.447386Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2025-12-23T18:48:51.447565Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:51.447621Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:51.447880Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:51.448151Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:51.448374Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:51.448503Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2025-12-23T18:48:51.452790Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdl ... :48:58.407219Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2025-12-23T18:48:58.407844Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1704: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Add new write blob: topic 'topic' partition {0, KafkaTransactionWriteId{2, 0}, 100001} compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 D0000100001_00000000000000000000_00000_0000000001_00000? size 64 WTime 1800332 2025-12-23T18:48:58.408106Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:58.408253Z node 6 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 100001 offset 0 partNo 0 count 1 size 64 2025-12-23T18:48:58.411902Z node 6 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 100001 offset 0 count 1 size 64 actorID [6:138:2142] 2025-12-23T18:48:58.412047Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:48:58.412117Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] TPartition::HandleWriteResponse writeNewSize# 11 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:48:58.412193Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] TPartition::ReplyWrite. Partition: {0, KafkaTransactionWriteId{2, 0}, 100001} 2025-12-23T18:48:58.412272Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Answering for message sourceid: '2', Topic: 'topic', Partition: {0, KafkaTransactionWriteId{2, 0}, 100001}, SeqNo: 0, partNo: 0, Offset: 0 is stored on disk 2025-12-23T18:48:58.412474Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx events 2025-12-23T18:48:58.412527Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.412565Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:58.412612Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.412650Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2025-12-23T18:48:58.412703Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] No data for blobs compaction 2025-12-23T18:48:58.412814Z node 6 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 100001 offset 0 partno 0 count 1 parts 0 suffix '63' size 64 2025-12-23T18:48:58.412892Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1198: [PQ: 72057594037927937] Topic 'topic' counters. CacheSize 128 CachedBlobs 2 2025-12-23T18:48:58.412952Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 123 2025-12-23T18:48:58.413693Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3052: [PQ: 72057594037927937] Transaction for Kafka producer {Id: 1, Epoch: 0} is expired 2025-12-23T18:48:58.413757Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5270: [PQ: 72057594037927937] send TEvPQ::TEvDeletePartition to partition {0, KafkaTransactionWriteId{1, 0}, 100000} 2025-12-23T18:48:58.414221Z node 6 :PERSQUEUE DEBUG: partition.cpp:4402: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Handle TEvPQ::TEvDeletePartition 2025-12-23T18:48:58.414890Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:58.414952Z node 6 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from D0000100000(+) to D0000100001(-) 2025-12-23T18:48:58.423145Z node 6 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 0 count 1 actorID [6:138:2142] 2025-12-23T18:48:58.423298Z node 6 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 100000 offset 0 partno 0 count 1 parts 0 suffix '63' size 64 2025-12-23T18:48:58.423382Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:48:58.423554Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] No data for blobs compaction 2025-12-23T18:48:58.423610Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Process user action and tx events 2025-12-23T18:48:58.423667Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.423714Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:58.423756Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.423799Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Try persist 2025-12-23T18:48:58.423850Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] No data for blobs compaction 2025-12-23T18:48:58.424314Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1198: [PQ: 72057594037927937] Topic 'topic' counters. CacheSize 64 CachedBlobs 1 2025-12-23T18:48:58.424447Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5192: [PQ: 72057594037927937] Handle TEvPQ::TEvDeletePartitionDone {0, KafkaTransactionWriteId{1, 0}, 100000} 2025-12-23T18:48:58.424505Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:5186: [PQ: 72057594037927937] DeletePartition {0, KafkaTransactionWriteId{1, 0}, 100000} 2025-12-23T18:48:58.424553Z node 6 :PQ_TX INFO: pq_impl.cpp:4605: [PQ: 72057594037927937] delete WriteId KafkaTransactionWriteId{1, 0} 2025-12-23T18:48:58.424634Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:48:58.430196Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:48:58.451237Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.482532Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:58.482617Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.482662Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:58.482725Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.482787Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:58.604602Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:58.874871Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx events 2025-12-23T18:48:58.874946Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.874979Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:58.875015Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.875048Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2025-12-23T18:48:58.896575Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] No data for blobs compaction 2025-12-23T18:48:58.927311Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.953319Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:58.953386Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.953421Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:58.953465Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.953496Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:58.953573Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.964095Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx events 2025-12-23T18:48:58.964160Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.964196Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:58.964233Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.964284Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2025-12-23T18:48:58.974594Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPartitionChooserSuite::TBoundaryChooser_GetTabletIdTest [GOOD] >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> TPQTestInternal::TestToHex [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> TPQUtilsTest::TLastCounter [GOOD] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert [GOOD] Test command err: Trying to start YDB, gRPC: 16692, MsgBus: 9978 2025-12-23T18:48:09.237672Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127113435815271:2235];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:09.237738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040df/r3tmp/tmp9XXmoZ/pdisk_1.dat 2025-12-23T18:48:09.496150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:09.527725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:09.527827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:09.581312Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:09.582870Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127113435815063:2081] 1766515689226003 != 1766515689226006 2025-12-23T18:48:09.589080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16692, node 1 2025-12-23T18:48:09.678811Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:09.699391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:09.699413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:09.699421Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:09.699511Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9978 TClient is connected to server localhost:9978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515689638 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:10.280042Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:10.283148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:10.289340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:48:12.579752Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127126320717842:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:12.579907Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:12.580307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127126320717854:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:12.580355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127126320717855:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:12.580473Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:12.584837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:12.600974Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127126320717858:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:48:12.663237Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127126320717909:2538] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:12.962331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:13.067176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.176226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:14.235307Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127113435815271:2235];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:14.235387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:15.712588Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715669; 2025-12-23T18:48:15.738405Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [1:7587127139205627941:2965], Table: `/Root/KV` ([72057594046644480:43:1]), SessionActorId: [1:7587127139205627678:2965]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[1:7587127139205627941:2965].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-12-23T18:48:15.739202Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:7587127139205627934:2965], SessionActorId: [1:7587127139205627678:2965], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7587127139205627678:2965]. 2025-12-23T18:48:15.739397Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [1:7587127139205627934:2965], SessionActorId: [1:7587127139205627678:2965], StateRollback: unknown message 278003713 2025-12-23T18:48:15.739410Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [1:7587127139205627934:2965], SessionActorId: [1:7587127139205627678:2965], StateRollback: unknown message 278003713 2025-12-23T18:48:15.739420Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [1:7587127139205627934:2965], SessionActorId: [1:7587127139205627678:2965], StateRollback: unknown message 278003713 2025-12-23T18:48:15.739430Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [1:7587127139205627934:2965], SessionActorId: [1:7587127139205627678:2965], StateRollback: unknown message 278003713 2025-12-23T18:48:15.739504Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=ODYxNDE1ZTMtNWE3ZDM5MWUtODYyOGM0MTktZTlhMmNiOWU=, ActorId: [1:7587127139205627678:2965], ActorState: ExecuteState, TraceId: 01kd68mr2f9vdwtb3aaaq3vndd, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7587127139205628248:2965] from: [1:7587127139205627934:2965] 2025-12-23T18:48:15.739595Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:7587127139205628248:2965] TxId: 281474976715669. Ctx: { TraceId: 01kd68mr2f9vdwtb3aaaq3vndd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODYxNDE1ZTMtNWE3ZDM5MWUtODYyOGM0MTktZTlhMmNiOWU=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-12-23T18:48:15.739717Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2862: SelfId: [1:7587127139205627934:2965], SessionActorId: [1:7587127139205627678:2965], StateRollback: unknown message 278003713 2025-12-23T18:48:15.739963Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=ODYxNDE1ZTMtNWE3ZDM5MWUtODYyOGM0MTktZTlhMmNiOWU=, ActorId: [1:7587127139205627678:2965], ActorState: ExecuteState, TraceId: 01kd68mr2f9vdwtb3aaaq3vndd, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "Ope ... :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.345417Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.345436Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.347249Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.347307Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.347324Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.355024Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.355098Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.355115Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.356877Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.356931Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.356946Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.364208Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.364278Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.364295Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.373496Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.373578Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.373596Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.383376Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.383453Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.383471Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038059;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.392619Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.392695Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.392714Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.402306Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.402389Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.402406Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.411863Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.411944Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.411985Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.421307Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.421385Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.421404Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.430553Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038055;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.430650Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038055;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.430671Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038055;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.440805Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.440881Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:54.440899Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:55.515630Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:48:55.515664Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:56.523651Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976715668;tx_id=281474976715668;commit_tx_id=281474976715668;commit_lock_id=281474976715665;fline=manager.cpp:80;broken_lock_id=281474976715666; 2025-12-23T18:48:56.578795Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2081: SelfId: [4:7587127315256623619:4625], TxId: 281474976715669, task: 1. Ctx: { TraceId : 01kd68nzzh4ra18tpdrdd338r9. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=M2U4MjAzNmUtYWM0MDAyNWQtY2MxZjkxYy05YWM0ZTk5NQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } 2025-12-23T18:48:56.578929Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [4:7587127315256623619:4625], TxId: 281474976715669, task: 1. Ctx: { TraceId : 01kd68nzzh4ra18tpdrdd338r9. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=M2U4MjAzNmUtYWM0MDAyNWQtY2MxZjkxYy05YWM0ZTk5NQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 }. 2025-12-23T18:48:56.579564Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=4&id=M2U4MjAzNmUtYWM0MDAyNWQtY2MxZjkxYy05YWM0ZTk5NQ==, ActorId: [4:7587127306666687053:3581], ActorState: ExecuteState, TraceId: 01kd68nzzh4ra18tpdrdd338r9, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TPartitionTests::TestTxBatchInFederation [GOOD] >> TTicketParserTest::NebiusAuthorizationModify [GOOD] >> TPQRBDescribes::PartitionLocations [GOOD] >> TPQTabletTests::DropTablet >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] >> TPartitionTests::TestBatchingWithChangeConfig >> TTicketParserTest::AuthenticationRetryError [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches >> TTicketParserTest::AuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::AuthorizationUnavailable >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test >> TPQTabletTests::DropTablet [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryError [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately >> TPQTabletTests::Cancel_Tx >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] >> TPartitionScaleManagerGraphCmpTest::ExtraRootPartitionsInTargetTopic [GOOD] >> TPartitionScaleManagerGraphCmpTest::SplittedTargetTopic [GOOD] >> TPartitionTests::Batching >> KqpSinkMvcc::OlapMultiSinks [GOOD] >> TPQTabletTests::Cancel_Tx [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] >> TTicketParserTest::BulkAuthorizationModify >> TPQTabletTests::Config_TEvTxCommit_After_Restart >> TPartitionTests::Batching [GOOD] >> TPQTabletTests::PQTablet_Send_RS_With_Abort >> TPartitionTests::ChangeConfig >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] Test command err: 2025-12-23T18:48:33.861577Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127216172228764:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:33.861758Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:33.922968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032d5/r3tmp/tmpfVe7wk/pdisk_1.dat 2025-12-23T18:48:34.248955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:34.249090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:34.292827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:34.337960Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:34.380694Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3931, node 1 2025-12-23T18:48:34.519128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:34.635402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:34.635430Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:34.635436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:34.635506Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:34.875145Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:23266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515714446 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:35.211479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:35.224660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:48:35.265133Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-12-23T18:48:35.265243Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c993a2011d0] Connect to grpc://localhost:11972 2025-12-23T18:48:35.268487Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c993a2011d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-12-23T18:48:35.309963Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c993a2011d0] Status 14 Service Unavailable 2025-12-23T18:48:35.315028Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:35.315067Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-12-23T18:48:35.315211Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c993a2011d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-12-23T18:48:35.318927Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c993a2011d0] Status 1 CANCELLED 2025-12-23T18:48:35.319443Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2025-12-23T18:48:37.591918Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127236132607706:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:37.591973Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:37.605413Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032d5/r3tmp/tmpjvsZ53/pdisk_1.dat 2025-12-23T18:48:37.699154Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:37.699229Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:37.699675Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:37.701187Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127236132607680:2081] 1766515717590715 != 1766515717590718 2025-12-23T18:48:37.727895Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:37.734823Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 25814, node 2 2025-12-23T18:48:37.781640Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:37.781673Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:37.781680Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:37.781764Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515717785 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:37.983042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:37.989571Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-12-23T18:48:37.989644Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c993a2b1dd0] Connect to grpc://localhost:5184 2025-12-23T18:48:37.991280Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c993a2b1dd0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-12-23T18:48:38.006652Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c993a2b1dd0] Status 14 Service Unavailable 2025-12-23T18:48:38.006867Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-12-23T18:48:38.006920Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:38.006993Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-12-23T18:48:38.007330Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c993a2b1dd0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-12-23T18:48:38.009518Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c993a2b1dd0] Status 14 Servic ... icket_parser_impl.h:537: Ticket **** (6968D2E8) asking for AccessServiceAuthorization( something.write) 2025-12-23T18:48:54.723974Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c993a245cd0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "**** (6968D2E8)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "service1" } } NebiusAccessService::Authorize response results { key: 0 value { account { service_account { id: "service1" } } impersonation_info { } } } 0: "OK" 2025-12-23T18:48:54.726285Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c993a245cd0] Response AuthorizeResponse { results { key: 0 value { account { service_account { id: "service1" } } impersonation_info { } } } } 2025-12-23T18:48:54.726460Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (6968D2E8) () has now valid token of service1@as 2025-12-23T18:48:54.726941Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (7A38211C) asking for AccessServiceAuthorization( something.write) 2025-12-23T18:48:54.727122Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c993a245cd0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "**** (7A38211C)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "service2" } } NebiusAccessService::Authorize response results { key: 0 value { account { service_account { id: "service2" } } impersonation_info { chain { account { service_account { id: "srv" } } } } } } 0: "OK" 2025-12-23T18:48:54.729170Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c993a245cd0] Response AuthorizeResponse { results { key: 0 value { account { service_account { id: "service2" } } impersonation_info { chain { account { service_account { id: "srv" } } } } } } } 2025-12-23T18:48:54.729365Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (7A38211C) () has now valid token of service2@as 2025-12-23T18:48:54.729882Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8853A21F) asking for AccessServiceAuthorization( something.write) 2025-12-23T18:48:54.730068Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c993a245cd0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "**** (8853A21F)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "service3" } } NebiusAccessService::Authorize response results { key: 0 value { account { service_account { id: "service3" } } impersonation_info { chain { account { service_account { id: "srv" } } account { service_account { id: "one_more_service" } } } chain { account { service_account { id: "srv" } } account { service_account { id: "srv2" } } account { user_account { id: "user1" } } account { service_account { id: "srv3" } } } } } } 0: "OK" 2025-12-23T18:48:54.731851Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c993a245cd0] Response AuthorizeResponse { results { key: 0 value { account { service_account { id: "service3" } } impersonation_info { chain { account { service_account { id: "srv" } } account { service_account { id: "one_more_service" } } } chain { account { service_account { id: "srv" } } account { service_account { id: "srv2" } } account { user_account { id: "user1" } } account { service_account { id: "srv3" } } } } } } } 2025-12-23T18:48:54.732049Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8853A21F) () has now valid token of service3@as 2025-12-23T18:48:58.015430Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587127323642726817:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:58.018070Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032d5/r3tmp/tmpZrQH3V/pdisk_1.dat 2025-12-23T18:48:58.037294Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:58.118982Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:58.162995Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:58.163087Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:58.170446Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17875, node 5 2025-12-23T18:48:58.213800Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:58.213826Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:58.213839Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:58.213928Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:58.242650Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515738218 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:58.431670Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:58.438507Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:48:58.444500Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-12-23T18:48:58.444562Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c993a2279d0] Connect to grpc://localhost:5583 2025-12-23T18:48:58.445716Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c993a2279d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2025-12-23T18:48:58.459125Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c993a2279d0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2025-12-23T18:48:58.459387Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:48:58.460866Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-12-23T18:48:58.461142Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c993a2279d0] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2025-12-23T18:48:58.467102Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c993a2279d0] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2025-12-23T18:48:58.468125Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit >> TPQTabletTests::All_New_Partitions_In_Another_Tablet >> TPQTabletTests::PQTablet_Send_RS_With_Abort [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] Test command err: 2025-12-23T18:48:35.736497Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127227625781487:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:35.736592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032d1/r3tmp/tmp82Sozt/pdisk_1.dat 2025-12-23T18:48:36.059775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:36.059887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:36.101848Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:36.102198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:36.150779Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127227625781456:2081] 1766515715734653 != 1766515715734656 2025-12-23T18:48:36.165710Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1229, node 1 2025-12-23T18:48:36.220087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:36.220112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:36.220119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:36.220254Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:36.390316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515716168 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:36.532642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:36.539735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:48:36.572436Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db1, token db /Root/Db1, DomainLoginOnly 0 2025-12-23T18:48:36.572479Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db1, /Root 2025-12-23T18:48:36.573056Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root/Db1, login state is not available yet, deffer token (eyJh****N7-w (88F91849)) 2025-12-23T18:48:36.746879Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:38.747044Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****N7-w (88F91849) () has now permanent error message 'Login state is not available' 2025-12-23T18:48:38.747136Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:2352: Finish waiting for login providers for 1 databases: /Root/Db1, 2025-12-23T18:48:39.356241Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127244248752311:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:39.356311Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032d1/r3tmp/tmpzFKouQ/pdisk_1.dat 2025-12-23T18:48:39.398829Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:39.497481Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:39.497541Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:39.497635Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:39.499138Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127244248752277:2081] 1766515719354710 != 1766515719354713 2025-12-23T18:48:39.510191Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20170, node 2 2025-12-23T18:48:39.588293Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:39.588324Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:39.588350Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:39.588444Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:39.644345Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27022 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515719591 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:39.783244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:39.828495Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:48:39.828814Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-12-23T18:48:39.828837Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:48:39.828993Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (5DAB89DE) () has now permanent error message 'Token is not in correct format' 2025-12-23T18:48:39.829017Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Token is not in correct format 2025-12-23T18:48:39.829044Z node 2 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (5DAB89DE): Token is not in correct format 2025-12-23T18:48:43.052600Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127262018075986:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:43.052681Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032d1/r3tmp/tmpMftiLF/pdisk_1.dat 2025-12-23T18:48:43.097299Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:43.202003Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:43.202104Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:43.215068Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587127262018075941:2081] 1766515723037246 != 1766515723037249 2025-12-23T18:48:43.215213Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:43.217153Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26779, node 3 2025-12-23T18:48:43.390866Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:43.395458Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable confi ... st: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515734550 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:54.787098Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:54.792289Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:48:54.796082Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2025-12-23T18:48:54.796136Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cdd6942b1d0] Connect to grpc://localhost:1834 2025-12-23T18:48:54.799327Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd6942b1d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-12-23T18:48:54.814247Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cdd6942b1d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:48:54.814470Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-12-23T18:48:54.816137Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cdd6942ae50] Connect to grpc://localhost:64979 2025-12-23T18:48:54.817081Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd6942ae50] Request GetUserAccountRequest { user_account_id: "user1" } 2025-12-23T18:48:54.825577Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cdd6942ae50] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-12-23T18:48:54.825956Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2025-12-23T18:48:54.826632Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.write) 2025-12-23T18:48:54.826805Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd6942b1d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-12-23T18:48:54.829268Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cdd6942b1d0] Response BulkAuthorizeResponse { results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2025-12-23T18:48:54.829498Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.write access denied for subject "" 2025-12-23T18:48:54.829547Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-12-23T18:48:54.830287Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-12-23T18:48:54.830497Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd6942b1d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-12-23T18:48:54.832313Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cdd6942b1d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:48:54.832476Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-12-23T18:48:54.832699Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2025-12-23T18:48:58.428205Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587127324957758951:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:58.428293Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032d1/r3tmp/tmpCFaCS7/pdisk_1.dat 2025-12-23T18:48:58.461902Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:58.526881Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7587127324957758924:2081] 1766515738427298 != 1766515738427301 2025-12-23T18:48:58.547401Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:58.552719Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:58.552868Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:58.571819Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20991, node 5 2025-12-23T18:48:58.672438Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:58.672462Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:58.672473Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:58.672583Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:58.673366Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515738638 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:58.930331Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:58.939191Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read somewhere.sleep something.list something.write something.eat) 2025-12-23T18:48:58.939233Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cdd69496150] Connect to grpc://localhost:9831 2025-12-23T18:48:58.939967Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd69496150] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "somewhere.sleep" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" ...(truncated) } 2025-12-23T18:48:58.946582Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cdd69496150] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "something.list" r...(truncated) } 2025-12-23T18:48:58.946797Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.read access denied for subject "user1@as" 2025-12-23T18:48:58.946823Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission somewhere.sleep access denied for subject "user1@as" 2025-12-23T18:48:58.946844Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.list access denied for subject "user1@as" 2025-12-23T18:48:58.946863Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.eat access denied for subject "user1@as" 2025-12-23T18:48:58.946880Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-12-23T18:48:58.947021Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cdd694964d0] Connect to grpc://localhost:25908 2025-12-23T18:48:58.947772Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd694964d0] Request GetUserAccountRequest { user_account_id: "user1" } 2025-12-23T18:48:58.958777Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cdd694964d0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-12-23T18:48:58.959062Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0c >> TPartitionTests::ChangeConfig [GOOD] >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] >> TPartitionTests::After_TEvGetWriteInfoError_Comes_TEvTxCalcPredicateResult >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0c [GOOD] >> TPartitionTests::After_TEvGetWriteInfoError_Comes_TEvTxCalcPredicateResult [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0a >> TSchemeShardServerLess::StorageBillingLabels [GOOD] >> TPartitionTests::BlobKeyFilfer [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit_After_Abort >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit_After_Abort [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort_After_Commit >> TPQTest::TestWaitInOwners [GOOD] >> TPQTest::TestWritePQCompact >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0a [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 26862, MsgBus: 4956 2025-12-23T18:47:55.417370Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127056054357804:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:55.419583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040fb/r3tmp/tmpw2Q37z/pdisk_1.dat 2025-12-23T18:47:55.663377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:55.694232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:55.694375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:55.758825Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127056054357767:2081] 1766515675377911 != 1766515675377914 2025-12-23T18:47:55.767697Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:55.773132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26862, node 1 2025-12-23T18:47:55.915276Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:55.915299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:55.915309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:55.915368Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:55.953960Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4956 2025-12-23T18:47:56.426914Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:4956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515675834 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:56.593523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:56.605128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:58.803001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127068939260542:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.803106Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.803395Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127068939260554:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.803472Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127068939260555:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.803578Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.808101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:58.826793Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127068939260558:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:47:58.925646Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127068939260609:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:59.317740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:47:59.482402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587127073234228053:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:47:59.482830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587127073234228053:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:47:59.483133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587127073234228053:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:47:59.483272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587127073234228053:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:47:59.483359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587127073234228053:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:47:59.483491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587127073234228053:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:47:59.483586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587127073234228053:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:47:59.483674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587127073234228053:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:47:59.483762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587127073234228053:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:47:59.483857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587127073234228053:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:47:59.483979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587127073234228053:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:47:59.484085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587127073234228053:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:47:59.484175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7587127073234228053:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:47:59.518019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587127073234228052:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:47:59.518122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587127073234228052:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:47:59.518345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587127073234228052:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:47:59.518458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7587 ... 10662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.574499Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.574802Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.574867Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.574884Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.585186Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.585254Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.585253Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.585276Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.585304Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.585323Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.595831Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.595830Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.595891Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.595899Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.595914Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.595919Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.606465Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.606546Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.606569Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.606954Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.607010Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.607034Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.616410Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.616481Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.616504Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038070;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.616562Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.616628Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.616648Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.627059Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.627125Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.627140Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.627162Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.627174Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.627192Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.637959Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.638035Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.638058Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.638717Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.638781Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.638797Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:48:58.672503Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd68nkhcf5qv0b8nsb4577dx", SessionId: ydb://session/3?node_id=4&id=MWFkNDNlMzYtYTM5ZTllZjAtOTgzYTdkYTctNTA1ZjBmYjY=, Slow query, duration: 11.092273s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::BlobKeyFilfer [GOOD] Test command err: 2025-12-23T18:49:02.612355Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:02.680382Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:02.680499Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:02.680571Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:02.680659Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:49:02.698879Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:183:2196] 2025-12-23T18:49:02.700003Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:183:2196] Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\002\030\003\"\014session-id-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:49:02.727264Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient-2" Value: "\010\000\020\004\030\005\"\014session-id-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-2" Value: "\000\000\000\000\000\000\000\000\004\000\000\000\005\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-3" Value: "\010\000\020\006\030\007\"\014session-id-3(\0000\004@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-3" Value: "\000\000\000\000\000\000\000\000\006\000\000\000\007\000\000\000session-id-3" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:49:02.770260Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:02.792181Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: set offset in already dead session session-id-1 actual is session-id-2 2025-12-23T18:49:02.792269Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: set offset in already dead session session-id-3 actual is session-id-2 2025-12-23T18:49:02.792490Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\010\030\t\"\014session-id-2(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\010\000\000\000\t\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:49:02.803513Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:03.247557Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:03.294608Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:03.294663Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:03.294720Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:03.294793Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:49:03.317811Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:184:2196] 2025-12-23T18:49:03.320812Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:49:03.000000Z 2025-12-23T18:49:03.321043Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:184:2196] 2025-12-23T18:49:03.331970Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:03.362954Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:03.383861Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:03.414871Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:03.445893Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:03.466619Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:03.518195Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:03.590168Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send change config Wait cmd write (initial) Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\000\030\000\"\tsession-1(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\230\262\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:49:03.675079Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\230\262\254\344\26438\001" StorageChannel: INLINE } Wait commit 1 done Wait cmd write (change config) Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-2" IncludeFrom: true To: "m0000000003cclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-2" IncludeFrom: true To: "m0000000003uclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003Sclient-2" IncludeFrom: true To: "c0000000003Sclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003wclient-2|0000000000000000" IncludeFrom: true To: "c0000000003wclient-2|FFFFFFFFFFFFFFFF" IncludeTo: true } } CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-3" Value: "\010\000\020\000\030\000\"\000(\0000\007@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-3" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\014\n\010client-1@\000\252\002\014\n\010client-3@\007" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\230\262\254\344\26438\001" StorageChannel: INLINE } Wait config changed 2025-12-23T18:49:03.738936Z node 2 :PERSQUEUE WARN: partition.cpp:2983: [72057594037927937][Partition][3][StateIdle] Partition 3 Unknown consumer 'client-2' 2025-12-23T18:49:04.022409Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:04.069395Z node 3 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:04.069453Z node 3 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:04.069497Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:04.069562Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:49:04.082573Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [3:183:2196] 2025-12-23T18:49:04.083456Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:183:2196] |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3a >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort_After_Commit [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Tx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBillingLabels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:47:47.034396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:47:47.034508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.034584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:47:47.034633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:47:47.034777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:47:47.034811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:47:47.034901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.034983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:47:47.035957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:47:47.037669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:47:47.133677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:47:47.133748Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:47.146530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:47:47.146769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:47:47.149537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:47:47.166042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:47:47.167466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:47:47.170258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:47.173205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:47:47.181253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.181460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:47:47.190698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:47:47.190814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.191018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:47:47.191097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:47:47.191217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:47:47.191354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:47:47.359393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.360459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.360576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.360689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.360757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.360823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.360908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.360981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.361061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.361129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.361189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.361278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.361375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.361468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.361557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-12-23T18:48:34.739048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-12-23T18:48:38.784304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:48:38.784470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:129: TTxServerlessStorageBilling: too soon call, wait until current period ends, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], now: 1970-01-01T00:02:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, next retry at: 1970-01-01T00:03:00.000000Z 2025-12-23T18:48:38.784547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:48:38.881777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7071: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-12-23T18:48:38.881929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:48:38.882002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-12-23T18:48:39.007094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7071: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-12-23T18:48:39.007242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-12-23T18:48:39.007333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-12-23T18:48:39.054242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7071: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-12-23T18:48:39.054368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-12-23T18:48:39.054439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-12-23T18:48:39.101781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0021 2025-12-23T18:48:39.143759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2025-12-23T18:48:39.143988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2025-12-23T18:48:39.144078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2025-12-23T18:48:39.144200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-12-23T18:48:39.154675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-12-23T18:48:43.283623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0021 2025-12-23T18:48:43.331293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2025-12-23T18:48:43.331492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2025-12-23T18:48:43.331559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2025-12-23T18:48:43.331673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-12-23T18:48:43.346548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-12-23T18:48:47.631118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.002 2025-12-23T18:48:47.672540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2025-12-23T18:48:47.672774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2025-12-23T18:48:47.672848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2025-12-23T18:48:47.672972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-12-23T18:48:47.683480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-12-23T18:48:51.842430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.002 2025-12-23T18:48:51.883688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2025-12-23T18:48:51.883892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2025-12-23T18:48:51.883980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2025-12-23T18:48:51.884096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-12-23T18:48:51.894644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-12-23T18:48:55.987389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.002 2025-12-23T18:48:56.030935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2025-12-23T18:48:56.031110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2025-12-23T18:48:56.031175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2025-12-23T18:48:56.031256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-12-23T18:48:56.042664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-12-23T18:49:00.195144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0019 2025-12-23T18:49:00.236289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2025-12-23T18:49:00.236486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2025-12-23T18:49:00.236565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2025-12-23T18:49:00.236671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2025-12-23T18:49:00.247172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2025-12-23T18:49:03.994190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:49:03.994580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":120,"quantity":59,"finish":179,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":41},"id":"72057594046678944-39-120-179-41","cloud_id":"CLOUD_ID_VAL","source_wt":180,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","labels":{"Category":"Table","k":"v"},"folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], now: 1970-01-01T00:03:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:02:00.000000Z--1970-01-01T00:02:59.000000Z, next retry at: 1970-01-01T00:04:00.000000Z 2025-12-23T18:49:03.997897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete ... blocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering cookie 0 ... waiting for metering (done) |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches [GOOD] >> TPartitionTests::ConflictingCommitsInSeveralBatches >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> TPartitionTests::DataTxCalcPredicateOk [GOOD] >> PQCountersLabeled::PartitionKeyCompaction >> TPQTest::TestPQPartialRead >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Tx [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3a [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Step ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] Test command err: Bucket: 100 elems count: 97 Bucket: 200 elems count: 104 Bucket: 500 elems count: 288 Bucket: 1000 elems count: 528 Bucket: 2000 elems count: 1008 Bucket: 5000 elems count: 2976 2025-12-23T18:48:50.236143Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127288398507685:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:50.236237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:50.274120Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127291231356676:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:50.274173Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:50.280351Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003481/r3tmp/tmpeQSy95/pdisk_1.dat 2025-12-23T18:48:50.291336Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:48:50.472470Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:50.474318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:50.510270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:50.510367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:50.511183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:50.511233Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:50.517522Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:48:50.531844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:50.532107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:50.550266Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14517, node 1 2025-12-23T18:48:50.669044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/003481/r3tmp/yandexLaI5l7.tmp 2025-12-23T18:48:50.669089Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/003481/r3tmp/yandexLaI5l7.tmp 2025-12-23T18:48:50.669333Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/003481/r3tmp/yandexLaI5l7.tmp 2025-12-23T18:48:50.669468Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:50.675959Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:50.706483Z INFO: TTestServer started on Port 27037 GrpcPort 14517 2025-12-23T18:48:50.708772Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27037 PQClient connected to localhost:14517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515730644 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:50.971145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:48:51.004611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:48:51.253128Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:51.290927Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:53.508473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127301283410983:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:53.508593Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127301283410959:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:53.508880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:53.509405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127301283410990:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:53.509501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:53.513410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:53.515690Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127304116258949:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:53.515712Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127304116258957:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:53.515781Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:53.516000Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127304116258964:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:53.516067Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:53.531698Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127301283410989:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:48:53.533729Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587127304116258966:2176] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:48:53.534557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-12-23T18:48:53.534853Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587127304116258963:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:48:53.606981Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587127304116258992:2182] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:53.626646Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127301283411089:3082] txid# 281474976710663, issues: { message: "Check failed: path: \'/Ro ... WAIT_RS 2025-12-23T18:49:03.717144Z node 6 :PQ_TX INFO: pq_impl.cpp:3962: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-12-23T18:49:03.717201Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4468: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-12-23T18:49:03.717428Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-12-23T18:49:03.717474Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] Responses received from the partitions 0/2 2025-12-23T18:49:03.717557Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-12-23T18:49:03.717607Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:03.717646Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:03.717686Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:03.717747Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2025-12-23T18:49:03.718015Z node 6 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user client-1 reinit with generation 2 done 2025-12-23T18:49:03.718054Z node 6 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2025-12-23T18:49:03.718084Z node 6 :PERSQUEUE DEBUG: partition.cpp:3831: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user drop done 2025-12-23T18:49:03.718124Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2025-12-23T18:49:03.718172Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:03.718218Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:49:03.718266Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:03.718676Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][1][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-12-23T18:49:03.718710Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:49:03.718754Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:03.718780Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:03.718814Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2025-12-23T18:49:03.718916Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][1][StateIdle] Schedule reply tx done 67890 2025-12-23T18:49:03.718952Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:03.718981Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2025-12-23T18:49:03.719010Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2025-12-23T18:49:03.719198Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:49:03.719273Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:49:03.722476Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:03.722787Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:49:03.723487Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:03.723527Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:03.723550Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:03.723570Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:03.723591Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:03.723612Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:03.723638Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:49:03.723855Z node 6 :PQ_TX INFO: pq_impl.cpp:3483: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2025-12-23T18:49:03.723906Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-12-23T18:49:03.723940Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-12-23T18:49:03.723979Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4310: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-12-23T18:49:03.724024Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] Responses received from the partitions 1/2 2025-12-23T18:49:03.724066Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4331: [PQ: 72057594037927937] TxId 67890 status has not changed 2025-12-23T18:49:03.724147Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:03.724284Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:49:03.724358Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:03.724388Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:49:03.724413Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:03.724436Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:03.724462Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:03.724485Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2025-12-23T18:49:03.724515Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2025-12-23T18:49:03.724641Z node 6 :PQ_TX INFO: pq_impl.cpp:3483: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 1 2025-12-23T18:49:03.724671Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-12-23T18:49:03.724702Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-12-23T18:49:03.724730Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4310: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-12-23T18:49:03.724758Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] Responses received from the partitions 2/2 2025-12-23T18:49:03.724795Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4196: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-12-23T18:49:03.724849Z node 6 :PQ_TX INFO: pq_impl.cpp:4504: [PQ: 72057594037927937] Complete TxId 67890 2025-12-23T18:49:03.725077Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ChildPartitionIds: 0 ChildPartitionIds: 1 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 } Consumers { Name: "client-3" Generation: 2 } 2025-12-23T18:49:03.725135Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:03.725204Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-12-23T18:49:03.725274Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3981: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-12-23T18:49:03.725324Z node 6 :PQ_TX INFO: pq_impl.cpp:3983: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-12-23T18:49:03.725364Z node 6 :PQ_TX INFO: pq_impl.cpp:4535: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-12-23T18:49:03.725409Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-12-23T18:49:03.725444Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2025-12-23T18:49:03.725474Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4544: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-12-23T18:49:03.725505Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2025-12-23T18:49:03.725542Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4623: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2025-12-23T18:49:03.725590Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS_ACKS to DELETING 2025-12-23T18:49:03.725646Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3835: [PQ: 72057594037927937] Delete key for TxId 67890 2025-12-23T18:49:03.725723Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:49:03.727661Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:49:03.727721Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state DELETING 2025-12-23T18:49:03.727772Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State DELETING 2025-12-23T18:49:03.727815Z node 6 :PQ_TX INFO: pq_impl.cpp:4568: [PQ: 72057594037927937] delete TxId 67890 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::DataTxCalcPredicateError >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4c >> TPartitionChooserSuite::THashChooserTest [GOOD] >> TPartitionChooserSuite::THashChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Step [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAccessKeySignatureUnsupported >> TPartitionTests::TestBatchingWithChangeConfig [GOOD] >> TPartitionTests::TestBatchingWithProposeConfig >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] >> BasicUsage::BrokenCredentialsProvider [GOOD] >> BasicUsage::CreateTopicWithCustomName >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4c [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood [GOOD] >> TTicketParserTest::LoginEmptyTicketBad >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4a >> TTicketParserTest::AuthorizationUnavailable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Step [GOOD] Test command err: 2025-12-23T18:49:03.460615Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:03.517464Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:49:03.521059Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:49:03.521379Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:03.521455Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:03.521497Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-12-23T18:49:03.521553Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4927: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-12-23T18:49:03.521633Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:03.521711Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:49:03.553119Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:210:2214], now have 1 active actors on pipe 2025-12-23T18:49:03.553320Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:49:03.573145Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1470: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:49:03.576392Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:49:03.576497Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:03.577248Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:49:03.577373Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:49:03.577692Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:49:03.578090Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:218:2142] 2025-12-23T18:49:03.578885Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:49:03.578936Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2025-12-23T18:49:03.578982Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:218:2142] 2025-12-23T18:49:03.579026Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:49:03.579069Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:49:03.579510Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:49:03.579546Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:03.579574Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:03.579631Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:03.579664Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:03.579713Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:03.579786Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-12-23T18:49:03.579836Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:49:03.579871Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:03.579900Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:49:03.579974Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:03.580158Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:49:03.580247Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:49:03.580400Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:49:03.580556Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:49:03.582855Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:03.582969Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:03.583038Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:03.583127Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:03.583173Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:03.583212Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:03.583245Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:03.583288Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:49:03.583660Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:225:2222], now have 1 active actors on pipe 2025-12-23T18:49:03.584319Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:228:2224], now have 1 active actors on pipe 2025-12-23T18:49:03.585245Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3135: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 181 RawX2: 4294969490 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-12-23T18:49:03.585326Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3321: [PQ: 72057594037927937] distributed transaction 2025-12-23T18:49:03.585438Z node 1 :PQ_TX INFO: pq_impl.cpp:3645: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-12-23T18:49:03.585492Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-12-23T18:49:03.585530Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-12-23T18:49:03.585581Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3934: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-12-23T18:49:03.585644Z node 1 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from UNKNOWN to PREPARING 2025-12-23T18:49:03.585698Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3813: [PQ: 72057594037927937] Persist state PREPARED for TxId 67890 2025-12-23T18:49:03.585834Z node 1 :PQ_TX DEBUG: transaction.cpp:414: [TxId: 67890] Save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 181 RawX2: 4294969490 } Partitions { } 2025-12-23T18:49:03.585923Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:49:03.588465Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:49:03.588525Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-12-23T18:49:03.588568Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-12-23T18:49:03.588623Z node 1 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from PREPARING to PREPARED 2025-12-23T18:49:03.592589Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3347: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 181 RawX2: 4294969490 } } St ... 12-23T18:49:05.770152Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state PREPARED 2025-12-23T18:49:05.770197Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State PREPARED 2025-12-23T18:49:05.770240Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from PREPARED to PLANNING 2025-12-23T18:49:05.770284Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from PLANNING to PLANNED 2025-12-23T18:49:05.770326Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4420: [PQ: 72057594037927937] TxQueue.size 1 2025-12-23T18:49:05.770371Z node 6 :PQ_TX INFO: pq_impl.cpp:649: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-12-23T18:49:05.770435Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2025-12-23T18:49:05.770483Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3740: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2025-12-23T18:49:05.770563Z node 6 :PERSQUEUE DEBUG: partition.cpp:1381: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-12-23T18:49:05.770621Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:05.770667Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:49:05.770755Z node 6 :PQ_TX DEBUG: partition.cpp:3048: [Partition][0][StateIdle] TxId 67890 affect consumer user 2025-12-23T18:49:05.770816Z node 6 :PQ_TX DEBUG: partition.cpp:1708: [Partition][0][StateIdle] The long answer to TEvTxCalcPredicate. TxId: 67890 2025-12-23T18:49:05.770878Z node 6 :PQ_TX DEBUG: partition.cpp:1711: [Partition][0][StateIdle] Send TEvTxCalcPredicateResult. TxId: 67890 2025-12-23T18:49:05.770947Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:05.770986Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:05.771029Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:05.771080Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:05.771309Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3437: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2025-12-23T18:49:05.771355Z node 6 :PQ_TX DEBUG: transaction.cpp:244: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-12-23T18:49:05.771397Z node 6 :PQ_TX DEBUG: transaction.cpp:301: [TxId: 67890] Partition responses 1/1 2025-12-23T18:49:05.771439Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-12-23T18:49:05.771481Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-12-23T18:49:05.771528Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4310: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-12-23T18:49:05.771585Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4431: [PQ: 72057594037927937] Responses received from the partitions 1/1 2025-12-23T18:49:05.771631Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-12-23T18:49:05.771684Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-12-23T18:49:05.771739Z node 6 :PQ_TX INFO: pq_impl.cpp:3962: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-12-23T18:49:05.771786Z node 6 :PQ_TX INFO: pq_impl.cpp:3972: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 tx 67890 2025-12-23T18:49:05.771903Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4468: [PQ: 72057594037927937] HaveParticipantsDecision 0 2025-12-23T18:49:05.772001Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:49:05.773380Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2766: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-12-23T18:49:05.773435Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2771: [PQ: 72057594037927937] Connected to tablet 22222 2025-12-23T18:49:05.774805Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [6:238:2231], now have 1 active actors on pipe 2025-12-23T18:49:05.775271Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [6:239:2232], now have 1 active actors on pipe 2025-12-23T18:49:05.775340Z node 6 :PERSQUEUE DEBUG: pqtablet_mock.cpp:72: Connected to tablet 72057594037927937 from tablet 22222 2025-12-23T18:49:05.775478Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3362: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\002" Seqno: 0 2025-12-23T18:49:05.775528Z node 6 :PQ_TX INFO: pq_impl.cpp:3372: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet tx 67890 tabletProducer 22222 2025-12-23T18:49:05.775569Z node 6 :PQ_TX DEBUG: transaction.cpp:308: [TxId: 67890] Handle TEvReadSet 67890 2025-12-23T18:49:05.775613Z node 6 :PQ_TX DEBUG: transaction.cpp:325: [TxId: 67890] Predicates 1/1 2025-12-23T18:49:05.775660Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2025-12-23T18:49:05.775701Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2025-12-23T18:49:05.775746Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4310: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2025-12-23T18:49:05.775790Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4468: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-12-23T18:49:05.775870Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-12-23T18:49:05.775918Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] Responses received from the partitions 0/1 2025-12-23T18:49:05.776186Z node 6 :PERSQUEUE DEBUG: partition.cpp:1485: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxRollback Step 100, TxId 67890 2025-12-23T18:49:05.776235Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:05.776581Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:05.776658Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:05.776705Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:05.776743Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:05.776787Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:05.776848Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2025-12-23T18:49:05.776918Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2025-12-23T18:49:05.776965Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:05.777004Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:49:05.777048Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:05.777204Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:49:05.777326Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:49:05.779654Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:05.779756Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:05.779798Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:05.779830Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:05.779858Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:05.779890Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:05.779918Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:05.779952Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:49:05.780018Z node 6 :PQ_TX INFO: pq_impl.cpp:3483: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2025-12-23T18:49:05.780072Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-12-23T18:49:05.780116Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-12-23T18:49:05.780163Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4310: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-12-23T18:49:05.780223Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] Responses received from the partitions 1/1 2025-12-23T18:49:05.780269Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4196: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-12-23T18:49:05.780313Z node 6 :PQ_TX INFO: pq_impl.cpp:4504: [PQ: 72057594037927937] Complete TxId 67890 2025-12-23T18:49:05.780358Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-12-23T18:49:05.780426Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3981: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-12-23T18:49:05.780485Z node 6 :PQ_TX INFO: pq_impl.cpp:3983: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-12-23T18:49:05.780535Z node 6 :PQ_TX INFO: pq_impl.cpp:4535: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-12-23T18:49:05.780579Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-12-23T18:49:05.780627Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/1 2025-12-23T18:49:05.780662Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4544: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-12-23T18:49:05.780697Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/1 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4a [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test >> KqpLocksTricky::TestSnapshotWithDependentReads+UseSink [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-clusteradmin >> TPartitionTests::SetOffset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] Test command err: 2025-12-23T18:48:33.864073Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127218656935977:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:33.864120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032e2/r3tmp/tmprVg9S4/pdisk_1.dat 2025-12-23T18:48:34.213430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:34.219397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:34.219507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:34.266550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:34.366525Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:34.418861Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:34.513596Z node 1 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2025-12-23T18:48:34.516557Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2025-12-23T18:48:34.516622Z node 1 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2025-12-23T18:48:34.516633Z node 1 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2025-12-23T18:48:34.516646Z node 1 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2025-12-23T18:48:34.516669Z node 1 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2025-12-23T18:48:34.516727Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d0580700750] Connect to grpc://localhost:9146 2025-12-23T18:48:34.520861Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0580700750] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-12-23T18:48:34.570336Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:2377: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# ydb-****ount (05D5F592) 2025-12-23T18:48:34.602985Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d0580700750] Status 16 Unauthenticated service 2025-12-23T18:48:34.606895Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Unauthenticated service" retryable: 1 2025-12-23T18:48:34.606959Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Unauthenticated service' 2025-12-23T18:48:34.606995Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2025-12-23T18:48:34.607054Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2025-12-23T18:48:34.611126Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0580700750] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-12-23T18:48:34.618968Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d0580700750] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:48:34.619180Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:48:37.528207Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127236085358082:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:37.528325Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032e2/r3tmp/tmpaekvi7/pdisk_1.dat 2025-12-23T18:48:37.552035Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:37.630960Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:37.644545Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127236085358056:2081] 1766515717525720 != 1766515717525723 2025-12-23T18:48:37.669583Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:37.669685Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:37.677039Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:37.709305Z node 2 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2025-12-23T18:48:37.709514Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2025-12-23T18:48:37.709562Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2025-12-23T18:48:37.709572Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2025-12-23T18:48:37.709582Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2025-12-23T18:48:37.709589Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2025-12-23T18:48:37.709621Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d05807217d0] Connect to grpc://localhost:1279 2025-12-23T18:48:37.710946Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d05807217d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-12-23T18:48:37.716317Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:2377: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# ydb-****ount (05D5F592) 2025-12-23T18:48:37.734271Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d05807217d0] Status 16 Unauthenticated service 2025-12-23T18:48:37.734607Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Unauthenticated service" retryable: 1 2025-12-23T18:48:37.734666Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Unauthenticated service' 2025-12-23T18:48:37.734699Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2025-12-23T18:48:37.734782Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2025-12-23T18:48:37.735070Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d05807217d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-12-23T18:48:37.743649Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d05807217d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:48:37.746906Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:48:37.750550Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:38.532181Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2025-12-23T18:48:38.545651Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:39.532716Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2025-12-23T18:48:40.533187Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2025-12-23T18:48:40.533236Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:70: Refresh token for provider# token-for-access-service 2025-12-23T18:48:40.533320Z node 2 :TOKEN_MANAGER TRACE: vm_metadata_token_provider_handler.cpp:25: Handle send request to vm metaservice 2025-12-23T18:48:40.533835Z node 2 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2025-12-23T18:48:40.534105Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2025-12-23T18:48:40.534175Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (9D42FAED) asking for AccessServiceBulkAuthorization( something.read) 2025-12-23T18:48:40.534213Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2025-12-23T18:48:40.534227Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2025-12-23T18:48:40.534240Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2025-12-23T18:48:40.534249Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2025-12-23T18:48:40.534510Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d05807217d0] Request BulkAuthorizeRequest { iam_token: "**** (9D42FAED)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-12-23T18:48:40.535493Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:2377: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# new-****ount (82D66F55) 2025-12-23T18:48:40.540016Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d05807217d0] Status 16 Unauthenticated service 2025-12-23T18:48:40.540466Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (9D42FAED) permission something.read now has a retryable error "Unauthenticated service" retryable: 1 2025-12-23T18:48:40.540496Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h: ... FIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:49.036460Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:49.036564Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:49.181297Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515729041 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:49.233805Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:49.240195Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-12-23T18:48:49.240277Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d0580807d50] Connect to grpc://localhost:4006 2025-12-23T18:48:49.241611Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0580807d50] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-12-23T18:48:49.248902Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d0580807d50] Status 14 Service Unavailable 2025-12-23T18:48:49.249070Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:49.249116Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-12-23T18:48:49.249305Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0580807d50] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-12-23T18:48:49.250995Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d0580807d50] Status 14 Service Unavailable 2025-12-23T18:48:49.251102Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:49.863383Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-12-23T18:48:49.863434Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-12-23T18:48:49.863653Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0580807d50] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-12-23T18:48:49.866470Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d0580807d50] Status 14 Service Unavailable 2025-12-23T18:48:49.866765Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:49.870922Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:50.863815Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-12-23T18:48:50.863856Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-12-23T18:48:50.864019Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0580807d50] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-12-23T18:48:50.867554Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d0580807d50] Status 14 Service Unavailable 2025-12-23T18:48:50.867701Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:52.867724Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-12-23T18:48:52.867763Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-12-23T18:48:52.867941Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0580807d50] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-12-23T18:48:52.873606Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d0580807d50] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:48:52.873940Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-12-23T18:48:53.858022Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587127282755249026:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:53.858130Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:49:02.023126Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587127343258849508:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:02.023167Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032e2/r3tmp/tmpro2zhK/pdisk_1.dat 2025-12-23T18:49:02.037205Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:49:02.100826Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:02.143638Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:02.143842Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:02.147046Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8725, node 6 2025-12-23T18:49:02.188410Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:49:02.188433Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:49:02.188445Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:49:02.188502Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:49:02.204968Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:31589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515742194 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:49:02.416543Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:49:02.423928Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-12-23T18:49:02.424002Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d05807be8d0] Connect to grpc://localhost:12281 2025-12-23T18:49:02.424977Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d05807be8d0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-12-23T18:49:02.434353Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d05807be8d0] Status 14 Service Unavailable 2025-12-23T18:49:02.434476Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-12-23T18:49:02.434506Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-12-23T18:49:02.434685Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d05807be8d0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-12-23T18:49:02.436457Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d05807be8d0] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:49:02.436792Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-12-23T18:49:03.029974Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4a [GOOD] Test command err: 2025-12-23T18:49:03.221795Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:03.295788Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:49:03.301745Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:49:03.302108Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:03.302192Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:03.302234Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-12-23T18:49:03.302289Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4927: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-12-23T18:49:03.302356Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:03.302427Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:49:03.332792Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:210:2214], now have 1 active actors on pipe 2025-12-23T18:49:03.332937Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:49:03.349147Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1470: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:49:03.351913Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:49:03.352024Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:03.352838Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:49:03.352964Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:49:03.353286Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:49:03.353644Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:218:2142] 2025-12-23T18:49:03.354756Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:49:03.354809Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2025-12-23T18:49:03.354869Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:218:2142] 2025-12-23T18:49:03.354915Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:49:03.354961Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:49:03.355401Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:49:03.355430Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:03.355461Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:03.355513Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:03.355538Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:03.355584Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:03.355648Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-12-23T18:49:03.355698Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:49:03.355732Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:03.355761Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:49:03.355805Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:03.355959Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:49:03.356063Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:49:03.356187Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:49:03.356338Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:49:03.358415Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:03.358507Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:03.358550Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:03.358616Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:03.358664Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:03.358703Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:03.358735Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:03.358790Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:49:03.359070Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:225:2222], now have 1 active actors on pipe 2025-12-23T18:49:03.359548Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:228:2224], now have 1 active actors on pipe 2025-12-23T18:49:03.360448Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3135: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 181 RawX2: 4294969490 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-12-23T18:49:03.360544Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3321: [PQ: 72057594037927937] distributed transaction 2025-12-23T18:49:03.360621Z node 1 :PQ_TX INFO: pq_impl.cpp:3645: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-12-23T18:49:03.360664Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-12-23T18:49:03.360714Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-12-23T18:49:03.360753Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3934: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-12-23T18:49:03.360798Z node 1 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from UNKNOWN to PREPARING 2025-12-23T18:49:03.360837Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3813: [PQ: 72057594037927937] Persist state PREPARED for TxId 67890 2025-12-23T18:49:03.360948Z node 1 :PQ_TX DEBUG: transaction.cpp:414: [TxId: 67890] Save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 181 RawX2: 4294969490 } Partitions { } 2025-12-23T18:49:03.361014Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:49:03.363222Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:49:03.363284Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-12-23T18:49:03.363332Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-12-23T18:49:03.363395Z node 1 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from PREPARING to PREPARED 2025-12-23T18:49:03.367405Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3347: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 181 RawX2: 4294969490 } } St ... partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:06.736915Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:06.736950Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:06.736979Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:06.757638Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:06.768283Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:06.768356Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:06.768391Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:06.768430Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:06.768474Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:06.790618Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:06.790685Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:06.790716Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:06.790772Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:06.790799Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:06.803077Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:06.823906Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:06.834501Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:06.834569Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:06.834611Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:06.834649Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:06.834680Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:06.835216Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [6:380:2325], now have 1 active actors on pipe 2025-12-23T18:49:06.835602Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3362: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22226 TabletDest: 72057594037927937 TabletProducer: 22226 ReadSet: "\010\002" Seqno: 0 2025-12-23T18:49:06.835656Z node 6 :PQ_TX INFO: pq_impl.cpp:3372: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet tx 67890 tabletProducer 22226 2025-12-23T18:49:06.835694Z node 6 :PQ_TX DEBUG: transaction.cpp:308: [TxId: 67890] Handle TEvReadSet 67890 2025-12-23T18:49:06.835743Z node 6 :PQ_TX DEBUG: transaction.cpp:325: [TxId: 67890] Predicates 5/5 2025-12-23T18:49:06.835806Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2025-12-23T18:49:06.835849Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2025-12-23T18:49:06.835891Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4310: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2025-12-23T18:49:06.835932Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4468: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-12-23T18:49:06.836019Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-12-23T18:49:06.836066Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] Responses received from the partitions 0/1 2025-12-23T18:49:06.836152Z node 6 :PERSQUEUE DEBUG: partition.cpp:1485: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxRollback Step 100, TxId 67890 2025-12-23T18:49:06.836196Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:06.836232Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:06.836272Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:06.836333Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2025-12-23T18:49:06.836405Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2025-12-23T18:49:06.836447Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:06.836488Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:49:06.836528Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:06.836730Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:49:06.839491Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:06.839627Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:06.839685Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:06.839726Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:06.839764Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:06.839809Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:06.839846Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:06.839893Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:49:06.839975Z node 6 :PQ_TX INFO: pq_impl.cpp:3483: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2025-12-23T18:49:06.840040Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-12-23T18:49:06.840083Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-12-23T18:49:06.840126Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4310: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-12-23T18:49:06.840173Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4495: [PQ: 72057594037927937] Responses received from the partitions 1/1 2025-12-23T18:49:06.840217Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4196: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-12-23T18:49:06.840256Z node 6 :PQ_TX INFO: pq_impl.cpp:4504: [PQ: 72057594037927937] Complete TxId 67890 2025-12-23T18:49:06.840301Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-12-23T18:49:06.840376Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3981: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-12-23T18:49:06.840435Z node 6 :PQ_TX INFO: pq_impl.cpp:3983: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22226 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-12-23T18:49:06.840480Z node 6 :PQ_TX INFO: pq_impl.cpp:3983: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22225 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-12-23T18:49:06.840520Z node 6 :PQ_TX INFO: pq_impl.cpp:3983: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-12-23T18:49:06.840560Z node 6 :PQ_TX INFO: pq_impl.cpp:3983: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22223 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-12-23T18:49:06.840598Z node 6 :PQ_TX INFO: pq_impl.cpp:3983: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22224 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-12-23T18:49:06.840633Z node 6 :PQ_TX INFO: pq_impl.cpp:4535: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-12-23T18:49:06.840678Z node 6 :PQ_TX INFO: pq_impl.cpp:4264: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-12-23T18:49:06.840728Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2025-12-23T18:49:06.840760Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4544: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-12-23T18:49:06.840796Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2025-12-23T18:49:06.840931Z node 6 :PQ_TX INFO: pq_impl.cpp:3408: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSetAck Step: 100 TxId: 67890 TabletSource: 22226 TabletDest: 72057594037927937 TabletConsumer: 72057594037927937 Flags: 0 Seqno: 0 2025-12-23T18:49:06.840969Z node 6 :PQ_TX DEBUG: transaction.cpp:344: [TxId: 67890] Handle TEvReadSetAck txId 67890 2025-12-23T18:49:06.841012Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4318: [PQ: 72057594037927937] Try execute txs with state WAIT_RS_ACKS 2025-12-23T18:49:06.841059Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4363: [PQ: 72057594037927937] TxId 67890, State WAIT_RS_ACKS 2025-12-23T18:49:06.841101Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2025-12-23T18:49:06.841135Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4544: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-12-23T18:49:06.841174Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationUnavailable [GOOD] Test command err: 2025-12-23T18:48:34.368891Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127221282120572:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:34.380528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032d2/r3tmp/tmpMwsjTK/pdisk_1.dat 2025-12-23T18:48:34.734968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:34.747945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:34.748056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:34.853914Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127221282120537:2081] 1766515714287965 != 1766515714287968 2025-12-23T18:48:34.870012Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:34.875678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:34.906388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 4862, node 1 2025-12-23T18:48:34.979519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:34.979562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:34.979572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:34.979686Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515714929 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:35.236290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:35.259337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:48:35.263219Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-12-23T18:48:35.266224Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d57cbc00050] Connect to grpc://localhost:18359 2025-12-23T18:48:35.269471Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbc00050] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:48:35.302992Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d57cbc00050] Status 14 Service Unavailable 2025-12-23T18:48:35.303176Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-12-23T18:48:35.303219Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:35.303271Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-12-23T18:48:35.303544Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbc00050] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:48:35.308449Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d57cbc00050] Status 14 Service Unavailable 2025-12-23T18:48:35.308825Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-12-23T18:48:35.308853Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:35.382915Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:36.420166Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-12-23T18:48:36.420205Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-12-23T18:48:36.420431Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbc00050] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:48:36.424170Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d57cbc00050] Status 14 Service Unavailable 2025-12-23T18:48:36.424403Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-12-23T18:48:36.426900Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:38.420874Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-12-23T18:48:38.420936Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-12-23T18:48:38.421281Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbc00050] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:48:38.423882Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d57cbc00050] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:48:38.424037Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2025-12-23T18:48:38.424156Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-12-23T18:48:39.342870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127221282120572:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:39.342956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:47.910459Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127278444445974:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:47.910713Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032d2/r3tmp/tmpHpibqf/pdisk_1.dat 2025-12-23T18:48:47.933629Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:47.996508Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:48.034159Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:48.034234Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:48.037577Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31539, node 2 2025-12-23T18:48:48.079441Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:48.079463Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:48.079471Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:48.079558Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:48.195738Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVe ... :491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-12-23T18:48:59.047248Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(somewhere.sleep) 2025-12-23T18:48:59.047275Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.list) 2025-12-23T18:48:59.047301Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-12-23T18:48:59.047329Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.eat) 2025-12-23T18:48:59.047384Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d57cbd606d0] Connect to grpc://localhost:7442 2025-12-23T18:48:59.048374Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbd606d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:48:59.051888Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbd606d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:48:59.052176Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbd606d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.list" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:48:59.052376Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbd606d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:48:59.052556Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbd606d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.eat" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:48:59.058246Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d57cbd606d0] Status 16 Access Denied 2025-12-23T18:48:59.058408Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d57cbd606d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:48:59.058413Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.list now has a permanent error "Access Denied" retryable:0 2025-12-23T18:48:59.058508Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-12-23T18:48:59.059048Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d57cbd606d0] Status 16 Access Denied 2025-12-23T18:48:59.059148Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission somewhere.sleep now has a permanent error "Access Denied" retryable:0 2025-12-23T18:48:59.059732Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d57cbd606d0] Status 16 Access Denied 2025-12-23T18:48:59.059737Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d57cbd606d0] Status 16 Access Denied 2025-12-23T18:48:59.060040Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-12-23T18:48:59.060093Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.eat now has a permanent error "Access Denied" retryable:0 2025-12-23T18:48:59.060113Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-12-23T18:48:59.060244Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d57cbd60dd0] Connect to grpc://localhost:1670 2025-12-23T18:48:59.061040Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbd60dd0] Request GetUserAccountRequest { user_account_id: "user1" } 2025-12-23T18:48:59.067465Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d57cbd60dd0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-12-23T18:48:59.067849Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2025-12-23T18:49:02.178624Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587127341440334985:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:02.178687Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032d2/r3tmp/tmp79JclO/pdisk_1.dat 2025-12-23T18:49:02.210149Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:49:02.322733Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:02.322865Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:02.329629Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:02.339418Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7610, node 6 2025-12-23T18:49:02.367978Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:49:02.387492Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:49:02.387521Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:49:02.387532Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:49:02.387633Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515742390 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:49:02.624919Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:02.633955Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-12-23T18:49:02.634047Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-12-23T18:49:02.634101Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d57cbd39450] Connect to grpc://localhost:18788 2025-12-23T18:49:02.635162Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbd39450] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:49:02.640163Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbd39450] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:49:02.644175Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d57cbd39450] Status 14 Service Unavailable 2025-12-23T18:49:02.644426Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d57cbd39450] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:49:02.644507Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2025-12-23T18:49:02.644586Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-12-23T18:49:02.644616Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-12-23T18:49:02.644647Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-12-23T18:49:02.644707Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-12-23T18:49:02.644933Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbd39450] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:49:02.645484Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d57cbd39450] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-12-23T18:49:02.649427Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d57cbd39450] Status 1 CANCELLED 2025-12-23T18:49:02.649429Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d57cbd39450] Status 1 CANCELLED 2025-12-23T18:49:02.649588Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1488: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" 2025-12-23T18:49:02.649758Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.write now has a permanent error "CANCELLED" retryable:1 2025-12-23T18:49:02.649800Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TPartitionTests::SetOffset [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] Test command err: 2025-12-23T18:48:33.862186Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127218057905224:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:33.862228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f9/r3tmp/tmp6iCJW9/pdisk_1.dat 2025-12-23T18:48:34.223352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:34.223494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:34.248206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:34.315984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:34.352185Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29109, node 1 2025-12-23T18:48:34.547372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:34.634256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:34.634283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:34.634288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:34.634371Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:34.869915Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:27673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515714425 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:35.185523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:35.190783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:48:35.193364Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-12-23T18:48:35.193431Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d63e36003d0] Connect to grpc://localhost:14563 2025-12-23T18:48:35.197058Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d63e36003d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-12-23T18:48:35.223824Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d63e36003d0] Status 14 Service Unavailable 2025-12-23T18:48:35.224020Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-12-23T18:48:35.224092Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:35.224226Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-12-23T18:48:35.224516Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d63e36003d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-12-23T18:48:35.235037Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d63e36003d0] Status 14 Service Unavailable 2025-12-23T18:48:35.235193Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-12-23T18:48:35.235233Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:35.893559Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-12-23T18:48:35.893672Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-12-23T18:48:35.893960Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d63e36003d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-12-23T18:48:35.900092Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d63e36003d0] Status 14 Service Unavailable 2025-12-23T18:48:35.900372Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-12-23T18:48:35.900410Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:36.893940Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-12-23T18:48:36.894065Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-12-23T18:48:36.894358Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d63e36003d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-12-23T18:48:36.899865Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d63e36003d0] Status 14 Service Unavailable 2025-12-23T18:48:36.900517Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-12-23T18:48:36.900548Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:38.863736Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127218057905224:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:38.863834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:38.898930Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-12-23T18:48:38.899029Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-12-23T18:48:38.899334Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d63e36003d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-12-23T18:48:38.902827Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d63e36003d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:48:38.903114Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-12-23T18:48:47.883315Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127275865780391:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:47.883377Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f9/r3tmp/tmpsNkjGi/pdisk_1.dat 2025-12-23T18:48:47.896532Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:47.970888Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:47.995799Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:47.995871Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:47.999287Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16146, node 2 2025-12-23T18:48:48.042633Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:48.042656Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025 ... tence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:59.172289Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:59.173781Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7587127329379067372:2081] 1766515739079614 != 1766515739079617 2025-12-23T18:48:59.203884Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:59.203949Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:59.206409Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14017, node 5 2025-12-23T18:48:59.249260Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:59.249286Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:59.249294Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:59.249383Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:59.384541Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:29932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515739254 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:59.473770Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:59.482553Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-12-23T18:48:59.482608Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d63e3677e50] Connect to grpc://localhost:25761 2025-12-23T18:48:59.483703Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d63e3677e50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-12-23T18:48:59.495023Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d63e3677e50] Status 14 Service Unavailable 2025-12-23T18:48:59.495170Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-12-23T18:48:59.495225Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-12-23T18:48:59.495274Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:59.495374Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-12-23T18:48:59.495692Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d63e3677e50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-12-23T18:48:59.498596Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d63e3677e50] Status 1 CANCELLED 2025-12-23T18:48:59.498713Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2025-12-23T18:48:59.498757Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2025-12-23T18:48:59.498781Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2025-12-23T18:49:02.893067Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587127340463332148:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:02.893170Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f9/r3tmp/tmpeCFiVH/pdisk_1.dat 2025-12-23T18:49:02.909186Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:49:02.996479Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:02.998371Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7587127340463332125:2081] 1766515742892456 != 1766515742892459 2025-12-23T18:49:03.031681Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:03.031782Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:03.040644Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12835, node 6 2025-12-23T18:49:03.082379Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:49:03.082396Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:49:03.082401Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:49:03.082476Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:49:03.152497Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18048 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515743090 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:49:03.326562Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:03.334606Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2025-12-23T18:49:03.334680Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d63e35fcf50] Connect to grpc://localhost:23536 2025-12-23T18:49:03.335806Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d63e35fcf50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-12-23T18:49:03.346154Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d63e35fcf50] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:49:03.346405Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:49:03.347100Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-12-23T18:49:03.347365Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d63e35fcf50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-12-23T18:49:03.349162Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d63e35fcf50] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:49:03.349360Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TPartitionTests::OldPlanStep >> TPartitionTests::DataTxCalcPredicateError [GOOD] >> TLocksFatTest::PointSetNotBreak >> TPartitionTests::DataTxCalcPredicateOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestSnapshotWithDependentReads+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31892, MsgBus: 22053 ... waiting for SysViewsRoster update finished 2025-12-23T18:48:19.459467Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:48:19.572689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:48:19.582109Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:48:19.582399Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:48:19.582502Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:48:19.872055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:19.872194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:19.949200Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515696574661 != 1766515696574665 2025-12-23T18:48:19.961558Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:20.015433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:20.177309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040d1/r3tmp/tmprSSwXi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31892, node 1 2025-12-23T18:48:20.553752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:20.553824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:20.553872Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:20.554275Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:20.566618Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22053 TClient is connected to server localhost:22053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:21.072151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:21.084098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:48:21.107129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:21.369822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:21.769997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.087660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:22.837526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1908:3515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:22.837995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:22.838704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1982:3534], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:22.838834Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:22.866377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:23.057375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:23.342686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:23.618917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:23.874843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:24.140594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:24.425866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:24.753884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:25.110668Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4180], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:25.110917Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:25.111611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2803:4184], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:25.111730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2806:4187], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:25.111785Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_worklo ... 025-12-23T18:48:58.037136Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1766515733917324 != 1766515733917328 2025-12-23T18:48:58.042665Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:58.088626Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:58.190574Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040d1/r3tmp/tmpGq3bhs/pdisk_1.dat TServer::EnableGrpc on GrpcPort 63405, node 4 2025-12-23T18:48:58.563668Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:58.563724Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:58.563756Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:58.563940Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:58.575267Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:15271 TClient is connected to server localhost:15271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:59.019446Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:59.084691Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:59.342786Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:59.716460Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:00.020093Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:00.541922Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1904:3511], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:00.542113Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:00.543476Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1977:3530], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:00.543624Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:00.576880Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:00.796803Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:01.088860Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:01.339620Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:01.579556Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:01.854440Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:02.111472Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:02.426023Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:02.825123Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2790:4171], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:02.825354Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:02.825755Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2794:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:02.825814Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:02.825872Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2797:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:02.831191Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:49:02.995831Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:2799:4180], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:49:03.058688Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:2859:4221] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:49:06.324607Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=4&id=OTAxMDAwN2YtYmNlZjhhNWUtMmQ2MjQyNWItNmMxOWQ2ZA==, ActorId: [4:3154:4454], ActorState: ExecuteState, TraceId: 01kd68p894d67x0wfrf716dh29, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken, status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KeyValue2`" issue_code: 2001 severity: 1 } |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] >> TPQTabletTests::UpdateConfig_1 >> TPartitionTests::ConflictingCommitFails >> TPartitionTests::OldPlanStep [GOOD] >> TPQTabletTests::UpdateConfig_1 [GOOD] >> TPartitionTests::IncorrectRange >> TPQTabletTests::UpdateConfig_2 >> TLocksTest::BrokenLockUpdate >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> TPQTabletTests::UpdateConfig_2 [GOOD] >> TPartitionTests::IncorrectRange [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] >> TPartitionTests::ReserveSubDomainOutOfSpace >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-clusteradmin >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message >> TTicketParserTest::LoginEmptyTicketBad [GOOD] >> TLocksTest::Range_BrokenLock0 >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients >> TLocksFatTest::PointSetBreak >> TPartitionTests::ReserveSubDomainOutOfSpace [GOOD] >> TPartitionTests::NonConflictingCommitsBatch >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients [GOOD] >> TFlatTest::WriteMergeAndRead >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders >> TFlatTest::SplitInvalidPath >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage >> TPQTest::DirectReadBadSessionOrPipe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] Test command err: 2025-12-23T18:48:33.863976Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127216621979957:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:33.864031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f6/r3tmp/tmpheEkY8/pdisk_1.dat 2025-12-23T18:48:34.287127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:34.287226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:34.305491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:34.332132Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:34.361372Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3872, node 1 2025-12-23T18:48:34.508080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:34.634365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:34.634391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:34.634397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:34.634458Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:34.874384Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515714460 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:35.165027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:35.173276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:48:35.231947Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 2025-12-23T18:48:35.247206Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:48:35.247260Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:48:35.251223Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****OtoA (3B8626CF) () has now retryable error message 'Security state is empty' 2025-12-23T18:48:35.251485Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:48:35.251508Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:48:35.251744Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****OtoA (3B8626CF) () has now retryable error message 'Security state is empty' 2025-12-23T18:48:35.251767Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-12-23T18:48:35.251782Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-12-23T18:48:35.251808Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket eyJh****OtoA (3B8626CF): Security state is empty 2025-12-23T18:48:37.879606Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****OtoA (3B8626CF) 2025-12-23T18:48:37.879896Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:48:37.879917Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:48:37.880167Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****OtoA (3B8626CF) () has now retryable error message 'Security state is empty' 2025-12-23T18:48:37.880183Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-12-23T18:48:38.257154Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:48:38.870437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127216621979957:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:38.870533Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:40.880773Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****OtoA (3B8626CF) 2025-12-23T18:48:40.881042Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:48:40.881067Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:48:40.881965Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****OtoA (3B8626CF) () has now valid token of user1 2025-12-23T18:48:40.882131Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2025-12-23T18:48:43.887076Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****OtoA (3B8626CF) 2025-12-23T18:48:43.887499Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****OtoA (3B8626CF) () has now valid token of user1 2025-12-23T18:48:46.049112Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127271390142649:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:46.049483Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f6/r3tmp/tmpOM5pac/pdisk_1.dat 2025-12-23T18:48:46.082983Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:46.142510Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:46.184913Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:46.185000Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:46.189683Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5952, node 2 2025-12-23T18:48:46.251273Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:46.251301Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:46.251309Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:46.251388Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:46.284426Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26655 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515726234 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:46.432857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:46.438044Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-12-23T18:48:46.438144Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ca4f50153d0] Connect to grpc://localhost:24951 2025-12-23T18:48:46.441059Z node 2 :GRPC_CLIENT DEB ... T_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-12-23T18:48:51.431331Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca4f50b08d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-12-23T18:48:51.433569Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ca4f50b08d0] Status 14 Service Unavailable 2025-12-23T18:48:51.433866Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-12-23T18:48:53.431893Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket **** (8E120919) 2025-12-23T18:48:53.431921Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-12-23T18:48:53.432057Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca4f50b08d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-12-23T18:48:53.433705Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ca4f50b08d0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-12-23T18:48:53.433891Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:48:54.430852Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587127284375866704:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:54.430954Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:49:02.384560Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587127340368529835:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:02.385180Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f6/r3tmp/tmprUyffT/pdisk_1.dat 2025-12-23T18:49:02.404001Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:49:02.492408Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:02.494880Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587127340368529712:2081] 1766515742371978 != 1766515742371981 2025-12-23T18:49:02.514360Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:02.514422Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:02.517160Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65070, node 4 2025-12-23T18:49:02.555992Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:49:02.556022Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:49:02.556031Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:49:02.556133Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:49:02.585896Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:29657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515742565 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:49:02.757912Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:02.765448Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-12-23T18:49:02.765503Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ca4f50b4ed0] Connect to grpc://localhost:1393 2025-12-23T18:49:02.766265Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca4f50b4ed0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-12-23T18:49:02.773311Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7ca4f50b4ed0] Status 14 Service Unavailable 2025-12-23T18:49:02.773417Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-12-23T18:49:02.773458Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-12-23T18:49:02.773589Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca4f50b4ed0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-12-23T18:49:02.775176Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ca4f50b4ed0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-12-23T18:49:02.775344Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:49:03.388835Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:06.008983Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587127359806049265:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:06.017796Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f6/r3tmp/tmpWs7EMD/pdisk_1.dat 2025-12-23T18:49:06.042079Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:49:06.107919Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:06.114835Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7587127355511081860:2081] 1766515746003385 != 1766515746003388 2025-12-23T18:49:06.148742Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:06.148842Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:06.155963Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5869, node 5 2025-12-23T18:49:06.187322Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:49:06.187345Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:49:06.187363Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:49:06.187452Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:49:06.249720Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515746198 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:49:06.421036Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:06.425359Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:49:06.428101Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:979: Ticket AKIA****MPLE (B3EDC139): Access key signature is not supported |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test >> TObjectStorageListingTest::MaxKeysAndSharding >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] >> TPartitionTests::DifferentWriteTxBatchingOptions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::LoginEmptyTicketBad [GOOD] Test command err: 2025-12-23T18:48:33.861882Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127217184827115:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:33.861935Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f7/r3tmp/tmpMfLdQ7/pdisk_1.dat 2025-12-23T18:48:34.281627Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:34.309964Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127217184827064:2081] 1766515713857247 != 1766515713857250 2025-12-23T18:48:34.329126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:34.329255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:34.335141Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:34.367509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2113, node 1 2025-12-23T18:48:34.533124Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:34.632425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:34.632497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:34.632504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:34.632597Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:34.874437Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515714418 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:35.135914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:35.147170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:48:35.366948Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:48:35.379942Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:48:35.379979Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:48:35.382188Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****i8vQ (7672B06A) () has now valid token of user1 2025-12-23T18:48:35.382229Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2025-12-23T18:48:37.673401Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127235509534601:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:37.678881Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f7/r3tmp/tmpX0xFUV/pdisk_1.dat 2025-12-23T18:48:37.696658Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:37.801028Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:37.813291Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127235509534576:2081] 1766515717671990 != 1766515717671993 2025-12-23T18:48:37.825317Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:37.825396Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:37.827730Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25734, node 2 2025-12-23T18:48:37.931481Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:37.931506Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:37.931511Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:37.931585Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:37.948256Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515717876 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:38.168182Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:38.346952Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:48:38.355253Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:48:38.355296Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:48:38.356036Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****-T8A (D8CEEE73) () has now valid token of user1 2025-12-23T18:48:38.356051Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2025-12-23T18:48:41.060941Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127252039403325:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:41.061001Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f7/r3tmp/tmpEwfW97/pdisk_1.dat 2025-12-23T18:48:41.155914Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:41.158676Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:41.160458Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587127252039403294:2081] 1766515721059383 != 1766515721059386 2025-12-23T18:48:41.196362Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:41.196453Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:41.200270Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3674, node 3 2025-12-23T18:48:41.243823Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:41.243850Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:41.243857Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:41.243951Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:41.310238Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tabl ... r /Root/Db2 keys 1 2025-12-23T18:48:43.663355Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:1557: Handle deferred tokens for database: /Root/Db2 2025-12-23T18:48:43.663564Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db2, token db /Root/Db2, DomainLoginOnly 0 2025-12-23T18:48:43.663580Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db2, /Root 2025-12-23T18:48:43.664269Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****KGdg (1DA8398A) () has now valid token of user1 2025-12-23T18:48:43.664285Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root/Db2, A4 success 2025-12-23T18:48:44.932819Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587127264676172981:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:44.932868Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f7/r3tmp/tmpQ636mi/pdisk_1.dat 2025-12-23T18:48:44.959268Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:45.098476Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:45.098579Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:45.118891Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587127264676172954:2081] 1766515724931823 != 1766515724931826 2025-12-23T18:48:45.121853Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:45.142415Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:45.142968Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 30994, node 4 2025-12-23T18:48:45.211609Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:45.211638Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:45.211652Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:45.211732Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515725198 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:45.444122Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:45.453957Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:48:45.630100Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:48:45.634012Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:45.635049Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-12-23T18:48:45.635092Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2025-12-23T18:48:45.635894Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****a14A (8071C1D8) () has now valid token of user1 2025-12-23T18:48:45.635907Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2025-12-23T18:48:45.636443Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:48:45.937292Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:49.933179Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587127264676172981:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:49.933296Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:48:49.938781Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****a14A (8071C1D8) 2025-12-23T18:48:49.939288Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****a14A (8071C1D8) () has now valid token of user1 2025-12-23T18:48:54.942912Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****a14A (8071C1D8) 2025-12-23T18:48:54.943331Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****a14A (8071C1D8) () has now valid token of user1 2025-12-23T18:48:55.637099Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:48:58.947406Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****a14A (8071C1D8) 2025-12-23T18:48:58.947736Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****a14A (8071C1D8) () has now valid token of user1 2025-12-23T18:49:00.020561Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:49:00.020594Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:03.949684Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****a14A (8071C1D8) 2025-12-23T18:49:03.950109Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****a14A (8071C1D8) () has now valid token of user1 2025-12-23T18:49:06.339387Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587127358852896184:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:06.339460Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032f7/r3tmp/tmpYFT0SO/pdisk_1.dat 2025-12-23T18:49:06.372735Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:49:06.446497Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:06.480701Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:06.480798Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:06.494972Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2239, node 5 2025-12-23T18:49:06.540487Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:49:06.548899Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:49:06.548928Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:49:06.548936Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:49:06.549031Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515746541 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:49:06.824586Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:06.890348Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2025-12-23T18:49:06.907937Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:987: Ticket **** (00000000): Ticket is empty |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message [GOOD] Test command err: 2025-12-23T18:48:48.040141Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:48.112849Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:48.112923Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:48.112997Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:48.113060Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:48:48.131080Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-12-23T18:48:48.131324Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:48.131724Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:182:2195] 2025-12-23T18:48:48.132666Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request Got KV request Got KV request Got KV request 2025-12-23T18:48:48.132887Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-12-23T18:48:48.133091Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-12-23T18:48:48.133193Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000000 to m0000000001 Got KV request 2025-12-23T18:48:48.133301Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-12-23T18:48:48.133361Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000000 to d0000000001 Got KV request 2025-12-23T18:48:48.133562Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:523: key[0]: d0000000000_00000000000000000000_00000_0000002000_00000 2025-12-23T18:48:48.133628Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:531: add key d0000000000_00000000000000000000_00000_0000002000_00000 2025-12-23T18:48:48.133742Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:629: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] check key d0000000000_00000000000000000000_00000_0000002000_00000 2025-12-23T18:48:48.133834Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:659: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 2000 size 684 so 0 eo 2000 d0000000000_00000000000000000000_00000_0000002000_00000 2025-12-23T18:48:48.134003Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-12-23T18:48:48.134048Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-12-23T18:48:48.134100Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:48:48.000000Z 2025-12-23T18:48:48.134147Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMessageDeduplicatorStep 2025-12-23T18:48:48.134233Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000000|0000000000000000 to e0000000001 Got KV request 2025-12-23T18:48:48.134381Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TDeleteKeysStep 2025-12-23T18:48:48.134416Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:48:48.134454Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-12-23T18:48:48.134506Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:182:2195] 2025-12-23T18:48:48.134556Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 2000 Head Offset 2000 PartNo 0 PackedSize 0 count 0 nextOffset 2000 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000002000_00000 size 684 2025-12-23T18:48:48.134632Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:48.134684Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:48:48.134717Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:48.134777Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:48.134808Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:48.134877Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:48.134926Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:48.135009Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:48.135204Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:48.156090Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:48.187131Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:48.187213Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:48.187258Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:48.187304Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:48.187351Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:48.197648Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:48.218616Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:48.218675Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:48.218705Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:48.218776Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:48.218822Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:48.218905Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:48.229428Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:48.250282Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:48.250390Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:48.250425Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:48.250456Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:48.250491Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:48.271024Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:48.281572Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:48.281636Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:48.281657Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:48.281685Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:48.281714Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:48.302270Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:48.302334Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:48.302362Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:48.302393Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:48.302419Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:48.312675Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:48.333564Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:48.333631Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:48.333675Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:48.333732Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, Pen ... dingWrites: 0 2025-12-23T18:49:10.198041Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:10.198091Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Wait immediate tx complete 2 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 2025-12-23T18:49:10.198313Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2025-12-23T18:49:10.198375Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:10.198452Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:10.198488Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:10.198534Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2025-12-23T18:49:10.198636Z node 5 :PERSQUEUE DEBUG: partition.cpp:3831: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 drop done 2025-12-23T18:49:10.198680Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 3 2025-12-23T18:49:10.198719Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:10.198774Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:49:10.198819Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 2025-12-23T18:49:10.199367Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:10.210528Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:10.210712Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-12-23T18:49:10.210917Z node 5 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:49:10.211009Z node 5 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'Root/PQ/rt3.dc1--account--topic' partition 0 error: cannot finish read request. Consumer client-1 is gone from partition 2025-12-23T18:49:10.211149Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:10.211198Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:10.211249Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2025-12-23T18:49:10.211316Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:10.211353Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:10.211402Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ImmediateTx]) 2025-12-23T18:49:10.211454Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-12-23T18:49:10.211506Z node 5 :PERSQUEUE DEBUG: partition.cpp:3941: [72057594037927937][Partition][0][StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-12-23T18:49:10.211547Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:10.211582Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:49:10.211623Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:10.211809Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got KV request Got KV request 2025-12-23T18:49:10.212844Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:49:10.212902Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:976: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 send read request for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 Got KV request Got KV request Got KV request Got batch complete: 1 Got KV request Got KV request Got KV request Got KV request 2025-12-23T18:49:10.213125Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 4 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset 5 partno 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 5 2025-12-23T18:49:10.213348Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 4 added 1 blobs, size 0 count 45 last offset 0, current partition end offset: 50 2025-12-23T18:49:10.213386Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 4. Send blob request. Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 4 2025-12-23T18:49:10.213537Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:10.213632Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:10.213676Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:10.213707Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:10.213732Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:10.213783Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:10.213812Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:10.213845Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 2025-12-23T18:49:10.539287Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:10.587434Z node 6 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:10.587491Z node 6 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:10.587533Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:10.587582Z node 6 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:49:10.602682Z node 6 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{1, {2, 3}, 4}][StateInit] bootstrapping {1, {2, 3}, 4} [6:181:2194] 2025-12-23T18:49:10.603611Z node 6 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{1, {2, 3}, 4}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {1, {2, 3}, 4} generation 0 [6:181:2194] 2025-12-23T18:49:10.624894Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:10.666175Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:10.687167Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:10.697725Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:10.739587Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:10.781147Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:10.801978Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:10.938153Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:10.959206Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:11.177882Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:11.220840Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:11.429668Z node 6 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][{1, {2, 3}, 4}][StateIdle] Got error: The transaction is completed Got cmd write: CmdDeleteRange { Range { From: "M0000000004" IncludeFrom: true To: "M0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "D0000000004" IncludeFrom: true To: "D0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "X0000000004" IncludeFrom: true To: "X0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "J0000000004" IncludeFrom: true To: "J0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "K0000000004" IncludeFrom: true To: "K0000000005" IncludeTo: false } } |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |96.0%| [TA] $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {RESULT} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionTests::UserActCount [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace >> KqpLimits::CancelAfterRwTx-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] Test command err: 2025-12-23T18:48:41.490812Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.490856Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.490893Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.491556Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.505370Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.505610Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.505921Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.506508Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.506628Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:48:41.506706Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.506764Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-12-23T18:48:41.507570Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.507609Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.507682Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.508281Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.508881Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.508994Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.509159Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.509518Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.509656Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:48:41.509761Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.509803Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-12-23T18:48:41.510769Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.510801Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.510830Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.511206Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.511918Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.512058Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.512250Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.512966Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.513159Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:48:41.513263Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.513300Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-12-23T18:48:41.514155Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.514176Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.514223Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.514585Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.515260Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.515420Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.515673Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.519256Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.519863Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:48:41.519971Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.520018Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-12-23T18:48:41.521006Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.521030Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.521053Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.521411Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.522091Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.522268Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.522463Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.522891Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.523019Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:48:41.523186Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.523255Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-12-23T18:48:41.523918Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.523945Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.523982Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.524288Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.524940Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.525094Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.525300Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.525694Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.525836Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:48:41.525946Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.526008Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-12-23T18:48:41.526927Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.526976Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.526996Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.527321Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.528004Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.528114Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.528299Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.529019Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.529252Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:48:41.529354Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.529407Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-12-23T18:48:41.530271Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.530293Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.530324Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.530636Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:48:41.531244Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:48:41.531381Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.531596Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:48:41.533056Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.533452Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:48:41.533544Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:48:41.533617Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-12-23T18:48:41.539829Z :ReadSession INFO: Random seed for debugging is 1766515721539797 2025-12-23T18:48:41.803650Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127251612400699:2089];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:41.805172Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:41.886538Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00583c/r3tmp/tmph68t9z/pdisk_1.dat 2025-12-23T18:48:41.933120Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:48:42.150106Z node 1 ... 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.156981Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:09.257300Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:09.257332Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.257344Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:09.257363Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.257375Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:09.357626Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:09.357660Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.357671Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:09.357689Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.357709Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:09.458289Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:09.458339Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.458352Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:09.458379Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.458391Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:09.558318Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:09.558367Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.558380Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:09.558404Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.558416Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:09.659592Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:09.659622Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.659635Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:09.659650Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.659660Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:09.760664Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:09.760694Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.760714Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:09.760730Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.760739Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:09.860874Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:09.860925Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.860936Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:09.860963Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.860973Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:09.963014Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:09.963047Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.963057Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:09.963071Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:09.963080Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:09.991714Z node 1 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_1_1_5079616241312249343_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset 3 2025-12-23T18:49:10.063194Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:10.063224Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:10.063237Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:10.063263Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:10.063274Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:10.078806Z :INFO: [/Root] [/Root] [44f2483c-536b6153-bb94fb7-2438c38e] Closing read session. Close timeout: 0.000000s 2025-12-23T18:49:10.078895Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-12-23T18:49:10.078954Z :INFO: [/Root] [/Root] [44f2483c-536b6153-bb94fb7-2438c38e] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16474 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:49:10.079070Z :NOTICE: [/Root] [/Root] [44f2483c-536b6153-bb94fb7-2438c38e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-12-23T18:49:10.079116Z :DEBUG: [/Root] [/Root] [44f2483c-536b6153-bb94fb7-2438c38e] [dc1] Abort session to cluster 2025-12-23T18:49:10.083000Z :NOTICE: [/Root] [/Root] [44f2483c-536b6153-bb94fb7-2438c38e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:49:10.084151Z node 1 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_1_1_5079616241312249343_v1 grpc read done: success# 0, data# { } 2025-12-23T18:49:10.084194Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_1_1_5079616241312249343_v1 grpc read failed 2025-12-23T18:49:10.084242Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_1_1_5079616241312249343_v1 grpc closed 2025-12-23T18:49:10.084294Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_1_1_5079616241312249343_v1 is DEAD 2025-12-23T18:49:10.086846Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [1:7587127303152010664:2467] disconnected. 2025-12-23T18:49:10.086874Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [1:7587127303152010664:2467] disconnected; active server actors: 1 2025-12-23T18:49:10.087136Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_5079616241312249343_v1 2025-12-23T18:49:10.087197Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [1:7587127303152010667:2470] destroyed 2025-12-23T18:49:10.087257Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_1_1_5079616241312249343_v1 2025-12-23T18:49:10.093902Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [1:7587127303152010664:2467] client user disconnected session shared/user_1_1_5079616241312249343_v1 2025-12-23T18:49:10.166860Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:10.166898Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:10.166910Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:10.166928Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:10.166947Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:10.267453Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:10.267491Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:10.267504Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:10.267531Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:10.267543Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |96.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> TPQTest::DirectReadBadSessionOrPipe [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::DirectReadBadSessionOrPipe [GOOD] Test command err: 2025-12-23T18:49:08.962187Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:09.030689Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:49:09.034842Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:49:09.035125Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:09.035190Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:09.035228Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-12-23T18:49:09.035275Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4927: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-12-23T18:49:09.035346Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:09.035408Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:49:09.052780Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2025-12-23T18:49:09.052955Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:49:09.071643Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1470: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:49:09.074628Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:49:09.074785Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:09.076414Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:49:09.076563Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:49:09.076659Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:49:09.077196Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:49:09.077640Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2025-12-23T18:49:09.078600Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:49:09.078645Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2025-12-23T18:49:09.078706Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:190:2142] 2025-12-23T18:49:09.078791Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:49:09.078884Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:49:09.079411Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:49:09.079456Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:09.079493Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:09.079555Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:09.079589Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:09.079627Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:09.079717Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-12-23T18:49:09.079754Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:49:09.079794Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:09.079853Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:49:09.079913Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:09.080180Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:49:09.080244Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:49:09.080402Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:49:09.080614Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:192:2142] 2025-12-23T18:49:09.081353Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:49:09.081391Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:1:Initializer] Initializing completed. 2025-12-23T18:49:09.081422Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [1:192:2142] 2025-12-23T18:49:09.081457Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:49:09.081506Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:49:09.081871Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2025-12-23T18:49:09.081904Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:49:09.081947Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:09.081983Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:09.082010Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:09.082037Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:09.082079Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-12-23T18:49:09.082117Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-12-23T18:49:09.082151Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:09.082175Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2025-12-23T18:49:09.082205Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2025-12-23T18:49:09.082349Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:49:09.082419Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:49:09.082611Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:49:09.082840Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:49:09.083012Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:49:09.083124Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2025-12-23T18:49:09.086816Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdl ... w have 1 active actors on pipe 2025-12-23T18:49:12.992279Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:49:12.992306Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-12-23T18:49:12.992337Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with empty session id 2025-12-23T18:49:12.992391Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1252: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 6, Error Read prepare request with empty session id 2025-12-23T18:49:12.992412Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with empty session id Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "bad-session" Offset: 0 Count: 1 Bytes: 99999 DirectReadId: 1 PartitionSessionId: 1 } PipeClient { RawX1: 216 RawX2: 34359740587 } Cookie: 123 } via pipe: [8:181:2194] 2025-12-23T18:49:12.992687Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [8:227:2227], now have 1 active actors on pipe 2025-12-23T18:49:12.992775Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:49:12.992799Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-12-23T18:49:12.992829Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id bad-session 2025-12-23T18:49:12.992889Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1252: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 7, Error Read prepare request with unknown(old?) session id bad-session 2025-12-23T18:49:12.992921Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id bad-session 2025-12-23T18:49:12.993197Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [8:230:2229], now have 1 active actors on pipe 2025-12-23T18:49:12.993280Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:49:12.993316Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-12-23T18:49:12.993369Z node 8 :PERSQUEUE INFO: pq_impl.cpp:1701: [PQ: 72057594037927937] Got cmd delete session: ClientId: "user2" SessionId: "session2" 2025-12-23T18:49:12.993405Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72057594037927937] Destroy direct read session session2 2025-12-23T18:49:12.993471Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:12.993494Z node 8 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:12.993520Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:12.993541Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:12.993564Z node 8 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:12.993610Z node 8 :PERSQUEUE DEBUG: partition.cpp:3880: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user2 session is set to 0 (startOffset 0) session session2 2025-12-23T18:49:12.993630Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:12.993649Z node 8 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:49:12.993673Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:12.993809Z node 8 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:49:12.995719Z node 8 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:12.995808Z node 8 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:12.995849Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:12.995880Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:12.995908Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:12.995937Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:12.995963Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:12.996001Z node 8 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:49:12.996094Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "session2" Offset: 0 Count: 1 Bytes: 99999 DirectReadId: 1 PartitionSessionId: 1 } PipeClient { RawX1: 216 RawX2: 34359740587 } Cookie: 123 } via pipe: [8:181:2194] 2025-12-23T18:49:12.996460Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [8:236:2234], now have 1 active actors on pipe 2025-12-23T18:49:12.996577Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:49:12.996611Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-12-23T18:49:12.996651Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id session2 2025-12-23T18:49:12.996723Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1252: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 9, Error Read prepare request with unknown(old?) session id session2 2025-12-23T18:49:12.996752Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id session2 Publish read Send publish read request: Partition: 0 PipeClient { RawX1: 216 RawX2: 34359740587 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2025-12-23T18:49:12.997131Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [8:239:2236], now have 1 active actors on pipe 2025-12-23T18:49:12.997186Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:49:12.997221Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-12-23T18:49:12.997309Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 216 RawX2: 34359740587 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2025-12-23T18:49:12.997370Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1252: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 10, Error Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 216 RawX2: 34359740587 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2025-12-23T18:49:12.997404Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 216 RawX2: 34359740587 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } Got direct read response: Status: 128 ErrorReason: "Read prepare request with unknown(old?) session id session2Partition: 0\nPipeClient {\n RawX1: 216\n RawX2: 34359740587\n}\nCookie: 123\nCmdPublishRead {\n SessionKey {\n SessionId: \"session2\"\n PartitionSessionId: 1\n }\n DirectReadId: 1\n}\n" ErrorCode: BAD_REQUEST PartitionResponse { Cookie: 123 } Forget read Send forget read request: Partition: 0 PipeClient { RawX1: 216 RawX2: 34359740587 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2025-12-23T18:49:12.997759Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [8:241:2238], now have 1 active actors on pipe 2025-12-23T18:49:12.997867Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:49:12.997900Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-12-23T18:49:12.997989Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 216 RawX2: 34359740587 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2025-12-23T18:49:12.998056Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1252: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 11, Error Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 216 RawX2: 34359740587 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2025-12-23T18:49:12.998087Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 216 RawX2: 34359740587 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } Got direct read response: Status: 128 ErrorReason: "Read prepare request with unknown(old?) session id session2Partition: 0\nPipeClient {\n RawX1: 216\n RawX2: 34359740587\n}\nCookie: 123\nCmdForgetRead {\n SessionKey {\n SessionId: \"session2\"\n PartitionSessionId: 1\n }\n DirectReadId: 1\n}\n" ErrorCode: BAD_REQUEST PartitionResponse { Cookie: 123 } |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::NonConflictingCommitsBatch [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-clusteradmin >> TFlatTest::WriteMergeAndRead [GOOD] >> TFlatTest::WriteSplitAndRead >> TPartitionTests::GetUsedStorage >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] >> TFlatTest::LargeDatashardReplyDistributed >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> PQCountersLabeled::PartitionKeyCompaction [GOOD] >> PQCountersLabeled::PartitionBlobCompactionCounters >> TFlatTest::SplitInvalidPath [GOOD] >> TFlatTest::SplitThenMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRwTx-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 19065, MsgBus: 12751 2025-12-23T18:44:54.615044Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126278408049986:2145];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:54.615648Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033f3/r3tmp/tmpc2YVII/pdisk_1.dat 2025-12-23T18:44:54.862851Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:54.884129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:54.884254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:54.887715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:44:54.996042Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:54.996348Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126278408049870:2081] 1766515494604357 != 1766515494604360 TServer::EnableGrpc on GrpcPort 19065, node 1 2025-12-23T18:44:55.043728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:44:55.059046Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:55.059105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:55.059147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:55.059236Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12751 TClient is connected to server localhost:12751 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515495059 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:55.543148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:55.563990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:55.681447Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:57.720718Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126291292953004:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.720717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126291292952996:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.720840Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.721189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126291292953011:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.721281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:44:57.725610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:44:57.740302Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587126291292953010:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:44:57.821918Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587126291292953063:2762] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:44:58.200593Z node 1 :KQP_COMPUTE WARN: log.cpp:841: fline=kqp_compute_actor_factory.cpp:34;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=2;memory=1048576; 2025-12-23T18:44:58.200630Z node 1 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976710661, task: 2. [Mem] memory 1048576 NOT granted 2025-12-23T18:44:58.214298Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [1:7587126295587920403:2362], TxId: 281474976710661, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68epqnb5366amhr4n2p2zr. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YzNmNjA4OTQtMWVjNDZiYjEtZGE5ZGNjZDgtZDM4OTYzNGY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-ou4bsyjxme, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-12-23T18:44:58.198061Z }, code: 2029 }. 2025-12-23T18:44:58.214903Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [1:7587126295587920401:2361], TxId: 281474976710661, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68epqnb5366amhr4n2p2zr. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YzNmNjA4OTQtMWVjNDZiYjEtZGE5ZGNjZDgtZDM4OTYzNGY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7587126295587920390:2342], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-12-23T18:44:58.220096Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=YzNmNjA4OTQtMWVjNDZiYjEtZGE5ZGNjZDgtZDM4OTYzNGY=, ActorId: [1:7587126291292952965:2342], ActorState: ExecuteState, TraceId: 01kd68epqnb5366amhr4n2p2zr, Create QueryResponse for error on request, msg: , status: OVERLOADED, issues: { message: "Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-ou4bsyjxme, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-12-23T18:44:58.198061Z }\n" issue_code: 2029 severity: 1 }
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-ou4bsyjxme, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-12-23T18:44:58.198061Z } , code: 2029 Trying to start YDB, gRPC: 1947, MsgBus: 5175 2025-12-23T18:44:58.899646Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587126294860435041:2061];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:58.899758Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033f3/r3tmp/tmpcZcpXA/pdisk_1.dat 2025-12-23T18:44:58.916167Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:58.988411Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:58.990059Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:58.990114Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:58.990406Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587126294860435021:2081] 1766515498898666 != 176 ... us: CANCELLED, reason: {
: Error: Terminate execution } 2025-12-23T18:48:35.463371Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [5:7587127225959244856:5290], TxId: 281474976716004, task: 3. Ctx: { TraceId : 01kd68naz3dq3kpb6m8fwgtm2t. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7587127225959244846:2527], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-12-23T18:48:35.463582Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [5:7587127225959244857:5291], TxId: 281474976716004, task: 4. Ctx: { CheckpointId : . TraceId : 01kd68naz3dq3kpb6m8fwgtm2t. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7587127225959244846:2527], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-12-23T18:48:35.463808Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [5:7587127225959244858:5292], TxId: 281474976716004, task: 5. Ctx: { CheckpointId : . TraceId : 01kd68naz3dq3kpb6m8fwgtm2t. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7587127225959244846:2527], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-12-23T18:48:35.464000Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [5:7587127225959244859:5293], TxId: 281474976716004, task: 6. Ctx: { CheckpointId : . TraceId : 01kd68naz3dq3kpb6m8fwgtm2t. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7587127225959244846:2527], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-12-23T18:48:35.464216Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [5:7587127225959244860:5294], TxId: 281474976716004, task: 7. Ctx: { CheckpointId : . TraceId : 01kd68naz3dq3kpb6m8fwgtm2t. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7587127225959244846:2527], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-12-23T18:48:35.464416Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [5:7587127225959244861:5295], TxId: 281474976716004, task: 8. Ctx: { CheckpointId : . TraceId : 01kd68naz3dq3kpb6m8fwgtm2t. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7587127225959244846:2527], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-12-23T18:48:35.464886Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [5:7587127225959244854:5288], TxId: 281474976716004, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68naz3dq3kpb6m8fwgtm2t. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7587127225959244846:2527], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-12-23T18:48:35.465271Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037890 Cancelled read: {[5:7587127225959244879:5288], 0} 2025-12-23T18:48:35.465384Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, ActorId: [5:7587126886656818945:2527], ActorState: ExecuteState, TraceId: 01kd68naz3dq3kpb6m8fwgtm2t, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 414ms" severity: 1 }{ message: "Cancelling after 417ms during execution" severity: 1 } 2025-12-23T18:48:35.896100Z node 5 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [5:7587127225959244895:2527] TxId: 281474976716006. Ctx: { TraceId: 01kd68nbcr2tkn7nr155rq0qg7, Database: /Root, SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, PoolId: default, IsStreamingQuery: 0}. CANCELLED: [ {
: Error: Request canceled after 415ms } {
: Error: Cancelling after 415ms during execution } ] 2025-12-23T18:48:35.896523Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, ActorId: [5:7587126886656818945:2527], ActorState: ExecuteState, TraceId: 01kd68nbcr2tkn7nr155rq0qg7, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 415ms" severity: 1 }{ message: "Cancelling after 415ms during execution" severity: 1 } 2025-12-23T18:48:36.744545Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, ActorId: [5:7587126886656818945:2527], ActorState: ExecuteState, TraceId: 01kd68nc77d13k104pxjey82e4, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 417ms" severity: 1 }{ message: "Cancelling after 416ms during compilation" severity: 1 } 2025-12-23T18:48:40.934017Z node 5 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [5:7587127247434082029:2527] TxId: 281474976716030. Ctx: { TraceId: 01kd68ng9s6v55y2h2zg6gx36h, Database: /Root, SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, PoolId: default, IsStreamingQuery: 0}. CANCELLED: [ {
: Error: Request canceled after 428ms } {
: Error: Cancelling after 427ms during execution } ] 2025-12-23T18:48:40.934175Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [5:7587127247434082044:5482], TxId: 281474976716030, task: 9. Ctx: { TraceId : 01kd68ng9s6v55y2h2zg6gx36h. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7587127247434082029:2527], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-12-23T18:48:40.971518Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [5:7587127247434082043:5481], TxId: 281474976716030, task: 8. Ctx: { CheckpointId : . TraceId : 01kd68ng9s6v55y2h2zg6gx36h. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7587127247434082029:2527], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-12-23T18:48:40.972652Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, ActorId: [5:7587126886656818945:2527], ActorState: ExecuteState, TraceId: 01kd68ng9s6v55y2h2zg6gx36h, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 428ms" severity: 1 }{ message: "Cancelling after 427ms during execution" severity: 1 } 2025-12-23T18:48:44.047581Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, ActorId: [5:7587126886656818945:2527], ActorState: ExecuteState, TraceId: 01kd68nkaw23vhq6qtkt76qvpz, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 435ms" severity: 1 }{ message: "Cancelling after 433ms during compilation" severity: 1 } 2025-12-23T18:48:45.526143Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, ActorId: [5:7587126886656818945:2527], ActorState: ExecuteState, TraceId: 01kd68nmry42h91xv5hb4axrz4, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 439ms" severity: 1 }{ message: "Cancelling after 439ms during compilation" severity: 1 } 2025-12-23T18:48:46.463437Z node 5 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [5:7587127273203886478:2527] TxId: 281474976716054. Ctx: { TraceId: 01kd68nnp49k1xcr31vfewtdka, Database: /Root, SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, PoolId: default, IsStreamingQuery: 0}. CANCELLED: [ {
: Error: Request canceled after 442ms } {
: Error: Cancelling after 442ms during execution } ] 2025-12-23T18:48:46.463892Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, ActorId: [5:7587126886656818945:2527], ActorState: ExecuteState, TraceId: 01kd68nnp49k1xcr31vfewtdka, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 442ms" severity: 1 }{ message: "Cancelling after 442ms during execution" severity: 1 } 2025-12-23T18:48:47.779312Z node 5 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [5:7587127277498853937:2527] TxId: 281474976716061. Ctx: { TraceId: 01kd68npxn2rgzzqymskj7zpm7, Database: /Root, SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, PoolId: default, IsStreamingQuery: 0}. CANCELLED: [ {
: Error: Request canceled after 445ms } {
: Error: Cancelling after 493ms during execution } ] 2025-12-23T18:48:47.779688Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, ActorId: [5:7587126886656818945:2527], ActorState: ExecuteState, TraceId: 01kd68npxn2rgzzqymskj7zpm7, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 445ms" severity: 1 }{ message: "Cancelling after 493ms during execution" severity: 1 } 2025-12-23T18:48:50.419346Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, ActorId: [5:7587126886656818945:2527], ActorState: ExecuteState, TraceId: 01kd68nshf4akh0rcphzmzngzx, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 451ms" severity: 1 }{ message: "Cancelling after 450ms during compilation" severity: 1 } 2025-12-23T18:48:50.941032Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=NzQ4MThlZDYtNzdmMTlkNjgtNWNhMGEyMGUtZDFjNjU4ODg=, ActorId: [5:7587126886656818945:2527], ActorState: ExecuteState, TraceId: 01kd68nt1qa77pmh0jjrdye9d9, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 453ms" severity: 1 }{ message: "Cancelling after 452ms during compilation" severity: 1 } |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TPartitionTests::GetUsedStorage [GOOD] >> TPartitionTests::ConflictingCommitFails [GOOD] >> TFlatTest::SelectRangeReverseItemsLimit >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] >> TPartitionTests::ConflictingCommitProccesAfterRollback >> TPQTest::TestPartitionTotalQuota [GOOD] >> TPQTest::TestPartitionPerConsumerQuota >> TQuotaTracker::TestBigMessages [GOOD] >> TFlatTest::SplitEmptyAndWrite >> TLocksFatTest::PointSetNotBreak [GOOD] >> TLocksFatTest::PointSetRemove ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TQuotaTracker::TestBigMessages [GOOD] Test command err: 2025-12-23T18:48:44.856629Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:44.924320Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:44.924375Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:44.924423Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:44.924477Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:48:44.942808Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:183:2196] 2025-12-23T18:48:44.943640Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:183:2196] Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:48:44.973531Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:45.007726Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:48:45.018949Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:45.040000Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:45.040515Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:48:45.051737Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:45.075242Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:48:45.097270Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:45.109271Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:48:45.135307Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:48:45.146888Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:45.171616Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:48:45.187921Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:45.210473Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:48:45.236924Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:48:45.259084Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:48:45.323284Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:48:45.346963Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:45.371259Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2025-12-23T18:48:45.382556Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:45.405550Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Ra ... ccount--topic:1:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:49:14.596748Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Initializing completed. 2025-12-23T18:49:14.596783Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [4:183:2196] 2025-12-23T18:49:14.596825Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:49:14.596870Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:49:14.597131Z node 4 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2025-12-23T18:49:14.597164Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:49:14.597198Z node 4 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ChangeConfig]) 2025-12-23T18:49:14.597239Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:14.597268Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:14.597306Z node 4 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ChangeConfig]) 2025-12-23T18:49:14.597355Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:14.597383Z node 4 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2025-12-23T18:49:14.597415Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2025-12-23T18:49:14.597608Z node 4 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:49:14.597660Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:49:14.597758Z node 4 :PERSQUEUE INFO: partition.cpp:4338: [72057594037927937][Partition][1][StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-12-23T18:49:14.597833Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|6599fc07-cf57ec6f-49f1a95e-f8087ef9_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2025-12-23T18:49:14.597879Z node 4 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][1][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:14.598026Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction Send disk status response with cookie: 0 2025-12-23T18:49:14.598138Z node 4 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:14.598233Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:49:14.598298Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:14.598337Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:49:14.598366Z node 4 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:49:14.598400Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:14.598426Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:14.598459Z node 4 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:49:14.598499Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:14.598526Z node 4 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2025-12-23T18:49:14.598558Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2025-12-23T18:49:14.598606Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][1][StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2025-12-23T18:49:14.598648Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2025-12-23T18:49:14.603053Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][1][StateIdle] Received TPartition::TEvWrite 2025-12-23T18:49:14.603138Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1785: [72057594037927937][Partition][1][StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 24. Cookie: 1 2025-12-23T18:49:14.603212Z node 4 :PERSQUEUE DEBUG: partition.cpp:4259: [72057594037927937][Partition][1][StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2025-12-23T18:49:14.603259Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:49:14.603294Z node 4 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2025-12-23T18:49:14.603335Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:14.603363Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:14.603399Z node 4 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2025-12-23T18:49:14.603498Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2025-12-23T18:49:14.604405Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 120 count 1 nextOffset 101 batches 1 2025-12-23T18:49:14.604471Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:14.604504Z node 4 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2025-12-23T18:49:14.604541Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2025-12-23T18:49:14.604947Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1704: [72057594037927937][Partition][1][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000? size 106 WTime 128 2025-12-23T18:49:14.605241Z node 4 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:14.629687Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:14.664366Z node 4 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][1][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:14.664437Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][1][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-12-23T18:49:14.664500Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 24 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:14.664551Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][1][StateIdle] TPartition::ReplyWrite. Partition: 1 2025-12-23T18:49:14.664617Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][1][StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk 2025-12-23T18:49:14.664813Z node 4 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 100 ReadingTimestamp 0 rrg 0 2025-12-23T18:49:14.664849Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:49:14.664880Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:14.664909Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:14.664943Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:14.664971Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2025-12-23T18:49:14.665016Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2025-12-23T18:49:14.665143Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][1][StateIdle] Received TPartition::TEvWrite 2025-12-23T18:49:14.675402Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:14.696220Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:14.706774Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:14.754953Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:14.790979Z node 4 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'Root/PQ/rt3.dc1--account--topic' partition 1 error: database size exceeded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] Test command err: 2025-12-23T18:49:07.513806Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:07.587259Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:07.587339Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:07.587412Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:07.587509Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:49:07.607758Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:183:2196] 2025-12-23T18:49:07.609657Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:49:07.000000Z 2025-12-23T18:49:07.609890Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:183:2196] 2025-12-23T18:49:07.630969Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:07.672225Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:07.693052Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:07.703621Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:07.744866Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:07.786284Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:07.817379Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\270\321\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:49:07.975068Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\270\321\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:49:07.999070Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\270\321\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:49:08.042660Z node 1 :PERSQUEUE WARN: partition.cpp:3781: [72057594037927937][Partition][0][StateIdle] commit to future - topic Root/PQ/rt3.dc1--account--topic partition 0 client client EndOffset 10 offset 13 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\n\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\n\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\270\321\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:49:08.534784Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:08.578081Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:08.578137Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:08.578180Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:08.578245Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:49:08.594222Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:184:2196] 2025-12-23T18:49:08.595977Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:49:08.000000Z 2025-12-23T18:49:08.596249Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:184:2196] 2025-12-23T18:49:08.606691Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:08.637587Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:08.658237Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:08.689152Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:08.720073Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:08.740825Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:08.792271Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:08.864242Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:09.321497Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:09.383451Z node 3 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:09.383493Z node 3 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:09.383525Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:09.383560Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:49:09.394376Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [3:183:2196] 2025-12-23T18:49:09.396047Z node 3 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:49:09.000000Z 2025-12-23T18:49:09.396260Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:183:2196] 2025-12-23T18:49:09.417340Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:09.458871Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:09.481032Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:09.491599Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:09.535393Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:09.578612Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:09.614361Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\210\341\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:49:09.766971Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:09.779443Z node 3 :PERSQUEUE WARN: partition.cpp:3014: [7205 ... foResponse 2025-12-23T18:49:13.762772Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:13.762861Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:49:13.762940Z node 5 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:13.762989Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:49:13.763052Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:49:13.763087Z node 5 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:13.763118Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2025-12-23T18:49:13.763152Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 6, PendingWrites: 0 2025-12-23T18:49:13.763192Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:13.763235Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 6, PendingWrites: 0 2025-12-23T18:49:13.763274Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Wait kv request Wait kv request 2025-12-23T18:49:13.763697Z node 5 :PERSQUEUE DEBUG: partition.cpp:1485: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxRollback Step 1, TxId 0 2025-12-23T18:49:13.763744Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:13.763796Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2025-12-23T18:49:13.763839Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:13.763887Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2025-12-23T18:49:13.763931Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:13.763974Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:13.774362Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:13.774449Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-12-23T18:49:13.774538Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:13.774592Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:13.774649Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 6, PendingWrites: 0 2025-12-23T18:49:13.774704Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:13.774780Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2025-12-23T18:49:13.774846Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 0 2025-12-23T18:49:13.774891Z node 5 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:13.774963Z node 5 :PERSQUEUE DEBUG: partition.cpp:3880: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 5 (startOffset 0) session session-client-0 2025-12-23T18:49:13.775024Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2025-12-23T18:49:13.775064Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 3 2025-12-23T18:49:13.775105Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 3 2025-12-23T18:49:13.775180Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2025-12-23T18:49:13.775216Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 4 2025-12-23T18:49:13.775240Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 4 2025-12-23T18:49:13.775266Z node 5 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:49:13.775301Z node 5 :PERSQUEUE DEBUG: partition.cpp:3880: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 10 (startOffset 0) session session-client-0 2025-12-23T18:49:13.775328Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ImmediateTx]) 2025-12-23T18:49:13.797333Z node 5 :PERSQUEUE DEBUG: partition.cpp:3941: [72057594037927937][Partition][0][StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2025-12-23T18:49:13.797655Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 6 2025-12-23T18:49:13.797834Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (6) 2025-12-23T18:49:13.798040Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:13.799090Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 6 Got KV request Got KV request Wait tx rollback for tx 0 2025-12-23T18:49:13.799731Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:13.820875Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:13.821148Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-12-23T18:49:13.821574Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:13.821752Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:13.821875Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:13.821970Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:13.822116Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:13.822251Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:13.822372Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Wait tx committed for tx 3 Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2025-12-23T18:49:14.276406Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:14.352359Z node 6 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:14.352420Z node 6 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:14.352504Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:14.352557Z node 6 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:49:14.372798Z node 6 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] bootstrapping {2, {0, 10}, 100001} [6:181:2194] 2025-12-23T18:49:14.374733Z node 6 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:49:14.000000Z 2025-12-23T18:49:14.374842Z node 6 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [6:181:2194] 2025-12-23T18:49:14.396203Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:14.438626Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:14.459889Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:14.470905Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:14.513313Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:14.555481Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:14.576179Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TFlatTest::SelectRangeReverse |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 26897, MsgBus: 4818 2025-12-23T18:47:55.635657Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127052785854955:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:55.635827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:47:55.703377Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040ef/r3tmp/tmpeE5a9B/pdisk_1.dat 2025-12-23T18:47:56.004415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:56.004564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:56.058324Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:56.063924Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 26897, node 1 2025-12-23T18:47:56.128556Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:56.134853Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127052785854921:2081] 1766515675586670 != 1766515675586673 2025-12-23T18:47:56.236292Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:56.238361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:56.238373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:56.238380Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:56.238493Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4818 2025-12-23T18:47:56.664652Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:4818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515676128 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:56.829197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:56.839510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:58.985021Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127065670757678:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.985166Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.985936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127065670757706:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.985947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127065670757705:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.985996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:58.990553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:59.005530Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127065670757709:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:47:59.103510Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127069965725056:2535] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:59.435761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:47:59.594613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127069965725313:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:47:59.595053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127069965725313:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:47:59.595255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127069965725313:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:47:59.595373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127069965725313:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:47:59.595447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587127069965725224:2336];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:47:59.595484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127069965725313:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:47:59.595502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587127069965725224:2336];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:47:59.595594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127069965725313:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:47:59.595652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587127069965725224:2336];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:47:59.595710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127069965725313:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:47:59.595788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587127069965725224:2336];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:47:59.595824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127069965725313:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:47:59.595878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587127069965725224:2336];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:47:59.595958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7587127069965725313:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:47:59.595982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587127069965725224:2336];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:47:59.596084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7587127069965725224:2336];tablet_id=72075186224037895;process=TTxInitSche ... =resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.060677Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.071556Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.071646Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.071700Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.081933Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.082017Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.082038Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.090393Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.090462Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.090483Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.101473Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.101552Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.101576Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038049;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.111911Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.111984Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.112007Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.120404Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.120479Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.120501Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.130768Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.130857Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.130879Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.144906Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.144989Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.145014Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.145681Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.145726Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.145746Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.156967Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.157044Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.157065Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.158626Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.158699Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.158720Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2025-12-23T18:49:08.217783Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd68nwgw73c8jg5ys4dekn89", SessionId: ydb://session/3?node_id=4&id=OGM0MzJmMzMtNzliYmY2MGYtZmZmYzA2YzItMmE3MWRjNGU=, Slow query, duration: 11.504903s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2025-12-23T18:49:11.593356Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2081: SelfId: [4:7587127379996944894:4631], TxId: 281474976715669, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68peggbre5yxqb0mwxc5t6. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NmMzYmIxMDctZGZlNTJjZC1kZmQxNjYzOS1lZTBhNzllMg==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } 2025-12-23T18:49:11.593491Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [4:7587127379996944894:4631], TxId: 281474976715669, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68peggbre5yxqb0mwxc5t6. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NmMzYmIxMDctZGZlNTJjZC1kZmQxNjYzOS1lZTBhNzllMg==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 }. 2025-12-23T18:49:11.594113Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=4&id=NmMzYmIxMDctZGZlNTJjZC1kZmQxNjYzOS1lZTBhNzllMg==, ActorId: [4:7587127367112041796:3646], ActorState: ExecuteState, TraceId: 01kd68peggbre5yxqb0mwxc5t6, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TLocksTest::Range_IncorrectNullDot1 >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] >> TSchemeShardServerLess::StorageBilling [GOOD] >> TLocksFatTest::PointSetBreak [GOOD] >> TLocksFatTest::LocksLimit >> TFlatTest::ShardFreezeRejectBadProtobuf >> TFlatTest::WriteSplitAndRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] Test command err: 2025-12-23T18:48:57.881301Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.959402Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:57.959481Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:57.959550Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:57.959627Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:48:57.978905Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:183:2196] 2025-12-23T18:48:57.980832Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:48:57.000000Z 2025-12-23T18:48:57.981065Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:183:2196] 2025-12-23T18:48:58.002173Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.043573Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.064459Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.076012Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.121178Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.162443Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.197237Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\250\203\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:48:58.351805Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\250\203\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:48:58.374272Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\004\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\004\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\250\203\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:48:58.743315Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.788520Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:58.788582Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:58.788663Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:58.788725Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:48:58.801682Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-12-23T18:48:58.801886Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:58.802128Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:183:2195] 2025-12-23T18:48:58.802815Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request Got KV request Got KV request 2025-12-23T18:48:58.802963Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-12-23T18:48:58.803092Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-12-23T18:48:58.803190Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000000 to m0000000001 Got KV request 2025-12-23T18:48:58.803294Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-12-23T18:48:58.803345Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000000 to d0000000001 Got KV request 2025-12-23T18:48:58.803489Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:523: key[0]: d0000000000_00000000000000000000_00000_0000000001_00000 2025-12-23T18:48:58.803541Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:531: add key d0000000000_00000000000000000000_00000_0000000001_00000 2025-12-23T18:48:58.803616Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:629: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] check key d0000000000_00000000000000000000_00000_0000000001_00000 2025-12-23T18:48:58.803708Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:659: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-12-23T18:48:58.803805Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-12-23T18:48:58.803863Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-12-23T18:48:58.803902Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:48:58.000000Z 2025-12-23T18:48:58.803935Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMessageDeduplicatorStep 2025-12-23T18:48:58.804010Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000000|0000000000000000 to e0000000001 Got KV request 2025-12-23T18:48:58.804140Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TDeleteKeysStep 2025-12-23T18:48:58.804173Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:48:58.804205Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-12-23T18:48:58.804239Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:183:2195] 2025-12-23T18:48:58.804281Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-12-23T18:48:58.804321Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:58.804355Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:48:58.804381Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:58.804410Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.804440Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:58.804470Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.804495Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:58.804571Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:58.804697Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:58.815054Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.846200Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.856748Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:58.856829Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:58.856882Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:58.856938Z node 2 :PERSQUEUE DEBUG: partition ... artition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-12-23T18:49:16.745508Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:16.745564Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:16.745607Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:16.745651Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:16.745712Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2025-12-23T18:49:16.745773Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 1 2025-12-23T18:49:16.745824Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 1 2025-12-23T18:49:16.745875Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:16.745915Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:49:16.745963Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:16.746195Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 1 Got KV request Got KV request Got KV request Got KV request Got KV request Wait tx committed for tx 1 2025-12-23T18:49:16.747361Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:16.747484Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:16.747536Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:16.747581Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.747624Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:16.747665Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.747729Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:16.747785Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Wait for no tx committed 2025-12-23T18:49:16.768954Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:16.769031Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.769077Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:16.769131Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.769176Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:16.779562Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:16.810681Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:16.810783Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.810836Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:16.810887Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.810920Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:16.831776Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:16.831852Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.831887Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:16.831927Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.831961Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:16.852611Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:16.852689Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.852727Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:16.852768Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.852801Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:16.873766Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:16.873846Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.873886Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:16.873929Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.873968Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:16.894643Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:16.894734Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.894793Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:16.894847Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.894886Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:16.920112Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:16.920184Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.920218Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:16.920257Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.920290Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:16.942050Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:16.942123Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.942159Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:16.942200Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.942233Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:16.965536Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:16.965614Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.965651Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:16.965692Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.965727Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:16.989310Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:16.989394Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.989430Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:16.989473Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:16.989508Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:17.010766Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:17.010845Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:17.010884Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:17.010924Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:17.010959Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:47:47.034404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:47:47.034532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.034598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:47:47.034648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:47:47.034702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:47:47.034735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:47:47.034846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:47.034970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:47:47.035978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:47:47.037842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:47:47.134029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:47:47.134095Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:47.147299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:47:47.147539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:47:47.149563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:47:47.166685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:47:47.167241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:47:47.170496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:47.173006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:47:47.181967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.182140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:47:47.189176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:47:47.189256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:47.189424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:47:47.189472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:47:47.189608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:47:47.189772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:47:47.355611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.357770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.357918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:47:47.358919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eBoard Send, to populator: [1:1123:3037], at schemeshard: 72075186233409549, txId: 107, path id: 32 2025-12-23T18:48:55.041057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-12-23T18:48:55.041120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2025-12-23T18:48:55.041218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2025-12-23T18:48:55.041260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2025-12-23T18:48:55.041352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:0 129 -> 240 2025-12-23T18:48:55.042147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409549, cookie: 107 2025-12-23T18:48:55.042285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409549, cookie: 107 2025-12-23T18:48:55.042345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-12-23T18:48:55.042384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 12 2025-12-23T18:48:55.042428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 6 2025-12-23T18:48:55.044120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-12-23T18:48:55.044213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-12-23T18:48:55.044258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-12-23T18:48:55.044289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], version: 18446744073709551615 2025-12-23T18:48:55.044320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 4 2025-12-23T18:48:55.044399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-12-23T18:48:55.047457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-12-23T18:48:55.047531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2025-12-23T18:48:55.047944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2025-12-23T18:48:55.048133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2025-12-23T18:48:55.048177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-12-23T18:48:55.048221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2025-12-23T18:48:55.048256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-12-23T18:48:55.048294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-12-23T18:48:55.048382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1309:3188] message: TxId: 107 2025-12-23T18:48:55.048474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-12-23T18:48:55.048521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2025-12-23T18:48:55.048559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 107:0 2025-12-23T18:48:55.048667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 2 2025-12-23T18:48:55.050212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-12-23T18:48:55.052328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-12-23T18:48:55.053618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-12-23T18:48:55.053672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:3501:5332] TestWaitNotification: OK eventTxId 107 2025-12-23T18:48:55.074199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 1279 RawX2: 4294970462 } TabletId: 72075186233409552 State: 4 2025-12-23T18:48:55.074333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2025-12-23T18:48:55.076629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72075186233409549 2025-12-23T18:48:55.076769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2025-12-23T18:48:55.077387Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409552 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409552 2025-12-23T18:48:55.077629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2025-12-23T18:48:55.077969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 1 2025-12-23T18:48:55.081999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2025-12-23T18:48:55.082086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 32], at schemeshard: 72075186233409549 2025-12-23T18:48:55.082211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2025-12-23T18:48:55.085087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72075186233409549:4 2025-12-23T18:48:55.085184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2025-12-23T18:48:55.086539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2025-12-23T18:48:55.197186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7071: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-12-23T18:48:55.197302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-12-23T18:48:55.197380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-12-23T18:48:55.197540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7071: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-12-23T18:48:55.197593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:48:55.197630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-12-23T18:48:55.197747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7071: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-12-23T18:48:55.197803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-12-23T18:48:55.197837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-12-23T18:48:55.258650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:48:55.258803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:83: TTxServerlessStorageBilling: nothing to bill, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], next retry at: 2020-09-18T18:06:00.000000Z 2025-12-23T18:48:55.258893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:49:16.809388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:49:16.809487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:83: TTxServerlessStorageBilling: nothing to bill, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], next retry at: 2020-09-18T18:07:00.000000Z 2025-12-23T18:49:16.809584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TFlatTest::SplitThenMerge [GOOD] >> TFlatTest::SelectRangeReverseItemsLimit [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys >> TFlatTest::CopyCopiedTableAndRead >> TFlatTest::SplitEmptyAndWrite [GOOD] >> TFlatTest::SplitBoundaryRead >> TLocksTest::CK_GoodLock >> TPQTest::TestAlreadyWritten [GOOD] >> TPQTest::TestAlreadyWrittenWithoutDeduplication |96.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::SelectRangeReverse [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys |96.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] Test command err: 2025-12-23T18:49:10.962619Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127374972791540:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:10.962996Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057d8/r3tmp/tmpCKIfik/pdisk_1.dat 2025-12-23T18:49:11.203378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:11.203508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:11.240567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:11.248774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:11.258030Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:11.447906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:27164 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515751315 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:11.510475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:11.535435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:11.713487Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.009s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-12-23T18:49:11.727227Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.012s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-12-23T18:49:11.749636Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-12-23T18:49:11.755395Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-12-23T18:49:11.777640Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:49:11.778954Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:49:11.779008Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:49:11.781958Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:49:11.783627Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:49:11.783674Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:49:11.784191Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 8r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2025-12-23T18:49:11.785141Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-12-23T18:49:11.785163Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-12-23T18:49:11.787104Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:49:11.788498Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T18:49:11.788553Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:49:11.791357Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:49:11.791713Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.21, eph 3} end=Done, 4 blobs 9r (max 9), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (4073 2983 5183)b }, ecr=1.000 2025-12-23T18:49:11.792727Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-12-23T18:49:11.792753Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-12-23T18:49:11.792892Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T18:49:11.792943Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515751637 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name:... (TRUNCATED) 2025-12-23T18:49:11.804891Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:49:11.807132Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2025-12-23T18:49:11.807282Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:49:11.808735Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2025-12-23T18:49:11.808871Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:49:11.809505Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2025-12-23T18:49:11.810387Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2025-12-23T18:49:11.810492Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:49:11.811518Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2025-12-23T18:49:11.812400Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2025-12-23T18:49:11.812509Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:49:11.812998Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2025-12-23T18:49:11.813759Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2025-12-23T18:49:11.813870Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:49:11.814310Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2025-12-23T18:49:11.815155Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2025-12-23T18:49:11.815249Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:49:11.815656Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2025-12-23T18:49:11.816372Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2025-12-23T18:49:11.816949Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:49:11.817431Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2025-12-23T18:49:11.818310Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2025-12-23T18:49:11.818416Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:49:11.818887Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2025-12-23T18: ... 6644480 2025-12-23T18:49:15.039735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:15.039778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:15.039788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:15.042993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587127393275885695 RawX2: 4503608217307444 } TabletId: 72075186224037890 State: 4 2025-12-23T18:49:15.043054Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:49:15.043211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587127393275885695 RawX2: 4503608217307444 } TabletId: 72075186224037890 State: 4 2025-12-23T18:49:15.043240Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:49:15.046926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-12-23T18:49:15.047198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2025-12-23T18:49:15.047441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:15.047461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:15.047518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:15.047534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:15.047602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-12-23T18:49:15.048580Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2025-12-23T18:49:15.048677Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2025-12-23T18:49:15.049859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2025-12-23T18:49:15.050036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-12-23T18:49:15.055252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2025-12-23T18:49:15.055285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-12-23T18:49:15.055349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-12-23T18:49:15.055362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-12-23T18:49:15.055397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-12-23T18:49:15.055822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-12-23T18:49:15.055984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2025-12-23T18:49:15.056137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-12-23T18:49:15.056758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-12-23T18:49:15.056779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-12-23T18:49:15.056799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-12-23T18:49:15.100456Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-12-23T18:49:15.100493Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-12-23T18:49:15.100515Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-12-23T18:49:15.100573Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7587127393275885823:2816], serverId# [2:7587127393275885826:2819], sessionId# [0:0:0] 2025-12-23T18:49:15.100597Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-12-23T18:49:15.100613Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037892, clientId# [2:7587127393275885824:2817], serverId# [2:7587127393275885827:2820], sessionId# [0:0:0] 2025-12-23T18:49:15.100642Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-12-23T18:49:15.100652Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-12-23T18:49:15.100661Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-12-23T18:49:15.100677Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7587127393275885822:2815], serverId# [2:7587127393275885825:2818], sessionId# [0:0:0] 2025-12-23T18:49:15.100874Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-12-23T18:49:15.100968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587127393275885325 RawX2: 4503608217307373 } TabletId: 72075186224037889 State: 4 2025-12-23T18:49:15.101017Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:49:15.101211Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2025-12-23T18:49:15.101280Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2025-12-23T18:49:15.101503Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-12-23T18:49:15.101523Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-12-23T18:49:15.101711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:15.101745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:15.103009Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2025-12-23T18:49:15.103066Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2025-12-23T18:49:15.103762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-12-23T18:49:15.103931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2025-12-23T18:49:15.104063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:49:15.104073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2025-12-23T18:49:15.104103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:49:15.104632Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2025-12-23T18:49:15.104687Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2025-12-23T18:49:15.105787Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-12-23T18:49:15.105814Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-12-23T18:49:15.105839Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7587127393275885470:2595], serverId# [2:7587127393275885471:2596], sessionId# [0:0:0] 2025-12-23T18:49:15.105964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2025-12-23T18:49:15.105991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-12-23T18:49:15.106044Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:49:15.106211Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-12-23T18:49:15.106765Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2025-12-23T18:49:15.106817Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReply >> BasicUsage::CreateTopicWithCustomName [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> BasicUsage::RecreateObserver ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitThenMerge [GOOD] Test command err: 2025-12-23T18:49:11.278143Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127382163136131:2140];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:11.278392Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057d6/r3tmp/tmpthCJFr/pdisk_1.dat 2025-12-23T18:49:11.487792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:11.505991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:11.506095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:11.559054Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127382163136029:2081] 1766515751274196 != 1766515751274199 2025-12-23T18:49:11.572359Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:11.580097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:11.722880Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:4194 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515751630 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:11.875320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:11.880545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:11.901448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusNameConflict Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), tableStr: /dc-1/Dir1, tableId: , opId: 281474976715659:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir1" SourceTabletId: 100500 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 42 } } } } 2025-12-23T18:49:11.907033Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127382163136835:2496] txid# 281474976715659, issues: { message: "Check failed: path: \'/dc-1/Dir1\', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 128: Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2025-12-23T18:49:14.659537Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127392540490830:2168];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:14.659710Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057d6/r3tmp/tmpUqXrS2/pdisk_1.dat 2025-12-23T18:49:14.783712Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:14.814656Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:14.822818Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127392540490679:2081] 1766515754647602 != 1766515754647605 2025-12-23T18:49:14.829200Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:14.829280Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:14.857867Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20364 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:49:15.078849Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515754906 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:15.085266Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:15.113468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:15.280968Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-12-23T18:49:15.286031Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-12-23T18:49:15.311805Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-12-23T18:49:15.316324Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515755221 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name:... (TRUNCATED) 2025-12-23T18:49:15.350534Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:49:15.352457Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2025-12-23T18:49:15.352617Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:49:15.353822Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2025-12-23T18:49:15.353945Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:49:15.354518Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037888 restored its data 2025-12-23T18:49:15.355563Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2025-12-23T18:49:15.355687Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:49:15.356149Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037889 restored its data 2025-12-23T18:49:15.356782Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2025-12-23T18:49:15.356877Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:49:15.357290Z node 2 :TX_DATASHARD DEBUG: data ... txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-12-23T18:49:15.943618Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2025-12-23T18:49:15.943647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710693:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-12-23T18:49:15.943663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710693:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2025-12-23T18:49:15.943680Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710693:0 129 -> 240 2025-12-23T18:49:15.944031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2025-12-23T18:49:15.944148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2025-12-23T18:49:15.944589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2025-12-23T18:49:15.944662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2025-12-23T18:49:15.944755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710693:0, at schemeshard: 72057594046644480 2025-12-23T18:49:15.944761Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710693 datashard 72075186224037889 state PreOffline 2025-12-23T18:49:15.944788Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 281474976710693:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:49:15.944804Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-12-23T18:49:15.944951Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710693 datashard 72075186224037894 state PreOffline 2025-12-23T18:49:15.944972Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-12-23T18:49:15.945093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2025-12-23T18:49:15.945226Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2025-12-23T18:49:15.945238Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710693 ready parts: 1/1 2025-12-23T18:49:15.945274Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2025-12-23T18:49:15.945288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710693 ready parts: 1/1 2025-12-23T18:49:15.945303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710693, ready parts: 1/1, is published: true 2025-12-23T18:49:15.945346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7587127396835459680:2415] message: TxId: 281474976710693 2025-12-23T18:49:15.945361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710693 ready parts: 1/1 2025-12-23T18:49:15.945375Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710693:0 2025-12-23T18:49:15.945385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710693:0 2025-12-23T18:49:15.945471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2025-12-23T18:49:15.946243Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037894 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-12-23T18:49:15.946321Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037894 Initiating switch from PreOffline to Offline state TClient::Ls request: /dc-1/Dir/TableOld 2025-12-23T18:49:15.947576Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: TClient::Ls response: 2025-12-23T18:49:15.947641Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-12-23T18:49:15.948793Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037894 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:49:15.948872Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:49:15.949085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587127396835459454 RawX2: 4503608217307466 } TabletId: 72075186224037894 State: 4 2025-12-23T18:49:15.949145Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:49:15.949307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587127396835458820 RawX2: 4503608217307373 } TabletId: 72075186224037889 State: 4 2025-12-23T18:49:15.949317Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:49:15.949484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:15.949502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-12-23T18:49:15.949676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:15.949711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:15.950045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-12-23T18:49:15.950265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2025-12-23T18:49:15.950499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2025-12-23T18:49:15.950511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-12-23T18:49:15.950697Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037894 state Offline 2025-12-23T18:49:15.950723Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037894 reason = ReasonStop 2025-12-23T18:49:15.950776Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:7587127396835459558:2988], serverId# [2:7587127396835459560:2990], sessionId# [0:0:0] 2025-12-23T18:49:15.950790Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-12-23T18:49:15.951077Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-12-23T18:49:15.952605Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037894 2025-12-23T18:49:15.952678Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037894 2025-12-23T18:49:15.955424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-12-23T18:49:15.955631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2025-12-23T18:49:15.955818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:49:15.955834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2025-12-23T18:49:15.955871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:49:15.956235Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-12-23T18:49:15.957688Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-12-23T18:49:15.957908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2025-12-23T18:49:15.957933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-12-23T18:49:15.957960Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:49:15.958893Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2025-12-23T18:49:15.958983Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TPQTest::TestPQPartialRead [GOOD] >> TPQTest::TestPQRead >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-system >> TLocksTest::Range_BrokenLockMax >> TFlatTest::ShardFreezeRejectBadProtobuf [GOOD] >> TFlatTest::SelectRangeSkipNullKeys >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test >> TFlatTest::Init >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-system >> PQCountersLabeled::PartitionBlobCompactionCounters [GOOD] >> PQCountersLabeled::NewConsumersCountersAppear >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> TFlatTest::CopyCopiedTableAndRead [GOOD] >> TFlatTest::CopyTableAndAddFollowers >> TFlatTest::SplitBoundaryRead [GOOD] >> TFlatTest::AutoSplitBySize >> TPQTest::TestWritePQCompact [GOOD] >> TPQTest::TestWritePQBigMessage >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] >> TFlatTest::MiniKQLRanges >> TPQTest::TestMaxTimeLagRewind [GOOD] >> TPQTest::TestManyConsumers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] Test command err: 2025-12-23T18:49:14.953053Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127391447486885:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:14.953124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057d3/r3tmp/tmpmiLzD3/pdisk_1.dat 2025-12-23T18:49:15.234904Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:15.237682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:15.237780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:15.324314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:15.329881Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127391447486642:2081] 1766515754903666 != 1766515754903669 2025-12-23T18:49:15.336359Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:15.421844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:2165 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515755389 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:15.636081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:15.646904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:15.663356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:15.954988Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057d3/r3tmp/tmpVACEzo/pdisk_1.dat 2025-12-23T18:49:18.446178Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127411293424835:2097];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:18.465364Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:18.493227Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:18.587465Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:18.602962Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:18.603055Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:18.610478Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:18.660750Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:14655 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515758651 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:49:18.798605Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:18.829821Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TLocksFatTest::PointSetRemove [GOOD] >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler >> ReadSessionImplTest::CommonHandler [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] Test command err: 2025-12-23T18:49:15.385928Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127397683883875:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:15.385982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057d2/r3tmp/tmpcb3gQd/pdisk_1.dat 2025-12-23T18:49:15.655448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:15.655617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:15.655714Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:15.676885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:15.744226Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:15.765994Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127397683883847:2081] 1766515755382173 != 1766515755382176 2025-12-23T18:49:15.876722Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:27833 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515755781 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:16.067829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:16.075163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:16.095179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:16.286524Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-12-23T18:49:16.301442Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.018s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-12-23T18:49:16.329400Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-12-23T18:49:16.339074Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515756208 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name:... (TRUNCATED) 2025-12-23T18:49:16.399046Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:49:16.466974Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.22, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-12-23T18:49:16.467178Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-12-23T18:49:16.467340Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-12-23T18:49:16.467574Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-12-23T18:49:16.468881Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 3} end=Done, 4 blobs 2r (max 2), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (1907 1533 0)b }, ecr=1.000 2025-12-23T18:49:16.475539Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.33, eph 3} end=Done, 4 blobs 8r (max 8), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515756208 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name:... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-12-23T18:49:16.616360Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-12-23T18:49:16.621969Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-12-23T18:49:16.622227Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-12-23T18:49:16.622253Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-12-23T18:49:16.622266Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-12-23T18:49:18.855201Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127410731552807:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:18.855567Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057d2/r3tmp/tmpzw5ZGm/pdisk_1.dat 2025-12-23T18:49:18.896234Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:19.000186Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:19.002919Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:19.003000Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:19.027961Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:19.166857Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:9401 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515759085 Pare ... 81474976715678 TabletId: 72075186224037891 2025-12-23T18:49:19.643449Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037891 2025-12-23T18:49:19.643811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-12-23T18:49:19.645453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037890 2025-12-23T18:49:19.645497Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037890 2025-12-23T18:49:19.645521Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715678:0 3 -> 131 2025-12-23T18:49:19.645945Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-12-23T18:49:19.646084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-12-23T18:49:19.646126Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:49:19.646159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:353: TSplitMerge TTransferData operationId# 281474976715678:0 Starting split on src datashard 72075186224037888 splitOpId# 281474976715678:0 at tablet 72057594046644480 2025-12-23T18:49:19.646454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553154 2025-12-23T18:49:19.646551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037888 2025-12-23T18:49:19.649384Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-12-23T18:49:19.649724Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-12-23T18:49:19.649983Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-12-23T18:49:19.650179Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.28, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-12-23T18:49:19.650428Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph -9223372036854775808} end=Done, 0 blobs 0r (max 0), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-12-23T18:49:19.655649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-12-23T18:49:19.655710Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715678:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-12-23T18:49:19.655994Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715678:0 131 -> 132 2025-12-23T18:49:19.656087Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2025-12-23T18:49:19.656498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-12-23T18:49:19.656595Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:49:19.656615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715678, path id: [OwnerId: 72057594046644480, LocalPathId: 39] 2025-12-23T18:49:19.656809Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:49:19.656823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:7587127415026520575:2245], at schemeshard: 72057594046644480, txId: 281474976715678, path id: 39 2025-12-23T18:49:19.656850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-12-23T18:49:19.656872Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:49:19.656895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:468: Notify src datashard 72075186224037888 on partitioning changed splitOp# 281474976715678 at tablet 72057594046644480 2025-12-23T18:49:19.658884Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-12-23T18:49:19.658982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2025-12-23T18:49:19.658991Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715678 2025-12-23T18:49:19.659006Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715678, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], version: 4 2025-12-23T18:49:19.659025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 7 2025-12-23T18:49:19.659095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 0/1, is published: true 2025-12-23T18:49:19.659228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553158 2025-12-23T18:49:19.659531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715678 2025-12-23T18:49:19.663484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2025-12-23T18:49:19.663516Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715678:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-12-23T18:49:19.663564Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715678:0 progress is 1/1 2025-12-23T18:49:19.663578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-12-23T18:49:19.663595Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715678:0 progress is 1/1 2025-12-23T18:49:19.663604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-12-23T18:49:19.663620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 1/1, is published: true 2025-12-23T18:49:19.663658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7587127415026521246:2348] message: TxId: 281474976715678 2025-12-23T18:49:19.663678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2025-12-23T18:49:19.663689Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715678:0 2025-12-23T18:49:19.663698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976715678:0 2025-12-23T18:49:19.663801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2025-12-23T18:49:19.664100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2025-12-23T18:49:19.664120Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715678:0 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1766515759372 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name:... (TRUNCATED) |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] >> TFlatTest::Init [GOOD] >> TFlatTest::LargeDatashardReply ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] Test command err: 2025-12-23T18:49:16.028279Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127403168304050:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:16.028685Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057ca/r3tmp/tmp8ei3xi/pdisk_1.dat 2025-12-23T18:49:16.303132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:16.303262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:16.359623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:16.368950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:16.414754Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:1193 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-12-23T18:49:16.669191Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515756439 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:16.679658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:16.702012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:17.033554Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:19.284588Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127415535204044:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:19.284969Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:19.319598Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057ca/r3tmp/tmpbwcWYS/pdisk_1.dat 2025-12-23T18:49:19.423315Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:19.425541Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127415535204004:2081] 1766515759281570 != 1766515759281573 2025-12-23T18:49:19.425781Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:19.467731Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:19.467826Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:19.476110Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:19.630405Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:13334 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515759519 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:19.642760Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:19.650330Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:19.666797Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:49:19.672388Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyDistributed [GOOD] >> TFlatTest::LargeDatashardReplyRW >> TLocksFatTest::LocksLimit [GOOD] >> TPQTest::TestPartitionPerConsumerQuota [GOOD] >> TPQTest::TestPartitionWriteQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] Test command err: 2025-12-23T18:48:58.234299Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.320030Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:58.320101Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:58.320161Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:58.320236Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:48:58.339104Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [1:183:2196] 2025-12-23T18:48:58.340942Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:48:58.000000Z 2025-12-23T18:48:58.341162Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:183:2196] 2025-12-23T18:48:58.362950Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.407454Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.431191Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.442979Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.484639Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.526295Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.557355Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\220\213\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:48:58.719875Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\220\213\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:48:58.742226Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\001\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\220\213\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:48:58.787136Z node 1 :PERSQUEUE WARN: partition.cpp:3024: [72057594037927937][Partition][3][StateIdle] Partition 3 Consumer 'client' Bad request (gap) Offset 1 Begin 0 2025-12-23T18:48:58.787250Z node 1 :PERSQUEUE WARN: partition.cpp:3024: [72057594037927937][Partition][3][StateIdle] Partition 3 Consumer 'client' Bad request (gap) Offset 1 Begin 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\220\213\254\344\26438\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\264\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\220\213\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:48:59.265119Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:59.307988Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:59.308044Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:59.308087Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:59.308157Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:48:59.325029Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:184:2196] 2025-12-23T18:48:59.326761Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-12-23T18:48:59.000000Z 2025-12-23T18:48:59.327018Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:184:2196] 2025-12-23T18:48:59.337558Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:59.368701Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:59.389551Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:59.420566Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:59.451550Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:59.472341Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:59.523818Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:59.596149Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\370\222\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:48:59.680891Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\370\222\254\344\26438\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\370\222\254\344\26438\001" StorageChannel: INLINE } 2025-12-23T18:49:00.071116Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:00.121262Z node 3 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:00.121334Z node 3 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:00.121378Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:00.121425Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:49:00.136134Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-12-23T18:49:00.136350Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:49:00.136618Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [3:182:2195] 2025-12-23T18:49:00.137530Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request Go ... action and tx events 2025-12-23T18:49:24.550353Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:49:24.550402Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 2, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:24.550447Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:24.550485Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 2, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:24.550518Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:24.550576Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:49:24.550811Z node 6 :PERSQUEUE DEBUG: partition.cpp:1672: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-12-23T18:49:24.550873Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:24.550924Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2025-12-23T18:49:24.550981Z node 6 :PERSQUEUE DEBUG: partition.cpp:1616: [72057594037927937][Partition][0][StateIdle] TxId 26 affect SourceId src1 2025-12-23T18:49:24.551065Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2025-12-23T18:49:24.551098Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:24.551134Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:24.551187Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:24.551220Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:24.551287Z node 6 :PERSQUEUE DEBUG: partition.cpp:1672: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-12-23T18:49:24.551313Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:24.551337Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2025-12-23T18:49:24.551361Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:24.551381Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:24.551419Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:24.551441Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Wait for no tx committed 2025-12-23T18:49:24.551567Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 26 2025-12-23T18:49:24.551606Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:24.551662Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2025-12-23T18:49:24.551703Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:24.551742Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:24.551792Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2025-12-23T18:49:24.551839Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 26 2025-12-23T18:49:24.551904Z node 6 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2025-12-23T18:49:24.551955Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 26 2025-12-23T18:49:24.552002Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:24.552050Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:49:24.552089Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request 2025-12-23T18:49:24.575049Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:24.597995Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:24.612088Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:24.633640Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:24.659010Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:24.683256Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:24.707026Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:24.743031Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:24.767019Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:24.792044Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:24.818981Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events Send disk status response with cookie: 0 Wait tx committed for tx 26 2025-12-23T18:49:24.819280Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:24.819409Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2025-12-23T18:49:24.819463Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:24.819637Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2025-12-23T18:49:24.819722Z node 6 :PERSQUEUE DEBUG: partition.cpp:1608: [72057594037927937][Partition][0][StateIdle] TxId (empty maybe) affect SourceId src1 2025-12-23T18:49:24.819779Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:24.819826Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:24.819887Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ImmediateTx]) 2025-12-23T18:49:24.819933Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-12-23T18:49:24.820005Z node 6 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2025-12-23T18:49:24.820062Z node 6 :PERSQUEUE DEBUG: partition.cpp:3941: [72057594037927937][Partition][0][StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-12-23T18:49:24.820112Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:24.820154Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:49:24.820205Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:24.820443Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 28 2025-12-23T18:49:24.820694Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:24.841423Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2025-12-23T18:49:24.841514Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2025-12-23T18:49:24.841638Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2025-12-23T18:49:24.841698Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:24.841762Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:24.841817Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:24.841860Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:24.841893Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:49:24.841949Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 28 >> TFlatTest::LargeProxyReply [GOOD] >> TFlatTest::LargeProxyReplyRW |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-12-23T18:48:41.394785Z :SettingsValidation INFO: Random seed for debugging is 1766515721394761 2025-12-23T18:48:41.715841Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127250763718921:2145];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:41.718108Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:41.807497Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127250585911362:2097];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:41.841519Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:48:41.855450Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:48:41.855689Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005846/r3tmp/tmpgxiuVT/pdisk_1.dat 2025-12-23T18:48:42.126057Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:42.202863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:42.323318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:42.324952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:42.331405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:42.331537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:42.351139Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:48:42.351292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:42.383317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:42.506952Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:42.560454Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:42.565848Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 26942, node 1 2025-12-23T18:48:42.775832Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:42.851497Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:42.855655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005846/r3tmp/yandexySFXxu.tmp 2025-12-23T18:48:42.855705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005846/r3tmp/yandexySFXxu.tmp 2025-12-23T18:48:42.855944Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005846/r3tmp/yandexySFXxu.tmp 2025-12-23T18:48:42.856062Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:42.939489Z INFO: TTestServer started on Port 2396 GrpcPort 26942 TClient is connected to server localhost:2396 PQClient connected to localhost:26942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515722615 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:43.264797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:48:45.846284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127267943589348:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.846474Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.847741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127267943589361:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.847813Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127267943589362:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.847870Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.852999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:45.885248Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127267943589365:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-12-23T18:48:46.152684Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127267943589457:2974] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:46.184642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:46.189159Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587127267765780867:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:48:46.189140Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587127272238556770:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:48:46.189828Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=ZjcyMGUyYmMtNDRkMjcyNzQtNzQxZTIzNGQtNGVmZDM2ZGM=, ActorId: [1:7587127267943589346:2329], ActorState: ExecuteState, TraceId: 01kd68nngm3cfka7a3v0pahcxc, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:48:46.191786Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:48:46.192131Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=YmM0Nzk3ZGEtNDU1YjI2YjgtYjhmODFlZmMtOTE4NWQwNTI=, ActorId: [2:7587127267765780815:2298], Act ... 20694Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_5_1_12246793505348760216_v1 grpc read failed 2025-12-23T18:49:20.320718Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_5_1_12246793505348760216_v1 grpc closed 2025-12-23T18:49:20.320768Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_5_1_12246793505348760216_v1 is DEAD 2025-12-23T18:49:20.322191Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [5:7587127409273744244:2482] disconnected. 2025-12-23T18:49:20.322217Z node 5 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [5:7587127409273744244:2482] disconnected; active server actors: 1 2025-12-23T18:49:20.322236Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [5:7587127409273744244:2482] client user disconnected session shared/user_5_1_12246793505348760216_v1 2025-12-23T18:49:20.369894Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:20.369935Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:20.369951Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:20.369970Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:20.369984Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:20.470832Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:20.470867Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:20.470880Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:20.470897Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:20.470910Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:20.802833Z node 5 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1177: ActorId: [5:7587127417863679025:2514] TxId: 281474976715683. Ctx: { TraceId: 01kd68pqae5yxghztsexjcykwv, Database: /Root, SessionId: ydb://session/3?node_id=5&id=NWRmM2Y2ZjEtNDJjODg1YjEtNTRlNjM0Y2YtODZkNDU4MTY=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 6 2025-12-23T18:49:20.803580Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [5:7587127417863679029:2514], TxId: 281474976715683, task: 3. Ctx: { CheckpointId : . TraceId : 01kd68pqae5yxghztsexjcykwv. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=NWRmM2Y2ZjEtNDJjODg1YjEtNTRlNjM0Y2YtODZkNDU4MTY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7587127417863679025:2514], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-12-23T18:49:22.288831Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:22.288869Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:22.288925Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:49:22.289259Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:49:22.289788Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:49:22.290007Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:22.290454Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-12-23T18:49:22.292157Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:22.292189Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:22.292252Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:49:22.292763Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:49:22.293189Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:49:22.293340Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:22.293589Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:49:22.294870Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:49:22.296942Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-12-23T18:49:22.297079Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-12-23T18:49:22.297560Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:49:22.297630Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-12-23T18:49:22.297661Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-12-23T18:49:22.297716Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 57 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-12-23T18:49:22.299986Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:22.300036Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:22.300075Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:49:22.300355Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:49:22.311014Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:49:22.311237Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:22.311552Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:49:22.312427Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:22.312626Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:49:22.312762Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:49:22.312828Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-12-23T18:49:22.312916Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2025-12-23T18:49:22.314730Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:22.314784Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:22.314847Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:49:22.315171Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:49:22.315571Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:49:22.315706Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:22.316020Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-12-23T18:49:22.316740Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:49:22.317260Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-12-23T18:49:22.319527Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-12-23T18:49:22.319603Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:49:22.319663Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:49:22.319719Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-12-23T18:49:22.319874Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-12-23T18:49:22.319941Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-12-23T18:49:24.435022Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:24.435090Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:24.435134Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:49:24.435490Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-12-23T18:49:24.441822Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-12-23T18:49:24.442038Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:24.442998Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:49:24.443182Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-12-23T18:49:24.443280Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-12-23T18:49:24.443379Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TFlatTest::CopyTableAndAddFollowers [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy |96.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] Test command err: 2025-12-23T18:49:08.685365Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127366634350289:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:08.686052Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057dd/r3tmp/tmpjClyqH/pdisk_1.dat 2025-12-23T18:49:08.954915Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:08.979518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:08.979605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:09.006249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:09.068029Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127366634350268:2081] 1766515748683574 != 1766515748683577 2025-12-23T18:49:09.072264Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:09.158609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:27517 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515749082 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:49:09.279945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:09.299794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:09.431367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:09.480486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:09.698634Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:13.684780Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127366634350289:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:13.684884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:49:15.718245Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127399357752928:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:15.719359Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:15.726047Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057dd/r3tmp/tmpte7CY4/pdisk_1.dat 2025-12-23T18:49:15.810243Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:15.813144Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127399357752892:2081] 1766515755715837 != 1766515755715840 2025-12-23T18:49:15.826849Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:15.856862Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:15.856941Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:15.862134Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25422 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515755907 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:49:16.035170Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:16.048334Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions waiting... 2025-12-23T18:49:16.054764Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:16.144878Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:16.200781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:16.723706Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:19.734378Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127413639305315:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:19.735195Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057dd/r3tmp/tmp3UqMeK/pdisk_1.dat 2025-12-23T18:49:19.752307Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:19.821709Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:19.859704Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:19.859802Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:19.864024Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:19.976338Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:25336 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515759911 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:20.038489Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:20.059426Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:20.114564Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:20.164769Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:20.739199Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::MiniKQLRanges [GOOD] >> TFlatTest::MergeEmptyAndWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] Test command err: 2025-12-23T18:49:17.773819Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127407860380976:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:17.773884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c7/r3tmp/tmpYHKfJ7/pdisk_1.dat 2025-12-23T18:49:18.072750Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:18.080492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:18.080632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:18.133169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:18.189005Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:18.267666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:26542 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515758196 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:18.424731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:18.432573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:49:18.437228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:18.583605Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127412155349144:2562] txid# 281474976710659, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-12-23T18:49:18.588292Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127412155349157:2568] txid# 281474976710660, issues: { message: "Unexpected freeze state" severity: 1 } Error 128: Unexpected freeze state 2025-12-23T18:49:18.591754Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127412155349163:2573] txid# 281474976710661, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-12-23T18:49:18.594291Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127412155349169:2578] txid# 281474976710662, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-12-23T18:49:21.308626Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127422052094703:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:21.308692Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:21.332868Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c7/r3tmp/tmpqu6bWr/pdisk_1.dat 2025-12-23T18:49:21.414838Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127422052094677:2081] 1766515761307082 != 1766515761307085 2025-12-23T18:49:21.454853Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:21.459319Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:21.462042Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:21.462093Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:21.492280Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15578 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515761542 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:21.667343Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:21.669601Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions waiting... 2025-12-23T18:49:21.688463Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBytesLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::LocksLimit [GOOD] Test command err: 2025-12-23T18:49:10.675504Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127375874942968:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:10.675606Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057da/r3tmp/tmpmBiGOn/pdisk_1.dat 2025-12-23T18:49:10.900610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:10.910549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:10.910693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:10.977622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:10.986256Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:11.116252Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:27116 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515751056 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:11.244472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:11.266109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:49:11.397954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:11.439709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:11.683391Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:15.675511Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127375874942968:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:15.675760Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:49:17.640834Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127408451742978:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:17.640882Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057da/r3tmp/tmp8m7gA0/pdisk_1.dat 2025-12-23T18:49:17.674804Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:17.782213Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:17.782861Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127408451742954:2081] 1766515757639752 != 1766515757639755 2025-12-23T18:49:17.796153Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:17.796229Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:17.820922Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:17.888121Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:6786 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515757874 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:17.997955Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:18.023282Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:18.086324Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:18.132032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:21.389074Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127422383829438:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:21.389223Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057da/r3tmp/tmp8jiWaA/pdisk_1.dat 2025-12-23T18:49:21.405661Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:21.490717Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:21.501965Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:21.502054Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:21.528677Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:21.591424Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:18310 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515761584 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:21.741180Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:21.754173Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:21.758778Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:21.820614Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:21.877012Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:22.398869Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Listing >> TLocksFatTest::RangeSetBreak >> TLocksTest::BrokenLockUpdate [GOOD] >> TLocksTest::BrokenNullLock >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test >> TLocksTest::Range_BrokenLock0 [GOOD] >> TLocksTest::Range_BrokenLock1 >> TFlatTest::WriteSplitKillRead >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-system >> TFlatTest::SelectRangeForbidNullArgs2 >> TFlatTest::PathSorting >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] >> TObjectStorageListingTest::TestFilter >> TLocksTest::Range_CorrectNullDot >> TFlatTest::MergeEmptyAndWrite [GOOD] >> TFlatTest::SelectRangeBytesLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 >> CompressExecutor::TestExecutorMemUsage [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] Test command err: 2025-12-23T18:49:18.867152Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127410669441714:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:18.867225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c6/r3tmp/tmp31wWKf/pdisk_1.dat 2025-12-23T18:49:19.202843Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:19.264444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:19.264743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:19.266494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:19.304962Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:19.306405Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127410669441691:2081] 1766515758866388 != 1766515758866391 2025-12-23T18:49:19.390958Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:9778 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515759337 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:19.538066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:19.543932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-12-23T18:49:19.579615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:19.762960Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-12-23T18:49:19.773801Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-12-23T18:49:19.805510Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.005s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-12-23T18:49:19.812009Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-12-23T18:49:19.878175Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:19.959404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:49:19.959658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:431: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-12-23T18:49:19.960107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2025-12-23T18:49:19.960141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2025-12-23T18:49:19.960154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2025-12-23T18:49:19.960171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976710676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2025-12-23T18:49:19.960197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2025-12-23T18:49:19.960288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2025-12-23T18:49:19.960409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2025-12-23T18:49:19.960548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:49:19.962596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2025-12-23T18:49:19.962649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2025-12-23T18:49:19.963320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2025-12-23T18:49:19.963516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-12-23T18:49:19.963686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:49:19.963700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T18:49:19.963810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2025-12-23T18:49:19.963899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:49:19.963913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7587127414964409517:2247], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 38 2025-12-23T18:49:19.963933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7587127414964409517:2247], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 40 2025-12-23T18:49:19.963966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-12-23T18:49:19.963994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-12-23T18:49:19.964297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-12-23T18:49:19.964421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-12-23T18:49:19.966963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-12-23T18:49:19.967047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-12-23T18:49:19.967064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, tx ... SHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:27.168377Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:27.168386Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:27.168737Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-12-23T18:49:27.168913Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-12-23T18:49:27.168927Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 41] was 2 2025-12-23T18:49:27.168936Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-12-23T18:49:27.168947Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-12-23T18:49:27.169054Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-12-23T18:49:27.169168Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-12-23T18:49:27.169288Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 41] was 1 2025-12-23T18:49:27.169389Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-12-23T18:49:27.169482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2025-12-23T18:49:27.169561Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:49:27.169575Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 41], at schemeshard: 72057594046644480 2025-12-23T18:49:27.169619Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2025-12-23T18:49:27.169789Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037893 reason = ReasonStop 2025-12-23T18:49:27.169980Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-12-23T18:49:27.169996Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-12-23T18:49:27.170690Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037893 2025-12-23T18:49:27.170787Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037893 2025-12-23T18:49:27.170867Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-12-23T18:49:27.171373Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-12-23T18:49:27.171378Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2025-12-23T18:49:27.171390Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-12-23T18:49:27.171410Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-12-23T18:49:27.171456Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-12-23T18:49:27.171610Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2025-12-23T18:49:27.171758Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2025-12-23T18:49:27.171780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2025-12-23T18:49:27.171789Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-12-23T18:49:27.171847Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-12-23T18:49:27.171969Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2025-12-23T18:49:27.172058Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-12-23T18:49:27.172068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-12-23T18:49:27.172100Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 2 candidates, at schemeshard: 72057594046644480 2025-12-23T18:49:27.172140Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:49:27.172154Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2025-12-23T18:49:27.172193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2025-12-23T18:49:27.172211Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2025-12-23T18:49:27.172226Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:49:27.172326Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2025-12-23T18:49:27.172374Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2025-12-23T18:49:27.173218Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2025-12-23T18:49:27.173230Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-12-23T18:49:27.173494Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-12-23T18:49:27.173506Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-12-23T18:49:27.173530Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:49:27.175487Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [3:7587127443353091859:2775], serverId# [3:7587127443353091860:2776], sessionId# [0:0:0] 2025-12-23T18:49:27.178351Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2025-12-23T18:49:27.178435Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2025-12-23T18:49:27.179675Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037893 from 72075186224037891 is reset 2025-12-23T18:49:27.179702Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-12-23T18:49:27.179734Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-12-23T18:49:27.179759Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [3:7587127443353091589:2595], serverId# [3:7587127443353091590:2596], sessionId# [0:0:0] 2025-12-23T18:49:27.179974Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-12-23T18:49:27.180148Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-12-23T18:49:27.180305Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2025-12-23T18:49:27.180349Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2025-12-23T18:49:27.181744Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2025-12-23T18:49:27.181784Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2025-12-23T18:49:27.306717Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:27.460166Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2025-12-23T18:49:27.461583Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-12-23T18:49:27.462284Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-12-23T18:49:27.464970Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-12-23T18:49:27.465675Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-12-23T18:49:27.466300Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::MergeEmptyAndWrite [GOOD] Test command err: 2025-12-23T18:49:23.313764Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127433400128126:2143];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:23.315469Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:23.341936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057bf/r3tmp/tmp1lHJSp/pdisk_1.dat 2025-12-23T18:49:23.594846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:23.598193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:23.598291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:23.605379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:23.704033Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:23.781386Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:25187 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515763726 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:23.986549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:24.003741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:24.019602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:24.025034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:26.782135Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127443201582903:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:26.782198Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:26.811524Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057bf/r3tmp/tmpg4xBxA/pdisk_1.dat 2025-12-23T18:49:26.970551Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:26.982939Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127443201582877:2081] 1766515766780795 != 1766515766780798 2025-12-23T18:49:26.986869Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:26.989563Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:26.989648Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:27.018093Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:27.145000Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:9826 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515767072 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:49:27.227918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:49:27.234593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:27.244561Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:27.251752Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:27.363796Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.007s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-12-23T18:49:27.369670Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-12-23T18:49:27.403271Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-12-23T18:49:27.437485Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515767345 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name:... (TRUNCATED) 2025-12-23T18:49:27.488063Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:49:27.489936Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2025-12-23T18:49:27.490129Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:49:27.491464Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2025-12-23T18:49:27.491603Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:49:27.492158Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037888 restored its data 2025-12-23T18:49:27.492946Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2025-12-23T18:49:27.493367Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:49:27.493881Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710676 at 72075186224037889 restored its data 2025-12-23T18:49:27.494592Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710676 released its data 2025-12-23T18:49:27.494699Z n ... sult, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976710687 Step: 1766515767940 OrderId: 281474976710687 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 911 } } CommitVersion { Step: 1766515767940 TxId: 281474976710687 } 2025-12-23T18:49:27.900587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710687, tablet: 72075186224037890, partId: 0 2025-12-23T18:49:27.900693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710687:0, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976710687 Step: 1766515767940 OrderId: 281474976710687 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 911 } } CommitVersion { Step: 1766515767940 TxId: 281474976710687 } 2025-12-23T18:49:27.900799Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976710687 Step: 1766515767940 OrderId: 281474976710687 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 911 } } CommitVersion { Step: 1766515767940 TxId: 281474976710687 } debug: NTableState::TProposedWaitParts operationId# 281474976710687:0 2025-12-23T18:49:27.900930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710687 2025-12-23T18:49:27.900963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710687 2025-12-23T18:49:27.900985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710687 2025-12-23T18:49:27.901042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710687:0, at schemeshard: 72057594046644480 2025-12-23T18:49:27.901164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7587127447496551321 RawX2: 4503608217307432 } Origin: 72075186224037890 State: 5 TxId: 281474976710687 Step: 0 Generation: 1 2025-12-23T18:49:27.901183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710687, tablet: 72075186224037890, partId: 0 2025-12-23T18:49:27.901288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710687:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7587127447496551321 RawX2: 4503608217307432 } Origin: 72075186224037890 State: 5 TxId: 281474976710687 Step: 0 Generation: 1 2025-12-23T18:49:27.901318Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 281474976710687:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 2025-12-23T18:49:27.901390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 281474976710687:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7587127447496551321 RawX2: 4503608217307432 } Origin: 72075186224037890 State: 5 TxId: 281474976710687 Step: 0 Generation: 1 2025-12-23T18:49:27.901434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710687:0, shardIdx: 72057594046644480:3, shard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-12-23T18:49:27.901453Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710687:0, at schemeshard: 72057594046644480 2025-12-23T18:49:27.901490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710687:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-12-23T18:49:27.901527Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710687:0 129 -> 240 2025-12-23T18:49:27.901947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710687:0, at schemeshard: 72057594046644480 2025-12-23T18:49:27.902016Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710687 datashard 72075186224037890 state PreOffline 2025-12-23T18:49:27.902029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710687:0, at schemeshard: 72057594046644480 2025-12-23T18:49:27.902052Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-12-23T18:49:27.902069Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 281474976710687:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:49:27.902375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2025-12-23T18:49:27.902509Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710687:0 progress is 1/1 2025-12-23T18:49:27.902523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710687 ready parts: 1/1 2025-12-23T18:49:27.902545Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710687:0 progress is 1/1 2025-12-23T18:49:27.902556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710687 ready parts: 1/1 2025-12-23T18:49:27.902572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710687, ready parts: 1/1, is published: true 2025-12-23T18:49:27.902620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7587127447496551552:2381] message: TxId: 281474976710687 2025-12-23T18:49:27.902636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710687 ready parts: 1/1 2025-12-23T18:49:27.902653Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710687:0 2025-12-23T18:49:27.902667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710687:0 2025-12-23T18:49:27.902734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2025-12-23T18:49:27.903466Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-12-23T18:49:27.903525Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-12-23T18:49:27.905375Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:49:27.905820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587127447496551321 RawX2: 4503608217307432 } TabletId: 72075186224037890 State: 4 2025-12-23T18:49:27.905900Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1/Dir/TableOld 2025-12-23T18:49:27.906209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:27.906247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:27.906414Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-12-23T18:49:27.907960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-12-23T18:49:27.908196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2025-12-23T18:49:27.908395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:49:27.908424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2025-12-23T18:49:27.908468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:49:27.908730Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-12-23T18:49:27.909252Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-12-23T18:49:27.909454Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2025-12-23T18:49:27.909546Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2025-12-23T18:49:27.909681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-12-23T18:49:27.909700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-12-23T18:49:27.909736Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::PathSorting [GOOD] >> TFlatTest::PartBloomFilter >> TFlatTest::WriteSplitKillRead [GOOD] >> TFlatTest::WriteSplitWriteSplit >> TFlatTest::SelectRangeForbidNullArgs2 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 >> TPQTest::TestWritePQBigMessage [GOOD] >> TPQTest::TestWritePQ >> TObjectStorageListingTest::TestFilter [GOOD] >> TObjectStorageListingTest::TestSkipShards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] Test command err: 2025-12-23T18:49:19.762286Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127415428304166:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:19.762933Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c4/r3tmp/tmpgIKgYA/pdisk_1.dat 2025-12-23T18:49:19.999639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:19.999758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:20.034540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:20.055168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:20.073359Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:20.086516Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127415428304096:2081] 1766515759746852 != 1766515759746855 2025-12-23T18:49:20.271073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:12222 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515760135 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:20.353776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:20.359205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:20.371948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:20.770199Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:24.766946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127415428304166:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:24.767027Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:49:25.302514Z node 1 :TX_PROXY ERROR: datareq.cpp:2703: Actor# [1:7587127436903144370:4331] txid# 281474976716010 MergeResult Result too large TDataReq marker# P18 2025-12-23T18:49:25.302578Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7587127436903144370:4331] txid# 281474976716010 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2025-12-23T18:49:26.028986Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127443711739511:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:26.031378Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c4/r3tmp/tmpWTsxGx/pdisk_1.dat 2025-12-23T18:49:26.092163Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:26.158977Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:26.193282Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:26.193358Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:26.197649Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:26.257280Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:61907 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515766246 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:26.419774Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:26.431491Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:26.455505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:26.466483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:27.032452Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:31.018942Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587127443711739511:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:31.019006Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2025-12-23T18:49:31.429809Z node 2 :TX_PROXY ERROR: datareq.cpp:2703: Actor# [2:7587127460891612435:4333] txid# 281474976711011 MergeResult Result too large TDataReq marker# P18 2025-12-23T18:49:31.429870Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7587127460891612435:4333] txid# 281474976711011 RESPONSE Status# ExecResultUnavailable marker# P13c |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitBySize [GOOD] >> TFlatTest::AutoMergeBySize >> TObjectStorageListingTest::Listing [GOOD] >> TObjectStorageListingTest::ManyDeletes >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] >> TFlatTest::ReadOnlyMode >> BasicUsage::CreateTopicWithAvailabilityPeriod [GOOD] >> BasicUsage::CreateTopicWithCustomMetricsLevel >> TLocksTest::Range_Pinhole >> TPQTest::TestManyConsumers [GOOD] >> TFlatTest::LargeDatashardReply [GOOD] >> TLocksFatTest::RangeSetBreak [GOOD] >> TLocksFatTest::RangeSetNotBreak >> TLocksTest::Range_IncorrectNullDot1 [GOOD] >> TLocksTest::Range_IncorrectNullDot2 >> TFlatTest::PartBloomFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] Test command err: 2025-12-23T18:49:27.170107Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127451250905001:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:27.171196Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057be/r3tmp/tmpZDEe0T/pdisk_1.dat 2025-12-23T18:49:27.459651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:27.459744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:27.544427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:27.545940Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127451250904976:2081] 1766515767157941 != 1766515767157944 2025-12-23T18:49:27.545995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:27.546885Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:18108 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:49:27.798177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515767604 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:27.866316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:27.874795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:27.891588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:28.180666Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:30.955982Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127464224163057:2176];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:30.956126Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:30.970615Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057be/r3tmp/tmprFI3UU/pdisk_1.dat 2025-12-23T18:49:31.061410Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:31.064949Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127464224162919:2081] 1766515770950649 != 1766515770950652 2025-12-23T18:49:31.078163Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:31.078260Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:31.090845Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:31.108269Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:31.288750Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:32367 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515771167 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:49:31.388409Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:49:31.395021Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:31.412710Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] >> TFlatTest::ShardFreezeUnfreezeAlreadySet >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-system >> BasicUsage::RecreateObserver [GOOD] >> TFlatTest::LargeDatashardReplyRW [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestManyConsumers [GOOD] Test command err: 2025-12-23T18:48:49.153441Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2025-12-23T18:48:49.208534Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:48:49.211726Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:48:49.211952Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:49.212007Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:49.212039Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-12-23T18:48:49.212076Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4927: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-12-23T18:48:49.212109Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:49.212157Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2025-12-23T18:48:49.227313Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2025-12-23T18:48:49.227423Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:48:49.242351Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1470: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 52428800 BurstSize: 52428800 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2025-12-23T18:48:49.245190Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 52428800 BurstSize: 52428800 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2025-12-23T18:48:49.245311Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:49.245982Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 52428800 BurstSize: 52428800 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2025-12-23T18:48:49.246100Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:49.246440Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:49.246859Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2025-12-23T18:48:49.248708Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:48:49.248763Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-12-23T18:48:49.248794Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2025-12-23T18:48:49.248830Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:48:49.248875Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:49.249966Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:49.251022Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:48:49.251089Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:49.251131Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:49.251204Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:49.251234Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2025-12-23T18:48:49.251258Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:49.251286Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:49.251352Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-12-23T18:48:49.251381Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:48:49.251403Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:49.251421Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2025-12-23T18:48:49.251448Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2025-12-23T18:48:49.251469Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2025-12-23T18:48:49.251498Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2025-12-23T18:48:49.251543Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:49.251715Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:49.251736Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:49.251770Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:49.251921Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:49.252060Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:49.253832Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:48:49.253933Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:48:49.253972Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:49.253995Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:49.254018Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:49.254044Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:49.254082Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:49.254112Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:49.254345Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:196:2203], now have 1 active actors on pipe 2025-12-23T18:48:49.266159Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:199:2205], now have 1 active actors on pipe 2025-12-23T18:48:49.266260Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:48:49.266305Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1-- ... Q: 72057594037927937] server connected, pipe [28:998:2988], now have 1 active actors on pipe 2025-12-23T18:49:33.731844Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:33.761290Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:33.832917Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [28:1001:2991], now have 1 active actors on pipe 2025-12-23T18:49:33.835100Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:33.860547Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:33.899759Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [28:1004:2994], now have 1 active actors on pipe 2025-12-23T18:49:33.901943Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:33.922931Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:33.970390Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [28:1007:2997], now have 1 active actors on pipe 2025-12-23T18:49:33.972632Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:33.992998Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.036478Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [28:1010:3000], now have 1 active actors on pipe 2025-12-23T18:49:34.038563Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.065952Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.135756Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [28:1013:3003], now have 1 active actors on pipe 2025-12-23T18:49:34.138117Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.161600Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.210428Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [28:1016:3006], now have 1 active actors on pipe 2025-12-23T18:49:34.212722Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.239384Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.281191Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [28:1019:3009], now have 1 active actors on pipe 2025-12-23T18:49:34.283555Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.304261Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.351115Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [28:1022:3012], now have 1 active actors on pipe 2025-12-23T18:49:34.353351Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.372508Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.416686Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [28:1025:3015], now have 1 active actors on pipe 2025-12-23T18:49:34.418901Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.438553Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.520316Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [28:1028:3018], now have 1 active actors on pipe 2025-12-23T18:49:34.522579Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.550043Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.590514Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [28:1031:3021], now have 1 active actors on pipe 2025-12-23T18:49:34.592796Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.612850Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.662985Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [28:1034:3024], now have 1 active actors on pipe 2025-12-23T18:49:34.665123Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.696620Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:49:34.737642Z node 28 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [28:1037:3027] connected; active server actors: 1 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] Test command err: 2025-12-23T18:48:45.771861Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:132:2057] recipient: [1:130:2163] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:132:2057] recipient: [1:130:2163] Leader for TabletID 72057594037927937 is [1:136:2167] sender: [1:137:2057] recipient: [1:130:2163] 2025-12-23T18:48:45.874188Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:45.874267Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:45.874350Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:45.874443Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:178:2057] recipient: [1:176:2197] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:178:2057] recipient: [1:176:2197] Leader for TabletID 72057594037927938 is [1:182:2201] sender: [1:183:2057] recipient: [1:176:2197] Leader for TabletID 72057594037927937 is [1:136:2167] sender: [1:208:2057] recipient: [1:14:2061] 2025-12-23T18:48:45.898518Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:45.923008Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:206:2219] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:48:45.924151Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:214:2167] 2025-12-23T18:48:45.926903Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:214:2167] 2025-12-23T18:48:45.929173Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:215:2167] 2025-12-23T18:48:45.931136Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:215:2167] 2025-12-23T18:48:45.939474Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:48:45.939959Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c1cabffc-d1f38905-a8553b46-ee9bcb3c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:48:45.947340Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:48:45.947783Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6fe468a-755908d7-ee9b7acc-86675279_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:48:45.957281Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:48:45.957807Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d75298fe-3001c15-aafd86b8-89ac8cb2_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 2025-12-23T18:48:45.986505Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:46.043044Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:46.065446Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:46.089047Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:46.144282Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:46.165374Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:46.256110Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:46.340625Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:46.399806Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:46.569449Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:46.600671Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:46.794828Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:46.981915Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:47.023234Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesQuota" }, "value": 1000000000 }, { "kind": "RATE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadOffsetRewindSum" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadTimeLagMs" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByCommitted" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TimeSinceLastReadMs" }, "value": 500 ... let_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:23.155664Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:23.366376Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:23.562168Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:23.655262Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:23.913827Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:24.162913Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:24.423778Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:24.479452Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:24.673489Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:25.067054Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:25.318883Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:25.556089Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:25.713486Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:25.773380Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:26.075704Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:26.325218Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:26.553490Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:26.745418Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:26.853325Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:27.025316Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:27.295025Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:27.525418Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:27.787011Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:27.992965Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:28.070250Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:28.360962Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:28.636729Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:28.875790Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:29.119106Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:29.164644Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:29.212087Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][topic] pipe [6:432:2377] connected; active server actors: 1 2025-12-23T18:49:29.609749Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:29.911009Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:30.179340Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:30.458757Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:30.583967Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:30.736335Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:31.010875Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:31.298684Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:31.573332Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:31.801324Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:31.939780Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:32.225366Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:32.495048Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:32.761044Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:33.033758Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:33.107071Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:33.382721Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:33.671208Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:33.947443Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:34.278768Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:34.323007Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:34.503022Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:34.780498Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:35.029261Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:35.264598Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:35.425499Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:35.545668Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:35.827630Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:36.087008Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-system >> TObjectStorageListingTest::TestSkipShards [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TLocksTest::CK_GoodLock [GOOD] >> TLocksTest::CK_BrokenLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReply [GOOD] Test command err: 2025-12-23T18:49:21.486082Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127422339273865:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:21.497534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:21.516286Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c2/r3tmp/tmpXxasSb/pdisk_1.dat 2025-12-23T18:49:21.784949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:21.785054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:21.787522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:21.870874Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:21.874865Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127422339273825:2081] 1766515761464812 != 1766515761464815 2025-12-23T18:49:21.874968Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:21.884004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715680 2025-12-23T18:49:21.884068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715691 2025-12-23T18:49:21.884106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-12-23T18:49:21.884147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715683 2025-12-23T18:49:21.884170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715679 2025-12-23T18:49:21.884187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715690 2025-12-23T18:49:21.884246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715675 2025-12-23T18:49:21.884275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715686 2025-12-23T18:49:21.884295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715682 2025-12-23T18:49:21.884301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_queries_by_duration_one_minute' 2025-12-23T18:49:21.884329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715678 2025-12-23T18:49:21.884336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_partitions_one_hour' 2025-12-23T18:49:21.884350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715689 2025-12-23T18:49:21.884351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/auth_groups' 2025-12-23T18:49:21.884364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_queries_by_cpu_time_one_minute' 2025-12-23T18:49:21.884372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715674 2025-12-23T18:49:21.884376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/query_sessions' 2025-12-23T18:49:21.884389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715685 2025-12-23T18:49:21.884390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/ds_storage_stats' 2025-12-23T18:49:21.884403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_partitions_by_tli_one_minute' 2025-12-23T18:49:21.884406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715681 2025-12-23T18:49:21.884424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/ds_groups' 2025-12-23T18:49:21.884440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/tables' 2025-12-23T18:49:21.884441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715692 2025-12-23T18:49:21.884461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/auth_owners' 2025-12-23T18:49:21.884477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/query_metrics_one_minute' 2025-12-23T18:49:21.884477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715688 2025-12-23T18:49:21.884508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/ds_storage_pools' 2025-12-23T18:49:21.884510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715684 2025-12-23T18:49:21.884523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/partition_stats' 2025-12-23T18:49:21.884534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/pg_class' 2025-12-23T18:49:21.884550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_queries_by_duration_one_hour' 2025-12-23T18:49:21.884567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/resource_pools' 2025-12-23T18:49:21.884600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7587127422339274358:2253] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, create sys view '/dc-1/.sys/top_queries_by_request_units_one_minute' 2025-12-23T18:49:21.885745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:49:21.885784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715669, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-12-23T18:49:21.886051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715669, path id: [OwnerId: 72057594046644480, LocalPathId: 14] 2025-12-23T18:49:21.886191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:49:21.886206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7587127422339274343:2241], at schemeshard: 72057594046644480, txId: 281474976715669, path id: 2 2025-12-23T18:49:21.886226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7587127422339274343:2241], at schemeshard: 72057594046644480, txId: 281474976715669, path id: 14 2025-12-23T18:49:21.886252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:49:21.886258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715668, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-12-23T18:49:21.886316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715668, path id: [OwnerId: 72057594046644480, LocalPathId: 13] 2025-12-23T18:49:21.886386Z node 1 :FLAT_ ... 281474976710674 2025-12-23T18:49:23.135264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2025-12-23T18:49:23.135272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710674:0 2025-12-23T18:49:23.135276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710674:0 2025-12-23T18:49:23.135324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 42] was 2 2025-12-23T18:49:23.135578Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037899 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-12-23T18:49:23.135640Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037899 Initiating switch from PreOffline to Offline state 2025-12-23T18:49:23.137236Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037899 Reporting state Offline to schemeshard 72057594046644480 2025-12-23T18:49:23.137519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587127426634242479 RawX2: 4503603922340114 } TabletId: 72075186224037899 State: 4 2025-12-23T18:49:23.137560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037899, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:49:23.137800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:23.137811Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037899 state Offline 2025-12-23T18:49:23.137828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:12 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:23.137964Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 TxId_Deprecated: 0 TabletID: 72075186224037899 2025-12-23T18:49:23.137983Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037899 2025-12-23T18:49:23.138030Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037899.Leader.1) VolatileState: Running -> Stopped (Node 1) 2025-12-23T18:49:23.138081Z node 1 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037899.Leader.1 gen 1) to node 1 2025-12-23T18:49:23.138139Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 2025-12-23T18:49:23.139322Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:136: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [1:7587127422339273962:2102] NKikimrLocal.TEvStopTablet TabletId: 72075186224037899 FollowerId: 0 Generation: 1,0x10040206 [1:7587127422339274167:2141] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 Actions: NKikimr::TTabletReqBlockBlobStorage} 2025-12-23T18:49:23.139422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12, at schemeshard: 72057594046644480 2025-12-23T18:49:23.139531Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037899 reason = ReasonStop 2025-12-23T18:49:23.139890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 42] was 1 2025-12-23T18:49:23.140003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:49:23.140017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 42], at schemeshard: 72057594046644480 2025-12-23T18:49:23.140048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2025-12-23T18:49:23.140125Z node 1 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037899 OK) 2025-12-23T18:49:23.140156Z node 1 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037899 OK) 2025-12-23T18:49:23.140212Z node 1 :HIVE DEBUG: hive_impl.cpp:925: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037899 2025-12-23T18:49:23.140450Z node 1 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037899 2025-12-23T18:49:23.140486Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-12-23T18:49:23.140601Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037899 2025-12-23T18:49:23.140677Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037899 2025-12-23T18:49:23.140750Z node 1 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037899 OK) 2025-12-23T18:49:23.140996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:12 2025-12-23T18:49:23.141010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:12 tabletId 72075186224037899 2025-12-23T18:49:23.141032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:49:23.142074Z node 1 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037899)::Complete SideEffects {} test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c2/r3tmp/tmpVvosrS/pdisk_1.dat 2025-12-23T18:49:25.298533Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:25.298658Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:49:25.377739Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:25.377817Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:25.378158Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:25.378918Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127441537310035:2081] 1766515765136753 != 1766515765136756 2025-12-23T18:49:25.406979Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:25.592453Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:9804 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515765462 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:25.605108Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:25.610732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:25.622806Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:26.221424Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:35.792095Z node 2 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037888, txid %281474976716360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-12-23T18:49:35.803221Z node 2 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [0:281474976716360] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-12-23T18:49:35.804248Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976716360 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-12-23T18:49:35.821932Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7587127484486989625:6109] txid# 281474976716360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] Test command err: 2025-12-23T18:49:29.266466Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127458694608247:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:29.266583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:29.293370Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057ba/r3tmp/tmpAxmh3F/pdisk_1.dat 2025-12-23T18:49:29.526283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:29.586217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:29.586327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:29.607117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:29.657784Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:29.725148Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:61046 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515769655 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:29.929595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:29.936845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:29.953185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:30.161630Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-12-23T18:49:30.174192Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-12-23T18:49:30.213098Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-12-23T18:49:30.248580Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-12-23T18:49:30.274512Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:30.308552Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 8r (max 9), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1766515770096 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name:... (TRUNCATED) 2025-12-23T18:49:30.536718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } } } TxId: 281474976715680 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:49:30.537053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715680:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2025-12-23T18:49:30.537311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2025-12-23T18:49:30.537350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2025-12-23T18:49:30.537398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2025-12-23T18:49:30.537738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2025-12-23T18:49:30.537764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976715680:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2025-12-23T18:49:30.538046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715680:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000d\000\000\000" ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000d\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\310\000\000\000" ShardIdx: 4 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000\310\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" ShardIdx: 5 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2025-12-23T18:49:30.538091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715680:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:49:30.539538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715680, response: Status: StatusAccepted TxId: 281474976715680 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-12-23T18:49:30.539677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715680, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-12-23T18:49:30.539813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715680:0, at schemeshard: 72057594046644480 2025-12-23T18:49:30.539850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715680:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-12-23T18:49:30.540168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-12-23T18:49:30.540264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-12-23T18:49:30.540332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 5 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } Objec ... __state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:49:34.027580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587127474041920629 RawX2: 4503608217307469 } TabletId: 72075186224037893 State: 4 2025-12-23T18:49:34.027599Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:49:34.027689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:34.027714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:34.027763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:34.027774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:34.027820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587127474041920437 RawX2: 4503608217307443 } TabletId: 72075186224037892 State: 4 2025-12-23T18:49:34.027835Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:49:34.027933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5962: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7587127474041920633 RawX2: 4503608217307470 } TabletId: 72075186224037894 State: 4 2025-12-23T18:49:34.027950Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:49:34.028020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:34.028032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:34.028069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:34.028077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:34.028107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:34.028116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:34.028318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:34.028326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:34.028362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:34.028371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:34.029269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-12-23T18:49:34.029460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 7 2025-12-23T18:49:34.029599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-12-23T18:49:34.029710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2025-12-23T18:49:34.029777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-12-23T18:49:34.029860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2025-12-23T18:49:34.029925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-12-23T18:49:34.030007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2025-12-23T18:49:34.030071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-12-23T18:49:34.030163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2025-12-23T18:49:34.030258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-12-23T18:49:34.030352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2025-12-23T18:49:34.030417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-12-23T18:49:34.030515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2025-12-23T18:49:34.030601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:49:34.030611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2025-12-23T18:49:34.030642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:49:34.032213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2025-12-23T18:49:34.032256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-12-23T18:49:34.032287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-12-23T18:49:34.032300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-12-23T18:49:34.032324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-12-23T18:49:34.032332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-12-23T18:49:34.032346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2025-12-23T18:49:34.032353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-12-23T18:49:34.032375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2025-12-23T18:49:34.032384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-12-23T18:49:34.032404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2025-12-23T18:49:34.032410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-12-23T18:49:34.032425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2025-12-23T18:49:34.032462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-12-23T18:49:34.032503Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:49:34.033984Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-12-23T18:49:34.034242Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-12-23T18:49:34.034263Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-12-23T18:49:34.034278Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-12-23T18:49:34.034289Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-12-23T18:49:34.034301Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-12-23T18:49:34.034313Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-12-23T18:49:34.064855Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::PartBloomFilter [GOOD] Test command err: 2025-12-23T18:49:29.635730Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127458307763504:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:29.635809Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b7/r3tmp/tmp1w3rHD/pdisk_1.dat 2025-12-23T18:49:29.912522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:29.912624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:29.971048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:29.976274Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:29.977055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:29.989023Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127458307763479:2081] 1766515769634838 != 1766515769634841 TClient is connected to server localhost:24154 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:49:30.191932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515770019 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:30.273138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:30.278997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... waiting... waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766515770320 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 18 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 18 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 16 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515770019 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "A" PathId: 43... (TRUNCATED) 2025-12-23T18:49:32.979741Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127469753139182:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:32.979866Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:32.989964Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b7/r3tmp/tmpacS3Qk/pdisk_1.dat 2025-12-23T18:49:33.076540Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127469753138944:2081] 1766515772958602 != 1766515772958605 2025-12-23T18:49:33.097327Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:33.102941Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:33.115833Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:33.115908Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:33.131339Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16331 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515773183 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:49:33.319474Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:49:33.330049Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:49:33.347799Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:33.400657Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:49:33.858556Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710719:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) waiting... 2025-12-23T18:49:33.983319Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2025-12-23T18:47:42.557973Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1766515662557941 2025-12-23T18:47:42.817310Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127000258333818:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:42.841116Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:47:42.888792Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127000282005383:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:42.889988Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:47:42.893987Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056dd/r3tmp/tmppwfstD/pdisk_1.dat 2025-12-23T18:47:42.917312Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:47:43.136814Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:43.159935Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:43.193890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:43.197074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:43.201415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:43.201528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:43.271010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:43.276955Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:47:43.282649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:43.356115Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:43.358833Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 29838, node 1 2025-12-23T18:47:43.447766Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:43.464756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/0056dd/r3tmp/yandexITTosM.tmp 2025-12-23T18:47:43.464782Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/0056dd/r3tmp/yandexITTosM.tmp 2025-12-23T18:47:43.464989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/0056dd/r3tmp/yandexITTosM.tmp 2025-12-23T18:47:43.465119Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:43.508275Z INFO: TTestServer started on Port 18655 GrpcPort 29838 TClient is connected to server localhost:18655 PQClient connected to localhost:29838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515663409 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:43.793545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:47:43.846224Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... waiting... 2025-12-23T18:47:43.911173Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:46.357399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127017438204320:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:46.357509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127017438204330:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:46.357619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:46.358157Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127017438204337:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:46.358382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:46.361339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:46.384670Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127017438204334:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-12-23T18:47:46.451867Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127017438204427:2993] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:47:46.695381Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587127017438204437:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:47:46.695451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:46.695912Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=NTI4YWVhYmQtY2E4YjE4ODMtNTczNjljNDQtNDZiY2RmODA=, ActorId: [1:7587127017438204317:2328], ActorState: ExecuteState, TraceId: 01kd68kvdk7zvenzdvpx3k58w7, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:47:46.698278Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:47:46.701779Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587127017461874904:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:47:46.703168Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=YWE2NDE2YTYtMWYxYWNiNDQtOGYwNGJlOTktZWIwNmNmYzE=, ActorId: [2:7587127017461874879:23 ... ption: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:49:35.168881Z :INFO: [/Root] [/Root] [56462782-b1c39e0c-c459923c-960eb40] Closing read session. Close timeout: 0.000000s 2025-12-23T18:49:35.168918Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-12-23T18:49:35.168952Z :INFO: [/Root] [/Root] [56462782-b1c39e0c-c459923c-960eb40] Counters: { Errors: 0 CurrentSessionLifetimeMs: 64 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:49:35.169000Z :NOTICE: [/Root] [/Root] [56462782-b1c39e0c-c459923c-960eb40] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:49:35.169078Z :INFO: [/Root] [/Root] [1c857062-7fab3ec-67b7ceea-cac12c22] Closing read session. Close timeout: 0.000000s 2025-12-23T18:49:35.169105Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-12-23T18:49:35.169131Z :INFO: [/Root] [/Root] [1c857062-7fab3ec-67b7ceea-cac12c22] Counters: { Errors: 0 CurrentSessionLifetimeMs: 69 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:49:35.169172Z :NOTICE: [/Root] [/Root] [1c857062-7fab3ec-67b7ceea-cac12c22] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:49:35.168671Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_417938432339924788_v1 grpc read done: success# 0, data# { } 2025-12-23T18:49:35.168700Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_3_1_417938432339924788_v1 grpc read failed 2025-12-23T18:49:35.168733Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_3_1_417938432339924788_v1 grpc closed 2025-12-23T18:49:35.168763Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_3_1_417938432339924788_v1 is DEAD 2025-12-23T18:49:35.170034Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127485489481427:2517] disconnected. 2025-12-23T18:49:35.170077Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127485489481427:2517] disconnected; active server actors: 1 2025-12-23T18:49:35.170116Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127485489481427:2517] client user disconnected session shared/user_3_1_417938432339924788_v1 2025-12-23T18:49:35.170194Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-12-23T18:49:35.170253Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=2, Families=1, UnreadableFamilies=0 [], RequireBalancing=0 [] 2025-12-23T18:49:35.170280Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=2, desiredFamilyCount=0, allowPlusOne=1 2025-12-23T18:49:35.170307Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000034s 2025-12-23T18:49:35.177307Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer shared/user session shared/user_3_2_5641152103992203693_v1 grpc read done: success# 0, data# { } 2025-12-23T18:49:35.177356Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 2 consumer shared/user session shared/user_3_2_5641152103992203693_v1 grpc read failed 2025-12-23T18:49:35.177393Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1678: session cookie 2 consumer shared/user session shared/user_3_2_5641152103992203693_v1 closed 2025-12-23T18:49:35.177472Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer shared/user session shared/user_3_2_5641152103992203693_v1 is DEAD 2025-12-23T18:49:35.177727Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 3 consumer shared/user session shared/user_3_3_13036890540098456744_v1 grpc read done: success# 0, data# { } 2025-12-23T18:49:35.177754Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 3 consumer shared/user session shared/user_3_3_13036890540098456744_v1 grpc read failed 2025-12-23T18:49:35.177778Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1678: session cookie 3 consumer shared/user session shared/user_3_3_13036890540098456744_v1 closed 2025-12-23T18:49:35.177826Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer shared/user session shared/user_3_3_13036890540098456744_v1 is DEAD 2025-12-23T18:49:35.178082Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72075186224037892] Destroy direct read session shared/user_3_2_5641152103992203693_v1 2025-12-23T18:49:35.178132Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [3:7587127485489481435:2528] destroyed 2025-12-23T18:49:35.178182Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_2_5641152103992203693_v1 2025-12-23T18:49:35.179090Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127485489481431:2518] disconnected. 2025-12-23T18:49:35.179116Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127485489481431:2518] disconnected; active server actors: 1 2025-12-23T18:49:35.179139Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127485489481431:2518] client user disconnected session shared/user_3_2_5641152103992203693_v1 2025-12-23T18:49:35.179189Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-12-23T18:49:35.179217Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127485489481429:2521] disconnected. 2025-12-23T18:49:35.179228Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127485489481429:2521] disconnected; active server actors: 1 2025-12-23T18:49:35.179240Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7587127485489481429:2521] client user disconnected session shared/user_3_3_13036890540098456744_v1 2025-12-23T18:49:35.247543Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:35.247567Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:35.247576Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:35.247589Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:35.247597Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:35.348680Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:35.348720Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:35.348734Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:35.348753Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:35.348765Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:35.448845Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:35.448879Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:35.448895Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:35.448939Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:35.448992Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:35.553939Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:35.553976Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:35.553990Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:35.554007Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:35.554020Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:35.654406Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:35.654447Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:35.654461Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:35.654482Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:35.654496Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:35.760753Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:35.760789Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:35.760802Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:35.760822Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:35.760834Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] Test command err: 2025-12-23T18:49:29.586006Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127460013710699:2255];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:29.586086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b9/r3tmp/tmp5K5IMO/pdisk_1.dat 2025-12-23T18:49:29.926391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:29.926514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:29.929299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:29.937559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:30.083674Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:30.086896Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127460013710481:2081] 1766515769566501 != 1766515769566504 2025-12-23T18:49:30.160092Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:19754 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515770089 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:30.390278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:30.402258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:30.412475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:30.418807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:30.585055Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:33.236444Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127474577333116:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:33.237373Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b9/r3tmp/tmpRc80E7/pdisk_1.dat 2025-12-23T18:49:33.280013Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:33.373868Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127474577333071:2081] 1766515773225418 != 1766515773225421 2025-12-23T18:49:33.387685Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:33.399118Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:33.399222Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:33.403185Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:33.462074Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:5468 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515773449 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:33.664085Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:33.699964Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] Test command err: 2025-12-23T18:49:14.331154Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127394265897050:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:14.332192Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057d4/r3tmp/tmphyZU0d/pdisk_1.dat 2025-12-23T18:49:14.563770Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:14.583192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:14.591451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:14.644506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:14.684142Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:14.685185Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127394265896988:2081] 1766515754321403 != 1766515754321406 2025-12-23T18:49:14.802846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:1794 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515754731 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:15.055561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:15.062122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:15.072245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:15.081902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:15.338999Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:19.331302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127394265897050:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:19.331383Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:49:24.575238Z node 1 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037889, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-12-23T18:49:24.595619Z node 1 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [1766515764006:281474976711360] at 72075186224037889: Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-12-23T18:49:24.597917Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7587127432920609339:6141] txid# 281474976711360 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# RESULT_UNAVAILABLE shard id 72075186224037889 marker# P12 2025-12-23T18:49:24.598002Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7587127432920609339:6141] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) proxy error code: ExecResultUnavailable test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057d4/r3tmp/tmpIq2QVF/pdisk_1.dat 2025-12-23T18:49:25.407711Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:25.408430Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:49:25.557936Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:25.560285Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127441625063952:2081] 1766515765364946 != 1766515765364949 2025-12-23T18:49:25.589962Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:25.590046Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:25.611033Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:25.679019Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:29708 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515765658 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:49:25.812092Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:25.835054Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:26.392594Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:36.433107Z node 2 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037888, txid %281474976711361, engine error: Error executing transaction (read-only: 0): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-12-23T18:49:36.446057Z node 2 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [0:281474976711361] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-12-23T18:49:36.454248Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976711361 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-12-23T18:49:36.454901Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7587127484574743556:6116] txid# 281474976711361 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable >> TFlatTest::ReadOnlyMode [GOOD] >> TFlatTest::RejectByIncomingReadSetSize |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage |96.1%| [TA] $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::CopyTableAndRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] Test command err: 2025-12-23T18:49:30.155486Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127460783136967:2061];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:30.155547Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:30.224425Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b6/r3tmp/tmpW9IsEK/pdisk_1.dat 2025-12-23T18:49:30.495268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:30.495391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:30.513196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:30.597469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:30.618963Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:30.620668Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127460783136947:2081] 1766515770153739 != 1766515770153742 TServer::EnableGrpc on GrpcPort 18931, node 1 2025-12-23T18:49:30.749193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:49:30.749218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:49:30.749230Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:49:30.749328Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:49:30.794928Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:3555 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515770635 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:31.018757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:31.029166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:31.053454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:31.189487Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b6/r3tmp/tmpUFUuTB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6137, node 2 TClient is connected to server localhost:6503 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515774058 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... waiting... |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test [GOOD] >> TPartitionGraphTest::BuildGraph [GOOD] >> TPartitionScaleManagerGraphCmpTest::Equal [GOOD] >> TPartitionScaleManagerGraphCmpTest::ExtraRootPartitionsInSourceTopic [GOOD] >> TPartitionScaleManagerGraphCmpTest::EqualSplitted [GOOD] >> TFlatTest::SelectBigRangePerf >> TFlatTest::SelectRangeNullArgs3 >> TLocksTest::Range_BrokenLockMax [GOOD] >> TLocksTest::Range_CorrectDot >> TFlatTest::ShardFreezeUnfreezeAlreadySet [GOOD] >> TFlatTest::ShardFreezeUnfreeze >> TFlatTest::RejectByPerShardReadSize >> TLocksTest::GoodSameKeyLock >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test >> TLocksTest::CK_Range_BrokenLock >> TFlatTest::Mix_DML_DDL >> TObjectStorageListingTest::MaxKeysAndSharding [GOOD] >> TObjectStorageListingTest::SchemaChecks >> TLocksTest::NoLocksSet >> TFlatTest::CrossRW >> TPQTest::TestAlreadyWrittenWithoutDeduplication [GOOD] >> TPQTest::TestCompactifiedWithRetention ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionScaleManagerGraphCmpTest::EqualSplitted [GOOD] Test command err: 2025-12-23T18:49:07.243311Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127364591409724:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:07.248004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:07.335382Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127361930139337:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:07.335443Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:07.336896Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00343f/r3tmp/tmpJpMjnq/pdisk_1.dat 2025-12-23T18:49:07.382296Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:49:07.565480Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:49:07.565530Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:49:07.609106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:07.609178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:07.618076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:07.618195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:07.641745Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:49:07.652832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:07.659191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:07.665047Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64531, node 1 2025-12-23T18:49:07.732709Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:49:07.777358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/00343f/r3tmp/yandexdH2rA5.tmp 2025-12-23T18:49:07.777383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/00343f/r3tmp/yandexdH2rA5.tmp 2025-12-23T18:49:07.777561Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/00343f/r3tmp/yandexdH2rA5.tmp 2025-12-23T18:49:07.777720Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:49:07.800968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:49:07.819597Z INFO: TTestServer started on Port 28696 GrpcPort 64531 TClient is connected to server localhost:28696 PQClient connected to localhost:64531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515747738 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:49:08.111632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:08.183216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:08.278621Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:08.365878Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... waiting... 2025-12-23T18:49:10.686383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127377476312979:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:10.686508Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127377476312990:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:10.686662Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:10.687586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127377476312995:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:10.687690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:10.690904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:49:10.729257Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127377476312994:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:49:10.803923Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127377476313097:3054] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:49:11.052453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:11.059600Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587127374815041625:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:49:11.060078Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=ZWRlZDRjM2QtMjg4YThmYmMtYzJlMjIyYjMtYWFmODNlYzY=, ActorId: [2:7587127374815041592:2303], ActorState: ExecuteState, TraceId: 01kd68pdtf5thfw0kmt62r4vzx, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:49:11.062427Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:49:11.065399Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587127377476313107:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. ... schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T18:49:36.864321Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T18:49:37.361535Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710693:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (11131928866524144434, "Root", "00415F536F757263655F35", 1766515778074, 1766515778074, 0, 13); 2025-12-23T18:49:38.240947Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-12-23T18:49:38.240972Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-12-23T18:49:38.240979Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-12-23T18:49:38.241002Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__sm_chooser_actor.h:116: TPartitionChooser [3:7587127497527151112:4156] (SourceId=A_Source_5, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2025-12-23T18:49:38.241126Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [3:7587127497527151113:4156], Recipient [3:7587127480347280983:3604]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [3:7587127497527151112:4156] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-12-23T18:49:38.241225Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [3:7587127497527151112:4156], Recipient [3:7587127480347280983:3604]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_5" 2025-12-23T18:49:38.241299Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:139: StateOwnershipFast, received event# 271188558, Sender [3:7587127480347280983:3604], Recipient [3:7587127497527151112:4156]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-12-23T18:49:38.241332Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:88: TPartitionChooser [3:7587127497527151112:4156] (SourceId=A_Source_5, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_5 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-12-23T18:49:38.241390Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [3:7587127497527151112:4156], Recipient [3:7587127480347280983:3604]: NActors::TEvents::TEvPoison 2025-12-23T18:49:38.241671Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:101: StateInitTable, received event# 277020685, Sender [3:7587127433102638387:2069], Recipient [3:7587127497527151112:4156]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-12-23T18:49:38.241699Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:111: TPartitionChooser [3:7587127497527151112:4156] (SourceId=A_Source_5, PreferedPartition=(NULL)) StartKqpSession 2025-12-23T18:49:38.244580Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:132: StateCreateKqpSession, received event# 271646728, Sender [3:7587127433102638558:2227], Recipient [3:7587127497527151112:4156]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=3&id=OTA2ZWIyNzgtOTNkMTU4M2YtNmYxZDUwYjUtOWU2ZDExOGU=" NodeId: 3 } YdbStatus: SUCCESS ResourceExhausted: false 2025-12-23T18:49:38.244615Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:142: TPartitionChooser [3:7587127497527151112:4156] (SourceId=A_Source_5, PreferedPartition=(NULL)) Select from the table 2025-12-23T18:49:38.327042Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:49:38.327073Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:38.406670Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:163: StateSelect, received event# 271646721, Sender [3:7587127433102638558:2227], Recipient [3:7587127497527151112:4156]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=3&id=OTA2ZWIyNzgtOTNkMTU4M2YtNmYxZDUwYjUtOWU2ZDExOGU=" PreparedQuery: "27f87b1a-e6db6c96-8446080b-8e1a239a" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01kd68q8tx1nn0bk7ev6cg1ewq" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1766515778074 } items { uint64_value: 1766515778074 } items { uint64_value: 13 } } format: FORMAT_VALUE } QueryDiagnostics: "" } YdbStatus: SUCCESS C 2025-12-23T18:49:38.406879Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:151: TPartitionChooser [3:7587127497527151112:4156] (SourceId=A_Source_5, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2025-12-23T18:49:38.406897Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:209: TPartitionChooser [3:7587127497527151112:4156] (SourceId=A_Source_5, PreferedPartition=(NULL)) OnPartitionChosen 2025-12-23T18:49:38.407020Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [3:7587127497527151158:4156], Recipient [3:7587127480347280983:3604]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [3:7587127497527151112:4156] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-12-23T18:49:38.407075Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [3:7587127497527151112:4156], Recipient [3:7587127480347280983:3604]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2025-12-23T18:49:38.407138Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:240: StateCheckPartition, received event# 271188558, Sender [3:7587127480347280983:3604], Recipient [3:7587127497527151112:4156]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-12-23T18:49:38.407168Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:174: TPartitionChooser [3:7587127497527151112:4156] (SourceId=A_Source_5, PreferedPartition=(NULL)) Update the table 2025-12-23T18:49:38.407387Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [3:7587127497527151112:4156], Recipient [3:7587127480347280983:3604]: NActors::TEvents::TEvPoison Received TEvChooseResult: 1 2025-12-23T18:49:38.581287Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:212: StateUpdate, received event# 271646721, Sender [3:7587127433102638558:2227], Recipient [3:7587127497527151112:4156]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=3&id=OTA2ZWIyNzgtOTNkMTU4M2YtNmYxZDUwYjUtOWU2ZDExOGU=" PreparedQuery: "50b5eb3-3939110e-9fc4624e-de212179" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 101 2025-12-23T18:49:38.581333Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [3:7587127497527151112:4156] (SourceId=A_Source_5, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-12-23T18:49:38.581372Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7587127497527151112:4156] (SourceId=A_Source_5, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=13 2025-12-23T18:49:38.581395Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [3:7587127497527151112:4156] (SourceId=A_Source_5, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 11131928866524144434 AND Topic = "Root" AND ProducerId = "00415F536F757263655F35" 2025-12-23T18:49:39.218910Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1177: ActorId: [3:7587127501822118574:2677] TxId: 281474976710703. Ctx: { TraceId: 01kd68q9epdrda7sk8f0qd03d9, Database: /Root, SessionId: ydb://session/3?node_id=3&id=N2ZmMDVkOGEtYjkzMzlmOGMtY2I2Mjc2ZjEtYjIxY2E2NTA=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-12-23T18:49:39.219045Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [3:7587127501822118578:2677], TxId: 281474976710703, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68q9epdrda7sk8f0qd03d9. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=N2ZmMDVkOGEtYjkzMzlmOGMtY2I2Mjc2ZjEtYjIxY2E2NTA=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7587127501822118574:2677], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpBatchDelete::ManyPartitions_2 [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] >> TLocksTest::BrokenSameKeyLock >> TFlatTest::RejectByIncomingReadSetSize [GOOD] >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-anonymous >> TFlatTest::SelectBigRangePerf [GOOD] >> TFlatTest::SelectRangeBothLimit >> TFlatTest::SelectRangeNullArgs3 [GOOD] >> TFlatTest::SelectRangeNullArgs4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_2 [GOOD] Test command err: Trying to start YDB, gRPC: 22333, MsgBus: 8432 2025-12-23T18:47:17.242622Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126890051003005:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:17.259779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bed/r3tmp/tmpOtAkyu/pdisk_1.dat 2025-12-23T18:47:17.539282Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:17.560408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:17.560544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:17.634986Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:17.638868Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:17.645210Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126890051002901:2081] 1766515637231687 != 1766515637231690 TServer::EnableGrpc on GrpcPort 22333, node 1 2025-12-23T18:47:17.721247Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:17.750616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:17.750636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:17.750650Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:17.750780Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8432 TClient is connected to server localhost:8432 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515637705 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:18.237738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:18.242996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:18.253442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:18.261990Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:18.372568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:18.515251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:18.578859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:20.598172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126902935906671:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:20.598316Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:20.598723Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126902935906680:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:20.598835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:20.994842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.035684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.073880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.116036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.156573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.208904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.253827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.302988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:21.389840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126907230874845:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.389926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.389997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126907230874850:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.390596Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126907230874852:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.390656Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:21.394111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schem ... 62Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30968, node 12 2025-12-23T18:49:28.787625Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:49:28.787655Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:49:28.787667Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:49:28.787792Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:49:28.798513Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15681 2025-12-23T18:49:29.384014Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:15681 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515768689 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:49:29.639972Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:29.655318Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:29.751219Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:30.104938Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:30.218885Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:33.346900Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7587127452053923585:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:33.347010Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:49:34.256405Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127477823729208:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:34.256499Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:34.256963Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127477823729218:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:34.257032Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:34.373163Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:34.423215Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:34.482147Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:34.538137Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:34.589941Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:34.656735Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:34.715613Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:34.859179Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:35.002666Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127482118697393:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:35.002801Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:35.003443Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127482118697399:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:35.003506Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:35.003581Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127482118697398:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:35.010811Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:49:35.030843Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7587127482118697402:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:49:35.095817Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7587127482118697461:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:49:38.337918Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TPQTest::TestCompactifiedWithRetention [GOOD] >> TPQTest::TestChangeConfig >> TFlatTest::ShardFreezeUnfreeze [GOOD] >> TLocksFatTest::RangeSetNotBreak [GOOD] >> TFlatTest::CopyTableAndCompareColumnsSchema ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByIncomingReadSetSize [GOOD] Test command err: 2025-12-23T18:49:35.153370Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127481904683230:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:35.154170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:35.182580Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b4/r3tmp/tmpvW0vJZ/pdisk_1.dat 2025-12-23T18:49:35.401165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:35.609368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:35.609488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:35.643894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:35.665485Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:49:35.665642Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127481904683207:2081] 1766515775147491 != 1766515775147494 2025-12-23T18:49:35.667865Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:6640 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515775752 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:35.994198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:36.124039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpMkDir MkDir { Name: "Dir1" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:49:36.124217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /dc-1/Dir1, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-12-23T18:49:36.124327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: dc-1, child name: Dir1, child id: [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2025-12-23T18:49:36.124370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 0 2025-12-23T18:49:36.124391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976715658:0 type: TxMkDir target path: [OwnerId: 72057594046644480, LocalPathId: 38] source path: 2025-12-23T18:49:36.124438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:49:36.124626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-12-23T18:49:36.124658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:49:36.130460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 38, at schemeshard: 72057594046644480 2025-12-23T18:49:36.130661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /dc-1/Dir1 waiting... 2025-12-23T18:49:36.130910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:49:36.130936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:49:36.131103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T18:49:36.131242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:49:36.131266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7587127481904683892:2389], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-12-23T18:49:36.131293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7587127481904683892:2389], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 38 2025-12-23T18:49:36.131348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-12-23T18:49:36.131371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:63: MkDir::TPropose operationId# 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T18:49:36.131403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715658 ready parts: 1/1 2025-12-23T18:49:36.135709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:49:36.137585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-12-23T18:49:36.137676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-12-23T18:49:36.137694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-12-23T18:49:36.137712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-12-23T18:49:36.137729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2025-12-23T18:49:36.137965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-12-23T18:49:36.138019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-12-23T18:49:36.138031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-12-23T18:49:36.138051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 2 2025-12-23T18:49:36.138063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2025-12-23T18:49:36.138108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-12-23T18:49:36.138227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:49:36.138239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-12-23T18:49:36.138257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:49:36.139629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715658 msg type: 269090816 2025-12-23T18:49:36.139724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715658, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:49:36.143928Z nod ... er{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:49:36.524694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-12-23T18:49:36.524714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715661 2025-12-23T18:49:36.524729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 12 2025-12-23T18:49:36.524745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 5 2025-12-23T18:49:36.524845Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{224, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-12-23T18:49:36.524905Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:49:36.525025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-12-23T18:49:36.525064Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-12-23T18:49:36.525094Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:49:36.525143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715661 2025-12-23T18:49:36.525160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715661 2025-12-23T18:49:36.525192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715661, pathId: [OwnerId: 72057594046644480, LocalPathId: 40], version: 3 2025-12-23T18:49:36.525209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2025-12-23T18:49:36.525251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715661, subscribers: 1 2025-12-23T18:49:36.525262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7587127486199651881:2290] 2025-12-23T18:49:36.525323Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{225, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-12-23T18:49:36.525343Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:49:36.526053Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:22:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:49:36.526086Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:22:1:24576:107:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:49:36.526110Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:16:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:49:36.526142Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:16:1:24576:134:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:49:36.526250Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:23} commited cookie 1 for step 22 2025-12-23T18:49:36.526309Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 16 2025-12-23T18:49:36.526489Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:17:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:49:36.526526Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:17:1:24576:119:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:49:36.526563Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:18:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:49:36.526577Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 17 2025-12-23T18:49:36.526587Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:18:1:24576:121:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-12-23T18:49:36.526593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-12-23T18:49:36.526625Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 18 2025-12-23T18:49:36.526652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715661 2025-12-23T18:49:36.526834Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:406: TClient[72057594046644480] received poison pill [1:7587127486199651882:2290] 2025-12-23T18:49:36.526869Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594046644480] notify reset [1:7587127486199651882:2290] 2025-12-23T18:49:36.526878Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:182: [72057594046644480] Got PeerClosed from# [1:7587127486199651882:2290] 2025-12-23T18:49:39.216529Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127498828470350:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:39.224573Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:39.241617Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b4/r3tmp/tmpLd1IMu/pdisk_1.dat 2025-12-23T18:49:39.339846Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:39.340981Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127498828470296:2081] 1766515779183470 != 1766515779183473 2025-12-23T18:49:39.382868Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:39.392085Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:39.392164Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:39.397790Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10150 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515779441 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:39.579247Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:39.590592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:49:39.596050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:39.632492Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:49:40.222924Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:42.936288Z node 2 :TX_PROXY ERROR: datareq.cpp:2829: Actor# [2:7587127511713373526:2803] txid# 281474976715700 FailProposedRequest: Transaction incoming read set size 1000086 for tablet 72075186224037889 exceeded limit 1000 Status# ExecError 2025-12-23T18:49:42.936368Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7587127511713373526:2803] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c >> TFlatTest::Mix_DML_DDL [GOOD] >> TFlatTest::OutOfDiskSpace [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::CrossRW [GOOD] >> TFlatTest::GetTabletCounters >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> TObjectStorageListingTest::SchemaChecks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] Test command err: 2025-12-23T18:49:36.987007Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127489917440452:2057];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:36.987097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:37.065181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b2/r3tmp/tmpjTSVmA/pdisk_1.dat 2025-12-23T18:49:37.403710Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:37.425434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:37.425548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:37.493493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:37.497557Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:37.499052Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127489917440436:2081] 1766515776985711 != 1766515776985714 2025-12-23T18:49:37.576293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:8342 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515777579 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:49:37.818247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:49:37.824758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:49:37.829658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:37.975966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) waiting... 2025-12-23T18:49:38.001074Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127494212408687:2595] txid# 281474976710660, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-12-23T18:49:38.004369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) waiting... 2025-12-23T18:49:38.024019Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127498507376025:2631] txid# 281474976710662, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-12-23T18:49:38.031755Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:40.843799Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127504489267476:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:40.843827Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b2/r3tmp/tmp9j75BA/pdisk_1.dat 2025-12-23T18:49:40.902258Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:41.009436Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:41.011922Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:41.012002Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:41.025593Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:41.072841Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:11174 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515781093 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:41.243560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:41.255115Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:41.264528Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:41.319748Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) waiting... 2025-12-23T18:49:41.360483Z node 2 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710660: 2025-12-23T18:49:41.360623Z node 2 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [2:7587127508784235686:2594] txid# 281474976710660 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-12-23T18:49:41.360718Z node 2 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [2:7587127508784235686:2594] txid# 281474976710660 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-12-23T18:49:41.360740Z node 2 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [2:7587127508784235686:2594] txid# 281474976710660 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-12-23T18:49:41.364569Z node 2 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710661: 2025-12-23T18:49:41.364820Z node 2 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [2:7587127508784235694:2599] txid# 281474976710661 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-12-23T18:49:41.364904Z node 2 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [2:7587127508784235694:2599] txid# 281474976710661 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-12-23T18:49:41.364921Z node 2 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [2:7587127508784235694:2599] txid# 281474976710661 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-12-23T18:49:41.374154Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) waiting... |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test >> TPQTest::TestChangeConfig [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetNotBreak [GOOD] Test command err: 2025-12-23T18:49:28.332617Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127451467867920:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:28.332669Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057bb/r3tmp/tmpQur5Ud/pdisk_1.dat 2025-12-23T18:49:28.580506Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:28.603547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:28.603649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:28.685545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:28.700597Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:28.790973Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:17313 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515768759 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:28.987381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:29.009579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:29.161568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:29.217967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:29.340840Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:33.333078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127451467867920:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:33.333137Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:49:36.338099Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127485822237515:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:36.339130Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057bb/r3tmp/tmpPSa0kr/pdisk_1.dat 2025-12-23T18:49:36.370920Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:36.428321Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:36.430836Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127485822237487:2081] 1766515776336411 != 1766515776336414 2025-12-23T18:49:36.473898Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:36.473980Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:36.478095Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:36.567376Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:14244 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515776522 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:36.646536Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:36.652538Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:36.665147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:49:36.670505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:36.726160Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:36.774582Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:37.343000Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:41.339963Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587127485822237515:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:41.340073Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetRemove >> TFlatTest::CopyTableAndDropOriginal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::OutOfDiskSpace [GOOD] Test command err: 2025-12-23T18:49:42.099723Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127514112314690:2262];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:42.099799Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a3/r3tmp/tmpIFMHj4/pdisk_1.dat 2025-12-23T18:49:42.398865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:42.474680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:42.474834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:42.509705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:42.568323Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127514112314451:2081] 1766515782038941 != 1766515782038944 2025-12-23T18:49:42.569446Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:42.598828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:8910 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515782598 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:42.875340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:42.886927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:49:42.891790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:43.100059Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:43.111887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) waiting... proxy error code: Unknown error:
: Error: Resolve failed for table: /dc-1/Table, error: column 'value' not exist, code: 200400 2025-12-23T18:49:43.154764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) waiting... 2025-12-23T18:49:43.191278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) waiting... 2025-12-23T18:49:43.217861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) waiting... proxy error code: Unknown error:
:5:24: Error: At function: AsList
:5:32: Error: At function: SetResult
:4:27: Error: At function: SelectRow
:4:27: Error: Mismatch of key columns count for table [/dc-1/Table], expected: 2, but got 1., code: 2028 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestChangeConfig [GOOD] Test command err: 2025-12-23T18:48:44.482667Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2025-12-23T18:48:44.542115Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:48:44.546863Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:48:44.547229Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:44.547306Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:44.547348Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-12-23T18:48:44.547414Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4927: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-12-23T18:48:44.547486Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:44.547571Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:184:2057] recipient: [1:14:2061] 2025-12-23T18:48:44.566671Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:183:2196], now have 1 active actors on pipe 2025-12-23T18:48:44.566912Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:48:44.587503Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1470: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2025-12-23T18:48:44.590430Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2025-12-23T18:48:44.590593Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:44.591536Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2025-12-23T18:48:44.591706Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:44.592065Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:44.592473Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2025-12-23T18:48:44.595094Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:48:44.595157Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-12-23T18:48:44.595211Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2025-12-23T18:48:44.595265Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:48:44.595330Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:44.596795Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:44.598060Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:48:44.598112Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:44.598166Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:44.598248Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:44.598287Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2025-12-23T18:48:44.598322Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:44.598367Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:44.598448Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-12-23T18:48:44.598488Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:48:44.598522Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:44.598552Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2025-12-23T18:48:44.598587Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2025-12-23T18:48:44.598627Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2025-12-23T18:48:44.598662Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2025-12-23T18:48:44.598725Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:44.598986Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:44.599028Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:44.599089Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:44.599306Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:48:44.599529Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:44.601915Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:48:44.602028Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:48:44.602094Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:44.602134Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:44.602188Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:44.602227Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:48:44.602273Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:44.602328Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:48:44.602817Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:197:2204], now have 1 active actors on pipe 2025-12-23T18:48:44.613346Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:200:2206], now have 1 active actors on pipe 2025-12-23T18:48:44.613506Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-12-23T18:48:44.613557Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2647: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topi ... PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 45 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 44 } Consumers { Name: "bbb" Generation: 45 Important: true } Consumers { Name: "ccc" Generation: 45 Important: true } 2025-12-23T18:49:45.857707Z node 40 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:49:45.858388Z node 40 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f82098d2-243460e6-605de4fe-cf767fed_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:49:45.865942Z node 40 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:49:45.866524Z node 40 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f943095e-a5f56a57-d3c96afb-989b5afd_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:49:45.882969Z node 40 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 9 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:49:45.883563Z node 40 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4501837d-f1fa17f3-c6891fa5-6ab2db86_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:49:46.308627Z node 41 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 41 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:107:2057] recipient: [41:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:107:2057] recipient: [41:105:2138] Leader for TabletID 72057594037927937 is [41:111:2142] sender: [41:112:2057] recipient: [41:105:2138] 2025-12-23T18:49:46.401769Z node 41 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:46.401836Z node 41 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:46.401888Z node 41 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:46.401949Z node 41 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [41:153:2057] recipient: [41:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [41:153:2057] recipient: [41:151:2172] Leader for TabletID 72057594037927938 is [41:157:2176] sender: [41:158:2057] recipient: [41:151:2172] Leader for TabletID 72057594037927937 is [41:111:2142] sender: [41:183:2057] recipient: [41:14:2061] 2025-12-23T18:49:46.425144Z node 41 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:46.427064Z node 41 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 46 actor [41:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 46 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 46 } Consumers { Name: "aaa" Generation: 46 Important: true } 2025-12-23T18:49:46.428318Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [41:189:2142] 2025-12-23T18:49:46.431414Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [41:189:2142] 2025-12-23T18:49:46.434859Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [41:190:2142] 2025-12-23T18:49:46.437281Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [41:190:2142] 2025-12-23T18:49:46.440165Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [41:191:2142] 2025-12-23T18:49:46.442603Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [41:191:2142] 2025-12-23T18:49:46.445554Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [41:192:2142] 2025-12-23T18:49:46.447927Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [41:192:2142] 2025-12-23T18:49:46.450768Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][4][StateInit] bootstrapping 4 [41:193:2142] 2025-12-23T18:49:46.453179Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][4][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [41:193:2142] 2025-12-23T18:49:46.468533Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:49:46.468937Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|574f098a-20a06da2-24ae8a58-a45bf034_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:49:46.478485Z node 41 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:46.487210Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][5][StateInit] bootstrapping 5 [41:237:2142] 2025-12-23T18:49:46.489774Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][5][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 5 generation 2 [41:237:2142] 2025-12-23T18:49:46.495034Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][6][StateInit] bootstrapping 6 [41:238:2142] 2025-12-23T18:49:46.497510Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][6][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 6 generation 2 [41:238:2142] 2025-12-23T18:49:46.502020Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][7][StateInit] bootstrapping 7 [41:239:2142] 2025-12-23T18:49:46.504490Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][7][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 7 generation 2 [41:239:2142] 2025-12-23T18:49:46.510326Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][8][StateInit] bootstrapping 8 [41:240:2142] 2025-12-23T18:49:46.512722Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][8][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 8 generation 2 [41:240:2142] 2025-12-23T18:49:46.519762Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][9][StateInit] bootstrapping 9 [41:241:2142] 2025-12-23T18:49:46.522146Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][9][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 9 generation 2 [41:241:2142] 2025-12-23T18:49:46.556145Z node 41 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 47 actor [41:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 5 MaxSizeInPartition: 1048576 LifetimeSeconds: 86400 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 47 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 46 } Consumers { Name: "bbb" Generation: 47 Important: true } Consumers { Name: "ccc" Generation: 47 Important: true } 2025-12-23T18:49:46.558173Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:49:46.558575Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1bf638c6-56e8ff45-aefdca37-f0624faf_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:49:46.564547Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:49:46.564932Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f789eaae-f6fb0fcc-514dff68-54e57628_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:49:46.577314Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 9 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:49:46.577721Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1992947f-be4eefa9-33da4e3d-f8bc4cff_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] >> TFlatTest::SelectRangeBothLimit [GOOD] >> TLocksTest::Range_BrokenLock2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] Test command err: 2025-12-23T18:49:11.586141Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127379924408208:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:11.586471Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057d5/r3tmp/tmpEwOgtm/pdisk_1.dat 2025-12-23T18:49:11.805369Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:11.825952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:11.826085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:11.901902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:11.927927Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:11.934903Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127379924408179:2081] 1766515751582098 != 1766515751582101 TServer::EnableGrpc on GrpcPort 4690, node 1 2025-12-23T18:49:12.114732Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:49:12.356120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:49:12.356147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:49:12.356153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:49:12.356214Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:49:12.592958Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:20017 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515751987 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:12.823943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:12.848383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:16.586435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127379924408208:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:16.586498Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:49:26.756628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:49:26.756658Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:42.143821Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127511564299798:2145];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:42.144959Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:42.175780Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057d5/r3tmp/tmpPXBmeR/pdisk_1.dat 2025-12-23T18:49:42.299920Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:42.303429Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127511564299672:2081] 1766515782111608 != 1766515782111611 2025-12-23T18:49:42.342212Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:42.342312Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:42.348224Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31489, node 2 2025-12-23T18:49:42.517192Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:49:42.663877Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:49:42.663905Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:49:42.663912Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:49:42.663991Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29623 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515782395 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:42.940758Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:42.969058Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:43.169334Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2025-12-23T18:49:39.893178Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127502009355525:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:39.902611Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b0/r3tmp/tmpkIz0A6/pdisk_1.dat 2025-12-23T18:49:40.181255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:40.181377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:40.188725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:40.262440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:40.342193Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:3684 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-12-23T18:49:40.558836Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515780400 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:40.611785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:40.619216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:40.648301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:40.832851Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.013s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-12-23T18:49:40.871392Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.047s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-12-23T18:49:40.899788Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:40.912035Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-12-23T18:49:40.927680Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.009s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-12-23T18:49:41.092135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:49:41.092426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:431: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-12-23T18:49:41.092934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2025-12-23T18:49:41.092994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2025-12-23T18:49:41.093020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2025-12-23T18:49:41.093041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976710676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2025-12-23T18:49:41.093085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2025-12-23T18:49:41.093098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2025-12-23T18:49:41.093209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2025-12-23T18:49:41.093328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T18:49:41.093965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2025-12-23T18:49:41.094020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2025-12-23T18:49:41.094682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2025-12-23T18:49:41.094964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-12-23T18:49:41.095183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T18:49:41.095198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T18:49:41.095304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2025-12-23T18:49:41.095398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T18:49:41.095413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7587127506304323226:2255], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 38 2025-12-23T18:49:41.095430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7587127506304323226:2255], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 40 2025-12-23T18:49:41.095475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-12-23T18:49:41.095507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-12-23T18:49:41.095876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-12-23T18:49:41.096013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } waiting... 2025-12-23T18:49:41.100259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-12-23T18:49:41.100361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-12-23T18:49:41.100381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-12-23T18:49:41.100398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathI ... 037891, state: Offline, at schemeshard: 72057594046644480 2025-12-23T18:49:44.606591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:44.606607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:44.606771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:44.606783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:44.606836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:44.606845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:44.606877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:44.606887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:44.606917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2025-12-23T18:49:44.606925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-12-23T18:49:44.607657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-12-23T18:49:44.607938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2025-12-23T18:49:44.608571Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2025-12-23T18:49:44.608589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-12-23T18:49:44.609860Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-12-23T18:49:44.609885Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-12-23T18:49:44.609897Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-12-23T18:49:44.609907Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-12-23T18:49:44.609919Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-12-23T18:49:44.609947Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-12-23T18:49:44.609983Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7587127521086432007:2588], serverId# [2:7587127521086432008:2589], sessionId# [0:0:0] 2025-12-23T18:49:44.610279Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-12-23T18:49:44.611596Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2025-12-23T18:49:44.611824Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2025-12-23T18:49:44.611971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-12-23T18:49:44.612204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2025-12-23T18:49:44.612365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-12-23T18:49:44.612485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2025-12-23T18:49:44.612585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-12-23T18:49:44.612702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-12-23T18:49:44.612834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2025-12-23T18:49:44.612923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-12-23T18:49:44.613033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-12-23T18:49:44.613048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2025-12-23T18:49:44.613095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2025-12-23T18:49:44.613115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2025-12-23T18:49:44.613132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:49:44.613154Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-12-23T18:49:44.615208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2025-12-23T18:49:44.615231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-12-23T18:49:44.615257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-12-23T18:49:44.615263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-12-23T18:49:44.615279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2025-12-23T18:49:44.615295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-12-23T18:49:44.615309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-12-23T18:49:44.615324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2025-12-23T18:49:44.615350Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-12-23T18:49:44.615387Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-12-23T18:49:44.616503Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-12-23T18:49:44.616545Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037891 from 72075186224037889 is reset 2025-12-23T18:49:44.616573Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7587127521086432017:2595], serverId# [2:7587127521086432018:2596], sessionId# [0:0:0] 2025-12-23T18:49:44.616592Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-12-23T18:49:44.616612Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7587127521086432281:2767], serverId# [2:7587127521086432283:2769], sessionId# [0:0:0] 2025-12-23T18:49:44.616626Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7587127521086432385:2844], serverId# [2:7587127521086432386:2845], sessionId# [0:0:0] 2025-12-23T18:49:44.617037Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2025-12-23T18:49:44.617113Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2025-12-23T18:49:44.617833Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-12-23T18:49:44.617880Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-12-23T18:49:44.618658Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2025-12-23T18:49:44.618717Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2025-12-23T18:49:44.620067Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2025-12-23T18:49:44.620107Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2025-12-23T18:49:44.752538Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Check that tablet 72075186224037889 was deleted 2025-12-23T18:49:44.903295Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037890 was deleted 2025-12-23T18:49:44.903681Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 720751862240378912025-12-23T18:49:44.904008Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) was deleted 2025-12-23T18:49:44.904444Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] Test command err: 2025-12-23T18:49:40.627835Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127503360379985:2158];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:40.627987Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057af/r3tmp/tmpcumAxt/pdisk_1.dat 2025-12-23T18:49:40.838264Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:40.936531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:40.936650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:40.943435Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:40.967563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:41.042838Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:17876 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515781016 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:41.314396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:41.323899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-12-23T18:49:41.348364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-12-23T18:49:41.356284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:41.631789Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; insert finished 11110 usec 10070 usec 9525 usec 10192 usec 10563 usec 9781 usec 9299 usec 9505 usec 8595 usec 8477 usec 2025-12-23T18:49:44.399882Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127523816676468:2153];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:44.399953Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:44.418691Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057af/r3tmp/tmpGGkV53/pdisk_1.dat 2025-12-23T18:49:44.510986Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:44.512055Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:44.512106Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:44.550790Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:44.687116Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:20793 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515784607 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:44.788234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:44.796569Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:44.813424Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:45.403614Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] Test command err: 2025-12-23T18:49:40.698865Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127503163681437:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:40.699149Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057ae/r3tmp/tmplIwohH/pdisk_1.dat 2025-12-23T18:49:41.061276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:41.061439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:41.195201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:41.197087Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:41.219866Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:41.481108Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:16653 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515781282 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:41.606427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:41.630406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:41.649100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:49:41.654706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:41.737533Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:44.435185Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127521966800716:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:44.435282Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:44.450785Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057ae/r3tmp/tmpiT9FHC/pdisk_1.dat 2025-12-23T18:49:44.588951Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:44.599727Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:44.614884Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127521966800684:2081] 1766515784434088 != 1766515784434091 2025-12-23T18:49:44.640106Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:44.640206Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:44.645574Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27391 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:49:44.890843Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515784691 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:44.907714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:44.914999Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:44.938019Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-system >> TLocksTest::Range_CorrectNullDot [GOOD] >> TLocksTest::Range_EmptyKey >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-anonymous >> TFlatTest::ShardUnfreezeNonFrozen >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] >> ResultFormatter::Primitive [GOOD] >> ResultFormatter::Struct [GOOD] >> ResultFormatter::StructWithNoFields [GOOD] >> ResultFormatter::Optional [GOOD] >> ResultFormatter::Pg >> ResultFormatter::StructTypeNameAsString [GOOD] >> ResultFormatter::Tuple [GOOD] >> ResultFormatter::Tagged [GOOD] >> ResultFormatter::Pg [GOOD] >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] Test command err: 2025-12-23T18:49:42.320651Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127511806996332:2203];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:42.320773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a1/r3tmp/tmp5FVOwW/pdisk_1.dat 2025-12-23T18:49:42.834863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:42.838390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:42.838529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:42.850598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:42.965640Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127511806996166:2081] 1766515782312841 != 1766515782312844 2025-12-23T18:49:42.971863Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:43.050367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:10892 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515782997 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:43.317009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:43.326422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:43.340415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:43.378405Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:46.359211Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127531611828774:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:46.359299Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a1/r3tmp/tmpkXkqcL/pdisk_1.dat 2025-12-23T18:49:46.490368Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:46.492912Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:46.492990Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:46.515605Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:46.532221Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:46.690513Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:30291 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515786595 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:46.812261Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:46.839427Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:46.846484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515786945 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name:... (TRUNCATED) |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> ResultFormatter::FormatEmptySchema [GOOD] >> ResultFormatter::FormatNonEmptySchema [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::StructTypeNameAsString [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] >> TObjectStorageListingTest::ManyDeletes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] Test command err: 2025-12-23T18:48:41.473769Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1766515721473737 2025-12-23T18:48:41.723322Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127252759354223:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:41.723499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:41.796343Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:48:41.804635Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127250968121674:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:41.804694Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:41.835581Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:48:41.836138Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005845/r3tmp/tmp4IifIU/pdisk_1.dat 2025-12-23T18:48:42.037788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:42.102343Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:42.318699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:42.318852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:42.319076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:42.319166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:42.378210Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:48:42.381899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:42.382240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:42.421308Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:42.428607Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:42.430907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 19189, node 1 2025-12-23T18:48:42.697923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005845/r3tmp/yandexitjk8m.tmp 2025-12-23T18:48:42.698070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005845/r3tmp/yandexitjk8m.tmp 2025-12-23T18:48:42.702234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005845/r3tmp/yandexitjk8m.tmp 2025-12-23T18:48:42.702356Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:42.750478Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:42.834946Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:42.934932Z INFO: TTestServer started on Port 64969 GrpcPort 19189 TClient is connected to server localhost:64969 PQClient connected to localhost:19189 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515722489 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:43.249757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-12-23T18:48:45.712797Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127269939224703:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.712896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127269939224711:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.712992Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.712797Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127268147991177:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.712905Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127268147991185:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.713083Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.713418Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127268147991192:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.713504Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.714279Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127269939224719:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.714347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.719586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:45.738902Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587127268147991194:2132] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:48:45.742012Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127269939224717:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-12-23T18:48:45.748853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976720661, at schemeshard: 72057594046644480 2025-12-23T18:48:45.749207Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587127268147991191:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-12-23T18:48:45.833109Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587127268147991223:2139] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:45.839192Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127269939224825:2982] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:46. ... 25-12-23T18:49:49.564477Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:49.564493Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:49:49.565952Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2025-12-23T18:49:49.570434Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:49.570476Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:49.570494Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:49.570511Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:49.570526Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:49.573406Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:207: new Alter topic request 2025-12-23T18:49:49.575095Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:49:49.587445Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:49:49.589375Z node 9 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037895][rt3.dc1--test-topic-1766515789] updating configuration. Deleted partitions []. Added partitions [] 2025-12-23T18:49:49.591411Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:49:49.607378Z node 10 :PERSQUEUE DEBUG: partition.cpp:1289: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1766515789647, TxId 281474976715676 2025-12-23T18:49:49.607421Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:49.607441Z node 10 :PERSQUEUE DEBUG: partition.cpp:2416: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2025-12-23T18:49:49.607472Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:49.607483Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:49.607499Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:49.607509Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:49:49.607731Z node 10 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1766515789647, TxId 281474976715676 2025-12-23T18:49:49.607754Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:49.607770Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2025-12-23T18:49:49.607785Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:49.607812Z node 10 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2025-12-23T18:49:49.607968Z node 10 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037894][Partition][0][StateIdle] Schedule reply tx done 281474976715676 2025-12-23T18:49:49.607986Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2025-12-23T18:49:49.608000Z node 10 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037894][Partition][0][StateIdle] Batch completed (1) 2025-12-23T18:49:49.608019Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:49:49.608197Z node 10 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T18:49:49.611561Z node 10 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T18:49:49.611826Z node 10 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037894][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:49:49.611917Z node 10 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037894][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T18:49:49.611941Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:49.611959Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:49.611975Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:49.611995Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:49.612008Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:49:49.612034Z node 10 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:49:49.612352Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72075186224037894] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--test-topic-1766515789" Version: 3 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "test-topic-1766515789" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--test-topic-1766515789" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 72075186224037894 } MetricsLevel: 4 MonitoringProjectId: "" 2025-12-23T18:49:49.612388Z node 10 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:49.612498Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-12-23T18:49:49.617053Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1058: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-12-23T18:49:49.626841Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2025-12-23T18:49:49.664685Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:49.664719Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:49.664733Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:49.664749Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:49.664762Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:49:49.673211Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:49.673257Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:49.673285Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:49.673305Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:49.673323Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:49.766905Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:49.766944Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:49.766969Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:49.766988Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:49.767003Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T18:49:49.778872Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:49:49.778911Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:49.778929Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:49:49.778948Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:49:49.778964Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:49:50.180781Z node 9 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1177: ActorId: [9:7587127549509727471:2473] TxId: 281474976715677. Ctx: { TraceId: 01kd68qkxb407gapbay7tj7s91, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZTM2NGEzYWItNmVkYTY2NjEtZDE5ZWJmZmMtYzdmY2M1MDE=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 2025-12-23T18:49:50.180912Z node 9 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [9:7587127549509727476:2473], TxId: 281474976715677, task: 3. Ctx: { CheckpointId : . TraceId : 01kd68qkxb407gapbay7tj7s91. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=9&id=ZTM2NGEzYWItNmVkYTY2NjEtZDE5ZWJmZmMtYzdmY2M1MDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [9:7587127549509727471:2473], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |96.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-ordinaryuser >> TFlatTest::ShardUnfreezeNonFrozen [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme >> TLocksTest::Range_BrokenLock1 [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] Test command err: 2025-12-23T18:49:09.618507Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127373110790676:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:09.618899Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057dc/r3tmp/tmplCHToA/pdisk_1.dat 2025-12-23T18:49:09.834814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:09.865563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:09.865685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:09.924520Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:09.940358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:10.069297Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:63135 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515749985 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:10.273280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:10.280329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:10.294953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:10.409930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:10.452542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:10.623831Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:12.898337Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127383601974933:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:12.898396Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057dc/r3tmp/tmpInjPdf/pdisk_1.dat 2025-12-23T18:49:12.912741Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:12.984637Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:12.994644Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127383601974907:2081] 1766515752897316 != 1766515752897319 2025-12-23T18:49:13.029123Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:13.029214Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:13.033901Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:13.117516Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:27796 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515753086 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:13.220192Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:13.238706Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:13.290326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:13.338870Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:16.506644Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127403344076470:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:16.508230Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057dc/r3tmp/tmpw5O4XS/pdisk_1.dat 2025-12-23T18:49:16.527393Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:16.609394Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:16.609708Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:16.633406Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587127403344076422:2081] 1766515756489291 != 1766515756489294 2025-12-23T18:49:16.640444Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:16.651797Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:16.692915Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:61412 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 ... dId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:38.232268Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:38.245641Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:38.263823Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:38.313009Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:49:38.343245Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:38.467624Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:42.891236Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587127513691846228:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:42.891336Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057dc/r3tmp/tmpxXZTyy/pdisk_1.dat 2025-12-23T18:49:42.978334Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:43.155158Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:43.155234Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:43.169783Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7587127513691846192:2081] 1766515782880704 != 1766515782880707 2025-12-23T18:49:43.194186Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:43.205178Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:43.206953Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:14055 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515783263 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:43.604276Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:43.619349Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:43.639566Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:49:43.646204Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:43.724103Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:43.794380Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:43.922753Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:48.094110Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587127539704810203:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:48.094218Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057dc/r3tmp/tmp7uws3m/pdisk_1.dat 2025-12-23T18:49:48.122920Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:48.253305Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:48.258960Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7587127539704810166:2081] 1766515788092119 != 1766515788092122 2025-12-23T18:49:48.280835Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:48.280939Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:48.294844Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:48.312125Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1925 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515788366 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:48.609479Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:48.645394Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:49:48.672685Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:48.731354Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:48.789905Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout >> TLocksTest::Range_Pinhole [GOOD] >> TLocksTest::SetBreakSetEraseBreak >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> ResultFormatter::Void [GOOD] >> ResultFormatter::VariantTuple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::ManyDeletes [GOOD] Test command err: 2025-12-23T18:49:27.843483Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127450275918595:2259];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:27.843669Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057bd/r3tmp/tmpZrv9DN/pdisk_1.dat 2025-12-23T18:49:28.125853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:28.125937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:28.130126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:28.130881Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:28.245126Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22379, node 1 2025-12-23T18:49:28.319315Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:49:28.319341Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:49:28.319350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:49:28.319431Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:49:28.421718Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:5133 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515768269 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:28.669309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:28.675773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:28.704009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:28.838980Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:32.840965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127450275918595:2259];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:32.841033Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:49:34.397265Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127480929945771:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:34.397425Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057bd/r3tmp/tmpL382k4/pdisk_1.dat 2025-12-23T18:49:34.422864Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:34.517198Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:34.537246Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:34.537328Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:34.562244Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29836, node 2 2025-12-23T18:49:34.701795Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:49:34.714332Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:49:34.714360Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:49:34.714368Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:49:34.714440Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10072 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515774618 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:34.923955Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:34.935423Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:34.952149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:49:34.959789Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:35.372766Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; .2025-12-23T18:49:39.398125Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587127480929945771:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:39.398174Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; . 2025-12-23T18:49:45.448621Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:49:45.449119Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037891 2025-12-23T18:49:45.449655Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037889 2025-12-23T18:49:45.449822Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037891 2025-12-23T18:49:45.450003Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037890 2025-12-23T18:49:45.450077Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037892 2025-12-23T18:49:45.450659Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037890 2025-12-23T18:49:45.450790Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976716500 at tablet 72075186224037892 2025-12-23T18:49:45.451267Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037891 2025-12-23T18:49:45.451268Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T18:49:45.451891Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2025-12-23T18:49:45.451957Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037892 2025-12-23T18:49:45.474166Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transact ... 82Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 2025-12-23T18:49:52.889015Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:49:52.892478Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037890 2025-12-23T18:49:52.893352Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976716911 at tablet 72075186224037889 2025-12-23T18:49:52.893514Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976716911 at tablet 72075186224037890 2025-12-23T18:49:52.893727Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037891 2025-12-23T18:49:52.893807Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037892 2025-12-23T18:49:52.894531Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976716911 at tablet 72075186224037891 2025-12-23T18:49:52.894596Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976716911 at tablet 72075186224037892 2025-12-23T18:49:52.895140Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2025-12-23T18:49:52.895186Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T18:49:52.895730Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037891 2025-12-23T18:49:52.895798Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037892 2025-12-23T18:49:52.897229Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976716911 at step 1766515792944 at tablet 72075186224037890 { Transactions { TxId: 281474976716911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1766515792944 MediatorID: 72057594046382081 TabletID: 72075186224037890 } 2025-12-23T18:49:52.897237Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976716911 at step 1766515792944 at tablet 72075186224037889 { Transactions { TxId: 281474976716911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1766515792944 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-12-23T18:49:52.897253Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:49:52.897253Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:49:52.897343Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T18:49:52.897353Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-12-23T18:49:52.897358Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:49:52.897367Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:49:52.897379Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1766515792944:281474976716911] in PlanQueue unit at 72075186224037889 2025-12-23T18:49:52.897383Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1766515792944:281474976716911] in PlanQueue unit at 72075186224037890 2025-12-23T18:49:52.897405Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037889 got data tx from cache 1766515792944:281474976716911 2025-12-23T18:49:52.897408Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037890 got data tx from cache 1766515792944:281474976716911 2025-12-23T18:49:52.898382Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:49:52.898667Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976716911 at step 1766515792944 at tablet 72075186224037892 { Transactions { TxId: 281474976716911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1766515792944 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2025-12-23T18:49:52.898681Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-12-23T18:49:52.898785Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2025-12-23T18:49:52.898798Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:49:52.898829Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1766515792944:281474976716911] in PlanQueue unit at 72075186224037892 2025-12-23T18:49:52.898855Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037892 got data tx from cache 1766515792944:281474976716911 2025-12-23T18:49:52.899748Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:49:52.899983Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976716911 at step 1766515792944 at tablet 72075186224037891 { Transactions { TxId: 281474976716911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1766515792944 MediatorID: 72057594046382081 TabletID: 72075186224037891 } 2025-12-23T18:49:52.899994Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-12-23T18:49:52.900061Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2025-12-23T18:49:52.900073Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:49:52.900085Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1766515792944:281474976716911] in PlanQueue unit at 72075186224037891 2025-12-23T18:49:52.900105Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037891 got data tx from cache 1766515792944:281474976716911 2025-12-23T18:49:52.900713Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:49:52.903554Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 1766515792944} 2025-12-23T18:49:52.903603Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2025-12-23T18:49:52.903681Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1766515792944 : 281474976716911] from 72075186224037892 at tablet 72075186224037892 send result to client [2:7587127558239370370:5645], exec latency: 0 ms, propose latency: 4 ms 2025-12-23T18:49:52.903749Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-12-23T18:49:52.903969Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1766515792944} 2025-12-23T18:49:52.904005Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:49:52.904034Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1766515792944 : 281474976716911] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7587127558239370370:5645], exec latency: 0 ms, propose latency: 6 ms 2025-12-23T18:49:52.904053Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:49:52.904185Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 1766515792944} 2025-12-23T18:49:52.904214Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2025-12-23T18:49:52.904239Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1766515792944 : 281474976716911] from 72075186224037891 at tablet 72075186224037891 send result to client [2:7587127558239370370:5645], exec latency: 0 ms, propose latency: 4 ms 2025-12-23T18:49:52.904256Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-12-23T18:49:52.905414Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:49:52.907112Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1766515792944} 2025-12-23T18:49:52.907153Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T18:49:52.907180Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1766515792944 : 281474976716911] from 72075186224037889 at tablet 72075186224037889 send result to client [2:7587127558239370370:5645], exec latency: 7 ms, propose latency: 9 ms 2025-12-23T18:49:52.907193Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:49:52.926545Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-12-23T18:49:52.926881Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 1 2025-12-23T18:49:52.927266Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037891 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-12-23T18:49:52.927362Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037891 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 0 2025-12-23T18:49:52.927684Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-12-23T18:49:52.927944Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037892 S3 Listing: finished status: 0 description: "" contents: 4 common prefixes: 1 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-12-23T18:49:55.291513Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-12-23T18:49:55.295173Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-12-23T18:49:55.300714Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:49:55.303807Z node 3 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-12-23T18:49:55.304918Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2025-12-23T18:49:55.306568Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-12-23T18:49:55.306611Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:55.306702Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-12-23T18:49:55.306870Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2025-12-23T18:49:55.306948Z node 3 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:49:55.307114Z node 3 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2025-12-23T18:49:55.307163Z node 3 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2025-12-23T18:49:55.307280Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-12-23T18:49:55.307301Z node 3 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:55.307412Z node 4 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-12-23T18:49:55.307561Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-12-23T18:49:55.307629Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-12-23T18:49:55.307684Z node 4 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:49:55.307739Z node 4 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 4 2025-12-23T18:49:55.307784Z node 4 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:49:55.307860Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-12-23T18:49:55.307882Z node 4 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:55.308045Z node 2 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-12-23T18:49:55.318825Z node 4 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2025-12-23T18:49:55.318923Z node 4 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2025-12-23T18:49:55.318976Z node 3 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2025-12-23T18:49:55.319001Z node 3 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2025-12-23T18:49:55.329763Z node 2 :STATISTICS DEBUG: service_impl.cpp:403: Skip TEvKeepAliveTimeout 2025-12-23T18:49:55.329866Z node 1 :STATISTICS INFO: service_impl.cpp:418: Node 2 is unavailable 2025-12-23T18:49:55.329916Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2025-12-23T18:49:55.330044Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2025-12-23T18:49:55.330083Z node 1 :STATISTICS DEBUG: service_impl.cpp:395: Skip TEvKeepAliveTimeout 2025-12-23T18:49:55.330157Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2025-12-23T18:49:55.330193Z node 1 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2025-12-23T18:49:55.330297Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2025-12-23T18:49:55.330333Z node 1 :STATISTICS DEBUG: service_impl.cpp:430: Skip TEvAggregateKeepAlive |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantTuple [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-12-23T18:49:55.609748Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-12-23T18:49:55.610547Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-12-23T18:49:55.610915Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:49:55.611153Z node 3 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-12-23T18:49:55.611283Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2025-12-23T18:49:55.611474Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-12-23T18:49:55.611496Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:55.611537Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-12-23T18:49:55.611619Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2025-12-23T18:49:55.611657Z node 3 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:49:55.611743Z node 3 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2025-12-23T18:49:55.611775Z node 3 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2025-12-23T18:49:55.611875Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-12-23T18:49:55.611898Z node 3 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:55.612027Z node 4 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-12-23T18:49:55.612117Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-12-23T18:49:55.612162Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-12-23T18:49:55.612195Z node 4 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:49:55.612235Z node 4 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 4 2025-12-23T18:49:55.612275Z node 4 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:49:55.612349Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-12-23T18:49:55.612371Z node 4 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:55.612469Z node 2 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-12-23T18:49:55.622881Z node 4 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2025-12-23T18:49:55.622968Z node 4 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2025-12-23T18:49:55.623021Z node 3 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2025-12-23T18:49:55.623043Z node 3 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2025-12-23T18:49:55.633786Z node 2 :STATISTICS DEBUG: service_impl.cpp:403: Skip TEvKeepAliveTimeout 2025-12-23T18:49:55.633883Z node 1 :STATISTICS INFO: service_impl.cpp:418: Node 2 is unavailable 2025-12-23T18:49:55.633922Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2025-12-23T18:49:55.634046Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2025-12-23T18:49:55.634085Z node 1 :STATISTICS DEBUG: service_impl.cpp:395: Skip TEvKeepAliveTimeout 2025-12-23T18:49:55.634156Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2025-12-23T18:49:55.634188Z node 1 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2025-12-23T18:49:55.634291Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2025-12-23T18:49:55.634324Z node 1 :STATISTICS DEBUG: service_impl.cpp:430: Skip TEvAggregateKeepAlive |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |96.1%| [TA] $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocksFatTest::RangeSetRemove [GOOD] >> TLocksFatTest::ShardLocks |96.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::CopyTableAndCompareColumnsSchema [GOOD] >> TFlatTest::CopyTableAndDropCopy >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] >> AggregateStatistics::ShouldBePings |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] Test command err: 2025-12-23T18:49:10.616831Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127374405403755:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:10.618194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057db/r3tmp/tmpPHJi7A/pdisk_1.dat 2025-12-23T18:49:10.871883Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:10.883937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:10.884064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:10.958029Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:10.960387Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127374405403725:2081] 1766515750614540 != 1766515750614543 2025-12-23T18:49:10.964622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:11.157036Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:21156 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515751014 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:11.252566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:11.272924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:11.428803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:11.475152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:13.807454Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127387395881737:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:13.807525Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057db/r3tmp/tmp5wk5np/pdisk_1.dat 2025-12-23T18:49:13.825237Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:13.893573Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:13.903391Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127387395881712:2081] 1766515753806300 != 1766515753806303 2025-12-23T18:49:13.930407Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:13.930521Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:13.933089Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:14.041145Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:21130 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515753982 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:14.093735Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:14.116373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:14.172417Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:14.220493Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:17.448365Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127404824834112:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:17.448440Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057db/r3tmp/tmpJtdVFO/pdisk_1.dat 2025-12-23T18:49:17.466747Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:17.546337Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:17.563792Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:17.563870Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:17.574865Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:17.700329Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:25553 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515757629 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:17.756732Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation p ... TypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515779364 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:39.597872Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:39.605072Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:39.623605Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:39.632362Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:39.730454Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:39.792958Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:44.517563Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587127520319275703:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:44.517657Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057db/r3tmp/tmp8dlQNz/pdisk_1.dat 2025-12-23T18:49:44.603839Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:44.715992Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:44.717801Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7587127520319275677:2081] 1766515784515814 != 1766515784515817 2025-12-23T18:49:44.746060Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:44.746182Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:44.773550Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:44.788917Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64257 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515784838 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:45.141305Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:45.148386Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:45.158342Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:45.163787Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:45.242112Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:45.301817Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:45.326471Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:49:49.845043Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587127542881675348:2149];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:49.845144Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057db/r3tmp/tmp5Modu6/pdisk_1.dat 2025-12-23T18:49:49.954856Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:49.998285Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:49.998423Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:49.999100Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:50.032193Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:50.148695Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:14616 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515790088 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:50.314307Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:50.321265Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:50.335474Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:50.407182Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:50.474594Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> AggregateStatistics::ShouldBePings [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2025-12-23T18:49:57.265348Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-12-23T18:49:57.266179Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 3, status = OK 2025-12-23T18:49:57.266545Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:49:57.266688Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = OK 2025-12-23T18:49:57.266734Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:49:57.266849Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2025-12-23T18:49:57.267034Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5, status = OK 2025-12-23T18:49:57.267076Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:49:57.267123Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = OK 2025-12-23T18:49:57.267152Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:49:57.267202Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-12-23T18:49:57.267219Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:57.267252Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2, status = OK 2025-12-23T18:49:57.267297Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:49:57.267363Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 5 2025-12-23T18:49:57.267430Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 6, status = OK 2025-12-23T18:49:57.267460Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:49:57.267516Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2025-12-23T18:49:57.267575Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 5, status = ERROR 2025-12-23T18:49:57.267602Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:57.267630Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = OK 2025-12-23T18:49:57.267662Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:14:2061], path = { OwnerId: 3 LocalId: 3 } 2025-12-23T18:49:57.267714Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2025-12-23T18:49:57.267739Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:57.267766Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 7 2025-12-23T18:49:57.267818Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [0:0:0], tablet id = 7, status = ERROR 2025-12-23T18:49:57.267839Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:57.278185Z node 1 :STATISTICS DEBUG: service_impl.cpp:1034: Tablet 3 has already been processed 2025-12-23T18:49:57.278274Z node 1 :STATISTICS ERROR: service_impl.cpp:1038: No result was received from the tablet 4 2025-12-23T18:49:57.278314Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 4 is not local. 2025-12-23T18:49:57.278401Z node 1 :STATISTICS DEBUG: service_impl.cpp:1034: Tablet 5 has already been processed 2025-12-23T18:49:57.278431Z node 1 :STATISTICS DEBUG: service_impl.cpp:1034: Tablet 1 has already been processed 2025-12-23T18:49:57.278453Z node 1 :STATISTICS ERROR: service_impl.cpp:1038: No result was received from the tablet 2 2025-12-23T18:49:57.278493Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 2 is not local. 2025-12-23T18:49:57.278539Z node 1 :STATISTICS ERROR: service_impl.cpp:1038: No result was received from the tablet 6 2025-12-23T18:49:57.278557Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 6 is not local. 2025-12-23T18:49:57.278608Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2025-12-23T18:49:57.278774Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2025-12-23T18:49:57.278824Z node 1 :STATISTICS DEBUG: service_impl.cpp:1027: Skip TEvStatisticsRequestTimeout 2025-12-23T18:49:57.278881Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 4, status = ERROR 2025-12-23T18:49:57.278918Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:57.278952Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 2, status = ERROR 2025-12-23T18:49:57.278968Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:57.278999Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-12-23T18:49:57.279018Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2025-12-23T18:49:57.252588Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-12-23T18:49:57.253077Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-12-23T18:49:57.365466Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2025-12-23T18:49:57.365583Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 2 2025-12-23T18:49:57.365617Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2025-12-23T18:49:57.366285Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:19:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2025-12-23T18:49:57.366344Z node 2 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:57.366392Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:16:2056], server id = [0:0:0], tablet id = 1, status = ERROR 2025-12-23T18:49:57.366412Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:57.366513Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-12-23T18:49:57.366560Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] >> TFlatTest::RejectByPerShardReadSize [GOOD] >> TFlatTest::RejectByPerRequestSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2025-12-23T18:49:58.419599Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-12-23T18:49:58.419901Z node 1 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2 2025-12-23T18:49:58.419947Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 2 is not local. 2025-12-23T18:49:58.420057Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2025-12-23T18:49:58.421277Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = ERROR 2025-12-23T18:49:58.421351Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 1 is not local. 2025-12-23T18:49:58.421481Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-12-23T18:49:58.421503Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2025-12-23T18:49:58.421539Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = ERROR 2025-12-23T18:49:58.421559Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 4 is not local. 2025-12-23T18:49:58.421664Z node 1 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5 2025-12-23T18:49:58.421692Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 5 is not local. 2025-12-23T18:49:58.421741Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 6 2025-12-23T18:49:58.421807Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = ERROR 2025-12-23T18:49:58.421827Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 7 is not local. 2025-12-23T18:49:58.421876Z node 1 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 1, client id = [1:15:2062], server id = [1:15:2062], tablet id = 8 2025-12-23T18:49:58.421917Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 8 is not local. 2025-12-23T18:49:58.421972Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2025-12-23T18:49:58.422088Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-12-23T18:49:58.422109Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable |96.1%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |96.1%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |96.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> StatsFormat::FullStat [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] Test command err: 2025-12-23T18:49:51.127199Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127553983760750:2057];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:51.127258Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:51.179933Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00577e/r3tmp/tmpc6gCko/pdisk_1.dat 2025-12-23T18:49:51.470465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:51.470594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:51.472572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:51.560977Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:51.562494Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:51.578928Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127553983760734:2081] 1766515791121621 != 1766515791121624 TClient is connected to server localhost:26982 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-12-23T18:49:51.770914Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515791635 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:51.806431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:51.824677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:49:51.835615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:51.976221Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127553983761648:2566] txid# 281474976710659, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00577e/r3tmp/tmpEGr0Va/pdisk_1.dat 2025-12-23T18:49:54.528901Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:54.528990Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:49:54.617911Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:54.626917Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127565753293728:2081] 1766515794480879 != 1766515794480882 2025-12-23T18:49:54.632551Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:54.632654Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:54.666013Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:54.782337Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:8270 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515794722 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:54.846646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:54.854895Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:54.910180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) waiting... 2025-12-23T18:49:54.922050Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587127565753294679:2599] txid# 281474976710660, issues: { message: "Table is frozen. Only unfreeze alter is allowed" severity: 1 } Error 128: Table is frozen. Only unfreeze alter is allowed 2025-12-23T18:49:54.923844Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) waiting... 2025-12-23T18:49:54.934152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) waiting... >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-ordinaryuser |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::FullStat [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test >> TFlatTest::CopyTableAndDropCopy [GOOD] |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest >> FormatTimes::DurationMs [GOOD] >> FormatTimes::ParseDuration [GOOD] >> TLocksTest::CK_BrokenLock [GOOD] >> TLocksTest::GoodSameKeyLock [GOOD] >> TLocksTest::GoodSameShardLock |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationMs [GOOD] |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::ParseDuration [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-anonymous >> TLocksTest::CK_Range_BrokenLock [GOOD] >> TLocksTest::CK_Range_BrokenLockInf |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] >> TLocksTest::NoLocksSet [GOOD] >> TLocksTest::MultipleLocks >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] >> TPQTest::TestPartitionWriteQuota [GOOD] >> TPQTest::TestPartitionedBlobFails ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] Test command err: 2025-12-23T18:49:16.873454Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127402725659015:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:16.874242Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c9/r3tmp/tmpgDQtQ2/pdisk_1.dat 2025-12-23T18:49:17.114103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:17.114225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:17.166350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:17.185207Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:17.185448Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:17.201839Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127402725658977:2081] 1766515756862255 != 1766515756862258 TClient is connected to server localhost:9874 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:49:17.384723Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515757223 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:17.454847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:17.459957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:17.475787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:17.622394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:17.692944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:20.322304Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127417552026265:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:20.322344Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c9/r3tmp/tmpzRmNAw/pdisk_1.dat 2025-12-23T18:49:20.358162Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:20.449275Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:20.449384Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:20.458858Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127417552026238:2081] 1766515760320843 != 1766515760320846 2025-12-23T18:49:20.463890Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:20.485396Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:20.652856Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:9009 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515760548 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:20.721346Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:20.728026Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:20.746416Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:20.850550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:20.917329Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:24.256025Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127436282484422:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:24.256101Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c9/r3tmp/tmpnRSXy5/pdisk_1.dat 2025-12-23T18:49:24.275578Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:24.357788Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:24.386723Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:24.386849Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:24.397778Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:24.552391Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:9442 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 2 ... s Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515786287 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:46.468398Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:46.487014Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:46.499345Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:46.504918Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:46.642045Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:46.765506Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:50.700894Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587127548516827821:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:50.700999Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c9/r3tmp/tmpMaP991/pdisk_1.dat 2025-12-23T18:49:50.761010Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:50.850295Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:50.867356Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:50.867458Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:50.900857Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:51.018874Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:19187 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515790956 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:51.196285Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:51.203555Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:51.213905Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:51.220035Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:49:51.299549Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:51.362419Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:55.595985Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587127568932773757:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:55.596061Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c9/r3tmp/tmpMiuqIi/pdisk_1.dat 2025-12-23T18:49:55.619171Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:55.711069Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:55.713608Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7587127568932773719:2081] 1766515795595012 != 1766515795595015 2025-12-23T18:49:55.731856Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:55.731976Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:55.763070Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:55.903369Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:6243 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515795814 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:49:56.087143Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:56.108594Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:56.200268Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:56.268995Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TLocksTest::BrokenSameKeyLock [GOOD] >> TLocksTest::BrokenSameShardLock >> Config::IncludeScope [GOOD] |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:49:59.906712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:49:59.906843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:49:59.906872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:49:59.906900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:49:59.906926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:49:59.906950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:49:59.906995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:49:59.907053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:49:59.907883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:49:59.908116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:49:59.982825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:49:59.982883Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:59.988328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:49:59.988495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:49:59.988594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:49:59.990865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:49:59.992057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:49:59.994514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:49:59.997712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:50:00.008322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:00.026985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:50:00.054383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:50:00.054512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:00.054803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:50:00.054901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:50:00.055066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:50:00.056665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:50:00.259115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.260436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.260608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.260696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.260768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.260835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.260899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.261018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.261095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.261185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.261273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.261381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.261493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.261576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.261644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:50:01.371877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:50:01.371984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:50:01.372042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:50:01.372085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2025-12-23T18:50:01.372127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:50:01.372216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:50:01.375074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:50:01.387727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1170 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:50:01.387793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-12-23T18:50:01.387960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1170 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:50:01.388097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1170 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:50:01.389135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 725 RawX2: 4294970003 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:50:01.389182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-12-23T18:50:01.389342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 725 RawX2: 4294970003 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:50:01.389397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:50:01.389481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 725 RawX2: 4294970003 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:50:01.389561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:01.389603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:01.389659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:50:01.389695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:50:01.392812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:01.393183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:01.393487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:01.393536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:50:01.393635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:50:01.393671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:50:01.393706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:50:01.393741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:50:01.393772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:50:01.393833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:777:2744] message: TxId: 102 2025-12-23T18:50:01.393878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:50:01.393929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:50:01.393963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:50:01.394103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:50:01.396241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:50:01.396294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:861:2820] TestWaitNotification: OK eventTxId 102 2025-12-23T18:50:01.398857Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:50:01.399140Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 341us result status StatusSuccess 2025-12-23T18:50:01.399724Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:49:59.876588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:49:59.876705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:49:59.876758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:49:59.876793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:49:59.876832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:49:59.876862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:49:59.876930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:49:59.877013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:49:59.877919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:49:59.879847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:49:59.957802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:49:59.957851Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:59.971744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:49:59.971907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:49:59.972106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:49:59.991915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:49:59.992148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:49:59.994704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:49:59.997865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:50:00.006602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:00.026985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:50:00.054507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:50:00.054626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:00.054936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:50:00.055013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:50:00.055079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:50:00.056554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:50:00.243017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.246193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.246449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.246540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.247302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.247423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.247503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.247865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.248024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.248425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.248539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.248659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.248769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.249094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:00.249207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... __operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:50:01.413442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:50:01.413524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:50:01.413554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:50:01.413588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2025-12-23T18:50:01.413623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:50:01.413722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:50:01.420849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:50:01.433962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1137 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:50:01.434057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-12-23T18:50:01.434203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1137 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:50:01.434326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1137 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:50:01.435268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 739 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:50:01.435326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-12-23T18:50:01.435485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 739 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:50:01.435546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:50:01.435642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 739 RawX2: 4294970016 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:50:01.435723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:01.435767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:01.435816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:50:01.435891Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:50:01.439235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:01.439682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:01.439998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:01.440039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:50:01.440150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:50:01.440190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:50:01.440229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:50:01.440316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:50:01.440355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:50:01.440426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:791:2757] message: TxId: 102 2025-12-23T18:50:01.440474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:50:01.440513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:50:01.440551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:50:01.440736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:50:01.442785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:50:01.442871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:922:2844] TestWaitNotification: OK eventTxId 102 2025-12-23T18:50:01.443523Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:50:01.443809Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 295us result status StatusSuccess 2025-12-23T18:50:01.444312Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] Test command err: 2025-12-23T18:49:45.595299Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127528185453694:2132];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:45.595665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00579d/r3tmp/tmp7fkz2V/pdisk_1.dat 2025-12-23T18:49:45.859415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:45.859507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:45.863724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:45.928585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:45.970609Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127528185453602:2081] 1766515785588584 != 1766515785588587 2025-12-23T18:49:45.975091Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:1268 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:49:46.155251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515785993 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:46.251759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:46.259489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:46.277853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515786378 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_1_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1_Copy" PathId: 40 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1766515786476 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapsh... (TRUNCATED) 2025-12-23T18:49:46.464570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2" PathId: 41 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1766515786539 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_2_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2_Copy" PathId: 42 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1766515786581 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames... (TRUNCATED) 2025-12-23T18:49:46.563435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:46.601844Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... TClient::Ls request: /dc-1/Dir/Table_3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3" PathId: 43 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1766515786651 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "c... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_3_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3_Copy" PathId: 44 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710664 CreateStep: 1766515786693 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { ... (TRUNCATED) 2025-12-23T18:49:46.687684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4" PathId: 45 SchemeshardI ... ts is done, operation id: 281474976715686:0 2025-12-23T18:49:58.193139Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-12-23T18:49:58.193140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976715686:0 2025-12-23T18:49:58.193241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 42] was 4 2025-12-23T18:49:58.197716Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:7587127582986959818:3206], serverId# [2:7587127582986959819:3207], sessionId# [0:0:0] 2025-12-23T18:49:58.197818Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-12-23T18:49:58.199074Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-12-23T18:49:58.199124Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-12-23T18:49:58.202118Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037895, clientId# [2:7587127582986959837:3215], serverId# [2:7587127582986959838:3216], sessionId# [0:0:0] 2025-12-23T18:49:58.202220Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-12-23T18:49:58.203536Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-12-23T18:49:58.203590Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-12-23T18:49:58.206580Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-12-23T18:49:58.207706Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:58.207966Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-12-23T18:49:58.208063Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-12-23T18:49:58.210973Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-12-23T18:49:58.217976Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-12-23T18:49:58.218048Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-12-23T18:49:58.223704Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-12-23T18:49:58.225064Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-12-23T18:49:58.225111Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-12-23T18:49:58.226501Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-12-23T18:49:58.227105Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-12-23T18:49:58.227147Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-12-23T18:49:58.238251Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-12-23T18:49:58.240074Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-12-23T18:49:58.240128Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-12-23T18:49:58.241882Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-12-23T18:49:58.242444Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-12-23T18:49:58.242493Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-12-23T18:49:58.244477Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-12-23T18:49:58.245762Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-12-23T18:49:58.245816Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-12-23T18:49:58.248908Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-12-23T18:49:58.250117Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-12-23T18:49:58.250168Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-12-23T18:49:58.253060Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-12-23T18:49:58.254247Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-12-23T18:49:58.254297Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-12-23T18:49:58.257226Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-12-23T18:49:58.258553Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-12-23T18:49:58.258614Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-12-23T18:49:58.263564Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-12-23T18:49:58.267495Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-12-23T18:49:58.267852Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-12-23T18:49:58.267948Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-12-23T18:49:58.268599Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-12-23T18:49:58.268621Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-12-23T18:49:58.272752Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-12-23T18:49:58.276326Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-12-23T18:49:58.279187Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-12-23T18:49:58.279288Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-12-23T18:49:58.279945Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-12-23T18:49:58.279968Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-12-23T18:49:58.286848Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-12-23T18:49:58.289945Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-12-23T18:49:58.290042Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-12-23T18:49:58.296021Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-12-23T18:49:58.303056Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-12-23T18:49:58.303145Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-12-23T18:49:58.312073Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2025-12-23T18:49:58.314709Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2025-12-23T18:49:58.314786Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-12-23T18:49:58.321872Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2025-12-23T18:49:58.324198Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2025-12-23T18:49:58.324269Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 Check that tablet 72075186224037892 was deleted Check that tablet 72075186224037893 was deleted 2025-12-23T18:49:58.324987Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) 2025-12-23T18:49:58.325505Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-12-23T18:49:58.325894Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-12-23T18:49:58.326243Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-12-23T18:49:58.326587Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-12-23T18:49:58.327000Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/compute/common/ut/unittest >> Config::IncludeScope [GOOD] |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/compute/common/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] Test command err: 2025-12-23T18:49:18.991773Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127411380895245:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:18.992535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c5/r3tmp/tmpAHJLFe/pdisk_1.dat 2025-12-23T18:49:19.254923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:19.277652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:19.277777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:19.295296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:19.391847Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:19.457445Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:3729 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515759442 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:19.681997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:19.710692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:19.843493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:19.895418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:20.020455Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:22.443737Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127426192145252:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:22.443775Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:22.470386Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c5/r3tmp/tmpzqqXYl/pdisk_1.dat 2025-12-23T18:49:22.576774Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:22.585620Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127426192145229:2081] 1766515762443339 != 1766515762443342 2025-12-23T18:49:22.610850Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:22.640770Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:22.640880Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:22.645934Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:22.790829Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:3994 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515762697 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:22.811874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:22.816584Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:22.828572Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:22.902021Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:22.949390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:26.367088Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127444529678576:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:26.367154Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:26.376506Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c5/r3tmp/tmp9yp3vV/pdisk_1.dat 2025-12-23T18:49:26.471595Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:26.475211Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:26.475279Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:26.476990Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:26.498755Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10262 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 ... chemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515787575 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:47.738086Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:47.754411Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:47.759167Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:47.861096Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:47.915159Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:51.648869Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587127551585045770:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:51.648921Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c5/r3tmp/tmp6MBBkH/pdisk_1.dat 2025-12-23T18:49:51.667428Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:51.756941Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:51.758545Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7587127551585045745:2081] 1766515791647886 != 1766515791647889 2025-12-23T18:49:51.789795Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:51.789899Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:51.805492Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:51.826065Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:20714 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515791859 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:52.167335Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:52.188270Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:52.255073Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:52.339684Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:56.197268Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587127575341379631:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:56.197486Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c5/r3tmp/tmp7J9PCo/pdisk_1.dat 2025-12-23T18:49:56.212882Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:56.329265Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7587127575341379591:2081] 1766515796195066 != 1766515796195069 2025-12-23T18:49:56.344801Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:56.348508Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:56.348614Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:56.353580Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:56.425378Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:32549 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515796437 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:49:56.705223Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:56.725288Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:56.732192Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:56.816010Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:56.884337Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] |96.2%| [TA] $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-dbadmin >> TLocksTest::SetBreakSetEraseBreak [GOOD] >> TLocksTest::Range_CorrectDot [GOOD] |96.2%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] >> TLocksFatTest::ShardLocks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] Test command err: 2025-12-23T18:49:41.744752Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127511333976893:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:41.744793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a8/r3tmp/tmpyttM7E/pdisk_1.dat 2025-12-23T18:49:42.054660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:42.105837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:42.105977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:42.194948Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127511333976870:2081] 1766515781742332 != 1766515781742335 2025-12-23T18:49:42.196871Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:42.207996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:42.258283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:11015 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515782262 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:42.581681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:42.594991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:42.615623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:42.814636Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:46.746643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587127511333976893:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:46.746751Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:49:50.296217Z node 1 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:133: Transaction read size 51002373 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760 2025-12-23T18:49:50.296366Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002373 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760) | 2025-12-23T18:49:50.296996Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7587127549988684405:3126] txid# 281474976710760 RESPONSE Status# WrongRequest marker# P13c 2025-12-23T18:49:50.921303Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127549476821820:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:50.921363Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a8/r3tmp/tmpnrx4P1/pdisk_1.dat 2025-12-23T18:49:50.981303Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:51.071051Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:51.071140Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:51.073683Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:51.075127Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127549476821782:2081] 1766515790919838 != 1766515790919841 2025-12-23T18:49:51.105690Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:51.174834Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:4588 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515791166 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:51.339798Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:51.350967Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:51.360095Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:51.364526Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:51.930179Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:55.921583Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587127549476821820:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:55.921644Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:49:58.455456Z node 2 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:133: Transaction read size 51002341 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760 2025-12-23T18:49:58.455581Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002341 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760) | 2025-12-23T18:49:58.455865Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7587127583836562016:3125] txid# 281474976710760 RESPONSE Status# WrongRequest marker# P13c 2025-12-23T18:49:59.103710Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127587179125001:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:59.103774Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a8/r3tmp/tmpMvdvJG/pdisk_1.dat 2025-12-23T18:49:59.134494Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:59.216452Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:59.218307Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587127587179124973:2081] 1766515799102911 != 1766515799102914 2025-12-23T18:49:59.262818Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:59.262905Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:59.270111Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:59.405370Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:29714 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515799314 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:59.440905Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:59.446276Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:59.459477Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:49:59.468247Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:00.131777Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:02.674179Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:7587127587179125206:2104] Handle TEvProposeTransaction 2025-12-23T18:50:02.674218Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:7587127587179125206:2104] TxId# 281474976715700 ProcessProposeTransaction 2025-12-23T18:50:02.674267Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:272: actor# [3:7587127587179125206:2104] Cookie# 0 userReqId# "" txid# 281474976715700 SEND to# [3:7587127600064028167:2800] DataReq marker# P0 2025-12-23T18:50:02.674342Z node 3 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [3:7587127600064028167:2800] Cookie# 0 txid# 281474976715700 HANDLE TDataReq marker# P1 2025-12-23T18:50:02.675387Z node 3 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [3:7587127600064028167:2800] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-12-23T18:50:02.675415Z node 3 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [3:7587127600064028167:2800] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-12-23T18:50:02.675457Z node 3 :TX_PROXY DEBUG: datareq.cpp:1453: Actor# [3:7587127600064028167:2800] txid# 281474976715700 SEND to# [3:7587127587179125245:2112] TSchemeCache with 2 scheme entries. DataReq marker# P2 2025-12-23T18:50:02.675617Z node 3 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [3:7587127600064028167:2800] txid# 281474976715700 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-12-23T18:50:02.677325Z node 3 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [3:7587127600064028167:2800] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037888 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-12-23T18:50:02.677675Z node 3 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [3:7587127600064028167:2800] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037889 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-12-23T18:50:02.678321Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:50:02.679914Z node 3 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037888 2025-12-23T18:50:02.680336Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-12-23T18:50:02.681580Z node 3 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037889 2025-12-23T18:50:02.684939Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:50:02.685044Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T18:50:02.685321Z node 3 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [3:7587127600064028167:2800] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037888 read size 17000887 out readset size 0 marker# P6 2025-12-23T18:50:02.685368Z node 3 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [3:7587127600064028167:2800] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037889 read size 9000479 out readset size 0 marker# P6 2025-12-23T18:50:02.685467Z node 3 :TX_PROXY ERROR: datareq.cpp:2829: Actor# [3:7587127600064028167:2800] txid# 281474976715700 FailProposedRequest: Transaction total read size 26001366 exceeded limit 10000 Status# ExecError 2025-12-23T18:50:02.685541Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7587127600064028167:2800] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c 2025-12-23T18:50:02.686276Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037888 txId 281474976715700 2025-12-23T18:50:02.686356Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037888 txId 281474976715700 2025-12-23T18:50:02.689344Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037889 txId 281474976715700 2025-12-23T18:50:02.689414Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037889 txId 281474976715700 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:50:02.758303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:50:02.758404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:02.758445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:50:02.758488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:50:02.758528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:50:02.758566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:50:02.758642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:02.758732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:50:02.759776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:50:02.760127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:50:02.851404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:50:02.851475Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:02.858957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:50:02.859230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:50:02.859375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:50:02.862822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:50:02.863053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:50:02.863830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:02.864105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:50:02.866187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:02.866396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:50:02.867620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:50:02.867698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:02.867846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:50:02.867899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:50:02.867943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:50:02.868077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:50:03.027425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.028567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.028706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.028781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.028856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.028923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.028992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.029122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.029206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.029304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.029374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.029454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.029568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.029649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:03.029719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... FO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:50:04.266959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:50:04.267005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:50:04.267041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2025-12-23T18:50:04.267085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2025-12-23T18:50:04.267858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:50:04.267962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:50:04.267992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:50:04.268016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2025-12-23T18:50:04.268042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2025-12-23T18:50:04.268101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-12-23T18:50:04.269097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 250 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1254 } } CommitVersion { Step: 250 TxId: 103 } 2025-12-23T18:50:04.269139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-12-23T18:50:04.269271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 250 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1254 } } CommitVersion { Step: 250 TxId: 103 } 2025-12-23T18:50:04.269396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 250 OrderId: 103 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1254 } } CommitVersion { Step: 250 TxId: 103 } debug: NTableState::TProposedWaitParts operationId# 103:0 2025-12-23T18:50:04.270063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 959 RawX2: 4294970200 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-12-23T18:50:04.270121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-12-23T18:50:04.270271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 959 RawX2: 4294970200 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-12-23T18:50:04.270331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:50:04.270422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 959 RawX2: 4294970200 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-12-23T18:50:04.270511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:04.270549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:50:04.270601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-12-23T18:50:04.270639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2025-12-23T18:50:04.273059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:50:04.275340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:50:04.275451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:50:04.275543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:50:04.275849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:50:04.275894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:50:04.276012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:50:04.276055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:50:04.276101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:50:04.276127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:50:04.276161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-12-23T18:50:04.276232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:818:2785] message: TxId: 103 2025-12-23T18:50:04.276292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:50:04.276353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:50:04.276385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:50:04.276497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2025-12-23T18:50:04.278227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:50:04.278276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:987:2924] TestWaitNotification: OK eventTxId 103 W0000 00:00:1766515804.278933 1467388 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 104 2025-12-23T18:50:04.282115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/SubDomain" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:50:04.282545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:50:04.282679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, schema: Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } }, at schemeshard: 72057594046678944 2025-12-23T18:50:04.283354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, at schemeshard: 72057594046678944 2025-12-23T18:50:04.288401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "TTL run interval cannot be less than limit: 1800" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:50:04.288647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot/SubDomain, subject: , status: StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, operation: CREATE TABLE, path: /MyRoot/SubDomain/Table4 TestModificationResult got TxId: 104, wait until txId: 104 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetBreakSetEraseBreak [GOOD] Test command err: 2025-12-23T18:49:35.469623Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127485529006121:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:35.471131Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b3/r3tmp/tmp4qAA1c/pdisk_1.dat 2025-12-23T18:49:35.720371Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:35.731133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:35.731241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:35.734054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:35.800410Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127485529006089:2081] 1766515775466925 != 1766515775466928 2025-12-23T18:49:35.816557Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:35.907042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:14831 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515775871 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:36.061387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:36.067930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:36.090567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:36.102386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:36.238299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:49:36.291177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:36.478356Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:39.214138Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127501298096367:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:39.214668Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:39.231621Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b3/r3tmp/tmpt5tS8N/pdisk_1.dat 2025-12-23T18:49:39.326890Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127501298096294:2081] 1766515779187582 != 1766515779187585 2025-12-23T18:49:39.342810Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:39.343276Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:39.345634Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:39.345688Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:39.371390Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:39.502364Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:26329 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515779427 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:39.584484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:39.595493Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:39.605813Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:39.611192Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:39.694306Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:39.745933Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:43.031851Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127515508591707:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:43.032621Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:43.099373Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b3/r3tmp/tmpXGil7w/pdisk_1.dat 2025-12-23T18:49:43.195614Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:43.334315Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:43.334402Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:43.341762Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:43.396632Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:1655 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls respo ... d: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515791313 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:51.488575Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:51.495386Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:51.511412Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:51.584162Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:51.634683Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:55.315248Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587127569572892066:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:55.315296Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b3/r3tmp/tmpDtQpv0/pdisk_1.dat 2025-12-23T18:49:55.340063Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:55.449089Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:55.474729Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:55.474858Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:55.476355Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:55.533705Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:29592 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515795520 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:55.723470Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:55.749880Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:55.821523Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:55.877520Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:59.597404Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7587127588754062155:2182];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:59.597472Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:59.609444Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b3/r3tmp/tmp61T8XQ/pdisk_1.dat 2025-12-23T18:49:59.718003Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:59.722890Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7587127588754062010:2081] 1766515799591295 != 1766515799591298 2025-12-23T18:49:59.734909Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:59.735489Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:59.735584Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:59.759991Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17061 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:50:00.014859Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515799811 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:00.017635Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:00.026601Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:50:00.043095Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:00.119193Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:00.173739Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectDot [GOOD] Test command err: 2025-12-23T18:49:21.178770Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127424617704434:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:21.178950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c3/r3tmp/tmpYwCeUt/pdisk_1.dat 2025-12-23T18:49:21.438262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:21.492000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:21.492094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:21.582720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:21.587752Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:21.736970Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:12316 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515761647 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:21.948528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:21.958905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:21.975062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:49:21.984682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:49:22.146663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:22.188886Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:22.207571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:24.930522Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127435666910448:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:24.930583Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c3/r3tmp/tmp0aP6lo/pdisk_1.dat 2025-12-23T18:49:24.958699Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:25.037637Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:25.044767Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127435666910424:2081] 1766515764930050 != 1766515764930053 2025-12-23T18:49:25.083925Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:25.084028Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:25.090717Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:25.223010Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:29469 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515765133 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:25.298645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:25.307377Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:49:25.324970Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:25.417395Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:25.478516Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:28.493258Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127452178561066:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:28.494181Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c3/r3tmp/tmp3BrvoK/pdisk_1.dat 2025-12-23T18:49:28.518015Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:28.640472Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:28.640546Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:28.645533Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:28.678219Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:28.812908Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:13786 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: tru ... de_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15775 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515790221 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:50.391412Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:50.412922Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:50.490223Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:50.533733Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:49:50.610484Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:54.414925Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587127565444715930:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:54.416236Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c3/r3tmp/tmp4Wku0t/pdisk_1.dat 2025-12-23T18:49:54.447524Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:54.538537Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:54.554110Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:54.554208Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:54.574620Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:54.679977Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:1770 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515794680 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:54.837608Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... 2025-12-23T18:49:54.858674Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:54.920412Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:55.025301Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:59.036892Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587127585202159273:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:59.036978Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c3/r3tmp/tmpH5KDt8/pdisk_1.dat 2025-12-23T18:49:59.053123Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:59.144815Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:59.146681Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7587127585202159246:2081] 1766515799035758 != 1766515799035761 2025-12-23T18:49:59.162783Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:59.162920Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:59.192281Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:59.224573Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:18992 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515799251 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:59.457455Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:59.479706Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:59.568354Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:59.661947Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] Test command err: 2025-12-23T18:49:47.661600Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127534616030386:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:47.661662Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:47.759640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005796/r3tmp/tmpRFZG7m/pdisk_1.dat 2025-12-23T18:49:48.043042Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:48.046988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:48.047102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:48.114189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:48.136545Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:48.279887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:8682 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515788177 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:48.450806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:48.466871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:48.472145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:48.597984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:48.651985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:48.653210Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:49:52.615516Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127557856295657:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:52.615780Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005796/r3tmp/tmpuP4TRF/pdisk_1.dat 2025-12-23T18:49:52.630329Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:52.692306Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:52.694001Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127557856295551:2081] 1766515792611897 != 1766515792611900 2025-12-23T18:49:52.718831Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:52.718901Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:52.728814Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:52.880645Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:63997 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515792776 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:52.890293Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:52.908604Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:52.960860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:53.008268Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:53.621031Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:56.983766Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127573382852931:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:56.985341Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005796/r3tmp/tmpyCn6l7/pdisk_1.dat 2025-12-23T18:49:57.018515Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:57.116992Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:57.133716Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:57.133793Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:57.140349Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:57.238844Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:27503 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515797186 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:49:57.349555Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:49:57.359013Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:57.376441Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:57.448708Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:57.554673Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:00.865695Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587127590002451134:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:00.865742Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005796/r3tmp/tmpaR5Lkt/pdisk_1.dat 2025-12-23T18:50:00.883450Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:00.957908Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:00.960557Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587127590002451108:2081] 1766515800865149 != 1766515800865152 2025-12-23T18:50:00.995496Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:00.995605Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:01.007015Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:01.080275Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:7659 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515801057 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:01.197863Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... 2025-12-23T18:50:01.222964Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:01.276644Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:01.332335Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TLocksTest::Range_BrokenLock2 [GOOD] >> TLocksTest::Range_BrokenLock3 >> KqpProxy::CalcPeerStats [GOOD] >> KqpProxy::CreatesScriptExecutionsTable >> TableCreation::TableCreationWithAcl |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi >> TKeyValueTest::TestConcatWorks >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-anonymous >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-dbadmin >> TLocksTest::MultipleLocks [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TPQTest::TestPQRead [GOOD] >> TPQTest::TestPQReadAhead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:50:09.135812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:50:09.135881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:09.135936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:50:09.135973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:50:09.136002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:50:09.136028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:50:09.136077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:09.136140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:50:09.136816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:50:09.137028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:50:09.220146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:50:09.220201Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:09.226382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:50:09.226654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:50:09.226829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:50:09.232679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:50:09.232905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:50:09.233614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:09.233863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:50:09.236162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:09.236363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:50:09.237502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:50:09.237566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:09.237712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:50:09.237755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:50:09.237788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:50:09.237905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:50:09.377952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.378841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.378964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.379029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.379090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.379150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.379209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.379378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.379451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.379532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.379613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.379725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.379846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.379930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:09.379994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ode 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:2 129 -> 240 2025-12-23T18:50:10.263571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 725 RawX2: 4294970003 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:50:10.263619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-12-23T18:50:10.263749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 725 RawX2: 4294970003 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:50:10.263794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:50:10.263859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 725 RawX2: 4294970003 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:50:10.263905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:10.263953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:10.263997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:50:10.264036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2025-12-23T18:50:10.265284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:50:10.265346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:50:10.267105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:50:10.267168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:50:10.267260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-12-23T18:50:10.268668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:10.268751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-12-23T18:50:10.268818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:10.269104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-12-23T18:50:10.269144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:2 ProgressState 2025-12-23T18:50:10.269234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2025-12-23T18:50:10.269260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-12-23T18:50:10.269290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2025-12-23T18:50:10.269331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-12-23T18:50:10.269369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-12-23T18:50:10.269559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:10.269582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:50:10.269622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2025-12-23T18:50:10.269638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-12-23T18:50:10.269665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2025-12-23T18:50:10.269686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-12-23T18:50:10.269702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-12-23T18:50:10.269755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:777:2744] message: TxId: 101 2025-12-23T18:50:10.269793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-12-23T18:50:10.269827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:50:10.269864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:50:10.269975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:50:10.270011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2025-12-23T18:50:10.270026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:1 2025-12-23T18:50:10.270049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:50:10.270062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2025-12-23T18:50:10.270087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:2 2025-12-23T18:50:10.270117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2025-12-23T18:50:10.272422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:50:10.272480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:778:2745] TestWaitNotification: OK eventTxId 101 2025-12-23T18:50:10.273009Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:50:10.273311Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 299us result status StatusSuccess 2025-12-23T18:50:10.274027Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] >> test_auditlog.py::test_single_dml_query_logged[delete] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TableCreation::MultipleTablesCreation |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:50:09.979585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:50:09.979671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:09.979722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:50:09.979760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:50:09.979799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:50:09.979842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:50:09.979918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:09.979990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:50:09.980907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:50:09.981221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:50:10.077714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:50:10.077774Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:10.084037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:50:10.084245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:50:10.084376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:50:10.087736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:50:10.087960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:50:10.088729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:10.088975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:50:10.090940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:10.091132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:50:10.092294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:50:10.092366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:10.092515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:50:10.092572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:50:10.092631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:50:10.092785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:50:10.253535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.254597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.254725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.254825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.254906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.254969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.255041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.255157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.255248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.255311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.255367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.255477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.255593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.255693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:10.255769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 8:50:11.299991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:11.300057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:50:11.300871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:50:11.300978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:50:11.301015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:50:11.301059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2025-12-23T18:50:11.301102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:50:11.301190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:50:11.303493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1211 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:50:11.303544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:50:11.303681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1211 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:50:11.303789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1211 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2025-12-23T18:50:11.307475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:50:11.307535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:50:11.307693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:50:11.307768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:50:11.307894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:50:11.307967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:11.308007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:11.308067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:50:11.308115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:50:11.309725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:50:11.311333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:11.311465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:11.311774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:11.311826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:50:11.311924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:50:11.311965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:50:11.312003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:50:11.312033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:50:11.312095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:50:11.312170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2715] message: TxId: 102 2025-12-23T18:50:11.312226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:50:11.312303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:50:11.312341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:50:11.312469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:50:11.315025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:50:11.315076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:796:2766] TestWaitNotification: OK eventTxId 102 2025-12-23T18:50:11.315642Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:50:11.315928Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 307us result status StatusSuccess 2025-12-23T18:50:11.316477Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-ordinaryuser |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::MultipleLocks [GOOD] Test command err: 2025-12-23T18:49:42.177122Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127515655097836:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:42.182785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:42.213465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a2/r3tmp/tmp37r5mW/pdisk_1.dat 2025-12-23T18:49:42.578861Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:42.582458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:42.584023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:42.646383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:42.717342Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:42.722955Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127515655097732:2081] 1766515782159529 != 1766515782159532 2025-12-23T18:49:42.837056Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:14209 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515782759 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:43.066117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:43.072471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:43.098211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:43.203908Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:43.303277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:43.393894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:46.167203Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127528973923318:2162];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:46.167287Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a2/r3tmp/tmprs9yY1/pdisk_1.dat 2025-12-23T18:49:46.204338Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:46.302318Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:46.325719Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:46.325816Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:46.348579Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:46.419973Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:25594 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515786399 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:46.572251Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:46.579390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:46.606345Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:46.694672Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:49:46.747651Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:50.136158Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127549192940622:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:50.136331Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a2/r3tmp/tmpjklusI/pdisk_1.dat 2025-12-23T18:49:50.182846Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:50.261561Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:50.261644Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:50.265613Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:50.278940Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587127549192940596:2081] 1766515790132345 != 1766515790132348 2025-12-23T18:49:50.283792Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:50.389708Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:8578 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSub ... ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515798012 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:58.173622Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:58.182951Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:58.194034Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:58.199367Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:58.239893Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:49:58.272736Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:58.341020Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a2/r3tmp/tmpkTstTX/pdisk_1.dat 2025-12-23T18:50:01.883123Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7587127594404472540:2262];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:01.884289Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:50:01.900914Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:01.998937Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7587127594404472311:2081] 1766515801856731 != 1766515801856734 2025-12-23T18:50:02.013193Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:02.026007Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:02.026116Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:02.030217Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:02.074347Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:6601 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515802079 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:02.263988Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:50:02.287707Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:02.368182Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:02.425081Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a2/r3tmp/tmpDjiwtf/pdisk_1.dat 2025-12-23T18:50:06.220163Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:06.220900Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:50:06.299719Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:06.301065Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7587127617622205941:2081] 1766515806183187 != 1766515806183190 2025-12-23T18:50:06.313381Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:06.313480Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:06.339472Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:06.466653Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:19291 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515806391 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:06.572691Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:06.580477Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:50:06.595929Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:50:06.601282Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:06.660618Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:06.713073Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TLocksTest::Range_EmptyKey [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-clusteradmin |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] Test command err: 2025-12-23T18:49:05.999709Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127354921461025:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:06.000259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:06.059839Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00344c/r3tmp/tmp3pfpwQ/pdisk_1.dat 2025-12-23T18:49:06.070977Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:49:06.225459Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:49:06.225625Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:49:06.226711Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:49:06.308063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:06.308194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:06.309294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:06.309352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:06.347072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:06.358914Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:06.375303Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:49:06.376705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:06.403173Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:49:06.409173Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 17823, node 1 2025-12-23T18:49:06.489768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/00344c/r3tmp/yandex8vaevs.tmp 2025-12-23T18:49:06.489804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/00344c/r3tmp/yandex8vaevs.tmp 2025-12-23T18:49:06.490006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/00344c/r3tmp/yandex8vaevs.tmp 2025-12-23T18:49:06.490123Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:49:06.540798Z INFO: TTestServer started on Port 6534 GrpcPort 17823 TClient is connected to server localhost:6534 PQClient connected to localhost:17823 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515746429 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:49:06.824181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:06.899745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:07.021909Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:07.063892Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... waiting... 2025-12-23T18:49:09.225137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127372101331583:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:09.225263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:09.225506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127372101331595:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:09.225554Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127372101331596:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:09.225807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:49:09.230503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:49:09.253103Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127372101331599:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:49:09.591738Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127372101331688:3040] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:49:09.619578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:09.629247Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587127372101331701:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:49:09.628136Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587127373889192811:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:49:09.630796Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=ODM1NTI3MzQtYmEwYTdmNGItOTRmNTNkZTgtYTU4ZDEyZTQ=, ActorId: [2:7587127373889192786:2304], ActorState: ExecuteState, TraceId: 01kd68pchy2snfy0ph2h5k1arq, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:49:09.632044Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:49:09.629680Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YzA0YWZkMjItYWI0YTA0MDE ... files were not loaded 2025-12-23T18:49:58.377691Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:58.377785Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:58.379161Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:58.379241Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:58.384348Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:58.408559Z node 7 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-12-23T18:49:58.408728Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:58.410036Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 27612, node 7 2025-12-23T18:49:58.517407Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/00344c/r3tmp/yandexRlk0F0.tmp 2025-12-23T18:49:58.517433Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/00344c/r3tmp/yandexRlk0F0.tmp 2025-12-23T18:49:58.517587Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/00344c/r3tmp/yandexRlk0F0.tmp 2025-12-23T18:49:58.517682Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:49:58.556407Z INFO: TTestServer started on Port 20987 GrpcPort 27612 2025-12-23T18:49:58.558248Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20987 PQClient connected to localhost:27612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515798474 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:49:58.769682Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:49:58.799537Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:58.853315Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... waiting... 2025-12-23T18:49:59.170592Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:59.205414Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:02.359437Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7587127601113125448:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:02.359531Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:02.360693Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7587127601113125459:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:02.360705Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7587127601113125462:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:02.360762Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:02.367065Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:50:02.403277Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7587127601113125465:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-12-23T18:50:02.432866Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:02.496924Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7587127601113125615:3158] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:50:02.543650Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [7:7587127601113125625:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:50:02.547380Z node 7 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=7&id=NTAyOGU5MGEtODdiNGUwODUtNTIyOThlMDktZDM0MzFmYWQ=, ActorId: [7:7587127601113125446:2334], ActorState: ExecuteState, TraceId: 01kd68r07m9b4xhtc7w1h61xjc, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:50:02.547861Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:50:02.565493Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:02.741452Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-12-23T18:50:03.164177Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7587127583933254772:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:03.164249Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [7:7587127605408093237:3406] === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2025-12-23T18:50:10.568983Z node 7 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [7:7587127635472864603:3568] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2025-12-23T18:50:10.569030Z node 7 :PQ_PARTITION_CHOOSER INFO: partition_chooser_impl__abstract_chooser_actor.h:312: TPartitionChooser [7:7587127635472864603:3568] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_EmptyKey [GOOD] Test command err: 2025-12-23T18:49:30.539302Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127460762221388:2140];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:30.539803Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b5/r3tmp/tmp3u7ROK/pdisk_1.dat 2025-12-23T18:49:30.802873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:30.837514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:30.837612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:30.904476Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127460762221286:2081] 1766515770526275 != 1766515770526278 2025-12-23T18:49:30.904959Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:30.911144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:31.018898Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:62334 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515770964 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:31.186227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:31.195163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:31.210328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:31.404346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:31.468107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:31.545938Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:34.068473Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127481448131926:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:34.068909Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b5/r3tmp/tmpPz9Bto/pdisk_1.dat 2025-12-23T18:49:34.104300Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:34.189020Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:34.194392Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127481448131823:2081] 1766515774064038 != 1766515774064041 2025-12-23T18:49:34.204847Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:34.204924Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:34.207170Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:34.402869Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:9628 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515774296 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:34.444107Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:34.453057Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:34.479187Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:34.494532Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:34.562356Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:34.620004Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:37.887400Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127490672673166:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:37.887567Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:37.901674Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b5/r3tmp/tmpFUdQ8G/pdisk_1.dat 2025-12-23T18:49:38.029721Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:38.043013Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:38.065475Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:38.065573Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:38.085869Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14660 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 AC ... Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515799405 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:59.605054Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:59.615819Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:59.629487Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:59.643552Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:59.715559Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:59.784658Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:03.816755Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587127605624024672:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:03.816833Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b5/r3tmp/tmpqpRwgV/pdisk_1.dat 2025-12-23T18:50:03.862034Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:03.972719Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:03.994672Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:03.994916Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:04.046855Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:04.049242Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:28601 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515804095 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:04.289665Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:50:04.315884Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:04.396538Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:50:04.466375Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:08.329980Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587127625063682279:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:08.330050Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057b5/r3tmp/tmpra8hve/pdisk_1.dat 2025-12-23T18:50:08.360236Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:08.471478Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7587127625063682252:2081] 1766515808328876 != 1766515808328879 2025-12-23T18:50:08.471836Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:08.475658Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:08.475778Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:08.502386Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:08.562878Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:15220 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515808568 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:08.788670Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:08.796056Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:50:08.819138Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:50:08.831376Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:08.911316Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:08.977044Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] >> KqpProxy::CreatesScriptExecutionsTable [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag >> TableCreation::TableCreationWithAcl [GOOD] >> TableCreation::UpdateTableWithAclModification |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:50:15.263389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:50:15.263482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:15.263520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:50:15.263572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:50:15.263608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:50:15.263643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:50:15.263706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:15.263771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:50:15.264650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:50:15.265000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:50:15.355636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:50:15.355708Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:15.361630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:50:15.361853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:50:15.361988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:50:15.365174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:50:15.365392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:50:15.366226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:15.366484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:50:15.368552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:15.368767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:50:15.370021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:50:15.370087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:15.370248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:50:15.370298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:50:15.370350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:50:15.370530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:50:15.517327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.518362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.518500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.518572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.518636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.518708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.518794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.518929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.519010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.519093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.519159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.519252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.519379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.519456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.519520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:16.203136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:50:16.203356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:50:16.203402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:50:16.203436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:50:16.203474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:50:16.205426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:50:16.205489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:50:16.205525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:50:16.207256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:50:16.207326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:50:16.207383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:50:16.207462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:50:16.207590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:50:16.209186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:50:16.209336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:50:16.209677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:16.209813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:50:16.209858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:50:16.210160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:50:16.210226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:50:16.210384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:50:16.210457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:50:16.213519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:50:16.213562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:50:16.213793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:16.213839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:50:16.214086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:50:16.214129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:50:16.214236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:50:16.214267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:50:16.214323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:50:16.214355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:50:16.214394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:50:16.214440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:50:16.214474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:50:16.214501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:50:16.214583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:50:16.214639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:50:16.214676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:50:16.215289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:50:16.215402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:50:16.215447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:50:16.215486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:50:16.215542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:50:16.215637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:50:16.218529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:50:16.219073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:50:16.220361Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:671:2661] Bootstrap 2025-12-23T18:50:16.221497Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:671:2661] Become StateWork (SchemeCache [1:676:2666]) 2025-12-23T18:50:16.224417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:50:16.224771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:16.224881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2025-12-23T18:50:16.225374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2025-12-23T18:50:16.226437Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:671:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:50:16.229408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:50:16.229572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-12-23T18:50:16.229837Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] Test command err: 2025-12-23T18:48:45.386733Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.010283s 2025-12-23T18:48:45.415495Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127266951120943:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:45.415625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:45.486471Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:48:45.543955Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:48:45.550735Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127270119225309:2152];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:45.550883Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:45.563651Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:48:45.564229Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003494/r3tmp/tmpzWyUpI/pdisk_1.dat 2025-12-23T18:48:45.746672Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:45.778465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:45.906011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:45.906143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:45.906383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:45.906440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:45.912813Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:48:45.912974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:45.914602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:46.004282Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:46.006955Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:46.013531Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 26547, node 1 2025-12-23T18:48:46.355828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/003494/r3tmp/yandexQ2W0RA.tmp 2025-12-23T18:48:46.355848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/003494/r3tmp/yandexQ2W0RA.tmp 2025-12-23T18:48:46.358442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/003494/r3tmp/yandexQ2W0RA.tmp 2025-12-23T18:48:46.358546Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:46.420741Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:46.560549Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:46.653786Z INFO: TTestServer started on Port 27630 GrpcPort 26547 TClient is connected to server localhost:27630 PQClient connected to localhost:26547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515726073 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:46.948829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:48:46.993472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:48:48.884287Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127283004127503:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:48.884400Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:48.884706Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127283004127515:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:48.884714Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127283004127516:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:48.884755Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:48.893456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:48.914889Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587127283004127519:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-12-23T18:48:49.019149Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587127287299094842:2182] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:49.437997Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587127279836024254:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:48:49.438024Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587127287299094856:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:48:49.438599Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=M2ZkNWM0ZGYtNDVjY2ZmNzEtYWExZDg2MWYtYjk1MjU5Nw==, ActorId: [2:7587127283004127501:2304], ActorState: ExecuteState, TraceId: 01kd68nrfh5zc5gy2bf418pywx, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:48:49.442211Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find ... p:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:01.263193Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:01.263275Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:01.265579Z node 11 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-12-23T18:50:01.265675Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:01.267541Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24282, node 11 2025-12-23T18:50:01.372660Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/003494/r3tmp/yandexhIKSu8.tmp 2025-12-23T18:50:01.372688Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/003494/r3tmp/yandexhIKSu8.tmp 2025-12-23T18:50:01.372799Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/003494/r3tmp/yandexhIKSu8.tmp 2025-12-23T18:50:01.372884Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:50:01.389859Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:50:01.409250Z INFO: TTestServer started on Port 62106 GrpcPort 24282 2025-12-23T18:50:01.422497Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:62106 PQClient connected to localhost:24282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515801358 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:50:01.665867Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:50:01.713792Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:50:02.013973Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:02.056079Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:05.891673Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587127612509721457:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:05.891793Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587127612509721465:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:05.891859Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:05.892732Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7587127612509721472:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:05.892817Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:05.897184Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:50:05.918923Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7587127612509721471:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:50:05.943245Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:05.985610Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7587127612509721651:3097] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:50:05.997274Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7587127591034883685:2153];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:05.997342Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:50:06.026501Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [11:7587127612509721666:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:50:06.026995Z node 11 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=11&id=YmE1NDRjYWEtNjJmYmZjNC0xYjM1NTIyOS05NjQ4NjZkZA==, ActorId: [11:7587127612509721454:2330], ActorState: ExecuteState, TraceId: 01kd68r3p2bpcst3802v4cy2yz, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:50:06.027467Z node 11 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:50:06.028093Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:06.061707Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7587127596747439510:2271];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:06.061803Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:50:06.182372Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [11:7587127616804689326:3380] === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2025-12-23T18:50:13.687926Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [11:7587127646869460803:3613] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2025-12-23T18:50:13.687970Z node 11 :PQ_PARTITION_CHOOSER INFO: partition_chooser_impl__abstract_chooser_actor.h:312: TPartitionChooser [11:7587127646869460803:3613] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId >> KqpProxy::InvalidSessionID |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TFlatTest::AutoMergeBySize [GOOD] >> TFlatTest::AutoSplitMergeQueue >> KqpBatchDelete::ManyPartitions_3 [GOOD] |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] Test command err: 2025-12-23T18:48:41.518329Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1766515721518306 2025-12-23T18:48:41.811836Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127253140791796:2089];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:41.816086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:41.843503Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127251469103200:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:41.843545Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:41.850649Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:48:41.876212Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005839/r3tmp/tmpVgwuHd/pdisk_1.dat 2025-12-23T18:48:42.063772Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:42.170824Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:42.205465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:42.205620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:42.208165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:42.208247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:42.247542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:42.248705Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:48:42.249335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:42.301932Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:42.360510Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 23430, node 1 2025-12-23T18:48:42.417376Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:42.699147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005839/r3tmp/yandexGgiPpo.tmp 2025-12-23T18:48:42.699192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005839/r3tmp/yandexGgiPpo.tmp 2025-12-23T18:48:42.701811Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005839/r3tmp/yandexGgiPpo.tmp 2025-12-23T18:48:42.701927Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:42.826977Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:42.854799Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:42.934788Z INFO: TTestServer started on Port 16158 GrpcPort 23430 TClient is connected to server localhost:16158 PQClient connected to localhost:23430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515722433 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:43.246153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:48:43.300465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... waiting... 2025-12-23T18:48:45.497069Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127268648972707:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.497549Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127268648972699:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.497612Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.497858Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127268648972716:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.497910Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.497110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127270320662266:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.497173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127270320662277:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.497219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.497674Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127270320662282:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.497744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.505623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:45.515174Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587127268648972718:2132] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:48:45.535581Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127270320662281:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-12-23T18:48:45.535909Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587127268648972715:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-12-23T18:48:45.605652Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587127268648972745:2138] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:45.638254Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587127270320662375:2984] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:46.067304Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587127268648972752:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3 ... session cookie 1 consumer shared/user session shared/user_11_1_12243636005158711523_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2025-12-23T18:50:15.684316Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_11_1_12243636005158711523_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 4c26cdff-9e1a5b20-ab75e30-7a7f5e05 has messages 1 2025-12-23T18:50:15.684407Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_11_1_12243636005158711523_v1 read done: guid# 4c26cdff-9e1a5b20-ab75e30-7a7f5e05, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 549 2025-12-23T18:50:15.684435Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_11_1_12243636005158711523_v1 response to read: guid# 4c26cdff-9e1a5b20-ab75e30-7a7f5e05 2025-12-23T18:50:15.684589Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_11_1_12243636005158711523_v1 Process answer. Aval parts: 0 2025-12-23T18:50:15.685541Z :DEBUG: [/Root] [/Root] [8e0d728a-c689e493-c902ecb5-a83e5ada] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:50:15.685695Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_12243636005158711523_v1 grpc read done: success# 1, data# { read { } } 2025-12-23T18:50:15.685782Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (0-3) 2025-12-23T18:50:15.685815Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_11_1_12243636005158711523_v1 got read request: guid# cc56f12-50ae7a46-89f55b49-8d6f20d3 2025-12-23T18:50:15.685990Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-12-23T18:50:15.686044Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-12-23T18:50:15.686072Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-12-23T18:50:15.686093Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 1} (3-3) 2025-12-23T18:50:15.686141Z :DEBUG: [/Root] [/Root] [8e0d728a-c689e493-c902ecb5-a83e5ada] [null] The application data is transferred to the client. Number of messages 4, size 14 bytes 2025-12-23T18:50:15.686291Z :INFO: [/Root] [/Root] [8e0d728a-c689e493-c902ecb5-a83e5ada] Closing read session. Close timeout: 0.000000s 2025-12-23T18:50:15.686327Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:test-topic:0:1:3:0 2025-12-23T18:50:15.686359Z :INFO: [/Root] [/Root] [8e0d728a-c689e493-c902ecb5-a83e5ada] Counters: { Errors: 0 CurrentSessionLifetimeMs: 35 BytesRead: 14 MessagesRead: 4 BytesReadCompressed: 74 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:50:15.686436Z :NOTICE: [/Root] [/Root] [8e0d728a-c689e493-c902ecb5-a83e5ada] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-12-23T18:50:15.686473Z :DEBUG: [/Root] [/Root] [8e0d728a-c689e493-c902ecb5-a83e5ada] [null] Abort session to cluster 2025-12-23T18:50:15.686889Z :NOTICE: [/Root] [/Root] [8e0d728a-c689e493-c902ecb5-a83e5ada] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:50:15.687087Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6cda85c7-fd24f38c-be2c955a-e57d00fe_0] Write session: close. Timeout = 0 ms 2025-12-23T18:50:15.687121Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6cda85c7-fd24f38c-be2c955a-e57d00fe_0] Write session will now close 2025-12-23T18:50:15.687165Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6cda85c7-fd24f38c-be2c955a-e57d00fe_0] Write session: aborting 2025-12-23T18:50:15.687921Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_12243636005158711523_v1 grpc read done: success# 0, data# { } 2025-12-23T18:50:15.687943Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_11_1_12243636005158711523_v1 grpc read failed 2025-12-23T18:50:15.687966Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_11_1_12243636005158711523_v1 grpc closed 2025-12-23T18:50:15.687993Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_11_1_12243636005158711523_v1 is DEAD 2025-12-23T18:50:15.689202Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6cda85c7-fd24f38c-be2c955a-e57d00fe_0] Write session: gracefully shut down, all writes complete 2025-12-23T18:50:15.689249Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|6cda85c7-fd24f38c-be2c955a-e57d00fe_0] Write session: destroy 2025-12-23T18:50:15.689201Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72075186224037892] Destroy direct read session shared/user_11_1_12243636005158711523_v1 2025-12-23T18:50:15.689253Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [11:7587127654959783247:2538] destroyed 2025-12-23T18:50:15.689299Z node 12 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_11_1_12243636005158711523_v1 2025-12-23T18:50:15.689456Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [11:7587127654959783244:2535] disconnected. 2025-12-23T18:50:15.689485Z node 11 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [11:7587127654959783244:2535] disconnected; active server actors: 1 2025-12-23T18:50:15.689502Z node 11 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [11:7587127654959783244:2535] client user disconnected session shared/user_11_1_12243636005158711523_v1 2025-12-23T18:50:15.692111Z node 11 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-message-group-id|6cda85c7-fd24f38c-be2c955a-e57d00fe_0 grpc read done: success: 0 data: 2025-12-23T18:50:15.692140Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: test-message-group-id|6cda85c7-fd24f38c-be2c955a-e57d00fe_0 grpc read failed 2025-12-23T18:50:15.692425Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 2 sessionId: test-message-group-id|6cda85c7-fd24f38c-be2c955a-e57d00fe_0 2025-12-23T18:50:15.692460Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group-id|6cda85c7-fd24f38c-be2c955a-e57d00fe_0 is DEAD 2025-12-23T18:50:15.692853Z node 11 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:50:15.693386Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [11:7587127629189979168:2472] destroyed 2025-12-23T18:50:15.693435Z node 12 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T18:50:15.693467Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:50:15.693485Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:15.693500Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:50:15.693518Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:15.693532Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:50:15.783373Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:50:15.783412Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:15.783430Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:50:15.783455Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:15.783475Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:50:15.884048Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:50:15.884085Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:15.884102Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:50:15.884125Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:15.884141Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:50:15.984280Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:50:15.984332Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:15.984350Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:50:15.984372Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:15.984390Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:50:16.130723Z node 11 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1177: ActorId: [11:7587127659254750562:2529] TxId: 281474976710681. Ctx: { TraceId: 01kd68rd5w27va608hybdf5ddb, Database: /Root, SessionId: ydb://session/3?node_id=11&id=YmNmMTU0YWItYzY2NzE0MmMtNDIzYjJkYzAtYzQ2NDVkYTE=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 12 2025-12-23T18:50:16.130986Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [11:7587127659254750569:2529], TxId: 281474976710681, task: 3. Ctx: { CheckpointId : . TraceId : 01kd68rd5w27va608hybdf5ddb. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=11&id=YmNmMTU0YWItYzY2NzE0MmMtNDIzYjJkYzAtYzQ2NDVkYTE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [11:7587127659254750562:2529], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |96.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_3 [GOOD] Test command err: Trying to start YDB, gRPC: 23288, MsgBus: 29819 2025-12-23T18:47:25.129636Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126924399665366:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:25.131017Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005be0/r3tmp/tmpdmktt1/pdisk_1.dat 2025-12-23T18:47:25.368484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:25.461812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:25.461958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:25.463704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:25.476401Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:25.494814Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126924399665336:2081] 1766515645127150 != 1766515645127153 TServer::EnableGrpc on GrpcPort 23288, node 1 2025-12-23T18:47:25.599486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:25.599502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:25.599512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:25.599583Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:25.622654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:29819 TClient is connected to server localhost:29819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515645552 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:26.082685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:26.093173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:26.140961Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:26.205598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:26.364696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:26.446257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:28.377837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126937284569104:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.377941Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.378337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126937284569114:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.378382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:28.700466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.738533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.775908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.816260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.850978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.909885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.945654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:28.990337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:29.080239Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126941579537283:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.080348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.080650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126941579537288:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.080688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126941579537289:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.080715Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:29.085717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:29.100240Z node 1 ... .711620Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:50:02.711658Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:50:02.711670Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:50:02.711797Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30727 2025-12-23T18:50:03.148934Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:50:03.356866Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:30727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515802646 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:50:03.582689Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:03.592212Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:50:03.613303Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:50:03.703431Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:04.041330Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:04.155472Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:07.348676Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7587127600528370668:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:07.348805Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:50:08.328407Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127626298176274:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:08.328570Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:08.328961Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127626298176284:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:08.329032Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:08.410301Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:08.468822Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:08.529039Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:08.583460Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:08.633243Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:08.692280Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:08.754451Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:08.851870Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:08.993324Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127626298177173:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:08.993486Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:08.993553Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127626298177178:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:08.993866Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127626298177180:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:08.993945Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:08.999602Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:50:09.015133Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7587127626298177181:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:50:09.118567Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7587127630593144529:3783] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:50:11.941589Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-dbadmin >> TableCreation::MultipleTablesCreation [GOOD] >> TableCreation::ConcurrentUpdateTable |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpProxy::InvalidSessionID [GOOD] >> KqpProxy::LoadedMetadataAfterCompilationTimeout |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TableCreation::UpdateTableWithAclRollback |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] >> KqpProxy::DatabasesCacheForServerless |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TPQTest::TestSourceIdDropByUserWrites [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount >> test_canonical_records.py::test_restart_pdisk >> TLocksTest::BrokenSameShardLock [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TLocksTest::CK_Range_BrokenLockInf [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TLocksTest::GoodSameShardLock [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpBatchUpdate::ManyPartitions_2 [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] Test command err: 2025-12-23T18:49:43.241270Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127520047790781:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:43.250105Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a0/r3tmp/tmp6Lc6lw/pdisk_1.dat 2025-12-23T18:49:43.525582Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:43.557061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:43.557166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:43.629925Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:43.636268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:43.779387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:19714 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515783683 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:43.957994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:43.966106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:43.985633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:44.136481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:44.186171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:44.263512Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:47.012059Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:49:47.012225Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127529816713124:2193];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:47.013103Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a0/r3tmp/tmpwAS6D2/pdisk_1.dat 2025-12-23T18:49:47.094719Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:47.188160Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:47.188925Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127529816712963:2081] 1766515786952286 != 1766515786952289 2025-12-23T18:49:47.189693Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:47.189759Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:47.237930Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:47.292878Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:15652 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515787295 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:47.496634Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:47.510907Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:47.527472Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:49:47.531502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:47.599811Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:47.692788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:50.754175Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127549051225869:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:50.754219Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:50.765951Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a0/r3tmp/tmpyANT6V/pdisk_1.dat 2025-12-23T18:49:50.833669Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:50.862622Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:50.862692Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:50.867431Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:50.988686Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:15492 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSu ... server localhost:11107 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515811018 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:11.247159Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:50:11.273751Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:11.349638Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:11.405339Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:15.281921Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587127654270617955:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:15.282020Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a0/r3tmp/tmpbJ7Ygh/pdisk_1.dat 2025-12-23T18:50:15.397048Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:15.407002Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7587127654270617930:2081] 1766515815280727 != 1766515815280730 2025-12-23T18:50:15.448344Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:15.455110Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:15.455215Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:15.488642Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:15.652409Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:13224 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515815547 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:15.737822Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:50:15.760869Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:15.889806Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:15.955581Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:20.210799Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587127678362962627:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:20.210875Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a0/r3tmp/tmpNaEAjx/pdisk_1.dat 2025-12-23T18:50:20.226959Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:20.363400Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:20.363496Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:20.363996Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:20.383172Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:20.424966Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:24782 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515820461 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:20.673143Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:20.679547Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:50:20.694450Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:50:20.700149Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:20.764941Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:20.811720Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLockInf [GOOD] Test command err: 2025-12-23T18:49:41.971867Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127510509118244:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:41.971968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a4/r3tmp/tmpgFIPY9/pdisk_1.dat 2025-12-23T18:49:42.342850Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:42.382834Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:42.382934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:42.445637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:42.493701Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:42.634196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:27030 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515782549 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:42.861560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:42.875341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:42.898965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:42.908755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:43.000545Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:43.143059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:49:43.235175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:46.009619Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127531871442623:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:46.009674Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a4/r3tmp/tmpHuBzA2/pdisk_1.dat 2025-12-23T18:49:46.043140Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:46.124422Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:46.124507Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:46.138898Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127531871442598:2081] 1766515786007984 != 1766515786007987 2025-12-23T18:49:46.152999Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:46.164257Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:46.262319Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:4785 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515786217 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:46.407817Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:46.420863Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:46.433189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:49:46.437393Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:46.527139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:46.590277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:49.743837Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127542857911591:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:49.743901Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:49.755474Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:49:49.846007Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a4/r3tmp/tmpCG5A5a/pdisk_1.dat 2025-12-23T18:49:49.944451Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:49.944530Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:49.944712Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:49.945980Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587127542857911566:2081] 1766515789742880 != 1766515789742883 2025-12-23T18:49:49.980141Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:50.043343Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:2455 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusC ... reate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:10.933589Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:10.940951Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:50:10.960793Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:50:10.968369Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:11.071203Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:11.142962Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:15.227594Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587127655201718893:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:15.227678Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a4/r3tmp/tmpLig9nS/pdisk_1.dat 2025-12-23T18:50:15.249394Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:15.362852Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:15.366319Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7587127655201718866:2081] 1766515815225235 != 1766515815225238 2025-12-23T18:50:15.408557Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:15.408668Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:15.420938Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:15.497779Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:25456 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515815470 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:15.756264Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:15.769977Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:50:15.782479Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:50:15.788652Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:15.874888Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:15.933827Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:20.125773Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587127677545076402:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:20.125865Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a4/r3tmp/tmp0KoeAR/pdisk_1.dat 2025-12-23T18:50:20.151404Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:20.262415Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:20.262521Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:20.271121Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:20.276749Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7587127677545076376:2081] 1766515820124650 != 1766515820124653 2025-12-23T18:50:20.301545Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:20.330811Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:23599 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515820349 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:20.636383Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:20.642896Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:50:20.656423Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T18:50:20.663129Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:20.729735Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:20.787556Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodSameShardLock [GOOD] Test command err: 2025-12-23T18:49:41.851176Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127509014667066:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:41.851906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:49:41.868963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a7/r3tmp/tmplAtG6O/pdisk_1.dat 2025-12-23T18:49:42.199634Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:42.233604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:42.233698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:42.245428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:42.285121Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:42.286257Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127509014666962:2081] 1766515781839891 != 1766515781839894 2025-12-23T18:49:42.355662Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:11334 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515782290 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:42.598484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:42.604150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:42.620681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:42.782289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:42.834311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:42.858901Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:45.425615Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127525743633557:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:45.425676Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a7/r3tmp/tmppMUkDD/pdisk_1.dat 2025-12-23T18:49:45.499065Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:45.592635Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:45.592692Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:45.596939Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:45.624519Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:45.786096Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:3678 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515785692 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:45.832551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:45.840269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:45.852002Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:49:45.857926Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:49:45.928605Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:49:45.985313Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:49.437447Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127543600006433:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:49.437511Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a7/r3tmp/tmpuIO7mx/pdisk_1.dat 2025-12-23T18:49:49.474806Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:49.562880Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:49.562965Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:49.564268Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:49.567042Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587127543600006406:2081] 1766515789436536 != 1766515789436539 2025-12-23T18:49:49.590226Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:49.746240Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:7493 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinishe ... ot@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515811018 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:11.274108Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:11.295251Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:50:11.321690Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:11.399395Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:11.517237Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:15.582022Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587127653662608730:2083];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:15.582725Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a7/r3tmp/tmprCgAyX/pdisk_1.dat 2025-12-23T18:50:15.608920Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:15.725785Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:15.742964Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7587127653662608673:2081] 1766515815578932 != 1766515815578935 2025-12-23T18:50:15.765504Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:15.765623Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:15.781534Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:15.856850Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:31835 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515815827 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:16.077966Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:16.084041Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:50:16.106560Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:16.181183Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:16.242261Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:20.398508Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587127675016842509:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:20.398596Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057a7/r3tmp/tmpN7O9hM/pdisk_1.dat 2025-12-23T18:50:20.425032Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:20.541756Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:20.541878Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:20.542272Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7587127675016842471:2081] 1766515820395452 != 1766515820395455 2025-12-23T18:50:20.549944Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:20.570017Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:20.690553Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:15305 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515820657 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:20.816159Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:20.822194Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:50:20.840688Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:20.929135Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:20.996790Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-clusteradmin >> TableCreation::UpdateTableWithAclModification [GOOD] >> TableCreation::UpdateTableAcl |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> test_canonical_records.py::test_topic >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TableCreation::ConcurrentUpdateTable [GOOD] >> TableCreation::CreateOldTable |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi [GOOD] >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime [GOOD] |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_2 [GOOD] Test command err: Trying to start YDB, gRPC: 2426, MsgBus: 27298 2025-12-23T18:47:32.285619Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126955633931620:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:32.285787Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bd3/r3tmp/tmpY5IsWP/pdisk_1.dat 2025-12-23T18:47:32.540112Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:32.603420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:32.603516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:32.653535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:32.723145Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:32.723545Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126955633931507:2081] 1766515652276850 != 1766515652276853 TServer::EnableGrpc on GrpcPort 2426, node 1 2025-12-23T18:47:32.803045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:32.955546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:32.955568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:32.955583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:32.955662Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27298 2025-12-23T18:47:33.294975Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:27298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515652727 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:33.520171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:33.525351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:33.540359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:33.709975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:33.853708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:33.925163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:35.839913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126968518835272:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.840056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.840459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126968518835282:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.840539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.215902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.256689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.297182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.344579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.387145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.449714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.511574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.599413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:36.694713Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126972813803454:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.694802Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.694874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126972813803459:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.695323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126972813803462:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.695374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:36.699964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sche ... 437Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4064, node 12 2025-12-23T18:50:11.408877Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:50:11.408927Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:50:11.408942Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:50:11.409072Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:50:11.418909Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22502 2025-12-23T18:50:12.096054Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515811382 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:50:12.224689Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:12.247804Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:12.356051Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:12.705434Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:12.823909Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:16.072073Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7587127639222133065:2140];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:16.072186Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:50:17.152450Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127664991938632:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:17.152606Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:17.153047Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127664991938642:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:17.153118Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:17.293207Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:17.351541Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:17.435320Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:17.489073Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:17.543933Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:17.593407Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:17.649810Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:17.752035Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:17.887329Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127664991939540:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:17.887494Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:17.887952Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127664991939545:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:17.888031Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127664991939546:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:17.888533Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:17.894237Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:50:17.910167Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7587127664991939549:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:50:17.995789Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7587127664991939600:3792] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:50:20.767020Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false >> TableCreation::UpdateTableWithAclRollback [GOOD] >> TestScriptExecutionsUtils::TestRetryPolicyItem [GOOD] >> TestScriptExecutionsUtils::TestRetryLimiter [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] 2025-12-23T18:50:16.885615Z node 1 :KEYVALUE ERROR: keyvalue_state.cpp:3029: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] 2025-12-23T18:50:27.120412Z node 2 :KEYVALUE ERROR: keyvalue_state.cpp:3029: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor >> TFlatMetrics::MaximumValue1 [GOOD] >> TFlatMetrics::TimeSeriesAvg16 [GOOD] >> TFlatMetrics::TimeSeriesAVG [GOOD] >> TFlatMetrics::MaximumValue2 [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAVG [GOOD] |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TestScriptExecutionsUtils::TestRetryLimiter [GOOD] Test command err: 2025-12-23T18:50:21.683738Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127683158796451:2183];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:21.683849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00600d/r3tmp/tmpsy6tsb/pdisk_1.dat 2025-12-23T18:50:22.002854Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127683158796305:2081] 1766515821654533 != 1766515821654536 2025-12-23T18:50:22.015968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:22.016074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:22.058832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:22.078128Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:12405 TServer::EnableGrpc on GrpcPort 29899, node 1 2025-12-23T18:50:22.334622Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:50:22.334668Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:50:22.334687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:50:22.334873Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515822162 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:22.483297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:22.691961Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:24.672356Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:50:24.675829Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:50:24.675884Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:50:24.675901Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:50:24.678046Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7587127696043699021:2487] Owner: [1:7587127696043699020:2486]. Describe result: PathErrorUnknown 2025-12-23T18:50:24.678071Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7587127696043699021:2487] Owner: [1:7587127696043699020:2486]. Creating table 2025-12-23T18:50:24.678112Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7587127696043699021:2487] Owner: [1:7587127696043699020:2486]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2025-12-23T18:50:24.678247Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7587127696043699022:2488] Owner: [1:7587127696043699020:2486]. Describe result: PathErrorUnknown 2025-12-23T18:50:24.678253Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7587127696043699022:2488] Owner: [1:7587127696043699020:2486]. Creating table 2025-12-23T18:50:24.678269Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7587127696043699022:2488] Owner: [1:7587127696043699020:2486]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2025-12-23T18:50:24.684092Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7587127696043699023:2489] Owner: [1:7587127696043699020:2486]. Describe result: PathErrorUnknown 2025-12-23T18:50:24.684142Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7587127696043699023:2489] Owner: [1:7587127696043699020:2486]. Creating table 2025-12-23T18:50:24.684185Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7587127696043699023:2489] Owner: [1:7587127696043699020:2486]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2025-12-23T18:50:24.686116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:24.688271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:24.689949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:24.696016Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7587127696043699021:2487] Owner: [1:7587127696043699020:2486]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2025-12-23T18:50:24.696043Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7587127696043699022:2488] Owner: [1:7587127696043699020:2486]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2025-12-23T18:50:24.696083Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7587127696043699022:2488] Owner: [1:7587127696043699020:2486]. Subscribe on create table tx: 281474976715659 2025-12-23T18:50:24.696084Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7587127696043699021:2487] Owner: [1:7587127696043699020:2486]. Subscribe on create table tx: 281474976715658 2025-12-23T18:50:24.696162Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7587127696043699023:2489] Owner: [1:7587127696043699020:2486]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2025-12-23T18:50:24.696174Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7587127696043699023:2489] Owner: [1:7587127696043699020:2486]. Subscribe on create table tx: 281474976715660 2025-12-23T18:50:24.699656Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7587127696043699021:2487] Owner: [1:7587127696043699020:2486]. Subscribe on tx: 281474976715658 registered 2025-12-23T18:50:24.699681Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7587127696043699022:2488] Owner: [1:7587127696043699020:2486]. Subscribe on tx: 281474976715659 registered 2025-12-23T18:50:24.699716Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7587127696043699023:2489] Owner: [1:7587127696043699020:2486]. Subscribe on tx: 281474976715660 registered 2025-12-23T18:50:24.814508Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7587127696043699021:2487] Owner: [1:7587127696043699020:2486]. Request: create. Transaction completed: 281474976715658. Doublechecking... 2025-12-23T18:50:24.850863Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7587127696043699022:2488] Owner: [1:7587127696043699020:2486]. Request: create. Transaction completed: 281474976715659. Doublechecking... 2025-12-23T18:50:24.855394Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7587127696043699023:2489] Owner: [1:7587127696043699020:2486]. Request: create. Transaction completed: 281474976715660. Doublechecking... 2025-12-23T18:50:24.894759Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7587127696043699021:2487] Owner: [1:7587127696043699020:2486]. Table already exists, number of columns: 33, has SecurityObject: true 2025-12-23T18:50:24.894877Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7587127696043699021:2487] Owner: [1:7587127696043699020:2486]. Column diff is empty, finishing 2025-12-23T18:50:24.895849Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7587127696043699021:2487] Owner: [1:7587127696043699020:2486]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2025-12-23T18:50:24.896927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:50:24.897875Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7587127696043699021:2487] Owner: [1:7587127696043699020:2486]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976715661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2025-12-23T18:50:24.897894Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_executions updater. SelfId: [1:7587127696043699021:2487] Owner: [1:7587127696043699020:2486]. Succe ... DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [1:7587127708928601801:2918], ActorId: [1:7587127708928601802:2919], TraceId: ExecutionId: d92b79b8-b14d4da1-fe4d9b55-ead976c4, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2025-12-23T18:50:27.250974Z node 1 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [1:7587127708928601800:2917], ActorId: [1:7587127708928601801:2918], TraceId: ExecutionId: d92b79b8-b14d4da1-fe4d9b55-ead976c4, RequestDatabase: /dc-1, Got response [1:7587127708928601802:2919] SUCCESS 2025-12-23T18:50:27.251021Z node 1 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [1:7587127708928601799:2916] ActorId: [1:7587127708928601800:2917] Database: /dc-1 ExecutionId: d92b79b8-b14d4da1-fe4d9b55-ead976c4. Extracted script execution operation [1:7587127708928601802:2919], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [1:7587127696043699294:2669], LeaseGeneration: 0 2025-12-23T18:50:27.251041Z node 1 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [1:7587127708928601799:2916] ActorId: [1:7587127708928601800:2917] Database: /dc-1 ExecutionId: d92b79b8-b14d4da1-fe4d9b55-ead976c4. Reply success 2025-12-23T18:50:27.251675Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=NjU2MDVkY2YtMTZjZWM1ZGYtYzcxY2M1MzctNDNhYzhjMmE=, workerId: [1:7587127708928601804:2494], local sessions count: 0 2025-12-23T18:50:27.279302Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd68rrje7zkb83s8bxdbdp1f", Request has 18444977557882.272344s seconds to be completed 2025-12-23T18:50:27.281560Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd68rrje7zkb83s8bxdbdp1f", Created new session, sessionId: ydb://session/3?node_id=1&id=OTg2YjI2YjItNDczNmIwYzgtZmNjMDk3NjQtMzgwMzA0ZjI=, workerId: [1:7587127708928601838:2508], database: /dc-1, longSession: 1, local sessions count: 1 2025-12-23T18:50:27.281743Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd68rrje7zkb83s8bxdbdp1f 2025-12-23T18:50:27.294389Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kd68rrjx5767ydbqnhfdkt16, Database: /dc-1, SessionId: ydb://session/3?node_id=1&id=OTg2YjI2YjItNDczNmIwYzgtZmNjMDk3NjQtMzgwMzA0ZjI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 38, targetId: [1:7587127708928601838:2508] 2025-12-23T18:50:27.294432Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 38 timeout: 600.000000s actor id: [1:7587127708928601842:2928] 2025-12-23T18:50:27.311759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:50:27.320250Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd68rrjx5767ydbqnhfdkt16", Forwarded response to sender actor, requestId: 38, sender: [1:7587127708928601840:2509], selfId: [1:7587127683158796539:2260], source: [1:7587127708928601838:2508] --------------------------- INIT FINISHED --------------------------- 2025-12-23T18:50:27.328082Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [1:7587127708928601897:2980] Owner: [1:7587127708928601896:2979]. Describe result: PathErrorUnknown 2025-12-23T18:50:27.328177Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [1:7587127708928601897:2980] Owner: [1:7587127708928601896:2979]. Creating table 2025-12-23T18:50:27.328227Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [1:7587127708928601897:2980] Owner: [1:7587127708928601896:2979]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2025-12-23T18:50:27.330410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:27.331941Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [1:7587127708928601897:2980] Owner: [1:7587127708928601896:2979]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715685 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2025-12-23T18:50:27.331976Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [1:7587127708928601897:2980] Owner: [1:7587127708928601896:2979]. Subscribe on create table tx: 281474976715685 2025-12-23T18:50:27.334393Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [1:7587127708928601897:2980] Owner: [1:7587127708928601896:2979]. Subscribe on tx: 281474976715685 registered 2025-12-23T18:50:27.364281Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [1:7587127708928601897:2980] Owner: [1:7587127708928601896:2979]. Request: create. Transaction completed: 281474976715685. Doublechecking... 2025-12-23T18:50:27.436737Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [1:7587127708928601897:2980] Owner: [1:7587127708928601896:2979]. Table already exists, number of columns: 3, has SecurityObject: true 2025-12-23T18:50:27.436768Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [1:7587127708928601897:2980] Owner: [1:7587127708928601896:2979]. Column diff is empty, finishing 2025-12-23T18:50:27.460550Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd68rrr49kn1yre6f6e57ssd", Request has 18444977557882.091098s seconds to be completed 2025-12-23T18:50:27.462707Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd68rrr49kn1yre6f6e57ssd", Created new session, sessionId: ydb://session/3?node_id=1&id=NzA4OGUyMjctZjY5NjdhYjctYWI2MDVlNWMtZDQwODQyZg==, workerId: [1:7587127708928601981:2518], database: /dc-1, longSession: 1, local sessions count: 2 2025-12-23T18:50:27.462940Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd68rrr49kn1yre6f6e57ssd 2025-12-23T18:50:27.481345Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [1:7587127708928601987:3039] Owner: [1:7587127708928601986:3038]. Table already exists, number of columns: 3, has SecurityObject: true 2025-12-23T18:50:27.481397Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:444: Table test_table updater. SelfId: [1:7587127708928601987:3039] Owner: [1:7587127708928601986:3038]. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2025-12-23T18:50:27.481488Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [1:7587127708928601987:3039] Owner: [1:7587127708928601986:3038]. Created ESchemeOpAlterTable transaction for path: /dc-1/test/test_table 2025-12-23T18:50:27.483376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T18:50:27.484640Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [1:7587127708928601987:3039] Owner: [1:7587127708928601986:3038]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715686 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2025-12-23T18:50:27.484681Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [1:7587127708928601987:3039] Owner: [1:7587127708928601986:3038]. Subscribe on create table tx: 281474976715686 2025-12-23T18:50:27.485775Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [1:7587127708928601987:3039] Owner: [1:7587127708928601986:3038]. Subscribe on tx: 281474976715686 registered 2025-12-23T18:50:27.485858Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=NzA4OGUyMjctZjY5NjdhYjctYWI2MDVlNWMtZDQwODQyZg==, workerId: [1:7587127708928601981:2518], local sessions count: 1 2025-12-23T18:50:27.495255Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [1:7587127708928601987:3039] Owner: [1:7587127708928601986:3038]. Request: alter. Transaction completed: 281474976715686. Doublechecking... 2025-12-23T18:50:27.563232Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [1:7587127708928601987:3039] Owner: [1:7587127708928601986:3038]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:50:27.563276Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [1:7587127708928601987:3039] Owner: [1:7587127708928601986:3038]. Column diff is empty, finishing 2025-12-23T18:50:27.563343Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [1:7587127708928601987:3039] Owner: [1:7587127708928601986:3038]. Created ESchemeOpModifyACL transaction for path: /dc-1/test/test_table 2025-12-23T18:50:27.564163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:50:27.567025Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [1:7587127708928601987:3039] Owner: [1:7587127708928601986:3038]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976715687 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2025-12-23T18:50:27.567049Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table test_table updater. SelfId: [1:7587127708928601987:3039] Owner: [1:7587127708928601986:3038]. Successful alter request: ExecComplete 2025-12-23T18:50:27.590167Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd68rrw59teqgh15yf2b8ney", Request has 18444977557881.961484s seconds to be completed 2025-12-23T18:50:27.591809Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd68rrw59teqgh15yf2b8ney", Created new session, sessionId: ydb://session/3?node_id=1&id=YzVjZGIyMzktNjlhYjA3Ni02YThlMGM0Ny1lNmExNTY5Yw==, workerId: [1:7587127708928602028:2525], database: /dc-1, longSession: 1, local sessions count: 2 2025-12-23T18:50:27.591948Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd68rrw59teqgh15yf2b8ney 2025-12-23T18:50:27.613747Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=YzVjZGIyMzktNjlhYjA3Ni02YThlMGM0Ny1lNmExNTY5Yw==, workerId: [1:7587127708928602028:2525], local sessions count: 1 2025-12-23T18:50:27.614395Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=OTg2YjI2YjItNDczNmIwYzgtZmNjMDk3NjQtMzgwMzA0ZjI=, workerId: [1:7587127708928601838:2508], local sessions count: 0 >> TLocksTest::Range_BrokenLock3 [GOOD] |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] >> TTabletPipeTest::TestRewriteSameNode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:50:29.092009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:50:29.092090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:29.092124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:50:29.092159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:50:29.092193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:50:29.092235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:50:29.092303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:29.092412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:50:29.093464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:50:29.093716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:50:29.170703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:50:29.170763Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:29.180461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:50:29.180701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:50:29.180832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:50:29.184332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:50:29.184494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:50:29.185164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:29.185388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:50:29.187249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:29.187453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:50:29.188540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:50:29.188586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:29.188693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:50:29.188737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:50:29.188767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:50:29.188882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:50:29.323076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.324145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.324276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.324350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.324424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.324488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.324556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.324701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.324809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.324889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.324950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.325030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.325165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.325247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.325321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:50:30.210623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:50:30.211887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:50:30.211979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:50:30.212007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:50:30.212046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:50:30.212077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:50:30.212153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-12-23T18:50:30.213444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1522 } } CommitVersion { Step: 5000038 TxId: 101 } 2025-12-23T18:50:30.213486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-12-23T18:50:30.213617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1522 } } CommitVersion { Step: 5000038 TxId: 101 } 2025-12-23T18:50:30.213762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1522 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2025-12-23T18:50:30.215488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:50:30.215537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-12-23T18:50:30.215663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:50:30.215707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:50:30.215814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:50:30.215893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:30.215934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:30.215973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:50:30.216012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2025-12-23T18:50:30.217438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:50:30.217754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:50:30.218682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:30.218811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:30.219026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:30.219060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:50:30.219154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:50:30.219188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:50:30.219222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:50:30.219266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:50:30.219309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-12-23T18:50:30.219363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2715] message: TxId: 101 2025-12-23T18:50:30.219403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:50:30.219430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:50:30.219478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:50:30.219606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:50:30.222343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:50:30.222389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:738:2716] TestWaitNotification: OK eventTxId 101 2025-12-23T18:50:30.223060Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:50:30.223310Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 256us result status StatusSuccess 2025-12-23T18:50:30.223873Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL >> test_canonical_records.py::test_execute_minikql ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:109:2141] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:109:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:121:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [1:123:2150] sender: [1:125:2057] recipient: [1:109:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:160:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:162:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:165:2057] recipient: [1:105:2139] Leader for TabletID 9437185 is [1:123:2150] sender: [1:166:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:123:2150] sender: [1:169:2057] recipient: [1:168:2179] Leader for TabletID 9437185 is [1:170:2180] sender: [1:171:2057] recipient: [1:168:2179] Leader for TabletID 9437185 is [1:170:2180] sender: [1:200:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:120:2148] sender: [1:203:2057] recipient: [1:104:2138] Leader for TabletID 9437184 is [1:120:2148] sender: [1:206:2057] recipient: [1:205:2203] Leader for TabletID 9437184 is [1:207:2204] sender: [1:208:2057] recipient: [1:205:2203] Leader for TabletID 9437184 is [1:207:2204] sender: [1:236:2057] recipient: [1:14:2061] |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] Test command err: 2025-12-23T18:49:48.907802Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127540372843515:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:48.907891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005783/r3tmp/tmpr7znHy/pdisk_1.dat 2025-12-23T18:49:49.177239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:49.177364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:49.182162Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:49.197624Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:49.214283Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127540372843491:2081] 1766515788906195 != 1766515788906198 2025-12-23T18:49:49.231781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:49.427478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:15195 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515789283 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:49.554544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:49.567764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:49.583614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:49.710094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:49.773175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:49.923593Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:49:52.411475Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127554603198106:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:52.415728Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005783/r3tmp/tmpujJjTC/pdisk_1.dat 2025-12-23T18:49:52.465241Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:52.577404Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:52.579708Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587127554603198051:2081] 1766515792386526 != 1766515792386529 2025-12-23T18:49:52.597965Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:52.598049Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:52.601423Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:52.654917Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:30642 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515792650 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:52.759900Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:49:52.767164Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:49:52.786047Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:52.849217Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:52.896344Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:49:55.986447Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127568550896755:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:55.986517Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005783/r3tmp/tmpz65SCq/pdisk_1.dat 2025-12-23T18:49:56.003075Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:56.074179Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7587127568550896732:2081] 1766515795985991 != 1766515795985994 2025-12-23T18:49:56.078546Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:56.111902Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:56.111993Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:56.117040Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:56.205436Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:31924 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty V ... thId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515816835 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:17.042025Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:17.049503Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:50:17.068137Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:17.152003Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:17.220977Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:21.293408Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7587127682103973436:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:21.294029Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005783/r3tmp/tmpdPAvrp/pdisk_1.dat 2025-12-23T18:50:21.314629Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:21.417935Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:21.418045Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:21.418474Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:21.465269Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:21.506911Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:29692 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515821518 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:21.741998Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:50:21.765210Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:50:21.847196Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:21.966854Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:26.012972Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587127701966400617:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:26.013087Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005783/r3tmp/tmpU3ektA/pdisk_1.dat 2025-12-23T18:50:26.052382Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:26.157721Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:26.168883Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7587127697671433283:2081] 1766515826011452 != 1766515826011455 2025-12-23T18:50:26.173252Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:26.173363Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:26.203528Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:26.273158Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:15168 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515826257 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:26.442453Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:26.448377Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:50:26.458398Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-12-23T18:50:26.464099Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:26.537920Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:26.604062Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] >> KqpProxy::LoadedMetadataAfterCompilationTimeout [GOOD] >> KqpProxy::NoLocalSessionExecution |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:50:32.337435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:50:32.337535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:32.337585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:50:32.337637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:50:32.337680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:50:32.337714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:50:32.337790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:32.337864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:50:32.338947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:50:32.339223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:50:32.426982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:50:32.427048Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:32.433070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:50:32.433275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:50:32.433406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:50:32.436169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:50:32.436342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:50:32.437036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:32.437277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:50:32.440091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:32.440328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:50:32.441523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:50:32.441595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:32.441762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:50:32.441806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:50:32.441845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:50:32.442005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:50:32.579248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.580306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.580430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.580504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.580576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.580644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.580711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.580835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.580917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.580998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.581066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.581157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.581294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.581385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.581454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 8:50:33.160430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:50:33.160471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:50:33.162342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:50:33.162402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:50:33.162444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:50:33.164056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:50:33.164115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:50:33.164170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:50:33.164248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:50:33.164377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:50:33.165844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:50:33.165979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:50:33.166259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:33.166361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:50:33.166421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:50:33.166722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:50:33.166816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:50:33.166969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:50:33.167038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:50:33.168764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:50:33.168813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:50:33.168994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:33.169036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:50:33.169280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:50:33.169329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:50:33.169417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:50:33.169456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:50:33.169523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:50:33.169559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:50:33.169597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:50:33.169656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:50:33.169704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:50:33.169733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:50:33.169790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:50:33.169837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:50:33.169877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:50:33.170404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:50:33.170515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:50:33.170563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:50:33.170614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:50:33.170657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:50:33.170782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:50:33.173396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:50:33.173889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1766515833.175148 1474646 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-12-23T18:50:33.175706Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:671:2661] Bootstrap 2025-12-23T18:50:33.176833Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:671:2661] Become StateWork (SchemeCache [1:676:2666]) 2025-12-23T18:50:33.179750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:50:33.180150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:33.180280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } }, at schemeshard: 72057594046678944 2025-12-23T18:50:33.180788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1766515833 seconds (20445 days, 56 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2025-12-23T18:50:33.181789Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:671:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:50:33.184598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1766515833 seconds (20445 days, 56 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:50:33.184822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1766515833 seconds (20445 days, 56 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-12-23T18:50:33.185229Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:47:43.401048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:47:43.401137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:43.401202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:47:43.401253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:47:43.401295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:47:43.401322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:47:43.401374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:43.401429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:47:43.402217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:47:43.402492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:47:43.529548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:47:43.529633Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:43.530451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:47:43.546022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:47:43.546193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:47:43.546338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:47:43.554147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:47:43.554440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:47:43.555207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:43.555553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:47:43.559340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:43.559514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:47:43.560785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:47:43.560841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:43.560979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:47:43.561024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:47:43.561065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:47:43.561222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:47:43.567801Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-12-23T18:47:43.697414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:47:43.697634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:43.697831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:47:43.697893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:47:43.698128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:47:43.698181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:43.700762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:43.700946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:47:43.701172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:43.701234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:47:43.701277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:47:43.701317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:47:43.703354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:43.703404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:47:43.703463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:47:43.705026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:43.705073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:43.705121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:47:43.705171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:47:43.708579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:47:43.710264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:47:43.710390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:47:43.711388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:43.711507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:47:43.711568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:47:43.711795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:47:43.711837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:47:43.711983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:47:43.712029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:47:43.713803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard ... as 4 2025-12-23T18:50:31.394075Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 190, publications: 4, subscribers: 0 2025-12-23T18:50:31.394106Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 137], 6 2025-12-23T18:50:31.394130Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 138], 6 2025-12-23T18:50:31.394149Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 139], 5 2025-12-23T18:50:31.394166Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 140], 2 2025-12-23T18:50:31.395884Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:50:31.395969Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:50:31.396015Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 190 2025-12-23T18:50:31.396045Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 137], version: 6 2025-12-23T18:50:31.396078Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 137] was 2 2025-12-23T18:50:31.397662Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:50:31.397736Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:50:31.397769Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 190 2025-12-23T18:50:31.397796Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 138], version: 6 2025-12-23T18:50:31.397826Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2025-12-23T18:50:31.399362Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:50:31.399461Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:50:31.399502Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 190 2025-12-23T18:50:31.399536Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2025-12-23T18:50:31.399576Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2025-12-23T18:50:31.401145Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:50:31.401218Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2025-12-23T18:50:31.401246Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 190 2025-12-23T18:50:31.401272Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 2 2025-12-23T18:50:31.401302Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 3 2025-12-23T18:50:31.401366Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 190, subscribers: 0 2025-12-23T18:50:31.404315Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2025-12-23T18:50:31.404455Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2025-12-23T18:50:31.406233Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2025-12-23T18:50:31.406325Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 TestModificationResult got TxId: 190, wait until txId: 190 TestWaitNotification wait txId: 190 2025-12-23T18:50:31.407437Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 190: send EvNotifyTxCompletion 2025-12-23T18:50:31.407474Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 190 2025-12-23T18:50:31.408864Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 190, at schemeshard: 72057594046678944 2025-12-23T18:50:31.408990Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2025-12-23T18:50:31.409033Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [32:5314:6865] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2025-12-23T18:50:31.410159Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2025-12-23T18:50:31.410196Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2025-12-23T18:50:31.410270Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2025-12-23T18:50:31.410292Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2025-12-23T18:50:31.410337Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2025-12-23T18:50:31.410357Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2025-12-23T18:50:31.410416Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2025-12-23T18:50:31.410436Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2025-12-23T18:50:31.410501Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2025-12-23T18:50:31.410520Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2025-12-23T18:50:31.412410Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2025-12-23T18:50:31.412584Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2025-12-23T18:50:31.412626Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [32:5317:6868] 2025-12-23T18:50:31.412978Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2025-12-23T18:50:31.413160Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2025-12-23T18:50:31.413186Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [32:5317:6868] 2025-12-23T18:50:31.413284Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2025-12-23T18:50:31.413354Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2025-12-23T18:50:31.413422Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2025-12-23T18:50:31.413443Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [32:5317:6868] 2025-12-23T18:50:31.413482Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2025-12-23T18:50:31.413501Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [32:5317:6868] 2025-12-23T18:50:31.413547Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2025-12-23T18:50:31.413669Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2025-12-23T18:50:31.413690Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [32:5317:6868] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:50:32.288835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:50:32.288926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:32.288968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:50:32.289008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:50:32.289049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:50:32.289107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:50:32.289176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:32.289247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:50:32.290223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:50:32.290504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:50:32.383733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:50:32.383826Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:32.399912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:50:32.400135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:50:32.400240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:50:32.407550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:50:32.407748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:50:32.408368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:32.408562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:50:32.411457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:32.411678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:50:32.412928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:50:32.412999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:32.413152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:50:32.413202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:50:32.413247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:50:32.413389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:50:32.570137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.571209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.571393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.571465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.571530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.571592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.571653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.571799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.571903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.571989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.572050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.572130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.572242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.572317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:32.572382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 46678944, txId: 102, path id: 38 2025-12-23T18:50:33.560620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:33.560697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:50:33.561325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:50:33.561468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:50:33.561518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:50:33.561565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2025-12-23T18:50:33.561613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:50:33.561718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:50:33.567748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:50:33.580279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1149 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:50:33.580345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:50:33.580537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1149 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:50:33.580683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1149 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:50:33.581891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 719 RawX2: 4294970001 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:50:33.581960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:50:33.582110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 719 RawX2: 4294970001 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:50:33.582184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:50:33.582321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 719 RawX2: 4294970001 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:50:33.582406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:33.582473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:33.582523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:50:33.582566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:50:33.585428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:33.585965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:33.586117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:50:33.586185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:50:33.586309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:50:33.586349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:50:33.586413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:50:33.586450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:50:33.586488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:50:33.586554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:747:2725] message: TxId: 102 2025-12-23T18:50:33.586606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:50:33.586658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:50:33.586703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:50:33.586884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:50:33.588935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:50:33.588991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:805:2775] TestWaitNotification: OK eventTxId 102 2025-12-23T18:50:33.589605Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:50:33.589868Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 270us result status StatusSuccess 2025-12-23T18:50:33.590415Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpProxy::DatabasesCacheForServerless [GOOD] >> TableCreation::CreateOldTable [GOOD] >> TableCreation::UpdateTableAcl [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::UpdateTableAcl [GOOD] Test command err: 2025-12-23T18:50:07.952741Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127622710459877:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:07.952798Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00600c/r3tmp/tmpzey0JB/pdisk_1.dat 2025-12-23T18:50:08.362160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:08.362312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:08.397510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:08.462836Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:8034 TServer::EnableGrpc on GrpcPort 24075, node 1 2025-12-23T18:50:08.927600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:50:08.927732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:50:08.927742Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:50:08.927847Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:50:08.967627Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515808568 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:09.389710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:10.958222Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:50:10.985306Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:50:10.985377Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:50:10.985399Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:50:10.988504Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7587127635595362577:2493] Owner: [1:7587127635595362573:2490]. Describe result: PathErrorUnknown 2025-12-23T18:50:10.988518Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7587127635595362577:2493] Owner: [1:7587127635595362573:2490]. Creating table 2025-12-23T18:50:10.988533Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7587127635595362575:2491] Owner: [1:7587127635595362573:2490]. Describe result: PathErrorUnknown 2025-12-23T18:50:10.988542Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7587127635595362575:2491] Owner: [1:7587127635595362573:2490]. Creating table 2025-12-23T18:50:10.988563Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7587127635595362577:2493] Owner: [1:7587127635595362573:2490]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2025-12-23T18:50:10.988586Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7587127635595362575:2491] Owner: [1:7587127635595362573:2490]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2025-12-23T18:50:10.988715Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7587127635595362576:2492] Owner: [1:7587127635595362573:2490]. Describe result: PathErrorUnknown 2025-12-23T18:50:10.988722Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7587127635595362576:2492] Owner: [1:7587127635595362573:2490]. Creating table 2025-12-23T18:50:10.988742Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7587127635595362576:2492] Owner: [1:7587127635595362573:2490]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2025-12-23T18:50:11.000223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:11.002487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:11.006324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:11.013648Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7587127635595362576:2492] Owner: [1:7587127635595362573:2490]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2025-12-23T18:50:11.013736Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7587127635595362576:2492] Owner: [1:7587127635595362573:2490]. Subscribe on create table tx: 281474976710660 2025-12-23T18:50:11.014031Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7587127635595362577:2493] Owner: [1:7587127635595362573:2490]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2025-12-23T18:50:11.014046Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7587127635595362577:2493] Owner: [1:7587127635595362573:2490]. Subscribe on create table tx: 281474976710658 2025-12-23T18:50:11.015438Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7587127635595362575:2491] Owner: [1:7587127635595362573:2490]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2025-12-23T18:50:11.015474Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7587127635595362575:2491] Owner: [1:7587127635595362573:2490]. Subscribe on create table tx: 281474976710659 2025-12-23T18:50:11.016272Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7587127635595362576:2492] Owner: [1:7587127635595362573:2490]. Subscribe on tx: 281474976710660 registered 2025-12-23T18:50:11.016331Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7587127635595362575:2491] Owner: [1:7587127635595362573:2490]. Subscribe on tx: 281474976710659 registered 2025-12-23T18:50:11.018209Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7587127635595362577:2493] Owner: [1:7587127635595362573:2490]. Subscribe on tx: 281474976710658 registered 2025-12-23T18:50:11.192139Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7587127635595362576:2492] Owner: [1:7587127635595362573:2490]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-12-23T18:50:11.235850Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7587127635595362577:2493] Owner: [1:7587127635595362573:2490]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-12-23T18:50:11.244966Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7587127635595362575:2491] Owner: [1:7587127635595362573:2490]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-12-23T18:50:11.289498Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7587127635595362576:2492] Owner: [1:7587127635595362573:2490]. Table already exists, number of columns: 6, has SecurityObject: true 2025-12-23T18:50:11.289542Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7587127635595362576:2492] Owner: [1:7587127635595362573:2490]. Column diff is empty, finishing 2025-12-23T18:50:11.290511Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7587127635595362576:2492] Owner: [1:7587127635595362573:2490]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2025-12-23T18:50:11.291457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:50:11.292981Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7587127635595362576:2492] Owner: [1:7587127635595362573:2490]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2025-12-23T18:50:11.292995Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_execution_leases updater. SelfId: [1:7587127635595362576:2492] Owner: [1:7587127635595362573:2490]. Successful alter request: ExecComplete 2025-12-23T18:50:11.299436Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:75871276355953 ... (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-12-23T18:50:34.977616Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZGRhZWMxODMtNzk0NGJlY2QtNjU0ODViMDUtZjcwNGUxZjA=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 40, targetId: [3:7587127738977181411:2514] 2025-12-23T18:50:34.977669Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 40 timeout: 300.000000s actor id: [3:7587127738977181413:2948] 2025-12-23T18:50:34.989549Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 40, sender: [3:7587127738977181412:2515], selfId: [3:7587127708912408768:2252], source: [3:7587127738977181411:2514] 2025-12-23T18:50:34.989911Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587127738977181408:2946], ActorId: [3:7587127738977181409:2947], TraceId: ExecutionId: e5c84d32-917b701c-1ac82c12-aaa879e9, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ZGRhZWMxODMtNzk0NGJlY2QtNjU0ODViMDUtZjcwNGUxZjA=, TxId: 2025-12-23T18:50:34.990571Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587127738977181408:2946], ActorId: [3:7587127738977181409:2947], TraceId: ExecutionId: e5c84d32-917b701c-1ac82c12-aaa879e9, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ZGRhZWMxODMtNzk0NGJlY2QtNjU0ODViMDUtZjcwNGUxZjA=, TxId: 2025-12-23T18:50:34.990613Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587127738977181408:2946], ActorId: [3:7587127738977181409:2947], TraceId: ExecutionId: e5c84d32-917b701c-1ac82c12-aaa879e9, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2025-12-23T18:50:34.990725Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587127738977181407:2945], ActorId: [3:7587127738977181408:2946], TraceId: ExecutionId: e5c84d32-917b701c-1ac82c12-aaa879e9, RequestDatabase: /dc-1, Got response [3:7587127738977181409:2947] SUCCESS 2025-12-23T18:50:34.990848Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7587127738977181406:2944] ActorId: [3:7587127738977181407:2945] Database: /dc-1 ExecutionId: e5c84d32-917b701c-1ac82c12-aaa879e9. Extracted script execution operation [3:7587127738977181409:2947], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7587127726092278831:2669], LeaseGeneration: 0 2025-12-23T18:50:34.990915Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7587127738977181406:2944] ActorId: [3:7587127738977181407:2945] Database: /dc-1 ExecutionId: e5c84d32-917b701c-1ac82c12-aaa879e9. Reply success 2025-12-23T18:50:34.991215Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZGRhZWMxODMtNzk0NGJlY2QtNjU0ODViMDUtZjcwNGUxZjA=, workerId: [3:7587127738977181411:2514], local sessions count: 0 2025-12-23T18:50:35.014620Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd68s0465x01c3mz457nw1vp", Request has 18444977557874.537026s seconds to be completed 2025-12-23T18:50:35.017258Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd68s0465x01c3mz457nw1vp", Created new session, sessionId: ydb://session/3?node_id=3&id=MjA4M2ViMjMtYWJmZmE1NjEtMTNkZWIzYTgtNWYzODgxNjA=, workerId: [3:7587127743272148742:2528], database: /dc-1, longSession: 1, local sessions count: 1 2025-12-23T18:50:35.017482Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd68s0465x01c3mz457nw1vp 2025-12-23T18:50:35.026045Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kd68s04h68t0t7zta2e2sn18, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=MjA4M2ViMjMtYWJmZmE1NjEtMTNkZWIzYTgtNWYzODgxNjA=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 42, targetId: [3:7587127743272148742:2528] 2025-12-23T18:50:35.026095Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 42 timeout: 600.000000s actor id: [3:7587127743272148746:2957] 2025-12-23T18:50:35.040292Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:50:35.045093Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd68s04h68t0t7zta2e2sn18", Forwarded response to sender actor, requestId: 42, sender: [3:7587127743272148744:2529], selfId: [3:7587127708912408768:2252], source: [3:7587127743272148742:2528] --------------------------- INIT FINISHED --------------------------- 2025-12-23T18:50:35.053649Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7587127743272148772:2980] Owner: [3:7587127743272148770:2978]. Describe result: PathErrorUnknown 2025-12-23T18:50:35.053672Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7587127743272148772:2980] Owner: [3:7587127743272148770:2978]. Creating table 2025-12-23T18:50:35.053698Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7587127743272148772:2980] Owner: [3:7587127743272148770:2978]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2025-12-23T18:50:35.058631Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:35.059856Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7587127743272148772:2980] Owner: [3:7587127743272148770:2978]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710687 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2025-12-23T18:50:35.059895Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7587127743272148772:2980] Owner: [3:7587127743272148770:2978]. Subscribe on create table tx: 281474976710687 2025-12-23T18:50:35.062153Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7587127743272148772:2980] Owner: [3:7587127743272148770:2978]. Subscribe on tx: 281474976710687 registered 2025-12-23T18:50:35.091466Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7587127743272148772:2980] Owner: [3:7587127743272148770:2978]. Request: create. Transaction completed: 281474976710687. Doublechecking... 2025-12-23T18:50:35.191026Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587127743272148772:2980] Owner: [3:7587127743272148770:2978]. Table already exists, number of columns: 3, has SecurityObject: true 2025-12-23T18:50:35.191080Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587127743272148772:2980] Owner: [3:7587127743272148770:2978]. Column diff is empty, finishing 2025-12-23T18:50:35.214661Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd68s0aed5wfscmjb4t6pvrz", Request has 18444977557874.336989s seconds to be completed 2025-12-23T18:50:35.217010Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd68s0aed5wfscmjb4t6pvrz", Created new session, sessionId: ydb://session/3?node_id=3&id=NGIxMDlkZGItM2MyMWIxZjYtMTExMzI3YmEtYzdjZmExMjg=, workerId: [3:7587127743272148885:2538], database: /dc-1, longSession: 1, local sessions count: 2 2025-12-23T18:50:35.217207Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd68s0aed5wfscmjb4t6pvrz 2025-12-23T18:50:35.235552Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587127743272148891:3068] Owner: [3:7587127743272148890:3067]. Table already exists, number of columns: 3, has SecurityObject: true 2025-12-23T18:50:35.235602Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587127743272148891:3068] Owner: [3:7587127743272148890:3067]. Column diff is empty, finishing 2025-12-23T18:50:35.235713Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7587127743272148891:3068] Owner: [3:7587127743272148890:3067]. Created ESchemeOpModifyACL transaction for path: /dc-1/test/test_table 2025-12-23T18:50:35.236827Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:50:35.240102Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7587127743272148891:3068] Owner: [3:7587127743272148890:3067]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710688 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2025-12-23T18:50:35.240130Z node 3 :KQP_PROXY INFO: table_creator.cpp:361: Table test_table updater. SelfId: [3:7587127743272148891:3068] Owner: [3:7587127743272148890:3067]. Successful alter request: ExecComplete 2025-12-23T18:50:35.242865Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=NGIxMDlkZGItM2MyMWIxZjYtMTExMzI3YmEtYzdjZmExMjg=, workerId: [3:7587127743272148885:2538], local sessions count: 1 2025-12-23T18:50:35.248809Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd68s0bg6tcf59cd1qg658j6", Request has 18444977557874.302837s seconds to be completed 2025-12-23T18:50:35.250650Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd68s0bg6tcf59cd1qg658j6", Created new session, sessionId: ydb://session/3?node_id=3&id=YzE2MmQ4MDEtYjQzNGY3NmUtMjY4YTY0Ny0yYzkzZWU0Ng==, workerId: [3:7587127743272148903:2542], database: /dc-1, longSession: 1, local sessions count: 2 2025-12-23T18:50:35.250881Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd68s0bg6tcf59cd1qg658j6 2025-12-23T18:50:35.275963Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=YzE2MmQ4MDEtYjQzNGY3NmUtMjY4YTY0Ny0yYzkzZWU0Ng==, workerId: [3:7587127743272148903:2542], local sessions count: 1 2025-12-23T18:50:35.277694Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=MjA4M2ViMjMtYWJmZmE1NjEtMTNkZWIzYTgtNWYzODgxNjA=, workerId: [3:7587127743272148742:2528], local sessions count: 0 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:91:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:94:2057] recipient: [13:93:2122] Leader for TabletID 72057594037927937 is [13:95:2123] sender: [13:96:2057] recipient: [13:93:2122] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:95:2123] Leader for TabletID 72057594037927937 is [13:95:2123] sender: [13:211:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:91:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:94:2057] recipient: [14:93:2122] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:96:2057] recipient: [14:93:2122] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:95:2123] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:211:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:92:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:95:2057] recipient: [15:94:2122] Leader for TabletID 72057594037927937 is [15:96:2123] sender: [15:97:2057] recipient: [15:94:2122] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:96:2123] Leader for TabletID 72057594037927937 is [15:96:2123] sender: [15:212:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2025-12-23T18:50:12.289872Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127644106444711:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:12.290081Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00601b/r3tmp/tmph0x4iK/pdisk_1.dat 2025-12-23T18:50:12.550285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:12.550406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:12.751772Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127644106444475:2081] 1766515812235423 != 1766515812235426 2025-12-23T18:50:12.777846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:12.785433Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:16474 TServer::EnableGrpc on GrpcPort 1796, node 1 2025-12-23T18:50:13.059072Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:50:13.059091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:50:13.059125Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:50:13.059223Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515812873 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-12-23T18:50:13.170902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:50:13.290924Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:15.324638Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:50:15.329250Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:50:15.329280Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:50:15.329295Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:50:15.336813Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7587127656991347195:2488] Owner: [1:7587127656991347193:2487]. Describe result: PathErrorUnknown 2025-12-23T18:50:15.336832Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7587127656991347195:2488] Owner: [1:7587127656991347193:2487]. Creating table 2025-12-23T18:50:15.336869Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7587127656991347195:2488] Owner: [1:7587127656991347193:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2025-12-23T18:50:15.336885Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7587127656991347200:2493] Owner: [1:7587127656991347193:2487]. Describe result: PathErrorUnknown 2025-12-23T18:50:15.336898Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7587127656991347200:2493] Owner: [1:7587127656991347193:2487]. Creating table 2025-12-23T18:50:15.336927Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7587127656991347200:2493] Owner: [1:7587127656991347193:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2025-12-23T18:50:15.337062Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7587127656991347199:2492] Owner: [1:7587127656991347193:2487]. Describe result: PathErrorUnknown 2025-12-23T18:50:15.337076Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7587127656991347199:2492] Owner: [1:7587127656991347193:2487]. Creating table 2025-12-23T18:50:15.337095Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7587127656991347199:2492] Owner: [1:7587127656991347193:2487]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2025-12-23T18:50:15.342429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:15.346184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:15.347783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:15.353151Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7587127656991347200:2493] Owner: [1:7587127656991347193:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2025-12-23T18:50:15.353153Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7587127656991347199:2492] Owner: [1:7587127656991347193:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2025-12-23T18:50:15.353187Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7587127656991347200:2493] Owner: [1:7587127656991347193:2487]. Subscribe on create table tx: 281474976710659 2025-12-23T18:50:15.353188Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7587127656991347199:2492] Owner: [1:7587127656991347193:2487]. Subscribe on create table tx: 281474976710660 2025-12-23T18:50:15.353253Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7587127656991347195:2488] Owner: [1:7587127656991347193:2487]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2025-12-23T18:50:15.353268Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7587127656991347195:2488] Owner: [1:7587127656991347193:2487]. Subscribe on create table tx: 281474976710658 2025-12-23T18:50:15.356146Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7587127656991347199:2492] Owner: [1:7587127656991347193:2487]. Subscribe on tx: 281474976710660 registered 2025-12-23T18:50:15.356157Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7587127656991347200:2493] Owner: [1:7587127656991347193:2487]. Subscribe on tx: 281474976710659 registered 2025-12-23T18:50:15.356203Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7587127656991347195:2488] Owner: [1:7587127656991347193:2487]. Subscribe on tx: 281474976710658 registered 2025-12-23T18:50:15.466383Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7587127656991347199:2492] Owner: [1:7587127656991347193:2487]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-12-23T18:50:15.512511Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7587127656991347195:2488] Owner: [1:7587127656991347193:2487]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-12-23T18:50:15.514221Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7587127656991347200:2493] Owner: [1:7587127656991347193:2487]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-12-23T18:50:15.525044Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7587127656991347199:2492] Owner: [1:7587127656991347193:2487]. Table already exists, number of columns: 6, has SecurityObject: true 2025-12-23T18:50:15.525083Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7587127656991347199:2492] Owner: [1:7587127656991347193:2487]. Column diff is empty, finishing 2025-12-23T18:50:15.526269Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7587127656991347199:2492] Owner: [1:7587127656991347193:2487]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2025-12-23T18:50:15.527325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:50:15.528960Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7587127656991347199:2492] Owner: [1:7587127656991347193:2487]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2025-12-23T18:50:15.529003Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_execution_leases updater. SelfId: [1:7587127656991347199:2492] Owner: [ ... ExecutionId: cf984e10-32e3d4d6-a08ccde5-d1cdd8a2, RequestDatabase: /dc-1, Bootstrap. Database: /dc-1, IsSystemUser: 1, run create session 2025-12-23T18:50:35.104774Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977557874.446869s seconds to be completed 2025-12-23T18:50:35.107365Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=3&id=ZmQ4OTlmOS1kMmZmYmM2ZC01MDEyNDgwOS1mZTc4ZDgxMw==, workerId: [3:7587127742346174849:2515], database: /dc-1, longSession: 1, local sessions count: 1 2025-12-23T18:50:35.107583Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T18:50:35.107824Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587127742346174846:2942], ActorId: [3:7587127742346174847:2943], TraceId: ExecutionId: cf984e10-32e3d4d6-a08ccde5-d1cdd8a2, RequestDatabase: /dc-1, RunDataQuery with SessionId: ydb://session/3?node_id=3&id=ZmQ4OTlmOS1kMmZmYmM2ZC01MDEyNDgwOS1mZTc4ZDgxMw==, TxId: , text: -- TGetScriptExecutionOperationQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT run_script_actor_id, operation_status, execution_status, finalization_status, query_text, syntax, execution_mode, result_set_metas, plan, plan_compressed, plan_compression_method, issues, transient_issues, stats, ast, ast_compressed, ast_compression_method, graph_compressed IS NOT NULL AS has_graph, retry_state, user_token FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-12-23T18:50:35.108187Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZmQ4OTlmOS1kMmZmYmM2ZC01MDEyNDgwOS1mZTc4ZDgxMw==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 40, targetId: [3:7587127742346174849:2515] 2025-12-23T18:50:35.108226Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 40 timeout: 300.000000s actor id: [3:7587127742346174851:2944] 2025-12-23T18:50:35.117833Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 40, sender: [3:7587127742346174850:2516], selfId: [3:7587127712281402242:2265], source: [3:7587127742346174849:2515] 2025-12-23T18:50:35.118856Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587127742346174846:2942], ActorId: [3:7587127742346174847:2943], TraceId: ExecutionId: cf984e10-32e3d4d6-a08ccde5-d1cdd8a2, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ZmQ4OTlmOS1kMmZmYmM2ZC01MDEyNDgwOS1mZTc4ZDgxMw==, TxId: 2025-12-23T18:50:35.119436Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587127742346174846:2942], ActorId: [3:7587127742346174847:2943], TraceId: ExecutionId: cf984e10-32e3d4d6-a08ccde5-d1cdd8a2, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ZmQ4OTlmOS1kMmZmYmM2ZC01MDEyNDgwOS1mZTc4ZDgxMw==, TxId: 2025-12-23T18:50:35.119474Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587127742346174846:2942], ActorId: [3:7587127742346174847:2943], TraceId: ExecutionId: cf984e10-32e3d4d6-a08ccde5-d1cdd8a2, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2025-12-23T18:50:35.119605Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7587127742346174845:2941], ActorId: [3:7587127742346174846:2942], TraceId: ExecutionId: cf984e10-32e3d4d6-a08ccde5-d1cdd8a2, RequestDatabase: /dc-1, Got response [3:7587127742346174847:2943] SUCCESS 2025-12-23T18:50:35.119670Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7587127742346174844:2940] ActorId: [3:7587127742346174845:2941] Database: /dc-1 ExecutionId: cf984e10-32e3d4d6-a08ccde5-d1cdd8a2. Extracted script execution operation [3:7587127742346174847:2943], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7587127725166304970:2663], LeaseGeneration: 0 2025-12-23T18:50:35.119692Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7587127742346174844:2940] ActorId: [3:7587127742346174845:2941] Database: /dc-1 ExecutionId: cf984e10-32e3d4d6-a08ccde5-d1cdd8a2. Reply success 2025-12-23T18:50:35.120452Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZmQ4OTlmOS1kMmZmYmM2ZC01MDEyNDgwOS1mZTc4ZDgxMw==, workerId: [3:7587127742346174849:2515], local sessions count: 0 2025-12-23T18:50:35.138857Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd68s082425ah20z1maa86ar", Request has 18444977557874.412831s seconds to be completed 2025-12-23T18:50:35.141141Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd68s082425ah20z1maa86ar", Created new session, sessionId: ydb://session/3?node_id=3&id=ZTMyNmYyZDktMjZhOTQwZGItOThkNGNlMzQtY2ZiNTJjMjQ=, workerId: [3:7587127742346174884:2529], database: /dc-1, longSession: 1, local sessions count: 1 2025-12-23T18:50:35.141354Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd68s082425ah20z1maa86ar 2025-12-23T18:50:35.151292Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kd68s08e0xhz9hkedr69z1s5, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZTMyNmYyZDktMjZhOTQwZGItOThkNGNlMzQtY2ZiNTJjMjQ=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 42, targetId: [3:7587127742346174884:2529] 2025-12-23T18:50:35.151338Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 42 timeout: 600.000000s actor id: [3:7587127742346174887:2953] 2025-12-23T18:50:35.166237Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:50:35.172136Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd68s08e0xhz9hkedr69z1s5", Forwarded response to sender actor, requestId: 42, sender: [3:7587127742346174886:2530], selfId: [3:7587127712281402242:2265], source: [3:7587127742346174884:2529] --------------------------- INIT FINISHED --------------------------- 2025-12-23T18:50:35.181548Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7587127742346174905:2967] Owner: [3:7587127742346174904:2966]. Describe result: PathErrorUnknown 2025-12-23T18:50:35.181598Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7587127742346174905:2967] Owner: [3:7587127742346174904:2966]. Creating table 2025-12-23T18:50:35.181643Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7587127742346174905:2967] Owner: [3:7587127742346174904:2966]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2025-12-23T18:50:35.187598Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:35.189316Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7587127742346174905:2967] Owner: [3:7587127742346174904:2966]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715687 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2025-12-23T18:50:35.189358Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7587127742346174905:2967] Owner: [3:7587127742346174904:2966]. Subscribe on create table tx: 281474976715687 2025-12-23T18:50:35.191591Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7587127742346174905:2967] Owner: [3:7587127742346174904:2966]. Subscribe on tx: 281474976715687 registered 2025-12-23T18:50:35.226020Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7587127742346174905:2967] Owner: [3:7587127742346174904:2966]. Request: create. Transaction completed: 281474976715687. Doublechecking... 2025-12-23T18:50:35.294385Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587127742346174905:2967] Owner: [3:7587127742346174904:2966]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:50:35.294422Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587127742346174905:2967] Owner: [3:7587127742346174904:2966]. Column diff is empty, finishing 2025-12-23T18:50:35.294961Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7587127742346175024:3060] Owner: [3:7587127742346175023:3059]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:50:35.294986Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7587127742346175024:3060] Owner: [3:7587127742346175023:3059]. Column diff is empty, finishing 2025-12-23T18:50:35.321387Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd68s0dscn4wh7633wx82qbh", Request has 18444977557874.230259s seconds to be completed 2025-12-23T18:50:35.323270Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd68s0dscn4wh7633wx82qbh", Created new session, sessionId: ydb://session/3?node_id=3&id=Zjc3NGM1NDYtMjQxMDkyZjktMmI2YzNkNjktNWNhZTc2ZTg=, workerId: [3:7587127742346175030:2539], database: /dc-1, longSession: 1, local sessions count: 2 2025-12-23T18:50:35.323422Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd68s0dscn4wh7633wx82qbh 2025-12-23T18:50:35.347594Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=Zjc3NGM1NDYtMjQxMDkyZjktMmI2YzNkNjktNWNhZTc2ZTg=, workerId: [3:7587127742346175030:2539], local sessions count: 1 2025-12-23T18:50:35.347934Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZTMyNmYyZDktMjZhOTQwZGItOThkNGNlMzQtY2ZiNTJjMjQ=, workerId: [3:7587127742346174884:2529], local sessions count: 0 |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> test_canonical_records.py::test_dml_through_http |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] Test command err: 2025-12-23T18:50:08.277849Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127623517769107:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:08.277904Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:50:08.352555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:50:08.369081Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127625789833553:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:08.369189Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:50:08.373897Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587127625964080332:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:08.373951Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:50:08.435566Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587127623484356187:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:08.439604Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00600e/r3tmp/tmpvWML4A/pdisk_1.dat 2025-12-23T18:50:08.486236Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587127626791859599:2152];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:08.486409Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:50:08.766385Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:08.819836Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:08.855932Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:08.866844Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:08.926876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:09.036409Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:09.059540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:09.059639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:09.061293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:09.061351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:09.061989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:09.062036Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:09.062258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:09.062323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:09.063133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:09.063182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:09.110994Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:50:09.111052Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:50:09.114482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:09.119608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:09.137346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:09.139324Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-12-23T18:50:09.139369Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:50:09.140256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:09.145831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:09.163414Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:09.194672Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:50:09.193483Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:09.196523Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:50:09.202358Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:50:09.292567Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:09.426603Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:09.459447Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:09.462590Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:09.532821Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:65215 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-12-23T18:50:09.588950Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:09.590132Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:50:09.590185Z node 4 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:148: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table dc-1/.metadata/script_executions not found Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515809254 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:09.719806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976735657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:12.493463Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:50:12.551694Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:50:12.551733Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:50:12.551752Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:50:12.553835Z node 4 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [4:7587127640664225682:2131] Owner: [4:7587127640664225679:2129]. Describe result: PathErrorUnknown 2025-12-23T18:50:12.553847Z node 4 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [4:7 ... X_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:50:28.139210Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7587127710260876990:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:28.139279Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:50:28.154357Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:28.154442Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:28.156604Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-shared/.metadata/script_executions 2025-12-23T18:50:28.157718Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-12-23T18:50:28.158524Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:28.201080Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:50:28.201216Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:50:28.201338Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:50:28.201440Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:50:28.201560Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:50:28.201675Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:50:28.201837Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:50:28.202008Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:50:28.202093Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:50:28.263790Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:28.263912Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:28.267636Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:28.362422Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:28.372509Z node 10 :STATISTICS WARN: tx_init.cpp:298: [72075186224038895] TTxInit::Complete. EnableColumnStatistics=false TServer::EnableGrpc on GrpcPort 28991, node 10 2025-12-23T18:50:28.501295Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:50:28.501324Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:50:28.501333Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:50:28.501414Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:50:28.552236Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:50:28.603016Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:28.658135Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:28.812251Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [10:7587127710260878379:2922], Database: /Root/test-serverless, Start database fetching 2025-12-23T18:50:28.812461Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [10:7587127710260878379:2922], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2025-12-23T18:50:29.147587Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:32.036988Z node 11 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-12-23T18:50:32.037469Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:50:32.037485Z node 11 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:50:32.037949Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7587127726601394875:2356], Start check tables existence, number paths: 2 2025-12-23T18:50:32.040111Z node 11 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-12-23T18:50:32.040376Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7587127726601394875:2356], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:50:32.040455Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7587127726601394875:2356], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:50:32.040498Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7587127726601394875:2356], Successfully finished 2025-12-23T18:50:32.040544Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-12-23T18:50:32.511252Z node 10 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-12-23T18:50:32.511382Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7587127727440747787:2388], Start check tables existence, number paths: 2 2025-12-23T18:50:32.511959Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:50:32.511997Z node 10 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:50:32.513474Z node 10 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-12-23T18:50:32.514660Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7587127727440747787:2388], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:50:32.514732Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7587127727440747787:2388], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:50:32.514785Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7587127727440747787:2388], Successfully finished 2025-12-23T18:50:32.514858Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-12-23T18:50:32.602880Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7587127705126557331:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:32.602983Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:50:33.139383Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7587127710260876990:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:33.139476Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:50:33.831159Z node 9 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-12-23T18:50:33.842929Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:50:33.843345Z node 9 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2025-12-23T18:50:33.843653Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:50:33.844521Z node 9 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=9&id=ZTBkYzY1NjQtYjdlMDk1YTYtMjQ0MzViOGMtZmM1ZjI4ZjE=, ActorId: [9:7587127706168281656:2329], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:50:33.844575Z node 9 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=9&id=ZTBkYzY1NjQtYjdlMDk1YTYtMjQ0MzViOGMtZmM1ZjI4ZjE=, ActorId: [9:7587127706168281656:2329], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:50:33.844601Z node 9 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=9&id=ZTBkYzY1NjQtYjdlMDk1YTYtMjQ0MzViOGMtZmM1ZjI4ZjE=, ActorId: [9:7587127706168281656:2329], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:50:33.844636Z node 9 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=9&id=ZTBkYzY1NjQtYjdlMDk1YTYtMjQ0MzViOGMtZmM1ZjI4ZjE=, ActorId: [9:7587127706168281656:2329], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:50:33.844720Z node 9 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=9&id=ZTBkYzY1NjQtYjdlMDk1YTYtMjQ0MzViOGMtZmM1ZjI4ZjE=, ActorId: [9:7587127706168281656:2329], ActorState: unknown state, Session actor destroyed |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> test_auditlog.py::test_single_dml_query_logged[update] >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TPQTest::TestPQReadAhead [GOOD] >> TPQTest::TestOwnership |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-system |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/005668/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk17/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit_log._udlut2f.txt 2025-12-23T18:50:27.512505Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:50:27.512451Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-12-23T18:50:27.274398Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig >> TPQTest::TestWritePQ [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage >> KqpLimits::OutOfSpaceYQLUpsertFail [GOOD] >> KqpLimits::ManyPartitionsSortingLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2025-12-23T18:50:17.502224Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127665688026109:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:17.502421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00600f/r3tmp/tmpuonXmo/pdisk_1.dat 2025-12-23T18:50:17.771973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:17.772122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:17.799269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:17.885274Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:50:17.889279Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127665688026083:2081] 1766515817500595 != 1766515817500598 2025-12-23T18:50:17.893061Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:14950 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515817899 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:50:18.096620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:18.103693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:50:18.127853Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:50:18.127900Z node 1 :KQP_PROXY DEBUG: kqp_finalize_script_service.cpp:148: [ScriptExecutions] [TKqpFinalizeScriptService] Script execution table dc-1/.metadata/script_executions not found 2025-12-23T18:50:18.507394Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:50:20.472353Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:50:20.476528Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1454: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-12-23T18:50:20.504718Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127678572928804:2301], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:20.504784Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 } 2025-12-23T18:50:20.504987Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T18:50:20.505044Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T18:50:20.505067Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T18:50:20.505234Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 2, sender: [1:7587127669982994143:2475], selfId: [1:7587127665688026344:2265], source: [1:7587127665688026344:2265] 2025-12-23T18:50:20.505375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:20.506354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127678572928821:2302], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:20.506568Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1454: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-12-23T18:50:20.506616Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 } 2025-12-23T18:50:20.506692Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [1:7587127669982994143:2475], selfId: [1:7587127665688026344:2265], source: [1:7587127665688026344:2265] 2025-12-23T18:50:20.506734Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:20.507405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127678572928823:2303], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:20.507592Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1454: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq 2025-12-23T18:50:20.507630Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq } 2025-12-23T18:50:20.507679Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:20.507697Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 4, sender: [1:7587127669982994143:2475], selfId: [1:7587127665688026344:2265], source: [1:7587127665688026344:2265] 2025-12-23T18:50:20.508191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127678572928826:2305], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:20.508261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:20.508550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127678572928829:2306], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:20.508582Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ... waiting for SysViewsRoster update finished 2025-12-23T18:50:24.141074Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:50:24.151522Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:50:24.151912Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:50:24.152002Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:50:24.417932Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:50:24.418071Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:50:24.439259Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766515821098835 != 1766515821098838 2025-12-23T18:50:24.447190Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:24.495499Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:50:24.568544Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00600f/r3tmp/tmpYJ9E0D/pdisk_1.dat 2025-12-23T18:50:24.881696Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183 ... p, Database: /Root, SessionId: ydb://session/3?node_id=5&id=OWYyNzdlYTctNTM5ZmU4NWItNzJjM2Y1ZTgtNTQxYWYxZDk=, PoolId: , DatabaseId: , CustomerSuppliedId: 01kd68s7d0ddcsk7t3fpmkz8jp, CurrentExecutionId: 2216dfc9-f907e465-e79ea844-af5ef843, RunScriptActorId: [5:7587127773451497876:3320], IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 7, targetId: [5:7587127777746465239:2364] 2025-12-23T18:50:43.033803Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 7 timeout: 604800.000000s actor id: [5:7587127777746465242:3363] 2025-12-23T18:50:43.033967Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977557866.517662s seconds to be completed 2025-12-23T18:50:43.034668Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587127777746465243:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:43.034821Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:43.035509Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587127777746465249:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:43.035603Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587127777746465248:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:43.035658Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:43.036259Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=5&id=YzM1Mjk5MTMtZDEzMTA4ZTAtOGFlZWY3Zi03ZTEzMjUzMQ==, workerId: [5:7587127777746465256:2372], database: /Root, longSession: 1, local sessions count: 2 2025-12-23T18:50:43.036478Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T18:50:43.037277Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TScriptProgressActor] OwnerId: [5:7587127773451497876:3320], ActorId: [5:7587127777746465240:3362], TraceId: ExecutionId: 2216dfc9-f907e465-e79ea844-af5ef843, RequestDatabase: /Root, LeaseGeneration: 1, RunDataQuery with SessionId: ydb://session/3?node_id=5&id=YzM1Mjk5MTMtZDEzMTA4ZTAtOGFlZWY3Zi03ZTEzMjUzMQ==, TxId: , text: -- TScriptProgressActor::OnRunQuery DECLARE $execution_id AS Text; DECLARE $database AS Text; DECLARE $plan_compressed AS Optional; DECLARE $plan_compression_method AS Optional; DECLARE $execution_status AS Int32; DECLARE $lease_generation AS Int64; UPDATE `.metadata/script_executions` SET plan_compressed = $plan_compressed, plan_compression_method = $plan_compression_method, execution_status = $execution_status WHERE database = $database AND execution_id = $execution_id AND (lease_generation IS NULL OR lease_generation = $lease_generation); 2025-12-23T18:50:43.037699Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=5&id=YzM1Mjk5MTMtZDEzMTA4ZTAtOGFlZWY3Zi03ZTEzMjUzMQ==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 9, targetId: [5:7587127777746465256:2372] 2025-12-23T18:50:43.037731Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 9 timeout: 300.000000s actor id: [5:7587127777746465271:3372] 2025-12-23T18:50:43.039544Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710666:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:50:43.055258Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01kd68s7ze2fe1b9rw44ynwmp3", Request has 18444977557866.496378s seconds to be completed 2025-12-23T18:50:43.057076Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd68s7ze2fe1b9rw44ynwmp3", Created new session, sessionId: ydb://session/3?node_id=5&id=MzEyNTdmMjAtZDdlZTJiZjgtZjhmMzE4YjgtYThmYjdhM2Y=, workerId: [5:7587127777746465306:2377], database: /Root, longSession: 1, local sessions count: 3 2025-12-23T18:50:43.057222Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01kd68s7ze2fe1b9rw44ynwmp3 2025-12-23T18:50:43.067678Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587127777746465252:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710666 completed, doublechecking } 2025-12-23T18:50:43.075496Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kd68s8024g6srwghbk6xss9m, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzEyNTdmMjAtZDdlZTJiZjgtZjhmMzE4YjgtYThmYjdhM2Y=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 11, targetId: [5:7587127777746465306:2377] 2025-12-23T18:50:43.075629Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 11 timeout: 300.000000s actor id: [5:7587127777746465327:3420] 2025-12-23T18:50:43.090981Z node 5 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [5:7587127777746465345:3426], for# user@builtin, access# DescribeSchema 2025-12-23T18:50:43.091020Z node 5 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [5:7587127777746465345:3426], for# user@builtin, access# DescribeSchema 2025-12-23T18:50:43.093197Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7587127777746465341:2383], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:50:43.096172Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=5&id=MzEyNTdmMjAtZDdlZTJiZjgtZjhmMzE4YjgtYThmYjdhM2Y=, ActorId: [5:7587127777746465306:2377], ActorState: ExecuteState, TraceId: 01kd68s8024g6srwghbk6xss9m, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/script_executions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:50:43.096530Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd68s8024g6srwghbk6xss9m", Forwarded response to sender actor, requestId: 11, sender: [5:7587127777746465326:2378], selfId: [5:7587127756271627043:2268], source: [5:7587127777746465306:2377] 2025-12-23T18:50:43.157022Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587127777746465352:3431] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:50:43.251289Z node 5 :KQP_PROXY DEBUG: query_actor.h:292: [TQueryRetryActor] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7587127773451497876:3320], ActorId: [5:7587127777746465385:3442], TraceId: ExecutionId: 2216dfc9-f907e465-e79ea844-af5ef843, RequestDatabase: /Root, LeaseGeneration: 1, Starting query actor #1 [5:7587127777746465386:3443] 2025-12-23T18:50:43.251342Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7587127777746465385:3442], ActorId: [5:7587127777746465386:3443], TraceId: ExecutionId: 2216dfc9-f907e465-e79ea844-af5ef843, RequestDatabase: /Root, LeaseGeneration: 1, Bootstrap. Database: /Root, IsSystemUser: 1, run create session 2025-12-23T18:50:43.252779Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977557866.298858s seconds to be completed 2025-12-23T18:50:43.254960Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=5&id=MzM4MTg3NDItNWY0Y2Y3ZDktNTc5NzdiLWU5NTNlZDI2, workerId: [5:7587127777746465388:2390], database: /Root, longSession: 1, local sessions count: 4 2025-12-23T18:50:43.255117Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T18:50:43.255544Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd68s7d0ddcsk7t3fpmkz8jp", Forwarded response to sender actor, requestId: 7, sender: [5:7587127773451497876:3320], selfId: [5:7587127756271627043:2268], source: [5:7587127777746465239:2364] 2025-12-23T18:50:43.255755Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] OwnerId: [5:7587127777746465385:3442], ActorId: [5:7587127777746465386:3443], TraceId: ExecutionId: 2216dfc9-f907e465-e79ea844-af5ef843, RequestDatabase: /Root, LeaseGeneration: 1, RunDataQuery with SessionId: ydb://session/3?node_id=5&id=MzM4MTg3NDItNWY0Y2Y3ZDktNTc5NzdiLWU5NTNlZDI2, TxId: , text: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; DECLARE $lease_generation AS Int64; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id AND (lease_generation IS NULL OR lease_generation = $lease_generation); 2025-12-23T18:50:43.256103Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=5&id=MzM4MTg3NDItNWY0Y2Y3ZDktNTc5NzdiLWU5NTNlZDI2, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 13, targetId: [5:7587127777746465388:2390] 2025-12-23T18:50:43.256133Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 13 timeout: 300.000000s actor id: [5:7587127777746465390:3444] |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TA] $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_auditlog.py::test_create_and_remove_tenant |96.4%| [TA] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] >> KqpBatchUpdate::ManyPartitions_3 [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpStats::DeferredEffects+UseSink [FAIL] >> KqpStats::DeferredEffects-UseSink >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-anonymous |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] Test command err: 2025-12-23T18:48:56.147297Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127315702170753:2263];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:56.148142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:56.210284Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127318114293824:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:56.210325Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:56.216143Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00347b/r3tmp/tmpGNx0hA/pdisk_1.dat 2025-12-23T18:48:56.246158Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:48:56.447189Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:56.502822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:56.554362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:56.554481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:56.565392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:56.565473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:56.579025Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:48:56.579336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:56.579749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:56.595676Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15053, node 1 2025-12-23T18:48:56.695788Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:56.711687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/00347b/r3tmp/yandexWLZ1u2.tmp 2025-12-23T18:48:56.711714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/00347b/r3tmp/yandexWLZ1u2.tmp 2025-12-23T18:48:56.711902Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/00347b/r3tmp/yandexWLZ1u2.tmp 2025-12-23T18:48:56.711991Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:56.752571Z INFO: TTestServer started on Port 20393 GrpcPort 15053 2025-12-23T18:48:56.783173Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20393 PQClient connected to localhost:15053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 1766515736692 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:57.035595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:48:57.070824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:48:57.146300Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:48:57.230882Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:48:59.397362Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127330999196113:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:59.397363Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127330999196108:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:59.397476Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:59.397764Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127330999196124:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:59.397820Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:59.402082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:59.431557Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587127330999196123:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-12-23T18:48:59.508691Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587127330999196152:2182] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:59.795073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:59.796975Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587127328587073910:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:48:59.799239Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YzE3MjhjODYtZTNkNTIyZWYtYzgzZTllNjEtYWFmNjZjMDY=, ActorId: [1:7587127328587073884:2329], ActorState: ExecuteState, TraceId: 01kd68p2wz6jdj5brne7hd59g7, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:48:59.801634Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:48:59.804698Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587127330999196166:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. ... ntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (3059208687299993691, "Root", "00595F536F757263655F37", 1766515844429, 1766515844429, 0, 13); 2025-12-23T18:50:44.621720Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-12-23T18:50:44.621745Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-12-23T18:50:44.621754Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-12-23T18:50:44.621775Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__sm_chooser_actor.h:116: TPartitionChooser [11:7587127781137482708:4195] (SourceId=Y_Source_7, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2025-12-23T18:50:44.621895Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [11:7587127781137482709:4195], Recipient [11:7587127759662645198:3600]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [11:7587127781137482708:4195] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-12-23T18:50:44.621985Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [11:7587127781137482708:4195], Recipient [11:7587127759662645198:3600]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "Y_Source_7" 2025-12-23T18:50:44.622071Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:139: StateOwnershipFast, received event# 271188558, Sender [11:7587127759662645198:3600], Recipient [11:7587127781137482708:4195]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-12-23T18:50:44.622108Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:88: TPartitionChooser [11:7587127781137482708:4195] (SourceId=Y_Source_7, PreferedPartition=(NULL)) InitTable: SourceId=Y_Source_7 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-12-23T18:50:44.622181Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [11:7587127781137482708:4195], Recipient [11:7587127759662645198:3600]: NActors::TEvents::TEvPoison 2025-12-23T18:50:44.622408Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:101: StateInitTable, received event# 277020685, Sender [11:7587127703828068026:2070], Recipient [11:7587127781137482708:4195]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-12-23T18:50:44.622451Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:111: TPartitionChooser [11:7587127781137482708:4195] (SourceId=Y_Source_7, PreferedPartition=(NULL)) StartKqpSession 2025-12-23T18:50:44.626213Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:132: StateCreateKqpSession, received event# 271646728, Sender [11:7587127703828068249:2266], Recipient [11:7587127781137482708:4195]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=11&id=YzUwZDM1M2EtYTU5MWRiZjctYTdkNjVhN2YtM2U4OTJhMGI=" NodeId: 11 } YdbStatus: SUCCESS ResourceExhausted: false 2025-12-23T18:50:44.626264Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:142: TPartitionChooser [11:7587127781137482708:4195] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Select from the table 2025-12-23T18:50:44.852432Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:163: StateSelect, received event# 271646721, Sender [11:7587127703828068249:2266], Recipient [11:7587127781137482708:4195]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=11&id=YzUwZDM1M2EtYTU5MWRiZjctYTdkNjVhN2YtM2U4OTJhMGI=" PreparedQuery: "209121cf-7b22eff0-5efd4355-9ec27206" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01kd68s9q7ejs1q7tqzyjd858x" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1766515844429 } items { uint64_value: 1766515844429 } items { uint64_value: 13 } } format: FORMAT_VALUE } QueryDiagnostics: "" } YdbStatus: SUCCESS 2025-12-23T18:50:44.852685Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:151: TPartitionChooser [11:7587127781137482708:4195] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2025-12-23T18:50:44.852757Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__sm_chooser_actor.h:151: TPartitionChooser [11:7587127781137482708:4195] (SourceId=Y_Source_7, PreferedPartition=(NULL)) GetOldSeqNo 2025-12-23T18:50:44.852934Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [11:7587127781137482738:4195], Recipient [11:7587127759662645197:3599]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1000 Status: OK ServerId: [11:7587127781137482708:4195] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-12-23T18:50:44.853057Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271187968, Sender [11:7587127781137482708:4195], Recipient [11:7587127759662645197:3599]: NKikimrClient.TPersQueueRequest PartitionRequest { Partition: 0 CmdGetMaxSeqNo { SourceId: "\000Y_Source_7" } PipeClient { RawX1: 7587127781137482738 RawX2: 47244644451 } } 2025-12-23T18:50:44.853130Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:209: TPartitionChooser [11:7587127781137482708:4195] (SourceId=Y_Source_7, PreferedPartition=(NULL)) OnPartitionChosen 2025-12-23T18:50:44.853209Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [11:7587127781137482708:4195], Recipient [11:7587127759662645197:3599]: NActors::TEvents::TEvPoison 2025-12-23T18:50:44.853259Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [11:7587127781137482739:4195], Recipient [11:7587127759662645199:3601]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1002 Status: OK ServerId: [11:7587127781137482708:4195] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-12-23T18:50:44.853319Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [11:7587127781137482708:4195], Recipient [11:7587127759662645199:3601]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 2 2025-12-23T18:50:44.853402Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:240: StateCheckPartition, received event# 271188558, Sender [11:7587127759662645199:3601], Recipient [11:7587127781137482708:4195]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-12-23T18:50:44.853457Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:174: TPartitionChooser [11:7587127781137482708:4195] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Update the table 2025-12-23T18:50:44.853742Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [11:7587127781137482708:4195], Recipient [11:7587127759662645199:3601]: NActors::TEvents::TEvPoison Received TEvChooseResult: 22025-12-23T18:50:45.190733Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:212: StateUpdate, received event# 271646721, Sender [11:7587127703828068249:2266], Recipient [11:7587127781137482708:4195]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=11&id=YzUwZDM1M2EtYTU5MWRiZjctYTdkNjVhN2YtM2U4OTJhMGI=" PreparedQuery: "f9225ee2-7469360e-f032296b-7af177e5" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 208 2025-12-23T18:50:45.190831Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [11:7587127781137482708:4195] (SourceId=Y_Source_7, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-12-23T18:50:45.190868Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [11:7587127781137482708:4195] (SourceId=Y_Source_7, PreferedPartition=(NULL)) ReplyResult: Partition=2, SeqNo=157 2025-12-23T18:50:45.190916Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [11:7587127781137482708:4195] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 3059208687299993691 AND Topic = "Root" AND ProducerId = "00595F536F757263655F37" 2025-12-23T18:50:46.571203Z node 11 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1177: ActorId: [11:7587127789727417506:2693] TxId: 281474976710704. Ctx: { TraceId: 01kd68sb6kcqs25ekxrb00jv78, Database: /Root, SessionId: ydb://session/3?node_id=11&id=NjRmNzI5Y2UtZDY2MjBjYjctZGQ5ZDk3YzktYjUyOGUyMw==, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 12 2025-12-23T18:50:46.571432Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [11:7587127789727417510:2693], TxId: 281474976710704, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68sb6kcqs25ekxrb00jv78. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=11&id=NjRmNzI5Y2UtZDY2MjBjYjctZGQ5ZDk3YzktYjUyOGUyMw==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [11:7587127789727417506:2693], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_3 [GOOD] Test command err: Trying to start YDB, gRPC: 10237, MsgBus: 10963 2025-12-23T18:47:12.159680Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126867527605698:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:12.159974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bf2/r3tmp/tmpc7M3FU/pdisk_1.dat 2025-12-23T18:47:12.222686Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:47:12.457095Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:12.515482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:12.515590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:12.517431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:12.595186Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10237, node 1 2025-12-23T18:47:12.672819Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:12.691205Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:12.691237Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:12.691245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:12.691356Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10963 TClient is connected to server localhost:10963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:13.179228Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515632602 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:13.205037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:13.211442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:13.226673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:13.371198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:13.532625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:13.607888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:15.684044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126880412509419:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.684207Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.685914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126880412509429:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.686013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.994601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.026174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.062477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.092753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.126054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.164349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.206700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.259140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.335498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126884707477598:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.335587Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.335680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126884707477603:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.335765Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126884707477605:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.335806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.339368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, ... N: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:50:30.863221Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:50:30.863236Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:50:30.863385Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:50:30.864255Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10811 2025-12-23T18:50:31.574935Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515830849 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:50:31.712090Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:31.733271Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:31.830710Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:32.134863Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:32.251484Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:35.558909Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7587127719714763829:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:35.559018Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:50:36.396572Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127745484569431:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:36.396721Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:36.397285Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127745484569440:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:36.397361Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:36.608391Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:36.668061Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:36.724531Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:36.771188Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:36.820847Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:36.875498Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:36.937958Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:37.097948Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:37.221042Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127749779537632:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:37.221196Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:37.221512Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127749779537637:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:37.221579Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7587127749779537638:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:37.221664Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:37.227690Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:50:37.241794Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7587127749779537641:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:50:37.300143Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7587127749779537692:3785] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:50:40.137089Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:45.717171Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:50:45.717222Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TPQTest::TestOwnership [GOOD] >> TPQTest::TestPQCacheSizeManagement >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-anonymous >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> KqpLimits::QSReplySize+useSink |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-12-23T18:48:41.474966Z :WriteAndReadSomeMessagesWithSyncCompression INFO: Random seed for debugging is 1766515721474735 2025-12-23T18:48:41.710163Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127253531714213:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:41.710245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:41.790617Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:48:41.814922Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127253669978588:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:41.822446Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005842/r3tmp/tmpzyORDN/pdisk_1.dat 2025-12-23T18:48:41.848272Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:48:41.993189Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:41.993659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:42.177359Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:42.252326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:42.252435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:42.253366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:42.253419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:42.289639Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:48:42.289828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:42.292686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:42.307569Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127253531714165:2082] 1766515721704834 != 1766515721704837 2025-12-23T18:48:42.342830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:42.362380Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15072, node 1 2025-12-23T18:48:42.709785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005842/r3tmp/yandexx5OnPq.tmp 2025-12-23T18:48:42.709826Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005842/r3tmp/yandexx5OnPq.tmp 2025-12-23T18:48:42.709978Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005842/r3tmp/yandexx5OnPq.tmp 2025-12-23T18:48:42.710045Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:42.710404Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:42.721600Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:42.845621Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:42.939254Z INFO: TTestServer started on Port 24997 GrpcPort 15072 TClient is connected to server localhost:24997 PQClient connected to localhost:15072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515722426 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:43.247507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... 2025-12-23T18:48:43.315439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:48:45.658094Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127270711584711:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.658280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.658113Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127270849848092:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.658113Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127270849848105:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.658269Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.658678Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127270849848109:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.658757Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.658667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127270711584724:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.659288Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127270711584727:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.659347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.662436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:45.668340Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587127270849848111:2133] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:48:45.670944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127270711584766:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.671031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.685582Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587127270711584726:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-12-23T18:48:45.685644Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587127270849848108:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for err ... nt message batch for topic 'rt3.dc1--test-topic' partition 0 2025-12-23T18:50:49.637690Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-12-23T18:50:49.638145Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|276aa444-83f07965-36645710-20ba14b7_0 2025-12-23T18:50:49.640528Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1766515849640 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T18:50:49.640661Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|276aa444-83f07965-36645710-20ba14b7_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-12-23T18:50:49.642102Z :INFO: [] MessageGroupId [src] SessionId [src|276aa444-83f07965-36645710-20ba14b7_0] Write session: close. Timeout = 0 ms 2025-12-23T18:50:49.642148Z :INFO: [] MessageGroupId [src] SessionId [src|276aa444-83f07965-36645710-20ba14b7_0] Write session will now close 2025-12-23T18:50:49.642204Z :DEBUG: [] MessageGroupId [src] SessionId [src|276aa444-83f07965-36645710-20ba14b7_0] Write session: aborting 2025-12-23T18:50:49.642689Z :INFO: [] MessageGroupId [src] SessionId [src|276aa444-83f07965-36645710-20ba14b7_0] Write session: gracefully shut down, all writes complete 2025-12-23T18:50:49.642730Z :DEBUG: [] MessageGroupId [src] SessionId [src|276aa444-83f07965-36645710-20ba14b7_0] Write session: destroy 2025-12-23T18:50:49.643814Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|276aa444-83f07965-36645710-20ba14b7_0 grpc read done: success: 0 data: 2025-12-23T18:50:49.643851Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|276aa444-83f07965-36645710-20ba14b7_0 grpc read failed 2025-12-23T18:50:49.643890Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|276aa444-83f07965-36645710-20ba14b7_0 grpc closed 2025-12-23T18:50:49.643916Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|276aa444-83f07965-36645710-20ba14b7_0 is DEAD 2025-12-23T18:50:49.645114Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:50:49.645796Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [15:7587127801812896080:2463] destroyed 2025-12-23T18:50:49.660373Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T18:50:49.660433Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:50:49.660455Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:49.660472Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:50:49.660495Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:49.660510Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:50:49.660610Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:50:49.660629Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:49.660645Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:50:49.660666Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:49.660684Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:50:49.703341Z :INFO: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Starting read session 2025-12-23T18:50:49.703395Z :DEBUG: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Starting cluster discovery 2025-12-23T18:50:49.703629Z :INFO: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10549: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:10549
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:10549. " 2025-12-23T18:50:49.703683Z :DEBUG: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Restart cluster discovery in 0.008657s 2025-12-23T18:50:49.714929Z :DEBUG: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Starting cluster discovery 2025-12-23T18:50:49.715332Z :INFO: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10549: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:10549
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:10549. " 2025-12-23T18:50:49.715391Z :DEBUG: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Restart cluster discovery in 0.013922s 2025-12-23T18:50:49.734838Z :DEBUG: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Starting cluster discovery 2025-12-23T18:50:49.735068Z :INFO: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10549: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:10549
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:10549. " 2025-12-23T18:50:49.735103Z :DEBUG: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Restart cluster discovery in 0.036366s 2025-12-23T18:50:49.759579Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:50:49.759620Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:49.759635Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:50:49.759658Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:49.759676Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:50:49.778830Z :DEBUG: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Starting cluster discovery 2025-12-23T18:50:49.779151Z :NOTICE: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10549: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:10549
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:10549. " } 2025-12-23T18:50:49.779347Z :NOTICE: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10549: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:10549
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:10549. " } 2025-12-23T18:50:49.779456Z :INFO: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Closing read session. Close timeout: 0.000000s 2025-12-23T18:50:49.779529Z :NOTICE: [/Root] [/Root] [780ad918-40006b3f-ea5a6358-b9f1d8e3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T18:50:49.859937Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:50:49.859978Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:49.859995Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:50:49.860019Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:49.860048Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:50:49.960713Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:50:49.960755Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:49.960771Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:50:49.960795Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:50:49.960814Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:50:50.176335Z node 15 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1177: ActorId: [15:7587127806107863410:2476] TxId: 281474976710676. Ctx: { TraceId: 01kd68seg5e77zxp3hzmep6fw8, Database: /Root, SessionId: ydb://session/3?node_id=15&id=MjUyZmYzODctNDRkZWRmOWMtYmY5MDBjNTEtZDc2MjhlYw==, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 16 2025-12-23T18:50:50.176857Z node 15 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [15:7587127806107863417:2476], TxId: 281474976710676, task: 3. Ctx: { TraceId : 01kd68seg5e77zxp3hzmep6fw8. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=15&id=MjUyZmYzODctNDRkZWRmOWMtYmY5MDBjNTEtZDc2MjhlYw==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [15:7587127806107863410:2476], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-12-23T18:50:50.556235Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:50:50.556265Z node 15 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-ordinaryuser |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:50:29.640474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:50:29.640586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:29.640648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:50:29.640685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:50:29.640723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:50:29.640763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:50:29.640877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:29.640949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:50:29.641891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:50:29.642171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:50:29.727348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:50:29.727420Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:29.735208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:50:29.735435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:50:29.735593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:50:29.738608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:50:29.738860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:50:29.739607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:29.739854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:50:29.741838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:29.742053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:50:29.743300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:50:29.743365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:29.743511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:50:29.743559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:50:29.743607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:50:29.743727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:50:29.891636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.892632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.892781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.892862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.892926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.892988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.893060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.893168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.893261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.893354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.893420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.893496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.893614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.893698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:29.893779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T18:50:54.714720Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:50:54.714843Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:50:54.714879Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:50:54.714910Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:50:54.714947Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:50:54.715021Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-12-23T18:50:54.716671Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1167 } } CommitVersion { Step: 5000038 TxId: 101 } 2025-12-23T18:50:54.716716Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-12-23T18:50:54.716852Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1167 } } CommitVersion { Step: 5000038 TxId: 101 } 2025-12-23T18:50:54.716976Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1167 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2025-12-23T18:50:54.718484Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 706 RawX2: 77309414020 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:50:54.718535Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-12-23T18:50:54.718651Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 706 RawX2: 77309414020 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:50:54.718709Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:50:54.718907Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 706 RawX2: 77309414020 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:50:54.718984Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:54.719032Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:54.719073Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:50:54.719120Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2025-12-23T18:50:54.719916Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:50:54.719969Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:50:54.721149Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:54.721236Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:54.721400Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:50:54.721432Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:50:54.721499Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:50:54.721529Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:50:54.721560Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:50:54.721588Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:50:54.721618Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-12-23T18:50:54.721666Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:732:2710] message: TxId: 101 2025-12-23T18:50:54.721707Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:50:54.721743Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:50:54.721770Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:50:54.721857Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:50:54.723141Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:50:54.723175Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:733:2711] TestWaitNotification: OK eventTxId 101 2025-12-23T18:50:54.723479Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:50:54.723627Z node 18 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 169us result status StatusSuccess 2025-12-23T18:50:54.724010Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] >> KqpStats::DeferredEffects-UseSink [GOOD] >> KqpStats::DataQueryWithEffects-UseSink >> TFlatTest::AutoSplitMergeQueue [GOOD] >> KqpSinkTx::TIsolationSettingTest-IsOlap+UsePragma [GOOD] >> KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitMergeQueue [GOOD] Test command err: 2025-12-23T18:49:22.815504Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127429406816299:2082];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:49:22.825857Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0057c1/r3tmp/tmpF7EYSO/pdisk_1.dat 2025-12-23T18:49:23.078660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:49:23.115454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:49:23.115560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:49:23.158448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:49:23.210779Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:49:23.212209Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587127429406816246:2081] 1766515762810867 != 1766515762810870 2025-12-23T18:49:23.386933Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:10008 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515763236 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:49:23.501811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... 2025-12-23T18:49:23.541239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515763649 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "String" TypeId: 4097 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyC... (TRUNCATED) A-0 2025-12-23T18:49:23.834215Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; B-0 2025-12-23T18:49:24.056117Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.9, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.033s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-12-23T18:49:24.066867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-12-23T18:49:24.095359Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.031s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-12-23T18:49:24.098208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-12-23T18:49:24.170004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-12-23T18:49:24.170156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 6291502 row count 1 2025-12-23T18:49:24.170216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 1, DataSize 6291502 2025-12-23T18:49:24.170376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2025-12-23T18:49:24.170491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-12-23T18:49:24.482158Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.14, eph 2} end=Done, 2 blobs 1r (max 1), put Spent{time=0.020s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-12-23T18:49:24.503375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 A-1 2025-12-23T18:49:24.547255Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 3 blobs 2r (max 2), put Spent{time=0.057s,wait=0.006s,interrupts=1} Part{ 1 pk, lobs 0 +0, (12583126 0 0)b }, ecr=1.000 2025-12-23T18:49:24.559703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-12-23T18:49:24.602849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-12-23T18:49:24.602957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 12583004 row count 2 2025-12-23T18:49:24.602996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 2, DataSize 12583004 2025-12-23T18:49:24.603071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2025-12-23T18:49:24.606840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-1 2025-12-23T18:49:24.919184Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.19, eph 3} end=Done, 2 blobs 1r (max 1), put Spent{time=0.047s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-12-23T18:49:24.939501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 18874506 rowCount 3 cpuUsage 0 2025-12-23T18:49:24.998582Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 3r (max 3), put Spent{time=0.074s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (18874656 0 0)b }, ecr=1.000 2025-12-23T18:49:25.042845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-12-23T18:49:25.042953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 18874506 row count 3 2025-12-23T18:49:25.042994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874506 2025-12-23T18:49:25.046828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 39], datashard# 72075186224037888, compactionInfo# {72057594046644480:1, SH# 2, Rows# 3, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2025-12-23T18:49:25.048435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 599 seconds 2025-12-23T18:49:25.048494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2025-12-23T18:49:25.048643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-12-23T18:49:25.048695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 18874506 rowCount 3 cpuUsage 0 2025-12-23T18: ... { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyC... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515818312 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 32 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 32 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 30 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyC... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515818312 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 32 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 32 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 30 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyC... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515818312 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 32 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 32 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 30 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyC... (TRUNCATED) 2025-12-23T18:50:52.661662Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037927 not found 2025-12-23T18:50:52.661706Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037917 not found 2025-12-23T18:50:52.665089Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037932 not found 2025-12-23T18:50:53.285077Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037928 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515818312 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 34 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 34 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 32 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyC... (TRUNCATED) 2025-12-23T18:50:53.377189Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusInvalidParameter Reached MaxPartitionsCount limit: 6, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976715729:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037936 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\334\000\000\000" } SchemeshardId: 72057594046644480 2025-12-23T18:50:53.524739Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037937 not found 2025-12-23T18:50:54.327345Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037935 not found 2025-12-23T18:50:54.327395Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037936 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515818312 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 36 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 36 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 34 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyC... (TRUNCATED) 2025-12-23T18:50:54.471967Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037938 not found 2025-12-23T18:50:54.472013Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037940 not found 2025-12-23T18:50:54.562290Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037941 not found 2025-12-23T18:50:54.562326Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037939 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515818312 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 38 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 38 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 36 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyC... (TRUNCATED) 2025-12-23T18:50:55.427482Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037942 not found 2025-12-23T18:50:55.427527Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037933 not found 2025-12-23T18:50:55.575858Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037934 not found 2025-12-23T18:50:55.575920Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037943 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515818312 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 40 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 40 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 38 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyC... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766515818312 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 40 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 40 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 38 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyC... (TRUNCATED) |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_database |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TPQTest::TestPQCacheSizeManagement [GOOD] >> TPQTest::TestOffsetEstimation [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TA] $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestOffsetEstimation [GOOD] Test command err: 2025-12-23T18:49:05.581379Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2025-12-23T18:49:05.646974Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:05.647067Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:05.647142Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:05.647241Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2025-12-23T18:49:05.668787Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:05.690522Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-12-23T18:49:05.691706Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2025-12-23T18:49:05.694185Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2025-12-23T18:49:05.697023Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2025-12-23T18:49:05.698479Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2025-12-23T18:49:05.713340Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:49:05.713823Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d7940bf2-3a19b8d7-cdccd5db-b57b1063_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [1:181:2194] 2025-12-23T18:49:06.207925Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:113:2057] recipient: [2:106:2138] 2025-12-23T18:49:06.248953Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:06.249025Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:06.249100Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:06.249150Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927938 is [2:158:2176] sender: [2:159:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:184:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:112:2142]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:186:2057] recipient: [2:104:2137] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:189:2057] recipient: [2:188:2197] Leader for TabletID 72057594037927937 is [2:190:2198] sender: [2:191:2057] recipient: [2:188:2197] 2025-12-23T18:49:06.297561Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:49:06.297625Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:49:06.297680Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:06.297739Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:112:2142]) rebooted! 2025-12-23T18:49:06.308288Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 !Reboot 72057594037927937 (actor [2:112:2142]) tablet resolver refreshed! new actor is[2:190:2198] 2025-12-23T18:49:06.370456Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:06.401581Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:06.412216Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:06.433149Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:06.496384Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:06.537864Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:06.651713Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:06.683187Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:06.969497Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:06.994568Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:07.346960Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:07.579553Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:49:07.710763Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:190:2198] sender: [2:270:2057] recipient: [2:14:2061] 2025-12-23T18:49:07.904512Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:49:07.905394Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 2 actor [2:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } Consumers { Name: "aaa" Generation: 2 Important: true } 2025-12-23T18:49:07.906306Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:276:2198] 2025-12-23T18:49:07.908911Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:276:2198] 2025-12-23T18:49:07.912370Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:277:2198] 2025-12-23T18:49:07.914226Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:277:2198] 2025-12-23T18:49:07.929407Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:49:07.929915Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|70dfbb7a-642b39a9-90c5f7d3-19d63042_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [2:182:2194] 2025-12-23T18:49:08.442515Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:108:2057] recipient: [3:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:108:2057] recipient: [3:106:2138] Leader for TabletID 72057594037927937 is [3:112:2142] sender: [3:113:2057] recipient: [3:106:2138] 2025-12-23T18:49:08.490078Z node 3 :PQ_TX IN ... 3557Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:776:2549] 2025-12-23T18:50:58.556105Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:777:2549] 2025-12-23T18:50:58.566663Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:50:58.577119Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 8 [69:777:2549] 2025-12-23T18:50:58.599058Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:50:58.600279Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 8 [69:776:2549] 2025-12-23T18:50:58.633729Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:182:2194] 2025-12-23T18:50:58.681481Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:717:2549] sender: [69:811:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:717:2549] sender: [69:814:2057] recipient: [69:104:2137] Leader for TabletID 72057594037927937 is [69:717:2549] sender: [69:817:2057] recipient: [69:816:2617] Leader for TabletID 72057594037927937 is [69:818:2618] sender: [69:819:2057] recipient: [69:816:2617] 2025-12-23T18:50:58.735104Z node 69 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:50:58.735187Z node 69 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:50:58.735958Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:50:58.736009Z node 69 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:50:58.736729Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:879:2618] 2025-12-23T18:50:58.739369Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:880:2618] 2025-12-23T18:50:58.749799Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:50:58.750916Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 9 [69:880:2618] 2025-12-23T18:50:58.769430Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:50:58.781243Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 9 [69:879:2618] 2025-12-23T18:50:58.829002Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:182:2194] 2025-12-23T18:50:58.853793Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:818:2618] sender: [69:912:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:818:2618] sender: [69:915:2057] recipient: [69:104:2137] Leader for TabletID 72057594037927937 is [69:818:2618] sender: [69:918:2057] recipient: [69:917:2686] Leader for TabletID 72057594037927937 is [69:919:2687] sender: [69:920:2057] recipient: [69:917:2686] 2025-12-23T18:50:58.913213Z node 69 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:50:58.913278Z node 69 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:50:58.914168Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:50:58.914220Z node 69 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:50:58.915193Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:982:2687] 2025-12-23T18:50:58.918136Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:983:2687] 2025-12-23T18:50:58.929266Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:50:58.929956Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 10 [69:983:2687] 2025-12-23T18:50:58.949518Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:50:58.950666Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 10 [69:982:2687] 2025-12-23T18:50:58.993346Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:182:2194] 2025-12-23T18:50:59.033764Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:919:2687] sender: [69:1015:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:919:2687] sender: [69:1018:2057] recipient: [69:104:2137] Leader for TabletID 72057594037927937 is [69:919:2687] sender: [69:1021:2057] recipient: [69:1020:2757] Leader for TabletID 72057594037927937 is [69:1022:2758] sender: [69:1023:2057] recipient: [69:1020:2757] 2025-12-23T18:50:59.097632Z node 69 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:50:59.097690Z node 69 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:50:59.098433Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:50:59.098482Z node 69 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:50:59.099227Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:1087:2758] 2025-12-23T18:50:59.101735Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:1088:2758] 2025-12-23T18:50:59.111535Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:50:59.112473Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 11 [69:1088:2758] 2025-12-23T18:50:59.131903Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:50:59.133036Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 11 [69:1087:2758] 2025-12-23T18:50:59.159351Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:182:2194] 2025-12-23T18:50:59.202968Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:1022:2758] sender: [69:1120:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:1022:2758] sender: [69:1123:2057] recipient: [69:104:2137] Leader for TabletID 72057594037927937 is [69:1022:2758] sender: [69:1126:2057] recipient: [69:1125:2830] Leader for TabletID 72057594037927937 is [69:1127:2831] sender: [69:1128:2057] recipient: [69:1125:2830] 2025-12-23T18:50:59.249720Z node 69 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:50:59.249777Z node 69 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:50:59.250531Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:50:59.250584Z node 69 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:50:59.251356Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:1194:2831] 2025-12-23T18:50:59.254121Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:1195:2831] 2025-12-23T18:50:59.264984Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:50:59.266163Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 12 [69:1195:2831] 2025-12-23T18:50:59.292456Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:50:59.293574Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 12 [69:1194:2831] 2025-12-23T18:50:59.319968Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-dbadmin |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] Test command err: Trying to start YDB, gRPC: 14742, MsgBus: 29902 2025-12-23T18:44:57.598267Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126291033231902:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:57.598572Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033d6/r3tmp/tmpPg5Y4n/pdisk_1.dat 2025-12-23T18:44:57.799799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:57.802533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:57.802654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:57.805130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:44:57.874599Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:57.876781Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126291033231859:2081] 1766515497594995 != 1766515497594998 TServer::EnableGrpc on GrpcPort 14742, node 1 2025-12-23T18:44:57.914619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:57.914648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:57.914658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:57.914789Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:44:57.956236Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:29902 TClient is connected to server localhost:29902 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515497957 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:58.394298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:58.413049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:58.541861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:58.647254Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:58.689013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:58.756489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:00.389178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126303918135616:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.389326Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.389753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126303918135626:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.389812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.670108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.696391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.723594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.749430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.774780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.802876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.854939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.893708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.960414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126303918136499:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.960473Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.960605Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126303918136504:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.960679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126303918136506:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.960736Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.963969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:00.973404Z node 1 ... { message: "Request canceled after 449ms" severity: 1 }{ message: "Cancelling after 449ms during compilation" severity: 1 } 2025-12-23T18:50:39.144334Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68s3q49xwhzbd0pca4mmq5, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 451ms" severity: 1 }{ message: "Cancelling after 451ms during compilation" severity: 1 } 2025-12-23T18:50:39.961555Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68s4gk997s5bjnyn74325z, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 453ms" severity: 1 }{ message: "Cancelling after 453ms during compilation" severity: 1 } 2025-12-23T18:50:40.747596Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68s5929dxne2k0rparwfmk, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 455ms" severity: 1 }{ message: "Cancelling after 455ms during compilation" severity: 1 } 2025-12-23T18:50:41.606158Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68s63w44gt7s773gc3bjt0, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 457ms" severity: 1 }{ message: "Cancelling after 456ms during compilation" severity: 1 } 2025-12-23T18:50:42.304741Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68s6sjd3c1edhz2r9akpqq, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 459ms" severity: 1 }{ message: "Cancelling after 461ms during compilation" severity: 1 } 2025-12-23T18:50:43.008654Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68s7fjdyfab6nszk2at2b1, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 461ms" severity: 1 }{ message: "Cancelling after 461ms during compilation" severity: 1 } 2025-12-23T18:50:43.899097Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68s8bc2v7cyq7ccyc7e63c, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 463ms" severity: 1 }{ message: "Cancelling after 462ms during compilation" severity: 1 } 2025-12-23T18:50:44.607140Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68s91cfx8715v2dmvh3fyx, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 465ms" severity: 1 }{ message: "Cancelling after 465ms during compilation" severity: 1 } 2025-12-23T18:50:45.353078Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68s9rn2dwz19sgzvkq01z0, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 467ms" severity: 1 }{ message: "Cancelling after 467ms during compilation" severity: 1 } 2025-12-23T18:50:46.122503Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68sagm9sg3vjnf1j7s0nza, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 469ms" severity: 1 }{ message: "Cancelling after 469ms during compilation" severity: 1 } 2025-12-23T18:50:46.919150Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68sb9fb89pdpw8w2t6pmd1, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 471ms" severity: 1 }{ message: "Cancelling after 471ms during compilation" severity: 1 } 2025-12-23T18:50:47.701460Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68sc1va8xs3z0nrfe7hzkt, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 473ms" severity: 1 }{ message: "Cancelling after 473ms during compilation" severity: 1 } 2025-12-23T18:50:48.491009Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68sctf4hcafhyhrgcwm6bf, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 475ms" severity: 1 }{ message: "Cancelling after 475ms during compilation" severity: 1 } 2025-12-23T18:50:49.375099Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68sdp00n1nbwqz8r3pqs93, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 477ms" severity: 1 }{ message: "Cancelling after 478ms during compilation" severity: 1 } 2025-12-23T18:50:50.078217Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68sebyd8eqnv8ybxk0arda, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 479ms" severity: 1 }{ message: "Cancelling after 479ms during compilation" severity: 1 } 2025-12-23T18:50:50.961944Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68sf7g9cremadzz82vw2ga, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 481ms" severity: 1 }{ message: "Cancelling after 480ms during compilation" severity: 1 } 2025-12-23T18:50:51.662854Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68sfxb4hsfbxn5pm0gxqqd, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 483ms" severity: 1 }{ message: "Cancelling after 483ms during compilation" severity: 1 } 2025-12-23T18:50:52.273100Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68sggb2hrd96v7jnk61eq1, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 485ms" severity: 1 }{ message: "Cancelling after 485ms during compilation" severity: 1 } 2025-12-23T18:50:53.152722Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68shbr0f5hb8s5a30apks9, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 487ms" severity: 1 }{ message: "Cancelling after 487ms during compilation" severity: 1 } 2025-12-23T18:50:54.030884Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68sj75ertb5jp7abn0e9y9, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 489ms" severity: 1 }{ message: "Cancelling after 489ms during compilation" severity: 1 } 2025-12-23T18:50:54.836253Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68sk08dnx7n6jt2p6htr7k, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 491ms" severity: 1 }{ message: "Cancelling after 491ms during compilation" severity: 1 } 2025-12-23T18:50:55.631234Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68skrz3mz9fk4nfywvt79e, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 493ms" severity: 1 }{ message: "Cancelling after 495ms during compilation" severity: 1 } 2025-12-23T18:50:56.342695Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68smf649cp3j77ggsjx9b7, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 495ms" severity: 1 }{ message: "Cancelling after 495ms during compilation" severity: 1 } 2025-12-23T18:50:57.155049Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68sn8h2npevrh5hgfs6ypk, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 497ms" severity: 1 }{ message: "Cancelling after 497ms during compilation" severity: 1 } 2025-12-23T18:50:57.855195Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MTdjMjI3ZjctNGY0NmFmOTQtZTIzNzJmMzAtNjFkMmZhZGQ=, ActorId: [5:7587127326877408221:2534], ActorState: ExecuteState, TraceId: 01kd68snybcm1czd5w8j1syjej, Create QueryResponse for error on request, msg: , status: CANCELLED, issues: { message: "Request canceled after 499ms" severity: 1 }{ message: "Cancelling after 499ms during compilation" severity: 1 } |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpBatchDelete::ManyPartitions_1 [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/00561f/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit_log.y0sg4a_9.txt 2025-12-23T18:50:48.361021Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpStats::DataQueryWithEffects-UseSink [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-dbadmin |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_1 [GOOD] Test command err: Trying to start YDB, gRPC: 29386, MsgBus: 7701 2025-12-23T18:47:12.386165Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126867344140362:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:12.386240Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:47:12.419398Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bf1/r3tmp/tmpE8NZ23/pdisk_1.dat 2025-12-23T18:47:12.668131Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:12.670596Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126867344140325:2081] 1766515632378829 != 1766515632378832 2025-12-23T18:47:12.684218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:12.684314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:12.712282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:12.719398Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 29386, node 1 2025-12-23T18:47:12.848242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:12.848283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:12.848293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:12.848404Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:12.911937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7701 TClient is connected to server localhost:7701 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515632805 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:47:13.323851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:13.334302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:13.394448Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:47:13.470097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:13.623934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:13.696907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:15.615826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126880229044087:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.615946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.616303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126880229044097:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.616360Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:15.986834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.026120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.059833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.091544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.126265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.162617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.226841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.274406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:16.346964Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126884524012263:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.347091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.347195Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126884524012268:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.347280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126884524012270:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.347323Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:16.351319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateRes ... 17Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13657, node 20 2025-12-23T18:50:49.484256Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:50:49.515827Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:50:49.515864Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:50:49.515878Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:50:49.516043Z node 20 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31764 2025-12-23T18:50:50.198372Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:31764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515849490 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:50:50.642150Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:50.668650Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:50.777702Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:51.061273Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:51.191827Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:54.189857Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7587127801169478786:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:50:54.189961Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:50:55.984931Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7587127826939284424:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:55.985106Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:55.985534Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7587127826939284434:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:55.985610Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:56.114198Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:56.172483Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:56.239120Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:56.308415Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:56.378289Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:56.453015Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:56.521621Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:56.613308Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:50:56.747525Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7587127831234252626:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:56.747668Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:56.747670Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7587127831234252631:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:56.748342Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7587127831234252633:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:56.748435Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:50:56.755298Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:50:56.772938Z node 20 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7587127831234252634:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:50:56.972218Z node 20 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [20:7587127831234252686:3794] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:51:00.063573Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryWithEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18659, MsgBus: 13420 2025-12-23T18:45:28.013816Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126422475364826:2134];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:28.014477Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033b0/r3tmp/tmpxLl1No/pdisk_1.dat 2025-12-23T18:45:28.250896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:28.282756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:28.282869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:28.333860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:28.400483Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:28.402380Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126418180397436:2081] 1766515528003094 != 1766515528003097 2025-12-23T18:45:28.424726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 18659, node 1 2025-12-23T18:45:28.474621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:28.474648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:28.474685Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:28.474809Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13420 TClient is connected to server localhost:13420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515528414 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:28.969874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:28.976593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:45:28.981753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:29.014427Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:29.106801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:29.237098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:29.303955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:31.153704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126435360268496:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.153824Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.154238Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126435360268506:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.154305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.496666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.529672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.557456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.589723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.617019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.651975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.714777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.758611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:31.836922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126435360269378:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.836997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.837066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126435360269383:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.837162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126435360269385:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.837243Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:31.841256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sc ... IER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:50:57.185827Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:50:57.185910Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:50:57.297322Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17303 TClient is connected to server localhost:17303 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515857176 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:50:57.733755Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:50:57.739204Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:50:57.752186Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:57.824389Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:58.001557Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:50:58.070011Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:50:58.099484Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:00.870891Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127850314629715:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:00.871239Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:00.871751Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127850314629725:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:00.871815Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:00.872272Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127850314629727:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:00.872370Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:00.935143Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:00.973859Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:01.013545Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:01.048549Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:01.082078Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:01.117277Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:01.154081Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:01.212341Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:01.323716Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127854609597896:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:01.323834Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:01.324089Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127854609597901:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:01.324118Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587127854609597902:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:01.324176Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:01.329836Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:51:01.343771Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587127854609597905:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:51:01.444525Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587127854609597956:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:51:02.015693Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587127837429725987:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:02.015799Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-clusteradmin >> test_auditlog.py::test_create_and_remove_tenant [GOOD] |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpLimits::QSReplySize+useSink [GOOD] >> KqpLimits::QSReplySize-useSink |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TPQTest::TestPartitionedBlobFails [GOOD] >> TPQTest::TestPQSmallRead ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/005605/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk11/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit_log.s798fr7w.txt 2025-12-23T18:50:55.411626Z: {"tx_id":"01kd68sm1k47w5hn63e4k2h3y1","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:50:55.411581Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-12-23T18:50:55.410679Z","grpc_method":"Ydb.Table.V1.TableService/BeginTransaction","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2025-12-23T18:50:55.591595Z: {"tx_id":"01kd68sm1k47w5hn63e4k2h3y1","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:50:55.591551Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-12-23T18:50:55.419473Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-12-23T18:50:55.618984Z: {"tx_id":"01kd68sm1k47w5hn63e4k2h3y1","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:50:55.618937Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-12-23T18:50:55.607024Z","grpc_method":"Ydb.Table.V1.TableService/CommitTransaction","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/00560c/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk21/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit_log.w2hm6be4.txt 2025-12-23T18:50:51.968165Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:50:51.968125Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-12-23T18:50:51.803261Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-clusteradmin |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/0055f6/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_good_dynconfig/audit_log.wckgqqr9.txt 2025-12-23T18:50:58.316965Z: {"sanitized_token":"othe****ltin (27F910A9)","subject":"other-user@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-system >> test_canonical_records.py::test_dstool_add_group_http |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/0055ef/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk16/testing_out_stuff/test_auditlog.py.test_dynconfig/audit_log.n1kqpinx.txt 2025-12-23T18:50:58.944319Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_create_and_remove_tenant [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/0055ec/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk10/testing_out_stuff/test_auditlog.py.test_create_and_remove_tenant/audit_log.fhmn90g7.txt 2025-12-23T18:50:52.638089Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"BEGIN INIT DATABASE CONFIG","remote_address":"::1","database":"/Root/users/database"} 2025-12-23T18:50:52.647521Z: {"paths":"[/Root/users/database]","tx_id":"281474976710660","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DATABASE","component":"schemeshard"} 2025-12-23T18:50:52.680848Z: {"paths":"[/Root/users/database]","tx_id":"281474976710661","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"ALTER DATABASE","component":"schemeshard"} 2025-12-23T18:50:56.932430Z: {"paths":"[/Root/users/database/.sys]","tx_id":"281474976725657","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} 2025-12-23T18:50:56.932883Z: {"paths":"[/Root/users/database/.sys/auth_owners]","tx_id":"281474976725658","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.933140Z: {"paths":"[/Root/users/database/.sys/query_sessions]","tx_id":"281474976725659","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.933329Z: {"paths":"[/Root/users/database/.sys/tables]","tx_id":"281474976725660","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.933609Z: {"paths":"[/Root/users/database/.sys/pg_class]","tx_id":"281474976725661","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.933783Z: {"paths":"[/Root/users/database/.sys/partition_stats]","tx_id":"281474976725662","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.935277Z: {"paths":"[/Root/users/database/.sys/query_metrics_one_minute]","tx_id":"281474976725663","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.935592Z: {"paths":"[/Root/users/database/.sys/top_queries_by_read_bytes_one_hour]","tx_id":"281474976725664","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.935902Z: {"paths":"[/Root/users/database/.sys/top_queries_by_duration_one_hour]","tx_id":"281474976725665","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.936119Z: {"paths":"[/Root/users/database/.sys/compile_cache_queries]","tx_id":"281474976725666","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.936336Z: {"paths":"[/Root/users/database/.sys/top_queries_by_request_units_one_minute]","tx_id":"281474976725667","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.936704Z: {"paths":"[/Root/users/database/.sys/top_queries_by_read_bytes_one_minute]","tx_id":"281474976725668","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.936901Z: {"paths":"[/Root/users/database/.sys/resource_pool_classifiers]","tx_id":"281474976725669","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.937061Z: {"paths":"[/Root/users/database/.sys/nodes]","tx_id":"281474976725670","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.937260Z: {"paths":"[/Root/users/database/.sys/top_queries_by_cpu_time_one_hour]","tx_id":"281474976725671","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.937456Z: {"paths":"[/Root/users/database/.sys/top_partitions_by_tli_one_minute]","tx_id":"281474976725672","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.937640Z: {"paths":"[/Root/users/database/.sys/top_queries_by_cpu_time_one_minute]","tx_id":"281474976725673","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.938037Z: {"paths":"[/Root/users/database/.sys/top_partitions_by_tli_one_hour]","tx_id":"281474976725674","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.938230Z: {"paths":"[/Root/users/database/.sys/top_queries_by_request_units_one_hour]","tx_id":"281474976725675","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.938408Z: {"paths":"[/Root/users/database/.sys/top_partitions_one_hour]","tx_id":"281474976725676","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.938616Z: {"paths":"[/Root/users/database/.sys/pg_tables]","tx_id":"281474976725677","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.938833Z: {"paths":"[/Root/users/database/.sys/top_queries_by_duration_one_minute]","tx_id":"281474976725678","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.939002Z: {"paths":"[/Root/users/database/.sys/auth_group_members]","tx_id":"281474976725679","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.939182Z: {"paths":"[/Root/users/database/.sys/auth_users]","tx_id":"281474976725680","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.939397Z: {"paths":"[/Root/users/database/.sys/resource_pools]","tx_id":"281474976725681","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.939555Z: {"paths":"[/Root/users/database/.sys/streaming_queries]","tx_id":"281474976725682","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.939757Z: {"paths":"[/Root/users/database/.sys/auth_permissions]","tx_id":"281474976725683","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.939921Z: {"paths":"[/Root/users/database/.sys/top_partitions_one_minute]","tx_id":"281474976725684","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.942189Z: {"paths":"[/Root/users/database/.sys/auth_effective_permissions]","tx_id":"281474976725685","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.942394Z: {"paths":"[/Root/users/database/.sys/auth_groups]","tx_id":"281474976725686","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2025-12-23T18:50:56.947285Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"END INIT DATABASE CONFIG","remote_address":"::1","database":"/Root/users/database"} 2025-12-23T18:50:58.005116Z: {"paths":"[.metadata/workload_manager/pools/default]","tx_id":"281474976720657","new_owner":"metadata@system","acl_add":"[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin]","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE RESOURCE POOL","component":"schemeshard"} 2025-12-23T18:50:58.140344Z: {"reason":"Check failed: path: '/Root/users/database/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 35], type: EPathTypeResourcePool, state: EPathStateNoChanges)","paths":"[default]","tx_id":"281474976720658","new_owner":"metadata@system","acl_add":"[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin]","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAlreadyExists","operation":"CREATE RESOURCE POOL","component":"schemeshard"} 2025-12-23T18:51:00.985259Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"BEGIN REMOVE DATABASE","remote_address":"::1","database":"/Root/users/database"} 2025-12-23T18:51:00.992139Z: {"paths":"[/Root/users/database]","tx_id":"281474976710662","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DATABASE","component":"schemeshard"} 2025-12-23T18:51:01.015409Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"END REMOVE DATABASE","remote_address":"::1","database":"/Root/users/database"} |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] Test command err: 2025-12-23T18:48:54.389016Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2025-12-23T18:48:54.458805Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:54.458891Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:54.458948Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:54.459031Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2025-12-23T18:48:54.475983Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2025-12-23T18:48:54.476100Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:48:54.501269Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "test" Generation: 1 Important: false } 2025-12-23T18:48:54.501449Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:54.504082Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "test" Generation: 1 Important: false } 2025-12-23T18:48:54.504244Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:54.504324Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:54.504370Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:54.504431Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:3:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:48:54.505299Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:54.505786Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2025-12-23T18:48:54.508674Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:48:54.508735Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-12-23T18:48:54.508798Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2025-12-23T18:48:54.508851Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:48:54.508914Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:54.511654Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:48:54.511706Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:48:54.511751Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:54.511815Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:54.511864Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2025-12-23T18:48:54.511908Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:54.511957Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:54.512054Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-12-23T18:48:54.512105Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-12-23T18:48:54.512151Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:54.512185Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit request with generation 1 2025-12-23T18:48:54.512209Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit with generation 1 done 2025-12-23T18:48:54.512238Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2025-12-23T18:48:54.512274Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2025-12-23T18:48:54.512316Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2025-12-23T18:48:54.512570Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:54.512623Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-12-23T18:48:54.512699Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:48:54.512901Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:48:54.513105Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2025-12-23T18:48:54.515195Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:48:54.515248Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2025-12-23T18:48:54.515291Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2025-12-23T18:48:54.515339Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:48:54.515410Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:48:54.517437Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2025-12-23T18:48:54.517480Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:48:54.517509Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:54.517549Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:54.517593Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2025-12-23T18:48:54.517626Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:48:54.517657Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:54.517704Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2025-12-23T18:48:54.517735Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2025-12-23T18:48:54.517783Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2025-12-23T18:48:54.517819Z node 1 :PERSQUEUE DEBUG: p ... request needed for owner 2025-12-23T18:51:17.421796Z node 74 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|48145074-b44c4b8c-c378746f-1aae350f_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:51:17.574691Z node 74 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:51:17.575183Z node 74 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ad81e29c-c64692ae-35d19dda-23c22ec8_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:51:18.187919Z node 75 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 75 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [75:107:2057] recipient: [75:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [75:107:2057] recipient: [75:105:2138] Leader for TabletID 72057594037927937 is [75:111:2142] sender: [75:112:2057] recipient: [75:105:2138] 2025-12-23T18:51:18.260405Z node 75 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:51:18.260470Z node 75 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:51:18.260519Z node 75 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:51:18.260575Z node 75 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [75:153:2057] recipient: [75:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [75:153:2057] recipient: [75:151:2172] Leader for TabletID 72057594037927938 is [75:157:2176] sender: [75:158:2057] recipient: [75:151:2172] Leader for TabletID 72057594037927937 is [75:111:2142] sender: [75:183:2057] recipient: [75:14:2061] 2025-12-23T18:51:18.282358Z node 75 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:51:18.283608Z node 75 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 73 actor [75:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 73 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 73 } Consumers { Name: "user" Generation: 73 Important: true } 2025-12-23T18:51:18.285049Z node 75 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [75:189:2142] 2025-12-23T18:51:18.288365Z node 75 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [75:189:2142] 2025-12-23T18:51:18.290571Z node 75 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [75:190:2142] 2025-12-23T18:51:18.293106Z node 75 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [75:190:2142] 2025-12-23T18:51:18.294983Z node 75 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [75:191:2142] 2025-12-23T18:51:18.297574Z node 75 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [75:191:2142] 2025-12-23T18:51:18.328092Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:51:18.328648Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|94f77548-7709f173-8266aa9a-f06c63dc_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:51:18.470640Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:51:18.471251Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6af006ca-a62f0be-60e4d756-f755722e_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:51:18.598259Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:51:18.598698Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a6a96545-d622ed57-d7dfb2b-a030b52e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 80000 2025-12-23T18:51:18.878925Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:51:18.879367Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e372345c-51c0ac3b-b1c872c2-16060d31_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:51:19.036016Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:51:19.036480Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|50bd17e3-bcdd12c6-b132e309-5b81037b_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:51:19.725416Z node 76 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 76 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [76:107:2057] recipient: [76:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [76:107:2057] recipient: [76:105:2138] Leader for TabletID 72057594037927937 is [76:111:2142] sender: [76:112:2057] recipient: [76:105:2138] 2025-12-23T18:51:19.798083Z node 76 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:51:19.798137Z node 76 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:51:19.798175Z node 76 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:51:19.798240Z node 76 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [76:153:2057] recipient: [76:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [76:153:2057] recipient: [76:151:2172] Leader for TabletID 72057594037927938 is [76:157:2176] sender: [76:158:2057] recipient: [76:151:2172] Leader for TabletID 72057594037927937 is [76:111:2142] sender: [76:183:2057] recipient: [76:14:2061] 2025-12-23T18:51:19.818658Z node 76 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:51:19.819756Z node 76 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 74 actor [76:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 74 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 74 } Consumers { Name: "user" Generation: 74 Important: true } 2025-12-23T18:51:19.821334Z node 76 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [76:189:2142] 2025-12-23T18:51:19.823880Z node 76 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [76:189:2142] 2025-12-23T18:51:19.825584Z node 76 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [76:190:2142] 2025-12-23T18:51:19.827944Z node 76 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [76:190:2142] 2025-12-23T18:51:19.829540Z node 76 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [76:191:2142] 2025-12-23T18:51:19.831520Z node 76 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [76:191:2142] 2025-12-23T18:51:19.862266Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:51:19.862813Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b6171edb-cc1dadbf-27c1eb2d-e6659bee_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:51:20.000047Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:51:20.000651Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|61a31027-d86eaa73-f9d5910d-4a3d2fab_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:51:20.136491Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:51:20.136874Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|495e68c7-53e5442-fcabaea0-74213cbe_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 80000 2025-12-23T18:51:20.427221Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:51:20.427602Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|fb7e2019-eada9ce8-2b8800c3-11eb912c_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:51:20.573994Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:51:20.574325Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|aa0653c8-77c66cb3-c1b25062-8e3b4f82_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/0055ce/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit_log.6fv6ew0n.txt 2025-12-23T18:51:08.315825Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/0055c8/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk19/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit_log._e2zq6xu.txt 2025-12-23T18:51:08.785045Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:51:08.784999Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-12-23T18:51:08.685426Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TPQTest::TestPQSmallRead [GOOD] >> TPQTest::TestReadAndDeleteConsumer |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/0055be/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit_log.ynhrguyk.txt 2025-12-23T18:51:11.383692Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system [GOOD] >> TKeyValueTest::TestConcatToLongKey [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:92:2057] recipient: [11:91:2120] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:94:2057] recipient: [11:91:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:93:2121] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:209:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... t: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:96:2057] recipient: [32:95:2123] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:98:2057] recipient: [32:95:2123] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:97:2124] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:213:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:94:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:97:2057] recipient: [33:96:2123] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:99:2057] recipient: [33:96:2123] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:98:2124] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:78:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:81:2057] recipient: [36:80:2112] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:83:2057] recipient: [36:80:2112] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:82:2113] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:198:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:78:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:81:2057] recipient: [37:80:2112] Leader for TabletID 72057594037927937 is [37:82:2113] sender: [37:83:2057] recipient: [37:80:2112] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:82:2113] Leader for TabletID 72057594037927937 is [37:82:2113] sender: [37:198:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:79:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:82:2057] recipient: [38:81:2112] Leader for TabletID 72057594037927937 is [38:83:2113] sender: [38:84:2057] recipient: [38:81:2112] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:83:2113] Leader for TabletID 72057594037927937 is [38:83:2113] sender: [38:199:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:82:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:85:2057] recipient: [39:84:2115] Leader for TabletID 72057594037927937 is [39:86:2116] sender: [39:87:2057] recipient: [39:84:2115] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:86:2116] Leader for TabletID 72057594037927937 is [39:86:2116] sender: [39:202:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:82:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:85:2057] recipient: [40:84:2115] Leader for TabletID 72057594037927937 is [40:86:2116] sender: [40:87:2057] recipient: [40:84:2115] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:86:2116] Leader for TabletID 72057594037927937 is [40:86:2116] sender: [40:202:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:83:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:86:2057] recipient: [41:85:2115] Leader for TabletID 72057594037927937 is [41:87:2116] sender: [41:88:2057] recipient: [41:85:2115] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:87:2116] Leader for TabletID 72057594037927937 is [41:87:2116] sender: [41:203:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:86:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:89:2057] recipient: [42:88:2118] Leader for TabletID 72057594037927937 is [42:90:2119] sender: [42:91:2057] recipient: [42:88:2118] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:90:2119] Leader for TabletID 72057594037927937 is [42:90:2119] sender: [42:206:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:86:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:89:2057] recipient: [43:88:2118] Leader for TabletID 72057594037927937 is [43:90:2119] sender: [43:91:2057] recipient: [43:88:2118] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:90:2119] Leader for TabletID 72057594037927937 is [43:90:2119] sender: [43:206:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:87:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:90:2057] recipient: [44:89:2118] Leader for TabletID 72057594037927937 is [44:91:2119] sender: [44:92:2057] recipient: [44:89:2118] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:91:2119] Leader for TabletID 72057594037927937 is [44:91:2119] sender: [44:207:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpBatchDelete::SimplePartitions [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpLimits::QSReplySize-useSink [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/0055b5/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_bad_dynconfig/audit_log.v7ca0oeq.txt 2025-12-23T18:51:13.484194Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-12-23T18:47:43.458370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:47:43.458493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:43.458546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:47:43.458592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:47:43.458626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:47:43.458654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:47:43.458705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:47:43.458805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:47:43.459792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:47:43.460052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:47:43.581244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:47:43.581322Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:43.582115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:47:43.603344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:47:43.603541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:47:43.603728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:47:43.614851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:47:43.615139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:47:43.615829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:43.616206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:47:43.619328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:43.619511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:47:43.620674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:47:43.620737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:47:43.620876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:47:43.620920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:47:43.620970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:47:43.621191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:47:43.628183Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:244:2058] recipient: [1:15:2062] 2025-12-23T18:47:43.763071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:47:43.763292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:43.763478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:47:43.763535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:47:43.763766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:47:43.763828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:47:43.765825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:43.765994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:47:43.766207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:43.766259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:47:43.766295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:47:43.766328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:47:43.768135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:43.768203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:47:43.768251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:47:43.769864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:43.769912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:47:43.769964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:47:43.770011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:47:43.778629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:47:43.780236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:47:43.780432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:47:43.781355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:47:43.781465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:47:43.781514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:47:43.781754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:47:43.781801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:47:43.781966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:47:43.782038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:47:43.783926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard ... as 4 2025-12-23T18:51:26.630189Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2025-12-23T18:51:26.630219Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2025-12-23T18:51:26.630250Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2025-12-23T18:51:26.630282Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 5 2025-12-23T18:51:26.630309Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2025-12-23T18:51:26.631976Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:51:26.632082Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:51:26.632119Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:51:26.632156Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2025-12-23T18:51:26.632193Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2025-12-23T18:51:26.633550Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:51:26.633643Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:51:26.633676Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:51:26.633709Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2025-12-23T18:51:26.633744Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2025-12-23T18:51:26.634923Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:51:26.635019Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:51:26.635051Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:51:26.635081Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 5 2025-12-23T18:51:26.635116Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2025-12-23T18:51:26.636586Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:51:26.636682Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2025-12-23T18:51:26.636735Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2025-12-23T18:51:26.636771Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2025-12-23T18:51:26.636804Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 3 2025-12-23T18:51:26.636872Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2025-12-23T18:51:26.639994Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:51:26.640104Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:51:26.642087Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2025-12-23T18:51:26.642216Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2025-12-23T18:51:26.643932Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2025-12-23T18:51:26.643987Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2025-12-23T18:51:26.645854Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2025-12-23T18:51:26.645979Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2025-12-23T18:51:26.646021Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:3905:5621] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2025-12-23T18:51:26.647499Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2025-12-23T18:51:26.647542Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2025-12-23T18:51:26.647626Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2025-12-23T18:51:26.647656Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2025-12-23T18:51:26.647721Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2025-12-23T18:51:26.647748Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2025-12-23T18:51:26.647812Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2025-12-23T18:51:26.647841Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2025-12-23T18:51:26.647902Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2025-12-23T18:51:26.647930Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2025-12-23T18:51:26.649934Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2025-12-23T18:51:26.650299Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2025-12-23T18:51:26.650364Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2025-12-23T18:51:26.650401Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:3908:5624] 2025-12-23T18:51:26.650702Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2025-12-23T18:51:26.650836Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2025-12-23T18:51:26.650872Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:3908:5624] 2025-12-23T18:51:26.651073Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2025-12-23T18:51:26.651219Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2025-12-23T18:51:26.651267Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2025-12-23T18:51:26.651298Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:3908:5624] 2025-12-23T18:51:26.651437Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2025-12-23T18:51:26.651469Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:3908:5624] 2025-12-23T18:51:26.651624Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2025-12-23T18:51:26.651656Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:3908:5624] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimplePartitions [GOOD] Test command err: Trying to start YDB, gRPC: 19892, MsgBus: 31057 2025-12-23T18:47:23.735230Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126915501326085:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:23.740895Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005be3/r3tmp/tmpdgPB1G/pdisk_1.dat 2025-12-23T18:47:24.078529Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:24.086360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:24.086476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:24.147540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:24.191536Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19892, node 1 2025-12-23T18:47:24.294950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:24.294985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:24.294999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:24.295103Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:24.295497Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:31057 TClient is connected to server localhost:31057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:24.756107Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515644215 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:24.829187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:24.835858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:47:24.847068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:24.991399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:25.149722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:25.215655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:27.162205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126932681197075:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:27.162303Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:27.162628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126932681197085:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:27.162715Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:27.468459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:27.517481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:27.550393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:27.581006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:27.618406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:27.660707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:27.702845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:27.774800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:27.878836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126932681197954:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:27.878939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:27.879344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126932681197957:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:27.879434Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:27.879659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126932681197961:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:27.884234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:47:27.897344Z node 1 :KQP_WORKLOAD ... ARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:12.840178Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:12.840301Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:12.847830Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11147, node 16 2025-12-23T18:51:12.911556Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:12.911587Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:12.911597Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:12.911713Z node 16 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:12.928422Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16266 TClient is connected to server localhost:16266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515872926 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:51:13.460209Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:51:13.480163Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:13.539407Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:13.701847Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:13.705605Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:51:13.774503Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:17.183440Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587127922213180126:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:17.183642Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:17.183971Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587127922213180135:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:17.184035Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:17.290470Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:17.327700Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:17.361279Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:17.393868Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:17.427305Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:17.462861Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:17.500129Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:17.551799Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:17.634012Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587127922213181004:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:17.634109Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:17.634171Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587127922213181009:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:17.634353Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587127922213181011:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:17.634410Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:17.638218Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:51:17.648597Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7587127922213181013:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:51:17.699766Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7587127900738341806:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:17.699856Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:51:17.738565Z node 16 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [16:7587127922213181067:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_restart_pdisk [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_table |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpBatchUpdate::ManyPartitions_1 [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_1 [GOOD] Test command err: Trying to start YDB, gRPC: 11450, MsgBus: 30638 2025-12-23T18:47:10.807657Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126860241936063:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:10.808412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bf6/r3tmp/tmp0WlVjW/pdisk_1.dat 2025-12-23T18:47:11.016315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:11.030322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:11.030432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:11.056470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:11.124211Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:11.125548Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126860241936037:2081] 1766515630806128 != 1766515630806131 TServer::EnableGrpc on GrpcPort 11450, node 1 2025-12-23T18:47:11.226551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:11.234269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:11.234289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:11.234312Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:11.234461Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30638 TClient is connected to server localhost:30638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:47:11.819572Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515631167 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:11.838792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:11.873063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:11.879863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:12.022455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:47:12.193351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:12.271979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.139879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126877421807096:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.140038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.142390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126877421807106:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.142497Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.473460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.507945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.541343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.573480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.605495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.667867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.701844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.749612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:14.844687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126877421807976:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.844775Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.845101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126877421807981:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.845153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126877421807982:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.845503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:14.849830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sc ... 0602Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23555, node 20 2025-12-23T18:51:18.073508Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:18.073539Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:18.073570Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:18.073722Z node 20 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:18.115100Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5140 2025-12-23T18:51:18.849990Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515878064 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:51:18.968343Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:51:18.985950Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:19.076595Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:19.314310Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:19.421951Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:22.840499Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7587127919665051077:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:22.840613Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:51:24.340033Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7587127949729824021:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:24.340184Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:24.340524Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7587127949729824030:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:24.340579Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:24.479248Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:24.535229Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:24.587365Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:24.640136Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:24.688981Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:24.737522Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:24.799511Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:24.880720Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:25.065103Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7587127954024792214:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:25.065181Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7587127954024792219:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:25.065264Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:25.065578Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7587127954024792222:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:25.065648Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:25.071544Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:51:25.089739Z node 20 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7587127954024792221:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:51:25.172792Z node 20 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [20:7587127954024792274:3787] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:51:27.808026Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpLimits::TooBigQuery-useSink [GOOD] >> KqpLimits::TooBigKey+useSink |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TPQTest::TestReadAndDeleteConsumer [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 22422, MsgBus: 25467 2025-12-23T18:45:25.267202Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126408943004214:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:45:25.267264Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033b7/r3tmp/tmpvKfqnI/pdisk_1.dat 2025-12-23T18:45:25.500683Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:45:25.527030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:45:25.527129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:45:25.545142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:45:25.624740Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:45:25.626164Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126408943004187:2081] 1766515525265463 != 1766515525265466 TServer::EnableGrpc on GrpcPort 22422, node 1 2025-12-23T18:45:25.705816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:45:25.705838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:45:25.705845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:45:25.705944Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:45:25.713144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25467 TClient is connected to server localhost:25467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515525649 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:45:26.210633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:45:26.235501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:26.296117Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:45:26.396312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:26.578093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:26.666710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:28.427373Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126421827909044:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.427513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.427940Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126421827909054:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.427992Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:28.723628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.782225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.830843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.883352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:28.963057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:29.038902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:29.101310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:29.171316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:29.294584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126426122877837:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:29.294648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:29.294850Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126426122877843:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:29.294884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126426122877842:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:29.294946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:29.298293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:29.319751Z node 1 ... } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515870588 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:51:11.090276Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:51:11.109308Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:11.176589Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:11.378598Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:51:11.390559Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:11.535819Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:14.197787Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587127906732977716:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:14.197901Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:14.198451Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587127906732977726:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:14.198517Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:14.198717Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587127906732977728:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:14.198784Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:14.266698Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:14.303529Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:14.341307Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:14.378784Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:14.418774Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:14.460985Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:14.502324Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:14.569045Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:51:14.656972Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587127906732978599:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:14.657126Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:14.657245Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587127906732978604:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:14.657508Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587127906732978606:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:14.657570Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:51:14.662102Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:51:14.677556Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587127906732978607:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:51:14.733801Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587127906732978659:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:51:15.365302Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587127889553106690:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:15.365406Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:51:16.667134Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:25.467438Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:51:25.467466Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:28.051553Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=OTMwYmM3OTUtYTIxZGQwZjUtYzFjYTEzNjItZGQxMGI3MzA=, ActorId: [5:7587127953977620114:2691], ActorState: ExecuteState, TraceId: 01kd68thf5afrwj3px61rg1wwk, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Intermediate data materialization exceeded size limit (88240925 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data." issue_code: 2013 severity: 1 }
: Error: Intermediate data materialization exceeded size limit (88240925 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data., code: 2013 |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadAndDeleteConsumer [GOOD] Test command err: 2025-12-23T18:48:50.241721Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2025-12-23T18:48:50.310488Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:50.310558Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:50.310622Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:50.310730Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2025-12-23T18:48:50.332335Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:50.356141Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "important_user" Generation: 1 Important: true } 2025-12-23T18:48:50.357106Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2025-12-23T18:48:50.360019Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2025-12-23T18:48:50.371381Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:48:50.371890Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5c570c89-40815c2b-3a40d6a9-6867d04_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:181:2194] 2025-12-23T18:48:50.426494Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:50.469449Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:50.490520Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:50.501121Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:50.542467Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:50.584097Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:50.615259Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:50.761457Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:50.813073Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:51.035067Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:51.045613Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:51.311505Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:51.548841Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:51.569656Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:51.865565Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:52.128733Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:52.420556Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:52.677037Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:52.718540Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:53.049011Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:181:2194] 2025-12-23T18:48:53.363667Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:53.632673Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:53.871152Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:54.098882Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:54.129969Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:54.434788Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:54.703508Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:54.964802Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:55.233604Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:55.443862Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:55.506857Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:55.839437Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:56.089436Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:56.400680Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:56.675297Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:56.758394Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:56.936917Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.175013Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.444175Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.693017Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.954719Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.996132Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.552018Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates fo ... sn't have tx info 2025-12-23T18:51:29.878969Z node 36 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:51:29.879048Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:51:29.879141Z node 36 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:154:2057] recipient: [36:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:154:2057] recipient: [36:152:2172] Leader for TabletID 72057594037927938 is [36:158:2176] sender: [36:159:2057] recipient: [36:152:2172] Leader for TabletID 72057594037927937 is [36:112:2142] sender: [36:184:2057] recipient: [36:14:2061] 2025-12-23T18:51:29.905484Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:51:29.906325Z node 36 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1002 actor [36:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1002 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1002 } Consumers { Name: "user1" Generation: 1002 Important: true } Consumers { Name: "user2" Generation: 1002 Important: true } 2025-12-23T18:51:29.907136Z node 36 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [36:190:2142] 2025-12-23T18:51:29.911046Z node 36 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [36:190:2142] 2025-12-23T18:51:29.926873Z node 36 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:51:29.927579Z node 36 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|46d8628b-bb772961-38eb9d33-ac891cee_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [36:112:2142] sender: [36:262:2057] recipient: [36:104:2137] Leader for TabletID 72057594037927937 is [36:112:2142] sender: [36:265:2057] recipient: [36:264:2258] Leader for TabletID 72057594037927937 is [36:266:2259] sender: [36:267:2057] recipient: [36:264:2258] 2025-12-23T18:51:32.337442Z node 36 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:51:32.337542Z node 36 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:51:32.338363Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:51:32.338448Z node 36 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:51:32.339307Z node 36 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [36:315:2259] 2025-12-23T18:51:32.373106Z node 36 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:51:32.377328Z node 36 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [36:315:2259] 2025-12-23T18:51:32.410112Z node 36 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 suffix '0' size 8296398 2025-12-23T18:51:32.439947Z node 36 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 36 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:51:32.440176Z node 36 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 36 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [36:266:2259] sender: [36:339:2057] recipient: [36:14:2061] 2025-12-23T18:51:32.445795Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:51:32.451296Z node 36 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: cannot finish read request. Consumer user1 is gone from partition 2025-12-23T18:51:32.451678Z node 36 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1003 actor [36:336:2310] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1003 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1002 Important: true } 2025-12-23T18:51:32.980993Z node 37 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 37 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:107:2057] recipient: [37:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:107:2057] recipient: [37:105:2138] Leader for TabletID 72057594037927937 is [37:111:2142] sender: [37:112:2057] recipient: [37:105:2138] 2025-12-23T18:51:33.052272Z node 37 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:51:33.052343Z node 37 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:51:33.052393Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:51:33.052451Z node 37 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [37:153:2057] recipient: [37:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [37:153:2057] recipient: [37:151:2172] Leader for TabletID 72057594037927938 is [37:157:2176] sender: [37:158:2057] recipient: [37:151:2172] Leader for TabletID 72057594037927937 is [37:111:2142] sender: [37:183:2057] recipient: [37:14:2061] 2025-12-23T18:51:33.075893Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:51:33.076557Z node 37 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1004 actor [37:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1004 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1004 } Consumers { Name: "user1" Generation: 1004 Important: true } Consumers { Name: "user2" Generation: 1004 Important: true } 2025-12-23T18:51:33.077150Z node 37 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [37:189:2142] 2025-12-23T18:51:33.080504Z node 37 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [37:189:2142] 2025-12-23T18:51:33.095209Z node 37 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:51:33.095710Z node 37 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|39bddc89-dc022e-40006439-30a5b144_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [37:111:2142] sender: [37:261:2057] recipient: [37:103:2137] Leader for TabletID 72057594037927937 is [37:111:2142] sender: [37:264:2057] recipient: [37:263:2258] Leader for TabletID 72057594037927937 is [37:265:2259] sender: [37:266:2057] recipient: [37:263:2258] 2025-12-23T18:51:35.845535Z node 37 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:51:35.845603Z node 37 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:51:35.846136Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:51:35.846189Z node 37 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:51:35.846778Z node 37 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [37:314:2259] 2025-12-23T18:51:35.878099Z node 37 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:51:35.883176Z node 37 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [37:314:2259] 2025-12-23T18:51:35.910580Z node 37 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 suffix '0' size 8296398 2025-12-23T18:51:35.938863Z node 37 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 37 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:51:35.939006Z node 37 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 37 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [37:265:2259] sender: [37:338:2057] recipient: [37:14:2061] 2025-12-23T18:51:35.944748Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:51:35.952826Z node 37 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: cannot finish read request. Consumer user1 is gone from partition 2025-12-23T18:51:35.953103Z node 37 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1005 actor [37:335:2310] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1005 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1004 Important: true } |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/00559c/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_good_dynconfig/audit_log.7c6nmsdc.txt 2025-12-23T18:51:17.057360Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_evict_vdisk_grpc >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_topic [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_execute_minikql [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_restart_pdisk [GOOD] >> test_canonical_records.py::test_dml_through_http [GOOD] >> KqpLimits::TooBigKey+useSink [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] >> KqpLimits::TooBigKey-useSink |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_execute_minikql [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_topic [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/00553a/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk20/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.select/audit_log.4qt_d520.txt 2025-12-23T18:51:38.124181Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:51:38.124136Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-12-23T18:51:37.991461Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_replace_config |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpLimits::TooBigKey-useSink [GOOD] >> KqpLimits::TooBigColumn-useSink |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml_through_http [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/005530/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk15/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit_log.roxcs__x.txt 2025-12-23T18:51:36.790184Z: {"database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:51:36.790140Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-12-23T18:51:36.774125Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-12-23T18:51:36.919958Z: {"database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:51:36.919925Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-12-23T18:51:36.900794Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-12-23T18:51:37.049897Z: {"database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:51:37.049861Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-12-23T18:51:37.031578Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-12-23T18:51:37.180916Z: {"database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:51:37.180879Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-12-23T18:51:37.159790Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-12-23T18:51:37.308990Z: {"database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:51:37.308891Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-12-23T18:51:37.291291Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-12-23T18:51:37.437296Z: {"database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:51:37.437247Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-12-23T18:51:37.420056Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView >> KqpSystemView::FailResolve >> KqpSystemView::QuerySessionsOrderByDesc >> KqpSysColV1::StreamInnerJoinSelect >> KqpSystemView::TopQueriesOrderByDesc >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView >> KqpSystemView::PartitionStatsFollower >> KqpSystemView::NodesSimple >> KqpSysColV1::SelectRowById >> KqpSysColV0::SelectRowAsterisk >> KqpSystemView::PartitionStatsRange1 >> KqpSysColV0::InnerJoinSelectAsterisk >> KqpSysColV1::InnerJoinTables >> KqpSystemView::Sessions+EnableRealSystemViewPaths >> KqpSysColV1::UpdateAndDelete >> KqpSysColV1::SelectRowAsterisk >> KqpSysColV1::StreamInnerJoinSelectAsterisk >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView >> KqpSystemView::PartitionStatsParametricRanges >> KqpSystemView::PartitionStatsRanges >> KqpSystemView::FailNavigate >> KqpSysColV1::SelectRange >> KqpSystemView::QueryStatsScan >> KqpSystemView::PartitionStatsRange3 >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView >> KqpSysColV1::InnerJoinSelect >> KqpSystemView::PartitionStatsRange2 >> KqpSystemView::ReadSuccess >> KqpSystemView::PartitionStatsOrderByDesc >> KqpSystemView::NodesRange1 >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] >> KqpSystemView::QuerySessionsOrderByDesc [GOOD] >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:113:2143] Leader for TabletID 72057594046447617 is [1:132:2155] sender: [1:136:2058] recipient: [1:114:2144] Leader for TabletID 72057594046316545 is [1:139:2160] sender: [1:141:2058] recipient: [1:115:2145] 2025-12-23T18:50:15.457974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:50:15.458050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:15.458084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:50:15.458110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:50:15.458139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:50:15.458172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:50:15.458228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:50:15.458292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:50:15.459060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:50:15.459334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:50:15.593581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:50:15.593672Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:50:15.594584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:132:2155] sender: [1:178:2058] recipient: [1:15:2062] 2025-12-23T18:50:15.608349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:50:15.608468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:50:15.608655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:50:15.621133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:50:15.623447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:50:15.624317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:50:15.624710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:50:15.633898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:15.634145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:50:15.636240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:50:15.636311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:50:15.636601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:50:15.636656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:50:15.636715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:50:15.636926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:50:15.805776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.806972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.807131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.807213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.807287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.807351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.807457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.807535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.807610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.807766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.807890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.807995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:50:15.808074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... 52:02.424000Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-12-23T18:52:02.424041Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-12-23T18:52:02.424082Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2025-12-23T18:52:02.424128Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:52:02.424219Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2025-12-23T18:52:02.427504Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000039 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1261 } } CommitVersion { Step: 5000039 TxId: 1002 } 2025-12-23T18:52:02.427561Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-12-23T18:52:02.427715Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000039 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1261 } } CommitVersion { Step: 5000039 TxId: 1002 } 2025-12-23T18:52:02.427843Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000039 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1261 } } CommitVersion { Step: 5000039 TxId: 1002 } debug: NTableState::TProposedWaitParts operationId# 1002:0 2025-12-23T18:52:02.429797Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 309237648031 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-12-23T18:52:02.429838Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-12-23T18:52:02.429927Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 309237648031 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-12-23T18:52:02.429973Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:52:02.430042Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 734 RawX2: 309237648031 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-12-23T18:52:02.430101Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:02.430135Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2025-12-23T18:52:02.430167Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:52:02.430207Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1002:0 129 -> 240 2025-12-23T18:52:02.431436Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-12-23T18:52:02.431575Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-12-23T18:52:02.434340Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-12-23T18:52:02.434449Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-12-23T18:52:02.434837Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-12-23T18:52:02.434885Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-12-23T18:52:02.435110Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1002:0 progress is 1/1 2025-12-23T18:52:02.435144Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-12-23T18:52:02.435180Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1002:0 progress is 1/1 2025-12-23T18:52:02.435206Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-12-23T18:52:02.435246Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-12-23T18:52:02.435292Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-12-23T18:52:02.435334Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1002:0 2025-12-23T18:52:02.435373Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1002:0 2025-12-23T18:52:02.435480Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-12-23T18:52:02.439267Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-12-23T18:52:02.439319Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-12-23T18:52:02.439705Z node 72 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-12-23T18:52:02.439802Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-12-23T18:52:02.439841Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:813:2784] TestWaitNotification: OK eventTxId 1002 2025-12-23T18:52:02.440302Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:52:02.440544Z node 72 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 291us result status StatusSuccess 2025-12-23T18:52:02.441097Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/0054e1/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs1/audit_log.9embnex1.txt 2025-12-23T18:51:49.841451Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:51:49.841408Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-12-23T18:51:49.629624Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QuerySessionsOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 10321, MsgBus: 26782 2025-12-23T18:51:58.908657Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128097881837000:2167];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:58.908740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e53/r3tmp/tmpcFinsi/pdisk_1.dat 2025-12-23T18:51:59.286852Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.313655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.313742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.319024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.482824Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128097881836865:2081] 1766515918896596 != 1766515918896599 2025-12-23T18:51:59.483566Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10321, node 1 2025-12-23T18:51:59.507002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:51:59.620892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.620924Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.620930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.621004Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:59.929910Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:26782 TClient is connected to server localhost:26782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515919497 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.464199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:00.476442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:52:00.496481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:02.942177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128115061707017:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.942301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.944093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128115061707029:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.944158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128115061707030:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.944314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.959536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:02.977803Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128115061707033:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:52:03.046565Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128119356674381:2580] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:03.908128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128097881837000:2167];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:03.911515Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] >> KqpSysColV0::SelectRowAsterisk [GOOD] >> KqpSystemView::FailNavigate [GOOD] >> KqpSystemView::PartitionStatsRange1 [GOOD] >> KqpSysColV1::InnerJoinTables [GOOD] >> KqpSysColV1::SelectRowById [GOOD] >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] >> KqpSysColV1::SelectRowAsterisk [GOOD] >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView [GOOD] >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] >> KqpSysColV1::StreamInnerJoinSelect [GOOD] >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] >> KqpSysColV1::SelectRange [GOOD] >> KqpSystemView::PartitionStatsRange3 [GOOD] >> KqpSystemView::PartitionStatsParametricRanges [GOOD] >> KqpSysColV0::SelectRange >> KqpSystemView::PartitionStatsRange2 [GOOD] >> KqpSystemView::PartitionStatsRanges [GOOD] >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView [GOOD] >> KqpSystemView::ReadSuccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] Test command err: Trying to start YDB, gRPC: 23631, MsgBus: 6494 2025-12-23T18:51:58.900176Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128097212595431:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:58.900238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e55/r3tmp/tmpwj1xgY/pdisk_1.dat 2025-12-23T18:51:59.318249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.363387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.363488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.408673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23631, node 1 2025-12-23T18:51:59.489595Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:59.581489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:51:59.683531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.683582Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.683590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.683666Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:59.926888Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6494 TClient is connected to server localhost:6494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515919546 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.470542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:00.486825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:52:00.501672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.676606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.862265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.963328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.800366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128114392466453:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.800453Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.800702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128114392466463:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.800734Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.315792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.350067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.384510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.447922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.482572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.527475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.562637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.610796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.699041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118687434633:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.699115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.699126Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118687434638:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.699534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118687434640:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.699584Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.702460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.712948Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128118687434641:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:52:03.786462Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128118687434695:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:03.900635Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128097212595431:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:03.900698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:05.365958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:05.528057Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7587128127277369688:4043], for# user0@builtin, access# SelectRow 2025-12-23T18:52:05.528221Z node 1 :KQP_EXECUTER ERROR: kqp_table_resolver.cpp:274: TxId: 281474976715675. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 30] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint64 : NULL, Uint64 : NULL, Uint64 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-12-23T18:52:05.538171Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=MWM5OWIzOWItYjIyMTZkOGQtOWY4MGVjOTQtZGE1YjNmZDY=, ActorId: [1:7587128127277369661:2536], ActorState: ExecuteState, TraceId: 01kd68vrcrcw07n2excz0z60g9, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Failed to resolve table `/Root/.sys/partition_stats` status: AccessDenied." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } 2025-12-23T18:52:05.538583Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515925526, txId: 281474976715674] shutting down >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView [GOOD] >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 29615, MsgBus: 28514 2025-12-23T18:51:58.900081Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128099257818938:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:58.900136Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:51:58.949488Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e60/r3tmp/tmpao5C9B/pdisk_1.dat 2025-12-23T18:51:59.258935Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.309922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.310018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.391509Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:59.401744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29615, node 1 2025-12-23T18:51:59.484034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:51:59.608802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.608884Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.608891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.608980Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:59.959188Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28514 TClient is connected to server localhost:28514 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515919448 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.681820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:00.717478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.896812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:52:01.147312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:01.228773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.978059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128116437689965:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.978169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.978777Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128116437689975:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.978835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.376661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.412184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.441504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.499410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.536252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.580535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.621030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.682564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.756714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120732658141:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.756801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.757118Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120732658147:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.757153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120732658146:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.757177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.760340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.769438Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128120732658150:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:52:03.854426Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128120732658201:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:03.902909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128099257818938:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:03.902992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 2937, MsgBus: 8879 2025-12-23T18:51:58.956671Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128098081620747:2256];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:58.957060Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e50/r3tmp/tmpWtf2Zh/pdisk_1.dat 2025-12-23T18:51:59.272793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.286121Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:59.310928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.311010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.316144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2937, node 1 2025-12-23T18:51:59.459159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:51:59.611420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.611452Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.611460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.611589Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:59.962941Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:8879 TClient is connected to server localhost:8879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515919392 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:52:00.574834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:00.598464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:52:00.759468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:01.023052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.127153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.784914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128115261491586:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.785033Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.785332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128115261491596:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.785391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.315702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.345155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.377477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.434278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.461611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.495624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.526884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.602102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.696149Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119556459777:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.696242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.696439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119556459782:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.696447Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119556459783:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.696481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.700327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.715982Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128119556459786:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:52:03.774603Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128119556459839:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:03.957418Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128098081620747:2256];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:03.957485Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:05.625660Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515925613, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] Test command err: Trying to start YDB, gRPC: 9715, MsgBus: 24669 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e4a/r3tmp/tmpoL1uDE/pdisk_1.dat 2025-12-23T18:51:59.855684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.855785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.908392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.926988Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.927105Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:51:59.994943Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128101152952569:2081] 1766515919383890 != 1766515919383893 TServer::EnableGrpc on GrpcPort 9715, node 1 2025-12-23T18:52:00.014705Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:00.116615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:00.119199Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:00.119210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:00.119303Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:00.132884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24669 2025-12-23T18:52:00.532554Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515920001 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.787642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:52:00.825364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:01.006618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.254516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.355088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.348496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118332823618:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.348581Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.349025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118332823628:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.349079Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.649045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.686043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.721382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.757934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.790695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.845343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.874617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.921729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.002246Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122627791796:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.002343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.002589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122627791801:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.002669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122627791802:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.002825Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.006430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:04.018146Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128122627791805:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:04.094513Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128122627791856:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:05.680258Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7587128126922759482:3979], for# user0@builtin, access# DescribeSchema 2025-12-23T18:52:05.680297Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7587128126922759482:3979], for# user0@builtin, access# DescribeSchema 2025-12-23T18:52:05.693508Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587128126922759472:2537], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/.sys/partition_stats]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:52:05.695112Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=Mzk2MzNmODAtMmNhOWZiYzktZjUzNjZmNTgtM2U4Y2VhZmU=, ActorId: [1:7587128126922759465:2533], ActorState: ExecuteState, TraceId: 01kd68vrm1cd2edtqh4xs7q6qq, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 13 } message: "At function: KiReadTable!" end_position { row: 2 column: 13 } severity: 1 issues { position { row: 2 column: 13 } message: "Cannot find table \'db.[/Root/.sys/partition_stats]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 13 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 9733, MsgBus: 65515 2025-12-23T18:51:58.911601Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128096358684750:2084];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:58.923884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:51:58.970572Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e58/r3tmp/tmpprdAhJ/pdisk_1.dat 2025-12-23T18:51:59.422192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.422284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.457164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.464937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.470872Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128096358684693:2081] 1766515918902913 != 1766515918902916 2025-12-23T18:51:59.472939Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9733, node 1 2025-12-23T18:51:59.646242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.646276Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.646284Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.646363Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:59.716283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:51:59.945545Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:65515 TClient is connected to server localhost:65515 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515919546 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:52:00.578336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:00.608681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:52:00.622826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.818099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.060233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:52:01.158752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.041031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128117833523046:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.041167Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.042125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128117833523056:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.042251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.495750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.528058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.562054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.602064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.645156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.685335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.717021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.755731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.840299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128117833523926:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.840396Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.842036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128117833523931:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.842096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128117833523932:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.842417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.845774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.860344Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128117833523935:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:52:03.908119Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128096358684750:2084];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:03.908231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:03.920316Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128117833523986:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSysColV1::UpdateAndDelete [GOOD] >> KqpSystemView::QueryStatsScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 62844, MsgBus: 7389 2025-12-23T18:51:58.899809Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128096591933424:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:58.899885Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e66/r3tmp/tmpZz9Dsz/pdisk_1.dat 2025-12-23T18:51:59.305011Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.343078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.343216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.403186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.437429Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:59.443713Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128096591933394:2081] 1766515918897114 != 1766515918897117 TServer::EnableGrpc on GrpcPort 62844, node 1 2025-12-23T18:51:59.494196Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:51:59.622451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.622482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.622492Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.622563Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:59.914898Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7389 TClient is connected to server localhost:7389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515919455 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.552128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:52:00.589025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:00.759786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.055561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.199948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.934848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128113771804453:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.934948Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.935386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128113771804463:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.935446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.315732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.350521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.381342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.418856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.453299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.492851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.530920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.578604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.661603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118066772631:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.661696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.662111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118066772636:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.662164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118066772637:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.662434Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.665735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.677605Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128118066772640:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:03.733603Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128118066772691:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:03.900244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128096591933424:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:03.900321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 11181, MsgBus: 1315 2025-12-23T18:51:59.343796Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128102065101967:2259];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.343880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e4f/r3tmp/tmp8UyjPc/pdisk_1.dat 2025-12-23T18:51:59.445049Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:51:59.782957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.811869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.811966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.884479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.934820Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128102065101730:2081] 1766515919286864 != 1766515919286867 2025-12-23T18:51:59.940491Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11181, node 1 2025-12-23T18:52:00.089906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:00.172541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:00.172572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:00.172578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:00.172644Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:00.346906Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1315 TClient is connected to server localhost:1315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515919994 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.822164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:00.853910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.077094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.263650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.373888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.376374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119244972800:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.376471Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.376971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119244972810:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.377012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.715523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.746726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.780223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.810221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.845395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.909824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.957485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.006319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.099950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128123539940981:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.100061Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.100279Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128123539940987:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.100312Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128123539940986:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.100320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.104102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:04.120059Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128123539940990:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:52:04.194533Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128123539941041:3782] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:04.343819Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128102065101967:2259];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.343908Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 18792, MsgBus: 7217 2025-12-23T18:51:58.900556Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128099559382760:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:58.900669Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e5f/r3tmp/tmpXaew8b/pdisk_1.dat 2025-12-23T18:51:59.203065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.220912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.221078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.324784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.399402Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18792, node 1 2025-12-23T18:51:59.479021Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:51:59.612964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.612990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.612996Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.613072Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:59.927019Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7217 TClient is connected to server localhost:7217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515919399 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.507379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:00.515842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:52:00.525091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.781402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.998231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.094850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.012804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121034221083:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.012908Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.013272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121034221093:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.013361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.557565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.592754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.633140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.664809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.695952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.737139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.769015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.810542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.900427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128099559382760:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:03.900486Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:03.908913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121034221975:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.909024Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.909167Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121034221980:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.909421Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121034221982:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.909545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.914514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.938239Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128121034221984:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:52:04.035028Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128125329189336:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:05.812836Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515925846, txId: 281474976715673] shutting down >> TSchemeShardTest::MkRmDir |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 27193, MsgBus: 61900 2025-12-23T18:51:58.900904Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128096795557175:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:58.900958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:51:58.922259Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.006092s test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e5b/r3tmp/tmp8AcUzy/pdisk_1.dat 2025-12-23T18:51:59.210091Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.237661Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.237778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.294191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.416539Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:59.421397Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128096795557150:2081] 1766515918899239 != 1766515918899242 TServer::EnableGrpc on GrpcPort 27193, node 1 2025-12-23T18:51:59.487405Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:51:59.608800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.608883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.608891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.608960Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:59.922885Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:61900 TClient is connected to server localhost:61900 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515919392 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.603505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:00.613392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:52:00.633507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.809851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.029716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting...2025-12-23T18:52:01.113188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:02.863266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128113975428206:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.863392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.863807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128113975428216:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.863855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.316039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.349288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.384897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.420148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.456518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.493396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.527675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.570208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.650231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118270396385:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.650359Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.652748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118270396390:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.652812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118270396391:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.652853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.657488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.673054Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128118270396394:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:03.761526Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128118270396445:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:03.901165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128096795557175:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:03.901305Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 27304, MsgBus: 17735 2025-12-23T18:51:58.899668Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128097057283999:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:58.899726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e6e/r3tmp/tmpsOjzfP/pdisk_1.dat 2025-12-23T18:51:59.275410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.275532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.334141Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.377443Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:59.379845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27304, node 1 2025-12-23T18:51:59.502844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:51:59.614856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.614877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.614883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.614977Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:59.916522Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:17735 TClient is connected to server localhost:17735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515919441 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.440544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:00.455233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:52:00.495685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.704191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.887191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.981171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.836362Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128114237155016:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.836490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.836809Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128114237155026:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.836853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.326291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.360310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.392287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.430573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.471984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.503695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.540329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.583562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.657934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118532123202:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.658005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.658448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118532123207:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.658500Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118532123208:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.658766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.662609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.672654Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128118532123211:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:03.736003Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128118532123262:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:03.900173Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128097057283999:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:03.900237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:05.367694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 3294, MsgBus: 3869 2025-12-23T18:51:59.296669Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128102789293597:2250];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.297420Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:51:59.323947Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e4b/r3tmp/tmpv8nkls/pdisk_1.dat 2025-12-23T18:51:59.721786Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.740925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.750880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.802433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.939322Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:59.946919Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128102789293385:2081] 1766515919247410 != 1766515919247413 TServer::EnableGrpc on GrpcPort 3294, node 1 2025-12-23T18:52:00.020220Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:00.090164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:00.090189Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:00.090199Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:00.090290Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:00.286938Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3869 TClient is connected to server localhost:3869 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515919938 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.795313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:00.818823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.994065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:52:01.206909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:01.316888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.234358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119969164442:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.234475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.236160Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119969164452:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.236224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.560646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.596118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.627492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.678651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.710832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.748470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.784515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.826122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.916703Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119969165321:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.916780Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.916969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119969165327:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.917009Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.917038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119969165326:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.921214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.938082Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128119969165330:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:04.009197Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128124264132677:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:04.286010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128102789293597:2250];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.286077Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:06.078775Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515926098, txId: 281474976710673] shutting down |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 31402, MsgBus: 27862 2025-12-23T18:51:58.904324Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128099908735407:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:58.904483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e7b/r3tmp/tmpQLumXs/pdisk_1.dat 2025-12-23T18:51:59.327152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.327273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.424035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.428329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.477061Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:59.481358Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128099908735365:2081] 1766515918896960 != 1766515918896963 TServer::EnableGrpc on GrpcPort 31402, node 1 2025-12-23T18:51:59.583430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:51:59.609971Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.609998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.610005Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.610072Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:59.918493Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:27862 TClient is connected to server localhost:27862 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515919518 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:52:00.535264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:00.539638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:52:00.551105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.693402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.926338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.078565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.087636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121383573720:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.087738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.091943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121383573730:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.092031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.433580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.466715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.533533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.562452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.589493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.627783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.667247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.722344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.805817Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121383574602:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.805909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.806231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121383574607:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.806270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121383574608:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.806279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.810019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.820025Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128121383574611:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:03.877739Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128121383574662:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:03.905002Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128099908735407:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:03.905092Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:06.106130Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515926140, txId: 281474976710673] shutting down |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] Test command err: Trying to start YDB, gRPC: 9839, MsgBus: 61185 2025-12-23T18:51:59.649542Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128103546198807:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.649741Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:51:59.682145Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e46/r3tmp/tmpIgminf/pdisk_1.dat 2025-12-23T18:52:00.045286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:00.045396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:00.054380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:00.118129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:00.163624Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9839, node 1 2025-12-23T18:52:00.171339Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128103546198673:2081] 1766515919617032 != 1766515919617035 2025-12-23T18:52:00.331259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:00.331295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:00.331302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:00.331368Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:00.407523Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61185 2025-12-23T18:52:00.663028Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:61185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515920204 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:01.025783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:01.037929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:52:01.053707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.209597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.416512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.500052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.483639Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120726069740:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.483774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.485246Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120726069750:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.485338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.003521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.071894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.144736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.181374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.222193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.266033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.298028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.347584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.435908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128125021037923:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.436028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.436290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128125021037928:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.436316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128125021037929:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.436365Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.440406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:04.451768Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128125021037932:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:52:04.527343Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128125021037985:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:04.648726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128103546198807:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.681182Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:06.387583Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515926380, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 2289, MsgBus: 10303 2025-12-23T18:51:59.544671Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128100415030437:2181];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.544728Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:51:59.583287Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e47/r3tmp/tmpCmWvjm/pdisk_1.dat 2025-12-23T18:52:00.045202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:00.045299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:00.076937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:00.085807Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:00.104513Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2289, node 1 2025-12-23T18:52:00.351368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:00.351400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:00.351406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:00.351484Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:00.399861Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:00.541940Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10303 TClient is connected to server localhost:10303 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515920155 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:01.069907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:01.078221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:52:01.086045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:01.294655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.497647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.583120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.524765Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128117594901338:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.524859Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.525182Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128117594901348:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.525243Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.843180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.876550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.949491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.986374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.020681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.081264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.118532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.161636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.242491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121889869519:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.242569Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.242827Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121889869524:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.242865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121889869525:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.242996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.246239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:04.259138Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128121889869528:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:52:04.348568Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128121889869579:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:04.539655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128100415030437:2181];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.539736Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges [GOOD] Test command err: Trying to start YDB, gRPC: 19883, MsgBus: 62401 2025-12-23T18:51:59.389099Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128103256927438:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.403770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:51:59.492362Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e49/r3tmp/tmpbXyFuJ/pdisk_1.dat 2025-12-23T18:51:59.780608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.780720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.852041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.853663Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.931761Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:59.933630Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128103256927404:2081] 1766515919380642 != 1766515919380645 TServer::EnableGrpc on GrpcPort 19883, node 1 2025-12-23T18:52:00.119148Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:00.119185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:00.119196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:00.132396Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:00.132518Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:62401 2025-12-23T18:52:00.416498Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:62401 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515920015 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.850070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:00.857369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:52:00.877437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:52:01.114516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:01.326852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.407905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.527194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120436798467:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.527315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.529099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120436798477:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.529176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.861636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.898170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.938772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.009637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.041215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.077545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.147083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.193139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.272119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124731766647:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.272192Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.272394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124731766652:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.272418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124731766653:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.272471Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.275649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:04.286962Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128124731766656:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:04.370543Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128124731766707:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:04.383832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128103256927438:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.383893Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:06.361785Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515926347, txId: 281474976710673] shutting down >> KqpSystemView::PartitionStatsOrderByDesc [GOOD] >> KqpLimits::TooBigColumn-useSink [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 14904, MsgBus: 15737 2025-12-23T18:51:59.930674Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128103911792775:2059];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.930719Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e42/r3tmp/tmpZ6iKvk/pdisk_1.dat 2025-12-23T18:52:00.442357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:00.442458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:00.478428Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:00.509679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:00.516767Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:00.519277Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128103911792757:2081] 1766515919929120 != 1766515919929123 TServer::EnableGrpc on GrpcPort 14904, node 1 2025-12-23T18:52:00.705489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:00.826119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:00.826147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:00.826154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:00.826538Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:00.959460Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:15737 TClient is connected to server localhost:15737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515920561 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:01.665218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:01.689044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.837305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.004800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.082274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.899943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121091663816:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.900078Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.900461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128121091663826:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.900614Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.231388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.265767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.296141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.363542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.405808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.445046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.494051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.545686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.623367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128125386631995:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.623458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.623869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128125386632000:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.623944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128125386632001:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.624052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.627516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:04.637104Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128125386632004:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:52:04.733847Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128125386632055:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:04.932931Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128103911792775:2059];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.933063Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:06.563284Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515926555, txId: 281474976715673] shutting down |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 62599, MsgBus: 10951 2025-12-23T18:52:00.621483Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128105181636125:2233];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:00.621827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e41/r3tmp/tmp49vEFr/pdisk_1.dat 2025-12-23T18:52:01.032025Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:01.048339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:01.048461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:01.168618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62599, node 1 2025-12-23T18:52:01.235529Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:01.356719Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:01.426858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:01.426878Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:01.426884Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:01.426961Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:01.622297Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10951 TClient is connected to server localhost:10951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515921254 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:02.010527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:52:02.027506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:02.190011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.365573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.449264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:04.258059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122361506961:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.258169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.258787Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122361506971:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.258850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.580548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.612533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.645490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.680356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.725070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.795376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.828652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.875687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.952921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122361507843:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.953085Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.953768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122361507849:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.953779Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122361507848:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.953860Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.957927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:04.972877Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128122361507852:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:05.027586Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128126656475199:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:05.619139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128105181636125:2233];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:05.619209Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:06.538093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:06.688733Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515926679, txId: 281474976710674] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges [GOOD] Test command err: Trying to start YDB, gRPC: 9824, MsgBus: 64954 2025-12-23T18:51:59.457351Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128102323412723:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.459391Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e48/r3tmp/tmpdXPKzZ/pdisk_1.dat 2025-12-23T18:51:59.822687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.822827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.824305Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.866919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.999582Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:00.026908Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128102323412679:2081] 1766515919441265 != 1766515919441268 TServer::EnableGrpc on GrpcPort 9824, node 1 2025-12-23T18:52:00.162303Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:00.243436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:00.243476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:00.243483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:00.243582Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:00.474521Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:64954 TClient is connected to server localhost:64954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515920001 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:01.033310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:01.057114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:52:01.077105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.435455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.649895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.741906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.689283Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119503283742:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.689399Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.689724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119503283752:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.689775Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.982879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.025443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.054700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.085820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.117997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.186528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.218103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.260540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.333255Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128123798251920:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.333332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.333399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128123798251925:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.333499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128123798251927:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.333540Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.337178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:04.348099Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128123798251929:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:04.419350Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128123798251980:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:04.457500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128102323412723:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.457565Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:06.555475Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515926542, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 19528, MsgBus: 4397 2025-12-23T18:51:58.899403Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128099233319164:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:58.899462Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e5a/r3tmp/tmpKj2X17/pdisk_1.dat 2025-12-23T18:51:59.247502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.247641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.357251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.402631Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:59.410181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19528, node 1 2025-12-23T18:51:59.643730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.643781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.643796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.651110Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:59.669187Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:51:59.913751Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:4397 TClient is connected to server localhost:4397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515919462 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.598161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:52:00.620033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:00.785625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.014855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.086574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.036245Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120708157495:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.036368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.036839Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120708157505:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.036923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.371288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.409703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.459400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.491415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.525119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.561111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.595068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.640392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.721773Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120708158374:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.721837Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.722308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120708158380:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.722357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120708158379:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.722397Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.727191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.739316Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128120708158383:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:03.802163Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128120708158434:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:03.899649Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128099233319164:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:03.899751Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 23960, MsgBus: 15286 2025-12-23T18:51:59.702530Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128102721462582:2246];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.702638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e44/r3tmp/tmp9MSc4n/pdisk_1.dat 2025-12-23T18:52:00.252862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:00.252963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:00.350226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:00.391757Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:00.426938Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:00.428092Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128102721462374:2081] 1766515919679188 != 1766515919679191 TServer::EnableGrpc on GrpcPort 23960, node 1 2025-12-23T18:52:00.606814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:00.657227Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:00.657260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:00.657267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:00.657353Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:00.698486Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:15286 TClient is connected to server localhost:15286 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515920498 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:52:01.601659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:01.608824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:52:01.614601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:52:01.828146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:02.010686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.089867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:04.091703Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124196300731:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.091815Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.092178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124196300741:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.092243Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.440528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.478364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.512452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.546713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.581591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.640907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.697072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128102721462582:2246];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.697139Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:04.709852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.762451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.853706Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124196301621:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.853798Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.854180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124196301625:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.854234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124196301627:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.854303Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.859379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:04.871189Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128124196301630:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:52:04.962353Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128124196301681:3778] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:06.376399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpSystemView::Sessions+EnableRealSystemViewPaths [GOOD] >> KqpSystemView::QueryStatsSimple |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 7039, MsgBus: 3792 2025-12-23T18:51:59.369191Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128102540314793:2259];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.369466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:51:59.393589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e4d/r3tmp/tmpHp8nk9/pdisk_1.dat 2025-12-23T18:51:59.775223Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.778452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.778556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.792077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.899145Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128102540314561:2081] 1766515919345115 != 1766515919345118 2025-12-23T18:51:59.904367Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7039, node 1 2025-12-23T18:52:00.051984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:00.151364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:00.151396Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:00.151403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:00.151503Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:00.367408Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3792 TClient is connected to server localhost:3792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515919973 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.853110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:00.858038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:52:00.863102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.086585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.324699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.429983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.315878Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119720185628:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.316014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.316432Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128119720185638:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.316486Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.666090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.703882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.739400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.790676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.817371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.849837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.884135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.936653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.031231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124015153810:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.031308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.031550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124015153816:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.031579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.031618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124015153815:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.035801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:04.048065Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128124015153819:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:04.112071Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128124015153870:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:04.365714Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128102540314793:2259];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.365812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 5759, MsgBus: 28565 2025-12-23T18:51:59.787217Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128100889557703:2193];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.787570Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:51:59.856092Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e43/r3tmp/tmpmtgdcP/pdisk_1.dat 2025-12-23T18:52:00.304961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:00.305106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:00.383328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:00.396065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:00.396423Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128100889557536:2081] 1766515919765779 != 1766515919765782 2025-12-23T18:52:00.413496Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5759, node 1 2025-12-23T18:52:00.612519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:00.612551Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:00.612559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:00.612638Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:00.628246Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:00.788932Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28565 TClient is connected to server localhost:28565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515920477 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:01.442292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:01.451949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:52:01.474319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.690098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.868955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.951623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.946382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118069428589:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.946509Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.947487Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118069428599:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.947544Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.317810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.348029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.382734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.410781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.441546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.481865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.527020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.572556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.653688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122364396766:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.653836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.654166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122364396771:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.654218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122364396772:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.654330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.657205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:04.666454Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128122364396775:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:04.769919Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128122364396826:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:04.784362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128100889557703:2193];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.784449Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 62689, MsgBus: 8947 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e51/r3tmp/tmpiDfOw6/pdisk_1.dat 2025-12-23T18:51:59.401660Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:51:59.407932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.479373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.479462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.489514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.607411Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:59.615004Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128100383324592:2081] 1766515919052899 != 1766515919052902 TServer::EnableGrpc on GrpcPort 62689, node 1 2025-12-23T18:51:59.746826Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:51:59.795459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.795499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.795513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.795592Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8947 2025-12-23T18:52:00.129556Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:8947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515919581 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.478297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:00.508798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.682620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.858971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:00.953714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.134869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128117563195656:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.134971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.135585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128117563195666:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.135677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.469298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.507403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.538882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.608834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.637697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.680159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.715513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.765191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.831844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128117563196537:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.831916Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.832342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128117563196543:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.832351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128117563196542:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.832401Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.835409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.844909Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128117563196546:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:52:03.911274Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128117563196597:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan [GOOD] Test command err: Trying to start YDB, gRPC: 64915, MsgBus: 13883 2025-12-23T18:51:59.657054Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128101386140815:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.659585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e45/r3tmp/tmpKhECtN/pdisk_1.dat 2025-12-23T18:52:00.042208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:00.080717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:00.080825Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:00.135359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:00.136920Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128101386140778:2081] 1766515919649700 != 1766515919649703 2025-12-23T18:52:00.144360Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64915, node 1 2025-12-23T18:52:00.322986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:00.380141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:00.380172Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:00.380179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:00.380252Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13883 2025-12-23T18:52:00.675135Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:13883 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515920197 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:01.007011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:01.013617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:52:01.019772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.170594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.381171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.482098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.500743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118566011832:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.500891Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.501296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128118566011842:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.501369Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.831567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.880219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.926651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.961039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.998307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.034084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.066439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.110306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:04.213265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122860980006:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.213398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.213908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122860980011:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.214008Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128122860980012:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.214338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.218009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:04.229209Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128122860980015:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:04.291056Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128122860980066:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:04.653806Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128101386140815:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.653864Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:07.120529Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515926343, txId: 281474976710673] shutting down 2025-12-23T18:52:07.264021Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515927255, txId: 281474976710676] shutting down |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 31191, MsgBus: 23602 2025-12-23T18:52:01.332635Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128110492419759:2236];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:01.332726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e40/r3tmp/tmpfDFqPt/pdisk_1.dat 2025-12-23T18:52:01.738499Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:01.753463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:01.753864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:01.820189Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128110492419551:2081] 1766515921267221 != 1766515921267224 2025-12-23T18:52:01.831544Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:01.831665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31191, node 1 2025-12-23T18:52:02.031177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:02.055591Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:02.055621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:02.055632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:02.055739Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23602 2025-12-23T18:52:02.335378Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:23602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515921891 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:02.768622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:02.781342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.914163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.089626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:52:03.151307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.078328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128127672290612:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.078473Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.078921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128127672290622:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.078970Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.383624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.429812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.465974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.496181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.531638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.593812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.621651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.666026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.750046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128127672291496:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.750124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.750254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128127672291501:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.750477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128127672291503:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.750515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.755568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:05.769150Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128127672291504:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:05.833177Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128127672291556:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:06.331551Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128110492419759:2236];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:06.331639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] >> TSchemeShardTest::Boot >> TSchemeShardTest::CreateTableWithUnknownNamedConfig >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions+EnableRealSystemViewPaths [GOOD] Test command err: Trying to start YDB, gRPC: 11038, MsgBus: 25071 2025-12-23T18:51:58.936619Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128098788706618:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:58.936697Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e52/r3tmp/tmpfvMwBa/pdisk_1.dat 2025-12-23T18:51:59.182917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.213588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.213708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.270526Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:59.309312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11038, node 1 2025-12-23T18:51:59.413703Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:51:59.609275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.609298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.609304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.609379Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:59.938873Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:25071 TClient is connected to server localhost:25071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515919364 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:00.521418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:00.528056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:52:00.532522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 1 2025-12-23T18:52:03.269442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120263544039:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.269478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120263544028:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.269585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.269937Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128120263544043:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.270041Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.273040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.283216Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128120263544042:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:52:03.367051Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128120263544095:2580] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ydb-cpp-sdk/dev 2025-12-23T18:52:03.935135Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128098788706618:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:03.935631Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:08.285320Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515928279, txId: 281474976710673] shutting down |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::TopQueriesOrderByDesc [GOOD] >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 4686, MsgBus: 18346 2025-12-23T18:44:57.497975Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126287943878032:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:44:57.499043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0033d7/r3tmp/tmpCendVf/pdisk_1.dat 2025-12-23T18:44:57.735173Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:44:57.738052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:44:57.738180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:44:57.742506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:44:57.836811Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:44:57.839386Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126287943877926:2081] 1766515497492659 != 1766515497492662 TServer::EnableGrpc on GrpcPort 4686, node 1 2025-12-23T18:44:57.891947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:44:57.891969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:44:57.891984Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:44:57.892126Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:44:57.922307Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18346 TClient is connected to server localhost:18346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515497859 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:44:58.325375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:44:58.344164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:58.448499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:58.503947Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:44:58.587409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:44:58.645927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:45:00.468911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126300828781691:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.468999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.469275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126300828781701:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.469340Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:00.730838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.760611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.785270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.809894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.835707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.862840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.909484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:00.946340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:45:01.016430Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126305123749865:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:01.016498Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:01.016618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126305123749870:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:01.016679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126305123749872:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:01.016718Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:45:01.020135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:45:01.030047Z node 1 : ... 594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515916865 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:51:57.463331Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:51:57.477189Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:57.617168Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:57.743815Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:51:57.797077Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:51:57.934701Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:01.657872Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587128089313465849:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:01.657970Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:02.584380Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587128115083271478:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.584514Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.584849Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587128115083271488:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.584916Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.703929Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:02.749425Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:02.792664Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:02.856375Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:02.898346Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:02.946544Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:02.995510Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.082130Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.228261Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587128119378239688:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.228395Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.228840Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587128119378239694:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.228850Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587128119378239693:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.228890Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.235050Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:03.250565Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587128119378239697:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:03.312826Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587128119378239748:3784] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:07.239391Z node 5 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:186: Transaction write column value of 20971522 bytes is larger than the allowed threshold 2025-12-23T18:52:07.239526Z node 5 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710673 at tablet 72075186224037911 status: EXEC_ERROR errors: BAD_ARGUMENT (Transaction write column value of 20971522 bytes is larger than the allowed threshold) | 2025-12-23T18:52:07.239745Z node 5 :KQP_EXECUTER ERROR: kqp_data_executer.cpp:843: ActorId: [5:7587128132263141933:2529] TxId: 281474976710673. Ctx: { TraceId: 01kd68vseqa2s5e1k3dw7tzx5z, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MmRkNGJlY2YtZTU1Mzg1ZjYtNGRlNzUyYjMtZTQ3MzhmZTE=, PoolId: default, IsStreamingQuery: 0}. EXEC_ERROR: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold; 2025-12-23T18:52:07.240372Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=5&id=MmRkNGJlY2YtZTU1Mzg1ZjYtNGRlNzUyYjMtZTQ3MzhmZTE=, ActorId: [5:7587128132263141909:2529], ActorState: ExecuteState, TraceId: 01kd68vseqa2s5e1k3dw7tzx5z, Create QueryResponse for error on request, msg: , status: GENERIC_ERROR, issues: { message: "Error executing transaction (ExecError): Execution failed" severity: 1 issues { message: "[BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold" severity: 1 } }
: Error: Error executing transaction (ExecError): Execution failed
: Error: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::ConsistentCopyTable >> test_canonical_records.py::test_dml [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::DependentOps |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TResourceBroker::TestCounters >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] Test command err: Trying to start YDB, gRPC: 12248, MsgBus: 21180 2025-12-23T18:51:59.062189Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128103106326051:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.086903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:51:59.175081Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:51:59.250589Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128101865925151:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.258811Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:51:59.276874Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.020580s 2025-12-23T18:51:59.290809Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.033926s 2025-12-23T18:51:59.294872Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.014375s 2025-12-23T18:51:59.307854Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587128100950557321:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.307916Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:51:59.381415Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e5e/r3tmp/tmpkZji93/pdisk_1.dat 2025-12-23T18:51:59.731456Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.734452Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.734834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.948217Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.948245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.967253Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:00.192093Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:00.194198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:00.194292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:00.202855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:00.202918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:00.203072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:00.203112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:00.263928Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:00.267203Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:52:00.290945Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:00.296337Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:00.301577Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:00.310254Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:00.345275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:00.348965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:00.375401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:00.375602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:00.384692Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:00.391167Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:00.408809Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:00.411095Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 12248, node 1 2025-12-23T18:52:00.855060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:00.855103Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:00.855113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:00.855225Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:01.029978Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:01.143980Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21180 2025-12-23T18:52:01.598909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21180 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515920526 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:02.106831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:02.173395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.378673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.678076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.810673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:04.062575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128103106326051:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.062684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:04.249903Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128101865925151:2260];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.249962Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:04.310840Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587128100950557321:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.310930Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:04.857757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124581164835:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.858017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.858654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124581164845:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.858721Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.236633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.314235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.380352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.455872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.518301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.585404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.643454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.743327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.837044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128128876133240:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.837123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.837197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128128876133245:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.837309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128128876133247:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.837342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.840977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:05.860987Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128128876133248:2418], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:05.943167Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128128876133327:4761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:07.869665Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515927859, txId: 281474976710673] shutting down |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> BasicStatistics::ServerlessGlobalIndex >> BsControllerTest::TestLocalBrokenRelocation >> BsControllerTest::DecommitRejected >> BsControllerTest::SelfHealBlock4Plus2 >> SelfHealActorTest::SingleErrorDisk >> SelfHealActorTest::NoMoreThanOneReplicating >> BsControllerTest::TestLocalSelfHeal |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> TResourceBroker::TestCounters [GOOD] >> TResourceBroker::TestChangeTaskType >> SelfHealActorTest::SingleErrorDisk [GOOD] >> BsControllerTest::SelfHealMirror3dc >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest |97.0%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/0054a3/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit_log.sr9s84dr.txt 2025-12-23T18:51:56.269199Z: {"commit_tx":"1","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","cloud_id":"cloud-id-A","begin_tx":"1","resource_id":"database-id-C","end_time":"2025-12-23T18:51:56.269135Z","tx_id":"{none}","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-12-23T18:51:56.106735Z","database":"/Root/test_auditlog.py","subject":"root@builtin","status":"SUCCESS","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","component":"grpc-proxy","sanitized_token":"**** (B6C6F477)","detailed_status":"SUCCESS","remote_address":"127.0.0.1"} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::TopQueriesOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 4863, MsgBus: 2923 2025-12-23T18:51:59.136650Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128102689787220:2266];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.136695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:51:59.250425Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.015253s 2025-12-23T18:51:59.294401Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587128103271077108:2197];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:59.294604Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e78/r3tmp/tmp5yyQbk/pdisk_1.dat 2025-12-23T18:51:59.738944Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.739149Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:51:59.745392Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.742858Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.979762Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:00.031783Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:00.075126Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:00.130715Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:00.173662Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:00.306968Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:00.307066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:00.313786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:00.313862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:00.313966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:00.314022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:00.348568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:00.421157Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:00.421191Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:52:00.421310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:00.439659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:00.495267Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:00.500591Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:00.525251Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:00.590911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:00.614284Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:00.622465Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 4863, node 1 2025-12-23T18:52:01.027045Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:01.027076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:01.027088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:01.027213Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2923 TClient is connected to server localhost:2923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976725657 CreateStep: 1766515920673 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:01.941424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:01.988456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.406222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.705739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:02.938509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:04.138195Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128102689787220:2266];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.138277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:04.296492Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587128103271077108:2197];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:04.296575Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:04.985056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124164625783:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.985155Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.990251Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128124164625793:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:04.990344Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.323521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.373639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.426378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.474286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.537329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.624398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.680476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.776710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:05.853541Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128128459594196:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.853632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.853760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128128459594201:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.853820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128128459594203:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.853855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:05.857360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:05.875975Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128128459594205:2417], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720670 completed, doublechecking } 2025-12-23T18:52:05.953393Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128128459594279:4736] txid# 281474976720671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardTTLTests::AlterTableShouldSuccess >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> TSchemeShardTest::ModifyACL [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardTest::NameFormat >> TResourceBroker::TestChangeTaskType [GOOD] |97.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> BsControllerTest::DecommitRejected [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::SingleErrorDisk [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] |97.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpSystemView::NodesRange1 [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] Test command err: 2025-12-23T18:52:11.582835Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-12-23T18:52:11.583534Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-12-23T18:52:11.584749Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-12-23T18:52:11.584781Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-12-23T18:52:11.584820Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-12-23T18:52:11.584848Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-12-23T18:52:11.584892Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-12-23T18:52:11.584920Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-12-23T18:52:11.584952Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-12-23T18:52:11.584973Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-12-23T18:52:11.585029Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-12-23T18:52:11.585092Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-12-23T18:52:11.585148Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-12-23T18:52:11.585173Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-12-23T18:52:11.585206Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-12-23T18:52:11.585225Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-12-23T18:52:11.585261Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-12-23T18:52:11.585281Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-12-23T18:52:11.585313Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-12-23T18:52:11.585332Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-12-23T18:52:11.585361Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-12-23T18:52:11.585382Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-12-23T18:52:11.585466Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-12-23T18:52:11.585485Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-12-23T18:52:11.585516Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-12-23T18:52:11.585535Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-12-23T18:52:11.585566Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-12-23T18:52:11.585584Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-12-23T18:52:11.585623Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-12-23T18:52:11.585650Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-12-23T18:52:11.611187Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:514:38] Status# ERROR ClientId# [1:514:38] ServerId# [0:0:0] PipeClient# [1:514:38] 2025-12-23T18:52:11.612047Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:515:20] Status# ERROR ClientId# [2:515:20] ServerId# [0:0:0] PipeClient# [2:515:20] 2025-12-23T18:52:11.612138Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:516:20] Status# ERROR ClientId# [3:516:20] ServerId# [0:0:0] PipeClient# [3:516:20] 2025-12-23T18:52:11.612197Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:517:20] Status# ERROR ClientId# [4:517:20] ServerId# [0:0:0] PipeClient# [4:517:20] 2025-12-23T18:52:11.612240Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:518:20] Status# ERROR ClientId# [5:518:20] ServerId# [0:0:0] PipeClient# [5:518:20] 2025-12-23T18:52:11.612294Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:519:20] Status# ERROR ClientId# [6:519:20] ServerId# [0:0:0] PipeClient# [6:519:20] 2025-12-23T18:52:11.612343Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:520:20] Status# ERROR ClientId# [7:520:20] ServerId# [0:0:0] PipeClient# [7:520:20] 2025-12-23T18:52:11.612389Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:521:20] Status# ERROR ClientId# [8:521:20] ServerId# [0:0:0] PipeClient# [8:521:20] 2025-12-23T18:52:11.612432Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:522:20] Status# ERROR ClientId# [9:522:20] ServerId# [0:0:0] PipeClient# [9:522:20] 2025-12-23T18:52:11.612480Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:523:20] Status# ERROR ClientId# [10:523:20] ServerId# [0:0:0] PipeClient# [10:523:20] 2025-12-23T18:52:11.612549Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:524:20] Status# ERROR ClientId# [11:524:20] ServerId# [0:0:0] PipeClient# [11:524:20] 2025-12-23T18:52:11.612607Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:525:20] Status# ERROR ClientId# [12:525:20] ServerId# [0:0:0] PipeClient# [12:525:20] 2025-12-23T18:52:11.612647Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:526:20] Status# ERROR ClientId# [13:526:20] ServerId# [0:0:0] PipeClient# [13:526:20] 2025-12-23T18:52:11.612689Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:527:20] Status# ERROR ClientId# [14:527:20] ServerId# [0:0:0] PipeClient# [14:527:20] 2025-12-23T18:52:11.612730Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:528:20] Status# ERROR ClientId# [15:528:20] ServerId# [0:0:0] PipeClient# [15:528:20] 2025-12-23T18:52:11.718860Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] Connect 2025-12-23T18:52:11.718925Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] Connect 2025-12-23T18:52:11.718960Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] Connect 2025-12-23T18:52:11.719034Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] Connect 2025-12-23T18:52:11.719075Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] Connect 2025-12-23T18:52:11.719151Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] Connect 2025-12-23T18:52:11.719194Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] Connect 2025-12-23T18:52:11.719246Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] Connect 2025-12-23T18:52:11.719277Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] Connect 2025-12-23T18:52:11.719303Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] Connect 2025-12-23T18:52:11.719334Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] Connect 2025-12-23T18:52:11.719366Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] Connect 2025-12-23T18:52:11.719396Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] Connect 2025-12-23T18:52:11.719422Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] Connect 2025-12-23T18:52:11.719457Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] Connect 2025-12-23T18:52:11.722245Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:589:66] Status# OK ClientId# [1:589:66] ServerId# [1:618:67] PipeClient# [1:589:66] 2025-12-23T18:52:11.722298Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] State switched from 0 to 1 2025-12-23T18:52:11.728168Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:590:21] Status# OK ClientId# [2:590:21] ServerId# [1:619:68] PipeClient# [2:590:21] 2025-12-23T18:52:11.728213Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] State switched from 0 to 1 2025-12-23T18:52:11.728251Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:591:21] Status# OK ClientId# [3:591:21] ServerId# [1:620:69] PipeClient# [3:591:21] 2025-12-23T18:52:11.728268Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] State switched from 0 to 1 2025-12-23T18:52:11.728292Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:592:21] Status# OK ClientId# [4:592:21] ServerId# [1:621:70] PipeClient# [4:592:21] 2025-12-23T18:52:11.728307Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] State switched from 0 to 1 2025-12-23T18:52:11.728329Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:593:21] Status# OK ClientId# [5:593:21] ServerId# [1:622:71] PipeClient# [5:593:21] 2025-12-23T18:52:11.728359Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] State switched from 0 to 1 2025-12-23T18:52:11.728398Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:594:21] Status# OK ClientId# [6:594:21] ServerId# [1:623:72] PipeClient# [6:594:21] 2025-12-23T18:52:11.728424Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] State switched from 0 to 1 2025-12-23T18:52:11.728452Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:595:21] Status# OK ClientId# [7:595:21] ServerId# [1:624:73] PipeClient# [7:595:21] 2025-12-23T18:52:11.728471Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] State switched from 0 to 1 2025-12-23T18:52:11.728496Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:596:21] Status# OK ClientId# [8:596:21] ServerId# [1:625:74] PipeClient# [8:596:21] 2025-12-23T18:52:11.728512Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] State switched from 0 to 1 2025-12-23T18:52:11.728549Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:597:21] Status# OK ClientId# [9:597:21] ServerId# [1:626:75] PipeClient# [9:597:21] 2025-12-23T18:52:11.728566Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] State switched from 0 to 1 2025-12-23T18:52:11.728588Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:598:21] Status# OK ClientId# [10:598:21] ServerId# [1:627:76] PipeClient# [10:598:21] 2025-12-23T18:52:11.728603Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] State switched from 0 to 1 2025-12-23T18:52:11.728629Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:599:21] Status# OK ClientId# [11:599:21] ServerId# [1:628:77] PipeClient# [11:599:21] 2025-12-23T18:52:11.728644Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] State switched from 0 to 1 2025-12-23T18:52:11.728670Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:600:21] Status# OK ClientId# [12:600:21] ServerId# [1:629:78] PipeClient# [12:600:21] 2025-12-23T18:52:11.728688Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] State switched from 0 to 1 2025-12-23T18:52:11.728712Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:601:21] Status# OK ClientId# [13:601:21] ServerId# [1:630:79] PipeClient# [13:601:21] 2025-12-23T18:52:11.728727Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] State switched from 0 to 1 2025-12-23T18:52:11.728763Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:602:21] Status# OK ClientId# [14:602:21] ServerId# [1:631:80] PipeClient# [14:602:21] 2025-12-23T18:52:11.728781Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] State switched from 0 to 1 2025-12-23T18:52:11.728804Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:603:21] Status# OK ClientId# [15:603:21] ServerId# [1:632:81] PipeClient# [15:603:21] 2025-12-23T18:52:11.728827Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] State switched from 0 to 1 2025-12-23T18:52:11.732190Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:11.732273Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-12-23T18:52:11.753373Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] status changed to INIT_PENDING 2025-12-23T18:52:11.754412Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-12-23T18:52:11.754474Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] PDiskId# 1000 VSlotId# 1000 created 2025-12-23T18:52:11.754549Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] status changed to INIT_PENDING 2025-12-23T18:52:11.754638Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-12-23T18:52:11.754662Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] PDiskId# 1000 VSlotId# 1000 created 2025-12-23T18:52:11.754702Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] status changed to INIT_PENDING 2025-12-23T18:52:11.754819Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-12-23T18:52:11.754845Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-12-23T18:52:11.754886Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] status changed to INIT_PENDING 2025-12-23T18:52:11.754974Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-12-23T18:52:11.755000Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] PDiskId# 1000 VSlotId# 1000 created 2025-12-23T18:52:11.755041Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] status changed to INIT_PENDING 2025-12-23T1 ... right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-12-23T18:52:11.997893Z 13 00h01m26.933512s :BS_NODE DEBUG: [13] VDiskId# [80000001:1:1:0:0] status changed to READY 2025-12-23T18:52:11.998252Z 1 00h01m26.933512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-12-23T18:52:11.998620Z 1 00h01m30.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-12-23T18:52:11.998832Z 11 00h01m31.034512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to READY 2025-12-23T18:52:11.999142Z 1 00h01m31.034512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-12-23T18:52:11.999394Z 10 00h01m31.141512s :BS_NODE DEBUG: [10] VDiskId# [80000001:1:0:0:0] status changed to READY 2025-12-23T18:52:11.999671Z 1 00h01m31.141512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-12-23T18:52:11.999815Z 2 00h01m31.302512s :BS_NODE DEBUG: [2] VDiskId# [80000001:1:2:1:0] status changed to READY 2025-12-23T18:52:12.000212Z 1 00h01m31.302512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-12-23T18:52:12.000503Z 1 00h01m32.602512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-12-23T18:52:12.000885Z 1 00h01m34.202512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-12-23T18:52:12.001095Z 13 00h01m36.271024s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] status changed to READY 2025-12-23T18:52:12.001426Z 1 00h01m36.271024s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-12-23T18:52:12.002003Z 7 00h01m36.271536s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-12-23T18:52:12.002050Z 7 00h01m36.271536s :BS_NODE DEBUG: [7] VDiskId# [80000000:1:2:0:0] destroyed 2025-12-23T18:52:12.002164Z 1 00h01m36.339512s :BS_NODE DEBUG: [1] VDiskId# [80000001:1:2:0:0] status changed to READY 2025-12-23T18:52:12.002400Z 1 00h01m36.339512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-12-23T18:52:12.002550Z 3 00h01m36.486512s :BS_NODE DEBUG: [3] VDiskId# [80000001:1:2:2:0] status changed to READY 2025-12-23T18:52:12.002963Z 1 00h01m36.486512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-12-23T18:52:12.003159Z 14 00h01m38.643536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to READY 2025-12-23T18:52:12.003496Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483648 2025-12-23T18:52:12.004130Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:12.004169Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:0:0] DiskIsOk# true 2025-12-23T18:52:12.004454Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:12.004504Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:1:0] DiskIsOk# true 2025-12-23T18:52:12.004546Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:12.004574Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:2:0] DiskIsOk# true 2025-12-23T18:52:12.004605Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:12.004645Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:0:0] DiskIsOk# true 2025-12-23T18:52:12.004688Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:12.004723Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:1:0] DiskIsOk# true 2025-12-23T18:52:12.004758Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:12.004801Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:2:0] DiskIsOk# true 2025-12-23T18:52:12.004854Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:12.004894Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:0:0] DiskIsOk# true 2025-12-23T18:52:12.004933Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:12.004962Z 1 00h01m38.643536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:1:0] DiskIsOk# true 2025-12-23T18:52:12.007022Z 1 00h01m38.644048s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:12.007097Z 1 00h01m38.644048s :BS_NODE DEBUG: [1] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2025-12-23T18:52:12.007537Z 1 00h01m38.644048s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483648 Items# [80000000:3:2:2:0]: 9:1000:1000 -> 15:1000:1001 ConfigTxSeqNo# 23 2025-12-23T18:52:12.007565Z 1 00h01m38.644048s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483648 Success# true 2025-12-23T18:52:12.007652Z 8 00h01m38.644048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-12-23T18:52:12.007698Z 8 00h01m38.644048s :BS_NODE DEBUG: [8] VDiskId# [80000000:2:2:1:0] destroyed 2025-12-23T18:52:12.007786Z 2 00h01m38.644048s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-12-23T18:52:12.007832Z 2 00h01m38.644048s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2025-12-23T18:52:12.007909Z 3 00h01m38.644048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-12-23T18:52:12.007958Z 3 00h01m38.644048s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2025-12-23T18:52:12.008034Z 4 00h01m38.644048s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-12-23T18:52:12.008075Z 4 00h01m38.644048s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:1:0:0] -> [80000000:4:1:0:0] 2025-12-23T18:52:12.008184Z 5 00h01m38.644048s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-12-23T18:52:12.008221Z 5 00h01m38.644048s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:1:1:0] -> [80000000:4:1:1:0] 2025-12-23T18:52:12.008281Z 6 00h01m38.644048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-12-23T18:52:12.008324Z 6 00h01m38.644048s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:1:2:0] -> [80000000:4:1:2:0] 2025-12-23T18:52:12.008363Z 9 00h01m38.644048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-12-23T18:52:12.008424Z 13 00h01m38.644048s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-12-23T18:52:12.008467Z 13 00h01m38.644048s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] -> [80000000:4:2:0:0] 2025-12-23T18:52:12.008533Z 14 00h01m38.644048s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-12-23T18:52:12.008578Z 14 00h01m38.644048s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] -> [80000000:4:2:1:0] 2025-12-23T18:52:12.008661Z 15 00h01m38.644048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-12-23T18:52:12.008696Z 15 00h01m38.644048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] PDiskId# 1000 VSlotId# 1001 created 2025-12-23T18:52:12.008747Z 15 00h01m38.644048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to INIT_PENDING 2025-12-23T18:52:12.010056Z 15 00h01m40.854048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to REPLICATING 2025-12-23T18:52:12.013645Z 15 00h02m07.100048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to READY 2025-12-23T18:52:12.014339Z 9 00h02m07.100560s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-12-23T18:52:12.014391Z 9 00h02m07.100560s :BS_NODE DEBUG: [9] VDiskId# [80000000:3:2:2:0] destroyed |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/0054b6/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_bad_dynconfig/audit_log.po2f0fk3.txt 2025-12-23T18:51:57.071927Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> ColumnStatistics::SimpleColumnStatistics ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/0054b4/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk13/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_sid_is_expected/audit_log.010q7hcx.txt >> KqpSysColV0::SelectRange [GOOD] >> BasicStatistics::TwoNodes >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::DropBlockStoreVolume >> BasicStatistics::Simple >> HttpRequest::ProbeServerless >> BasicStatistics::SimpleGlobalIndex |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::CorruptedDyNumber >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TExportToS3Tests::UidAsIdempotencyKey >> TExportToS3Tests::RebootDuringCompletion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 19612, MsgBus: 13678 2025-12-23T18:52:02.016422Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128114141012096:2158];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:02.016488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:52:02.083445Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128116441500186:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:02.083500Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:52:02.176810Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587128116538921695:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:02.176869Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:52:02.225426Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587128114570024325:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:02.225476Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e3f/r3tmp/tmpdQrM7v/pdisk_1.dat 2025-12-23T18:52:02.578711Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:02.672290Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:02.674234Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:02.674890Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:02.675079Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:02.687203Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:02.817329Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:02.817463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:02.821078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:02.821140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:02.821255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:02.821292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:02.821392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:02.821437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:02.821865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:02.821911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:02.846379Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:02.846417Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:52:02.846433Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:52:02.846447Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-12-23T18:52:02.851380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:02.880007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:02.887320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:02.887547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:02.887683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:03.115185Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:03.229686Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:03.230552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:03.228084Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:03.247158Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:03.256805Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:03.268166Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:03.268981Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:03.272589Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 19612, node 1 2025-12-23T18:52:03.295265Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:03.403172Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:03.413197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:03.413228Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:03.413245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:03.413346Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13678 TClient is connected to server localhost:13678 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515923214 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:04.234655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976730657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:04.272760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976730657, at schemeshard: 72057594046644480 2025-12-23T18:52:04.307504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:04.574186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:04.804075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:04.892893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:07.016410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128114141012096:2158];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:07.016478Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:07.083929Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128116441500186:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:07.083999Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:07.177181Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587128116538921695:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:07.177283Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:07.195476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128135615850578:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:07.195589Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:07.195880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128135615850588:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:07.195928Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:07.225797Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587128114570024325:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:07.225872Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:07.543442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:07.588407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:07.622834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:07.666052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:07.718889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:07.776487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:07.829429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:07.928699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976730669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:08.002082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128139910818861:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:08.002162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:08.002313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128139910818866:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:08.002337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128139910818867:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:08.002372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:08.006096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:08.025364Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128139910818870:2397], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730670 completed, doublechecking } 2025-12-23T18:52:08.107630Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128139910818947:4543] txid# 281474976730671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:09.848784Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515929838, txId: 281474976730673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:52:11.942015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:11.942098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:11.942136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:11.942169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:11.942203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:11.942234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:11.942290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:11.942350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:11.943208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:11.943453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:12.024240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:12.024301Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:12.029597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:12.029797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:12.029915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:12.032657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:12.032829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:12.033501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:12.033719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:12.035531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:12.035713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:12.036741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:12.036797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:12.036942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:12.036987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:12.037023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:12.037135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:12.158373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:12.159966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 104, path id: 38 2025-12-23T18:52:13.037686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:52:13.037756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:52:13.038472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:52:13.038567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:52:13.038604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:52:13.038650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2025-12-23T18:52:13.038706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:52:13.038820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-12-23T18:52:13.041666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:52:13.053950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1171 } } CommitVersion { Step: 5000040 TxId: 104 } 2025-12-23T18:52:13.054017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-12-23T18:52:13.054185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1171 } } CommitVersion { Step: 5000040 TxId: 104 } 2025-12-23T18:52:13.054323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1171 } } CommitVersion { Step: 5000040 TxId: 104 } debug: NTableState::TProposedWaitParts operationId# 104:0 FAKE_COORDINATOR: Erasing txId 104 2025-12-23T18:52:13.055567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-12-23T18:52:13.055634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-12-23T18:52:13.055776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-12-23T18:52:13.055839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:52:13.055951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-12-23T18:52:13.056032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:13.056070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:52:13.056106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:52:13.056149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2025-12-23T18:52:13.058780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:52:13.059110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:52:13.059401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:52:13.059450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:52:13.059541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:52:13.059571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:52:13.059626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:52:13.059656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:52:13.059693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-12-23T18:52:13.059751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2715] message: TxId: 104 2025-12-23T18:52:13.059795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:52:13.059854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:52:13.059896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:52:13.059997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:52:13.061609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:52:13.061655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:844:2814] TestWaitNotification: OK eventTxId 104 2025-12-23T18:52:13.062204Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:52:13.062476Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 274us result status StatusSuccess 2025-12-23T18:52:13.062954Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 TTLSettings { Disabled { } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TExportToS3Tests::DropSourceTableBeforeTransferring >> TSchemeShardExportToFsTests::ShouldSucceedCreateExportToFs >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 12685, MsgBus: 1211 2025-12-23T18:52:07.343481Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128134428343221:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:07.343771Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e3e/r3tmp/tmpO95huD/pdisk_1.dat 2025-12-23T18:52:07.576277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:07.584487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:07.584646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:07.664028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:07.690182Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:07.694885Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128134428343199:2081] 1766515927342401 != 1766515927342404 TServer::EnableGrpc on GrpcPort 12685, node 1 2025-12-23T18:52:07.745200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:07.748495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:07.748532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:07.748541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:07.748662Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1211 TClient is connected to server localhost:1211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515927743 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:08.254290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:08.270901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:08.353820Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:08.415291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:52:08.567114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:08.631606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:10.438978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128147313246968:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:10.439115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:10.439437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128147313246978:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:10.439536Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:10.696137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:10.721341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:10.747746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:10.771079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:10.794522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:10.825549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:10.871926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:10.904795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:10.961931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128147313247849:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:10.962011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128147313247854:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:10.962013Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:10.962173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128147313247856:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:10.962209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:10.964829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:10.972984Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128147313247858:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:52:11.074036Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128151608215205:3778] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:12.343316Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128134428343221:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:12.343401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TSchemeShardTest::ParallelCreateSameTable |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] Test command err: 2025-12-23T18:52:11.679402Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-12-23T18:52:11.679451Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-12-23T18:52:11.679506Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-12-23T18:52:11.679521Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-12-23T18:52:11.679546Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-12-23T18:52:11.679572Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-12-23T18:52:11.679599Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-12-23T18:52:11.679613Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-12-23T18:52:11.679634Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-12-23T18:52:11.679653Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-12-23T18:52:11.679691Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-12-23T18:52:11.679710Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-12-23T18:52:11.679730Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-12-23T18:52:11.679746Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-12-23T18:52:11.679766Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-12-23T18:52:11.679780Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-12-23T18:52:11.679816Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-12-23T18:52:11.679831Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-12-23T18:52:11.679852Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-12-23T18:52:11.679876Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-12-23T18:52:11.679899Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-12-23T18:52:11.679912Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-12-23T18:52:11.679976Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-12-23T18:52:11.679991Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-12-23T18:52:11.680012Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-12-23T18:52:11.680031Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-12-23T18:52:11.680058Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-12-23T18:52:11.680075Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-12-23T18:52:11.680100Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-12-23T18:52:11.680113Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-12-23T18:52:11.680134Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-12-23T18:52:11.680149Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-12-23T18:52:11.680176Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-12-23T18:52:11.680190Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-12-23T18:52:11.680224Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-12-23T18:52:11.680240Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-12-23T18:52:11.680260Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-12-23T18:52:11.680273Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-12-23T18:52:11.680294Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-12-23T18:52:11.680307Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-12-23T18:52:11.680349Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-12-23T18:52:11.680371Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-12-23T18:52:11.680399Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-12-23T18:52:11.680414Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-12-23T18:52:11.680432Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-12-23T18:52:11.680445Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-12-23T18:52:11.680467Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-12-23T18:52:11.680479Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-12-23T18:52:11.680499Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-12-23T18:52:11.680511Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-12-23T18:52:11.680533Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-12-23T18:52:11.680555Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-12-23T18:52:11.680579Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-12-23T18:52:11.680594Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-12-23T18:52:11.680627Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-12-23T18:52:11.680641Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-12-23T18:52:11.680660Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-12-23T18:52:11.680679Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-12-23T18:52:11.680722Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-12-23T18:52:11.680742Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-12-23T18:52:11.680776Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-12-23T18:52:11.680802Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-12-23T18:52:11.680828Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-12-23T18:52:11.680843Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-12-23T18:52:11.680866Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-12-23T18:52:11.680880Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-12-23T18:52:11.680904Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-12-23T18:52:11.680916Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-12-23T18:52:11.680935Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-12-23T18:52:11.680948Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-12-23T18:52:11.680969Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-12-23T18:52:11.680983Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-12-23T18:52:11.696041Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2025-12-23T18:52:11.696982Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2025-12-23T18:52:11.697023Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2025-12-23T18:52:11.697060Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2025-12-23T18:52:11.697099Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2025-12-23T18:52:11.697122Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2025-12-23T18:52:11.697146Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2025-12-23T18:52:11.697170Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2025-12-23T18:52:11.697194Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2025-12-23T18:52:11.697243Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2025-12-23T18:52:11.697266Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2025-12-23T18:52:11.697302Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2025-12-23T18:52:11.697325Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2025-12-23T18:52:11.697352Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2025-12-23T18:52:11.697378Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2025-12-23T18:52:11.697406Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2025-12-23T18:52:11.697433Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2025-12-23T18:52:11.697466Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2025-12-23T18:52:11.697496Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2025-12-23T18:52:11.697519Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2025-12-23T18:52:11.697542Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2025-12-23T18:52:11.697568Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2025-12-23T18:52:11.697603Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2025-12-23T18:52:11.697627Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2025-12-23T18:52:11.697660Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2025-12-23T18:52:11.697698Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2025-12-23T18:52:11.697738Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2025-12-23T18:52:11.697787Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2025-12-23T18:52:11.697811Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2025-12-23T18:52:11.697835Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2025-12-23T18:52:11.697881Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2025-12-23T18:52:11.697909Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2025-12-23T18:52:11.697931Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2025-12-23T18:52:11.697967Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2025-12-23T18:52:11.697995Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... icated# true 2025-12-23T18:52:13.380270Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483671 VDiskId# [80000017:1:2:1:0] DiskIsOk# true 2025-12-23T18:52:13.380314Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483671 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:13.380343Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483671 VDiskId# [80000017:1:2:2:0] DiskIsOk# true 2025-12-23T18:52:13.385812Z 1 00h05m00.105120s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483671 Items# [80000017:1:1:2:0]: 23:1001:1001 -> 23:1000:1010 ConfigTxSeqNo# 48 2025-12-23T18:52:13.385866Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483671 Success# true 2025-12-23T18:52:13.386074Z 17 00h05m00.105120s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-12-23T18:52:13.386145Z 17 00h05m00.105120s :BS_NODE DEBUG: [17] VDiskId# [80000017:1:1:0:0] -> [80000017:2:1:0:0] 2025-12-23T18:52:13.386249Z 35 00h05m00.105120s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2025-12-23T18:52:13.386312Z 35 00h05m00.105120s :BS_NODE DEBUG: [35] VDiskId# [80000017:1:2:2:0] -> [80000017:2:2:2:0] 2025-12-23T18:52:13.386412Z 20 00h05m00.105120s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-12-23T18:52:13.386467Z 20 00h05m00.105120s :BS_NODE DEBUG: [20] VDiskId# [80000017:1:1:1:0] -> [80000017:2:1:1:0] 2025-12-23T18:52:13.386586Z 5 00h05m00.105120s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-12-23T18:52:13.386641Z 5 00h05m00.105120s :BS_NODE DEBUG: [5] VDiskId# [80000017:1:0:0:0] -> [80000017:2:0:0:0] 2025-12-23T18:52:13.386822Z 23 00h05m00.105120s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-12-23T18:52:13.386872Z 23 00h05m00.105120s :BS_NODE DEBUG: [23] VDiskId# [80000017:2:1:2:0] PDiskId# 1000 VSlotId# 1010 created 2025-12-23T18:52:13.386959Z 23 00h05m00.105120s :BS_NODE DEBUG: [23] VDiskId# [80000017:2:1:2:0] status changed to INIT_PENDING 2025-12-23T18:52:13.387093Z 8 00h05m00.105120s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-12-23T18:52:13.387158Z 8 00h05m00.105120s :BS_NODE DEBUG: [8] VDiskId# [80000017:1:0:1:0] -> [80000017:2:0:1:0] 2025-12-23T18:52:13.387255Z 11 00h05m00.105120s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2025-12-23T18:52:13.387310Z 11 00h05m00.105120s :BS_NODE DEBUG: [11] VDiskId# [80000017:1:0:2:0] -> [80000017:2:0:2:0] 2025-12-23T18:52:13.387412Z 29 00h05m00.105120s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-12-23T18:52:13.387459Z 29 00h05m00.105120s :BS_NODE DEBUG: [29] VDiskId# [80000017:1:2:0:0] -> [80000017:2:2:0:0] 2025-12-23T18:52:13.387555Z 32 00h05m00.105120s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-12-23T18:52:13.387605Z 32 00h05m00.105120s :BS_NODE DEBUG: [32] VDiskId# [80000017:1:2:1:0] -> [80000017:2:2:1:0] 2025-12-23T18:52:13.388010Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483655 2025-12-23T18:52:13.388955Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:13.388999Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:1:0:0:0] DiskIsOk# true 2025-12-23T18:52:13.389049Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:13.389077Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:1:0:1:0] DiskIsOk# true 2025-12-23T18:52:13.389107Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:13.389140Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:1:0:2:0] DiskIsOk# true 2025-12-23T18:52:13.389172Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:13.389193Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:1:1:0:0] DiskIsOk# true 2025-12-23T18:52:13.389212Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:13.389230Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:1:1:1:0] DiskIsOk# true 2025-12-23T18:52:13.389265Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:13.389286Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:1:2:0:0] DiskIsOk# true 2025-12-23T18:52:13.389321Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:13.389340Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:1:2:1:0] DiskIsOk# true 2025-12-23T18:52:13.389370Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483655 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:13.389402Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483655 VDiskId# [80000007:1:2:2:0] DiskIsOk# true 2025-12-23T18:52:13.394706Z 1 00h05m00.105632s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483655 Items# [80000007:1:1:2:0]: 23:1001:1000 -> 23:1002:1010 ConfigTxSeqNo# 49 2025-12-23T18:52:13.395451Z 1 00h05m00.105632s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483655 Success# true 2025-12-23T18:52:13.395721Z 17 00h05m00.105632s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-12-23T18:52:13.395819Z 17 00h05m00.105632s :BS_NODE DEBUG: [17] VDiskId# [80000007:1:1:0:0] -> [80000007:2:1:0:0] 2025-12-23T18:52:13.395963Z 35 00h05m00.105632s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2025-12-23T18:52:13.396022Z 35 00h05m00.105632s :BS_NODE DEBUG: [35] VDiskId# [80000007:1:2:2:0] -> [80000007:2:2:2:0] 2025-12-23T18:52:13.396129Z 20 00h05m00.105632s :BS_NODE DEBUG: [20] NodeServiceSetUpdate 2025-12-23T18:52:13.396192Z 20 00h05m00.105632s :BS_NODE DEBUG: [20] VDiskId# [80000007:1:1:1:0] -> [80000007:2:1:1:0] 2025-12-23T18:52:13.396297Z 5 00h05m00.105632s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-12-23T18:52:13.396347Z 5 00h05m00.105632s :BS_NODE DEBUG: [5] VDiskId# [80000007:1:0:0:0] -> [80000007:2:0:0:0] 2025-12-23T18:52:13.396466Z 23 00h05m00.105632s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-12-23T18:52:13.396520Z 23 00h05m00.105632s :BS_NODE DEBUG: [23] VDiskId# [80000007:2:1:2:0] PDiskId# 1002 VSlotId# 1010 created 2025-12-23T18:52:13.396609Z 23 00h05m00.105632s :BS_NODE DEBUG: [23] VDiskId# [80000007:2:1:2:0] status changed to INIT_PENDING 2025-12-23T18:52:13.396744Z 8 00h05m00.105632s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-12-23T18:52:13.396792Z 8 00h05m00.105632s :BS_NODE DEBUG: [8] VDiskId# [80000007:1:0:1:0] -> [80000007:2:0:1:0] 2025-12-23T18:52:13.396921Z 11 00h05m00.105632s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2025-12-23T18:52:13.397001Z 11 00h05m00.105632s :BS_NODE DEBUG: [11] VDiskId# [80000007:1:0:2:0] -> [80000007:2:0:2:0] 2025-12-23T18:52:13.397138Z 29 00h05m00.105632s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-12-23T18:52:13.397191Z 29 00h05m00.105632s :BS_NODE DEBUG: [29] VDiskId# [80000007:1:2:0:0] -> [80000007:2:2:0:0] 2025-12-23T18:52:13.397371Z 32 00h05m00.105632s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-12-23T18:52:13.397428Z 32 00h05m00.105632s :BS_NODE DEBUG: [32] VDiskId# [80000007:1:2:1:0] -> [80000007:2:2:1:0] 2025-12-23T18:52:13.398774Z 23 00h05m01.798632s :BS_NODE DEBUG: [23] VDiskId# [80000007:2:1:2:0] status changed to REPLICATING 2025-12-23T18:52:13.399456Z 23 00h05m02.174072s :BS_NODE DEBUG: [23] VDiskId# [80000057:2:1:2:0] status changed to REPLICATING 2025-12-23T18:52:13.400077Z 23 00h05m03.903096s :BS_NODE DEBUG: [23] VDiskId# [80000037:2:1:2:0] status changed to REPLICATING 2025-12-23T18:52:13.400840Z 23 00h05m04.785608s :BS_NODE DEBUG: [23] VDiskId# [80000027:2:1:2:0] status changed to REPLICATING 2025-12-23T18:52:13.401492Z 23 00h05m04.918560s :BS_NODE DEBUG: [23] VDiskId# [80000067:2:1:2:0] status changed to REPLICATING 2025-12-23T18:52:13.403171Z 23 00h05m05.087120s :BS_NODE DEBUG: [23] VDiskId# [80000017:2:1:2:0] status changed to REPLICATING 2025-12-23T18:52:13.404003Z 23 00h05m05.683584s :BS_NODE DEBUG: [23] VDiskId# [80000047:2:1:2:0] status changed to REPLICATING 2025-12-23T18:52:13.404777Z 23 00h05m05.918048s :BS_NODE DEBUG: [23] VDiskId# [80000077:2:1:2:0] status changed to REPLICATING 2025-12-23T18:52:13.405601Z 23 00h05m09.687072s :BS_NODE DEBUG: [23] VDiskId# [80000057:2:1:2:0] status changed to READY 2025-12-23T18:52:13.407088Z 23 00h05m09.687584s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-12-23T18:52:13.407154Z 23 00h05m09.687584s :BS_NODE DEBUG: [23] VDiskId# [80000057:1:1:2:0] destroyed 2025-12-23T18:52:13.407614Z 23 00h05m10.216632s :BS_NODE DEBUG: [23] VDiskId# [80000007:2:1:2:0] status changed to READY 2025-12-23T18:52:13.408996Z 23 00h05m10.217144s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-12-23T18:52:13.409039Z 23 00h05m10.217144s :BS_NODE DEBUG: [23] VDiskId# [80000007:1:1:2:0] destroyed 2025-12-23T18:52:13.409224Z 23 00h05m14.354120s :BS_NODE DEBUG: [23] VDiskId# [80000017:2:1:2:0] status changed to READY 2025-12-23T18:52:13.410570Z 23 00h05m14.354632s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-12-23T18:52:13.410608Z 23 00h05m14.354632s :BS_NODE DEBUG: [23] VDiskId# [80000017:1:1:2:0] destroyed 2025-12-23T18:52:13.411511Z 23 00h05m19.960048s :BS_NODE DEBUG: [23] VDiskId# [80000077:2:1:2:0] status changed to READY 2025-12-23T18:52:13.412807Z 23 00h05m19.960560s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-12-23T18:52:13.412877Z 23 00h05m19.960560s :BS_NODE DEBUG: [23] VDiskId# [80000077:1:1:2:0] destroyed 2025-12-23T18:52:13.413260Z 23 00h05m21.882608s :BS_NODE DEBUG: [23] VDiskId# [80000027:2:1:2:0] status changed to READY 2025-12-23T18:52:13.414612Z 23 00h05m21.883120s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-12-23T18:52:13.414649Z 23 00h05m21.883120s :BS_NODE DEBUG: [23] VDiskId# [80000027:1:1:2:0] destroyed 2025-12-23T18:52:13.415571Z 23 00h05m25.847560s :BS_NODE DEBUG: [23] VDiskId# [80000067:2:1:2:0] status changed to READY 2025-12-23T18:52:13.416894Z 23 00h05m25.848072s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-12-23T18:52:13.416934Z 23 00h05m25.848072s :BS_NODE DEBUG: [23] VDiskId# [80000067:1:1:2:0] destroyed 2025-12-23T18:52:13.417116Z 23 00h05m28.885096s :BS_NODE DEBUG: [23] VDiskId# [80000037:2:1:2:0] status changed to READY 2025-12-23T18:52:13.418516Z 23 00h05m28.885608s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-12-23T18:52:13.418574Z 23 00h05m28.885608s :BS_NODE DEBUG: [23] VDiskId# [80000037:1:1:2:0] destroyed 2025-12-23T18:52:13.418987Z 23 00h05m29.666584s :BS_NODE DEBUG: [23] VDiskId# [80000047:2:1:2:0] status changed to READY 2025-12-23T18:52:13.420508Z 23 00h05m29.667096s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-12-23T18:52:13.420575Z 23 00h05m29.667096s :BS_NODE DEBUG: [23] VDiskId# [80000047:1:1:2:0] destroyed |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BasicStatistics::StatisticsOnShardsRestart >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TSchemeShardExportToFsTests::ShouldSucceedCreateExportToFs [GOOD] >> TSchemeShardExportToFsTests::ShouldAcceptCompressionForFs |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> BasicStatistics::TwoTables >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> BsControllerTest::TestLocalBrokenRelocation [GOOD] >> ColumnStatistics::CountMinSketchServerlessStatistics |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::CompletedExportEndTime >> TExportToS3Tests::UserSID >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> TExportToS3Tests::ShouldPreserveIncrBackupFlag |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TExportToS3Tests::RebootDuringAbortion >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::MultipleColumnFamilies >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs >> HttpRequest::ProbeBaseStatsServerless |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation [GOOD] Test command err: 2025-12-23T18:52:11.658457Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-12-23T18:52:11.658507Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-12-23T18:52:11.658566Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-12-23T18:52:11.658588Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-12-23T18:52:11.658648Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-12-23T18:52:11.658667Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-12-23T18:52:11.658703Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-12-23T18:52:11.658724Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-12-23T18:52:11.658777Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-12-23T18:52:11.658819Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-12-23T18:52:11.658864Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-12-23T18:52:11.658883Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-12-23T18:52:11.658920Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-12-23T18:52:11.658941Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-12-23T18:52:11.658974Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-12-23T18:52:11.658993Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-12-23T18:52:11.659043Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-12-23T18:52:11.659067Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-12-23T18:52:11.659109Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-12-23T18:52:11.659133Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-12-23T18:52:11.659212Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-12-23T18:52:11.659230Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-12-23T18:52:11.659252Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-12-23T18:52:11.659265Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-12-23T18:52:11.659285Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-12-23T18:52:11.659309Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-12-23T18:52:11.659361Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-12-23T18:52:11.659386Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-12-23T18:52:11.659411Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-12-23T18:52:11.659426Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-12-23T18:52:11.659446Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-12-23T18:52:11.659466Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-12-23T18:52:11.659490Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-12-23T18:52:11.659503Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-12-23T18:52:11.659522Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-12-23T18:52:11.659535Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-12-23T18:52:11.659555Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-12-23T18:52:11.659568Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-12-23T18:52:11.659593Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-12-23T18:52:11.659605Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-12-23T18:52:11.659648Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-12-23T18:52:11.659667Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-12-23T18:52:11.659696Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-12-23T18:52:11.659710Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-12-23T18:52:11.659732Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-12-23T18:52:11.659745Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-12-23T18:52:11.659764Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-12-23T18:52:11.659776Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-12-23T18:52:11.659794Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-12-23T18:52:11.659807Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-12-23T18:52:11.659845Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-12-23T18:52:11.659859Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-12-23T18:52:11.659899Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-12-23T18:52:11.659915Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-12-23T18:52:11.659938Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-12-23T18:52:11.659951Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-12-23T18:52:11.659970Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-12-23T18:52:11.659989Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-12-23T18:52:11.660024Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-12-23T18:52:11.660036Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-12-23T18:52:11.660055Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-12-23T18:52:11.660080Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-12-23T18:52:11.660104Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-12-23T18:52:11.660117Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-12-23T18:52:11.660136Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-12-23T18:52:11.660149Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-12-23T18:52:11.660170Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-12-23T18:52:11.660182Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-12-23T18:52:11.660200Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-12-23T18:52:11.660215Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-12-23T18:52:11.660237Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-12-23T18:52:11.660253Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-12-23T18:52:11.676847Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2025-12-23T18:52:11.677913Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2025-12-23T18:52:11.677983Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2025-12-23T18:52:11.678039Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2025-12-23T18:52:11.678084Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2025-12-23T18:52:11.678113Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2025-12-23T18:52:11.678136Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2025-12-23T18:52:11.678160Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2025-12-23T18:52:11.678193Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2025-12-23T18:52:11.678237Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2025-12-23T18:52:11.678261Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2025-12-23T18:52:11.678297Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2025-12-23T18:52:11.678320Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2025-12-23T18:52:11.678346Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2025-12-23T18:52:11.678370Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2025-12-23T18:52:11.678396Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2025-12-23T18:52:11.678419Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2025-12-23T18:52:11.678447Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2025-12-23T18:52:11.678479Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2025-12-23T18:52:11.678505Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2025-12-23T18:52:11.678527Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2025-12-23T18:52:11.678564Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2025-12-23T18:52:11.678601Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2025-12-23T18:52:11.678633Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2025-12-23T18:52:11.678662Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2025-12-23T18:52:11.678716Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2025-12-23T18:52:11.678783Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2025-12-23T18:52:11.678861Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2025-12-23T18:52:11.678891Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2025-12-23T18:52:11.678914Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2025-12-23T18:52:11.678960Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2025-12-23T18:52:11.678989Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2025-12-23T18:52:11.679012Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2025-12-23T18:52:11.679053Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2025-12-23T18:52:11.679090Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... 25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000001:2:2:2:0] -> [80000001:3:2:2:0] 2025-12-23T18:52:14.844942Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000021:2:2:2:0] -> [80000021:3:2:2:0] 2025-12-23T18:52:14.844998Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000031:2:2:2:0] -> [80000031:3:2:2:0] 2025-12-23T18:52:14.845046Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000051:2:2:2:0] -> [80000051:3:2:2:0] 2025-12-23T18:52:14.845085Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000061:2:2:2:0] -> [80000061:3:2:2:0] 2025-12-23T18:52:14.845746Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-12-23T18:52:14.845827Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000010:2:1:0:0] -> [80000010:3:1:0:0] 2025-12-23T18:52:14.845869Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000040:2:1:0:0] -> [80000040:3:1:0:0] 2025-12-23T18:52:14.845944Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000070:2:1:0:0] -> [80000070:3:1:0:0] 2025-12-23T18:52:14.845997Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000001:2:1:1:0] -> [80000001:3:1:1:0] 2025-12-23T18:52:14.846056Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000021:2:1:1:0] -> [80000021:3:1:1:0] 2025-12-23T18:52:14.846115Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000031:2:1:1:0] -> [80000031:3:1:1:0] 2025-12-23T18:52:14.846174Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000051:2:1:1:0] -> [80000051:3:1:1:0] 2025-12-23T18:52:14.846219Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000061:2:1:1:0] -> [80000061:3:1:1:0] 2025-12-23T18:52:14.846259Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000002:1:1:2:0] -> [80000002:2:1:2:0] 2025-12-23T18:52:14.846302Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000012:1:1:2:0] -> [80000012:2:1:2:0] 2025-12-23T18:52:14.846341Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000022:1:1:2:0] -> [80000022:2:1:2:0] 2025-12-23T18:52:14.846379Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000032:1:1:2:0] -> [80000032:2:1:2:0] 2025-12-23T18:52:14.846419Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000042:1:1:2:0] -> [80000042:2:1:2:0] 2025-12-23T18:52:14.846475Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000052:1:1:2:0] -> [80000052:2:1:2:0] 2025-12-23T18:52:14.846518Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000062:1:1:2:0] -> [80000062:2:1:2:0] 2025-12-23T18:52:14.846561Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000072:1:1:2:0] -> [80000072:2:1:2:0] 2025-12-23T18:52:14.847293Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-12-23T18:52:14.847352Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000010:2:2:2:0] -> [80000010:3:2:2:0] 2025-12-23T18:52:14.847392Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000040:2:2:2:0] -> [80000040:3:2:2:0] 2025-12-23T18:52:14.847428Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000070:2:2:2:0] -> [80000070:3:2:2:0] 2025-12-23T18:52:14.847475Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000002:1:2:0:0] -> [80000002:2:2:0:0] 2025-12-23T18:52:14.847534Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000012:1:2:0:0] -> [80000012:2:2:0:0] 2025-12-23T18:52:14.847574Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000022:1:2:0:0] -> [80000022:2:2:0:0] 2025-12-23T18:52:14.847613Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000032:1:2:0:0] -> [80000032:2:2:0:0] 2025-12-23T18:52:14.847652Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000042:1:2:0:0] -> [80000042:2:2:0:0] 2025-12-23T18:52:14.847695Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000052:1:2:0:0] -> [80000052:2:2:0:0] 2025-12-23T18:52:14.847731Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000062:1:2:0:0] -> [80000062:2:2:0:0] 2025-12-23T18:52:14.847782Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000072:1:2:0:0] -> [80000072:2:2:0:0] 2025-12-23T18:52:14.848233Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-12-23T18:52:14.848300Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000010:2:1:1:0] -> [80000010:3:1:1:0] 2025-12-23T18:52:14.848342Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000040:2:1:1:0] -> [80000040:3:1:1:0] 2025-12-23T18:52:14.848379Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000070:2:1:1:0] -> [80000070:3:1:1:0] 2025-12-23T18:52:14.848414Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000001:2:1:2:0] -> [80000001:3:1:2:0] 2025-12-23T18:52:14.848449Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000021:2:1:2:0] -> [80000021:3:1:2:0] 2025-12-23T18:52:14.848488Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000031:2:1:2:0] -> [80000031:3:1:2:0] 2025-12-23T18:52:14.848526Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000051:2:1:2:0] -> [80000051:3:1:2:0] 2025-12-23T18:52:14.848566Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000061:2:1:2:0] -> [80000061:3:1:2:0] 2025-12-23T18:52:14.851153Z 4 01h25m01.295560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to REPLICATING 2025-12-23T18:52:14.851659Z 10 01h25m01.596560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to REPLICATING 2025-12-23T18:52:14.852047Z 2 01h25m01.883560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to REPLICATING 2025-12-23T18:52:14.852425Z 7 01h25m01.901560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to REPLICATING 2025-12-23T18:52:14.852827Z 4 01h25m02.548560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to REPLICATING 2025-12-23T18:52:14.853222Z 5 01h25m02.654560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to REPLICATING 2025-12-23T18:52:14.853571Z 7 01h25m03.272560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to REPLICATING 2025-12-23T18:52:14.853944Z 5 01h25m03.300560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to REPLICATING 2025-12-23T18:52:14.854343Z 7 01h25m04.555560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to REPLICATING 2025-12-23T18:52:14.854705Z 4 01h25m04.770560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to REPLICATING 2025-12-23T18:52:14.855090Z 10 01h25m04.977560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to REPLICATING 2025-12-23T18:52:14.856943Z 4 01h25m05.140560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to REPLICATING 2025-12-23T18:52:14.857399Z 7 01h25m05.438560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to REPLICATING 2025-12-23T18:52:14.857772Z 10 01h25m05.688560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to REPLICATING 2025-12-23T18:52:14.858117Z 8 01h25m05.872560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to REPLICATING 2025-12-23T18:52:14.858489Z 2 01h25m05.970560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to REPLICATING 2025-12-23T18:52:14.859241Z 10 01h25m12.248560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to READY 2025-12-23T18:52:14.860210Z 1 01h25m12.249072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.860285Z 1 01h25m12.249072s :BS_NODE DEBUG: [1] VDiskId# [80000010:2:0:0:0] destroyed 2025-12-23T18:52:14.860446Z 2 01h25m14.991560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to READY 2025-12-23T18:52:14.861232Z 1 01h25m14.992072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.861278Z 1 01h25m14.992072s :BS_NODE DEBUG: [1] VDiskId# [80000042:1:0:2:0] destroyed 2025-12-23T18:52:14.862118Z 2 01h25m17.318560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to READY 2025-12-23T18:52:14.862954Z 1 01h25m17.319072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.863005Z 1 01h25m17.319072s :BS_NODE DEBUG: [1] VDiskId# [80000062:1:0:2:0] destroyed 2025-12-23T18:52:14.863108Z 4 01h25m17.866560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to READY 2025-12-23T18:52:14.863820Z 1 01h25m17.867072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.863866Z 1 01h25m17.867072s :BS_NODE DEBUG: [1] VDiskId# [80000032:1:0:2:0] destroyed 2025-12-23T18:52:14.863961Z 5 01h25m17.905560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to READY 2025-12-23T18:52:14.864715Z 1 01h25m17.906072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.864759Z 1 01h25m17.906072s :BS_NODE DEBUG: [1] VDiskId# [80000072:1:0:2:0] destroyed 2025-12-23T18:52:14.864858Z 10 01h25m18.113560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to READY 2025-12-23T18:52:14.865571Z 1 01h25m18.114072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.865644Z 1 01h25m18.114072s :BS_NODE DEBUG: [1] VDiskId# [80000040:2:0:0:0] destroyed 2025-12-23T18:52:14.866129Z 7 01h25m20.135560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to READY 2025-12-23T18:52:14.866916Z 1 01h25m20.136072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.866961Z 1 01h25m20.136072s :BS_NODE DEBUG: [1] VDiskId# [80000031:2:0:1:0] destroyed 2025-12-23T18:52:14.867100Z 10 01h25m23.015560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to READY 2025-12-23T18:52:14.867870Z 1 01h25m23.016072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.867925Z 1 01h25m23.016072s :BS_NODE DEBUG: [1] VDiskId# [80000070:2:0:0:0] destroyed 2025-12-23T18:52:14.868303Z 7 01h25m25.171560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to READY 2025-12-23T18:52:14.868994Z 1 01h25m25.172072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.869037Z 1 01h25m25.172072s :BS_NODE DEBUG: [1] VDiskId# [80000001:2:0:1:0] destroyed 2025-12-23T18:52:14.869139Z 4 01h25m26.007560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to READY 2025-12-23T18:52:14.869848Z 1 01h25m26.008072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.869893Z 1 01h25m26.008072s :BS_NODE DEBUG: [1] VDiskId# [80000002:1:0:2:0] destroyed 2025-12-23T18:52:14.870268Z 4 01h25m27.268560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to READY 2025-12-23T18:52:14.870995Z 1 01h25m27.269072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.871037Z 1 01h25m27.269072s :BS_NODE DEBUG: [1] VDiskId# [80000022:1:0:2:0] destroyed 2025-12-23T18:52:14.871173Z 4 01h25m29.277560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to READY 2025-12-23T18:52:14.871827Z 1 01h25m29.278072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.871871Z 1 01h25m29.278072s :BS_NODE DEBUG: [1] VDiskId# [80000012:1:0:2:0] destroyed 2025-12-23T18:52:14.872990Z 8 01h25m31.483560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to READY 2025-12-23T18:52:14.873751Z 1 01h25m31.484072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.873826Z 1 01h25m31.484072s :BS_NODE DEBUG: [1] VDiskId# [80000061:2:0:1:0] destroyed 2025-12-23T18:52:14.874683Z 7 01h25m33.699560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to READY 2025-12-23T18:52:14.875430Z 1 01h25m33.700072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.875478Z 1 01h25m33.700072s :BS_NODE DEBUG: [1] VDiskId# [80000021:2:0:1:0] destroyed 2025-12-23T18:52:14.875560Z 5 01h25m33.734560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to READY 2025-12-23T18:52:14.876262Z 1 01h25m33.735072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.876304Z 1 01h25m33.735072s :BS_NODE DEBUG: [1] VDiskId# [80000052:1:0:2:0] destroyed 2025-12-23T18:52:14.877237Z 7 01h25m38.628560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to READY 2025-12-23T18:52:14.877984Z 1 01h25m38.629072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:14.878032Z 1 01h25m38.629072s :BS_NODE DEBUG: [1] VDiskId# [80000051:2:0:1:0] destroyed >> TSchemeShardExportToFsTests::ShouldAcceptCompressionForFs [GOOD] >> TSchemeShardExportToFsTests::ShouldFailOnNonExistentPath |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath >> BasicStatistics::TwoServerlessDbs >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::TopicWithPermissionsExport |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/00548d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_good_dynconfig/audit_log.df__xaxw.txt 2025-12-23T18:52:01.735170Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TSchemeShardExportToFsTests::ShouldFailOnNonExistentPath [GOOD] >> TSchemeShardExportToFsTests::ShouldFailOnDeletedPath |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::ShouldSucceedOnManyTables |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::IndexMaterializationGlobal >> BasicStatistics::Serverless ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/00548c/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk12/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_anonymous/audit_log.khtaydz8.txt |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TExportToS3Tests::TopicWithPermissionsExport [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion >> TExportToS3Tests::TopicsExport |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml [GOOD] >> BasicStatistics::ServerlessTimeIntervals |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::CopyIndexedTable >> BasicStatistics::NotFullStatisticsColumnshard >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TSchemeShardExportToFsTests::ShouldFailOnDeletedPath [GOOD] >> TSchemeShardExportToFsTests::FsExportWithMultipleTables >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage >> HttpRequest::ProbeBaseStats >> BasicStatistics::TwoServerlessTwoSharedDbs >> TExportToS3Tests::IndexMaterializationGlobal [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::TopicsExport [GOOD] >> TExportToS3Tests::IndexMaterializationGlobalAsync >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TExportToS3Tests::ShouldSucceedOnConcurrentExport |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status >> TExportToS3Tests::TopicsWithPermissionsExport >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] Test command err: 2025-12-23T18:52:11.649693Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-12-23T18:52:11.649759Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-12-23T18:52:11.649841Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-12-23T18:52:11.649867Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-12-23T18:52:11.649906Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-12-23T18:52:11.649926Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-12-23T18:52:11.649961Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-12-23T18:52:11.649982Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-12-23T18:52:11.650011Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-12-23T18:52:11.650031Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-12-23T18:52:11.650063Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-12-23T18:52:11.650091Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-12-23T18:52:11.650139Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-12-23T18:52:11.650161Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-12-23T18:52:11.650193Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-12-23T18:52:11.650224Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-12-23T18:52:11.650264Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-12-23T18:52:11.650286Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-12-23T18:52:11.650324Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-12-23T18:52:11.650346Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-12-23T18:52:11.650376Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-12-23T18:52:11.650397Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-12-23T18:52:11.650427Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-12-23T18:52:11.650447Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-12-23T18:52:11.650509Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-12-23T18:52:11.650532Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-12-23T18:52:11.650582Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-12-23T18:52:11.650602Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-12-23T18:52:11.650634Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-12-23T18:52:11.650654Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-12-23T18:52:11.650683Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-12-23T18:52:11.650710Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-12-23T18:52:11.650778Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-12-23T18:52:11.650822Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-12-23T18:52:11.650862Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-12-23T18:52:11.650884Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-12-23T18:52:11.650919Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-12-23T18:52:11.650944Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-12-23T18:52:11.650993Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-12-23T18:52:11.651019Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-12-23T18:52:11.651051Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-12-23T18:52:11.651072Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-12-23T18:52:11.651103Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-12-23T18:52:11.651121Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-12-23T18:52:11.651155Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-12-23T18:52:11.651191Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-12-23T18:52:11.651229Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-12-23T18:52:11.651258Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-12-23T18:52:11.651322Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-12-23T18:52:11.651345Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-12-23T18:52:11.651379Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-12-23T18:52:11.651398Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-12-23T18:52:11.651426Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-12-23T18:52:11.651446Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-12-23T18:52:11.651475Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-12-23T18:52:11.651495Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-12-23T18:52:11.651543Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-12-23T18:52:11.651566Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-12-23T18:52:11.651600Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-12-23T18:52:11.651618Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-12-23T18:52:11.651653Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-12-23T18:52:11.651672Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-12-23T18:52:11.651714Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-12-23T18:52:11.651741Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-12-23T18:52:11.668321Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2163:55] Status# ERROR ClientId# [1:2163:55] ServerId# [0:0:0] PipeClient# [1:2163:55] 2025-12-23T18:52:11.669236Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2164:37] Status# ERROR ClientId# [2:2164:37] ServerId# [0:0:0] PipeClient# [2:2164:37] 2025-12-23T18:52:11.669280Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2165:37] Status# ERROR ClientId# [3:2165:37] ServerId# [0:0:0] PipeClient# [3:2165:37] 2025-12-23T18:52:11.669304Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2166:37] Status# ERROR ClientId# [4:2166:37] ServerId# [0:0:0] PipeClient# [4:2166:37] 2025-12-23T18:52:11.669339Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2167:37] Status# ERROR ClientId# [5:2167:37] ServerId# [0:0:0] PipeClient# [5:2167:37] 2025-12-23T18:52:11.669364Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2168:37] Status# ERROR ClientId# [6:2168:37] ServerId# [0:0:0] PipeClient# [6:2168:37] 2025-12-23T18:52:11.669390Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2169:37] Status# ERROR ClientId# [7:2169:37] ServerId# [0:0:0] PipeClient# [7:2169:37] 2025-12-23T18:52:11.669438Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2170:37] Status# ERROR ClientId# [8:2170:37] ServerId# [0:0:0] PipeClient# [8:2170:37] 2025-12-23T18:52:11.669488Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2171:37] Status# ERROR ClientId# [9:2171:37] ServerId# [0:0:0] PipeClient# [9:2171:37] 2025-12-23T18:52:11.669515Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2172:37] Status# ERROR ClientId# [10:2172:37] ServerId# [0:0:0] PipeClient# [10:2172:37] 2025-12-23T18:52:11.669552Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2173:37] Status# ERROR ClientId# [11:2173:37] ServerId# [0:0:0] PipeClient# [11:2173:37] 2025-12-23T18:52:11.669623Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2174:37] Status# ERROR ClientId# [12:2174:37] ServerId# [0:0:0] PipeClient# [12:2174:37] 2025-12-23T18:52:11.669673Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2175:37] Status# ERROR ClientId# [13:2175:37] ServerId# [0:0:0] PipeClient# [13:2175:37] 2025-12-23T18:52:11.669701Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2176:37] Status# ERROR ClientId# [14:2176:37] ServerId# [0:0:0] PipeClient# [14:2176:37] 2025-12-23T18:52:11.669723Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2177:37] Status# ERROR ClientId# [15:2177:37] ServerId# [0:0:0] PipeClient# [15:2177:37] 2025-12-23T18:52:11.669744Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2178:37] Status# ERROR ClientId# [16:2178:37] ServerId# [0:0:0] PipeClient# [16:2178:37] 2025-12-23T18:52:11.669769Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2179:37] Status# ERROR ClientId# [17:2179:37] ServerId# [0:0:0] PipeClient# [17:2179:37] 2025-12-23T18:52:11.669791Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2180:37] Status# ERROR ClientId# [18:2180:37] ServerId# [0:0:0] PipeClient# [18:2180:37] 2025-12-23T18:52:11.669815Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2181:37] Status# ERROR ClientId# [19:2181:37] ServerId# [0:0:0] PipeClient# [19:2181:37] 2025-12-23T18:52:11.669838Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2182:37] Status# ERROR ClientId# [20:2182:37] ServerId# [0:0:0] PipeClient# [20:2182:37] 2025-12-23T18:52:11.669860Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2183:37] Status# ERROR ClientId# [21:2183:37] ServerId# [0:0:0] PipeClient# [21:2183:37] 2025-12-23T18:52:11.669887Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2184:37] Status# ERROR ClientId# [22:2184:37] ServerId# [0:0:0] PipeClient# [22:2184:37] 2025-12-23T18:52:11.669909Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2185:37] Status# ERROR ClientId# [23:2185:37] ServerId# [0:0:0] PipeClient# [23:2185:37] 2025-12-23T18:52:11.669948Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2186:37] Status# ERROR ClientId# [24:2186:37] ServerId# [0:0:0] PipeClient# [24:2186:37] 2025-12-23T18:52:11.669972Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2187:37] Status# ERROR ClientId# [25:2187:37] ServerId# [0:0:0] PipeClient# [25:2187:37] 2025-12-23T18:52:11.669997Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2188:37] Status# ERROR ClientId# [26:2188:37] ServerId# [0:0:0] PipeClient# [26:2188:37] 2025-12-23T18:52:11.670021Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2189:37] Status# ERROR ClientId# [27:2189:37] ServerId# [0:0:0] PipeClient# [27:2189:37] 2025-12-23T18:52:11.670057Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2190:37] Status# ERROR ClientId# [28:2190:37] ServerId# [0:0:0] PipeClient# [28:2190:37] 2025-12-23T18:52:11.670089Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2191:37] Status# ERROR ClientId# [29:2191:37] ServerId# [0:0:0] PipeClient# [29:2191:37] 2025-12-23T18:52:11.670123Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2192:37] Status# ERROR ClientId# [30:2192:37] ServerId# [0:0:0] PipeClient# [30:2192:37] 2025-12-23T18:52:11.670146Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2193:37] Status# ERROR ClientId# [31:2193:37] ServerId# [0:0:0] PipeClient# [31:2193:37] 2025-12-23T18:52:11.670168Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2194:37] Status# ERROR ClientId# [32:2194:37] ServerId# [0:0:0] PipeClient# [32:2194:37] 2025-12-23T18:52:11.789393Z 1 00h00m00.002560s :BS_NODE DEBUG: [1] CheckState from [1:2264:79] expected 1 current 0 2025-12-23T18:52:11.789453Z 2 00h00m00.002560s :BS_NODE DEBUG: [2] CheckState from [2:2265:38] expected 1 current 0 2025-12-23T18:52:11.789481Z 3 00h00m00.002560s :BS_NODE DEBUG: [3] CheckState from [3:2266:38] expected 1 current 0 2025-12-23T18:52:11.789504Z 4 00h00m00.002560s :BS_NODE DEBUG: [4] CheckState from [4:2267:38] expected 1 current 0 2025-12-23T18:52:11.789526Z 5 00h00m00.002560s :BS_NODE DEBUG: [5] CheckState from [5:2268:38] expected 1 current 0 2025-12-23T18:52:11.789554Z 6 00h00m00.002560s :BS_NODE DEBUG: [6] CheckState from [6:2269:38] expected 1 current 0 2025-12-23T18:52:11.789584Z 7 00h00m00.002560s :BS_NODE DEBUG: [7] CheckState from [7:2270:38] expected 1 current 0 2025-12-23T18:52:11.789605Z 8 00h00m00.002560s :BS_NODE DEBUG: [8] CheckState from [8:2271:38] expected 1 current 0 2025-12-23T18:52:11.789627Z 9 00h00m00.002560s :BS_NODE DEBUG: [9] CheckState from [9:2272:38] expected 1 current 0 2025-12-23T18:52:11.789649Z 10 00h00m00.002560s :BS_NODE DEBUG: [10] CheckState from [10:2273 ... FHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483670 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:18.892972Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483670 VDiskId# [80000016:4:0:7:0] DiskIsOk# true 2025-12-23T18:52:18.895758Z 1 05h15m00.122016s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:18.895811Z 1 05h15m00.122016s :BS_NODE DEBUG: [1] VDiskId# [80000016:5:0:4:0] PDiskId# 1001 VSlotId# 1017 created 2025-12-23T18:52:18.895875Z 1 05h15m00.122016s :BS_NODE DEBUG: [1] VDiskId# [80000016:5:0:4:0] status changed to INIT_PENDING 2025-12-23T18:52:18.896289Z 1 05h15m00.122016s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483670 Items# [80000016:4:0:4:0]: 21:1001:1002 -> 1:1001:1017 ConfigTxSeqNo# 504 2025-12-23T18:52:18.896333Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483670 Success# true 2025-12-23T18:52:18.896463Z 17 05h15m00.122016s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-12-23T18:52:18.896509Z 17 05h15m00.122016s :BS_NODE DEBUG: [17] VDiskId# [80000016:4:0:0:0] -> [80000016:5:0:0:0] 2025-12-23T18:52:18.896600Z 18 05h15m00.122016s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-12-23T18:52:18.896648Z 18 05h15m00.122016s :BS_NODE DEBUG: [18] VDiskId# [80000016:4:0:6:0] -> [80000016:5:0:6:0] 2025-12-23T18:52:18.896738Z 19 05h15m00.122016s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-12-23T18:52:18.896786Z 19 05h15m00.122016s :BS_NODE DEBUG: [19] VDiskId# [80000016:4:0:2:0] -> [80000016:5:0:2:0] 2025-12-23T18:52:18.896849Z 21 05h15m00.122016s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-12-23T18:52:18.896926Z 22 05h15m00.122016s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-12-23T18:52:18.896977Z 22 05h15m00.122016s :BS_NODE DEBUG: [22] VDiskId# [80000016:4:0:5:0] -> [80000016:5:0:5:0] 2025-12-23T18:52:18.897061Z 24 05h15m00.122016s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-12-23T18:52:18.897109Z 24 05h15m00.122016s :BS_NODE DEBUG: [24] VDiskId# [80000016:4:0:7:0] -> [80000016:5:0:7:0] 2025-12-23T18:52:18.897188Z 27 05h15m00.122016s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-12-23T18:52:18.897224Z 27 05h15m00.122016s :BS_NODE DEBUG: [27] VDiskId# [80000016:4:0:3:0] -> [80000016:5:0:3:0] 2025-12-23T18:52:18.897276Z 16 05h15m00.122016s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-12-23T18:52:18.897310Z 16 05h15m00.122016s :BS_NODE DEBUG: [16] VDiskId# [80000016:4:0:1:0] -> [80000016:5:0:1:0] 2025-12-23T18:52:18.897528Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483654 2025-12-23T18:52:18.898194Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:18.898232Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:4:0:0:0] DiskIsOk# true 2025-12-23T18:52:18.898263Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:18.898286Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:4:0:1:0] DiskIsOk# true 2025-12-23T18:52:18.898308Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:18.898328Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:4:0:2:0] DiskIsOk# true 2025-12-23T18:52:18.898350Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:18.898368Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:4:0:3:0] DiskIsOk# true 2025-12-23T18:52:18.898388Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:18.898408Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:4:0:5:0] DiskIsOk# true 2025-12-23T18:52:18.898427Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:18.898446Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:4:0:6:0] DiskIsOk# true 2025-12-23T18:52:18.898467Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483654 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:18.898488Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483654 VDiskId# [80000006:4:0:7:0] DiskIsOk# true 2025-12-23T18:52:18.901658Z 1 05h15m00.122528s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483654 Items# [80000006:4:0:4:0]: 21:1001:1000 -> 23:1001:1017 ConfigTxSeqNo# 505 2025-12-23T18:52:18.901708Z 1 05h15m00.122528s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483654 Success# true 2025-12-23T18:52:18.901865Z 17 05h15m00.122528s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-12-23T18:52:18.901933Z 17 05h15m00.122528s :BS_NODE DEBUG: [17] VDiskId# [80000006:4:0:0:0] -> [80000006:5:0:0:0] 2025-12-23T18:52:18.902010Z 18 05h15m00.122528s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-12-23T18:52:18.902061Z 18 05h15m00.122528s :BS_NODE DEBUG: [18] VDiskId# [80000006:4:0:6:0] -> [80000006:5:0:6:0] 2025-12-23T18:52:18.902143Z 19 05h15m00.122528s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-12-23T18:52:18.902191Z 19 05h15m00.122528s :BS_NODE DEBUG: [19] VDiskId# [80000006:4:0:2:0] -> [80000006:5:0:2:0] 2025-12-23T18:52:18.902256Z 21 05h15m00.122528s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-12-23T18:52:18.902320Z 22 05h15m00.122528s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-12-23T18:52:18.902356Z 22 05h15m00.122528s :BS_NODE DEBUG: [22] VDiskId# [80000006:4:0:5:0] -> [80000006:5:0:5:0] 2025-12-23T18:52:18.902440Z 23 05h15m00.122528s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-12-23T18:52:18.902485Z 23 05h15m00.122528s :BS_NODE DEBUG: [23] VDiskId# [80000006:5:0:4:0] PDiskId# 1001 VSlotId# 1017 created 2025-12-23T18:52:18.902564Z 23 05h15m00.122528s :BS_NODE DEBUG: [23] VDiskId# [80000006:5:0:4:0] status changed to INIT_PENDING 2025-12-23T18:52:18.902672Z 24 05h15m00.122528s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-12-23T18:52:18.902725Z 24 05h15m00.122528s :BS_NODE DEBUG: [24] VDiskId# [80000006:4:0:7:0] -> [80000006:5:0:7:0] 2025-12-23T18:52:18.902823Z 28 05h15m00.122528s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-12-23T18:52:18.902861Z 28 05h15m00.122528s :BS_NODE DEBUG: [28] VDiskId# [80000006:4:0:3:0] -> [80000006:5:0:3:0] 2025-12-23T18:52:18.902918Z 16 05h15m00.122528s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-12-23T18:52:18.902952Z 16 05h15m00.122528s :BS_NODE DEBUG: [16] VDiskId# [80000006:4:0:1:0] -> [80000006:5:0:1:0] 2025-12-23T18:52:18.903784Z 1 05h15m02.830016s :BS_NODE DEBUG: [1] VDiskId# [80000016:5:0:4:0] status changed to REPLICATING 2025-12-23T18:52:18.904027Z 1 05h15m02.841432s :BS_NODE DEBUG: [1] VDiskId# [8000003e:6:0:4:0] status changed to REPLICATING 2025-12-23T18:52:18.904650Z 23 05h15m04.108528s :BS_NODE DEBUG: [23] VDiskId# [80000006:5:0:4:0] status changed to REPLICATING 2025-12-23T18:52:18.904997Z 1 05h15m04.347920s :BS_NODE DEBUG: [1] VDiskId# [8000002b:6:0:3:0] status changed to REPLICATING 2025-12-23T18:52:18.905524Z 1 05h15m04.361504s :BS_NODE DEBUG: [1] VDiskId# [80000026:5:0:4:0] status changed to REPLICATING 2025-12-23T18:52:18.906044Z 1 05h15m04.416944s :BS_NODE DEBUG: [1] VDiskId# [8000002e:5:0:4:0] status changed to REPLICATING 2025-12-23T18:52:18.906472Z 1 05h15m04.453992s :BS_NODE DEBUG: [1] VDiskId# [80000036:6:0:4:0] status changed to REPLICATING 2025-12-23T18:52:18.906878Z 1 05h15m04.924480s :BS_NODE DEBUG: [1] VDiskId# [80000003:7:0:0:0] status changed to REPLICATING 2025-12-23T18:52:18.908115Z 1 05h15m05.640968s :BS_NODE DEBUG: [1] VDiskId# [8000000e:6:0:4:0] status changed to REPLICATING 2025-12-23T18:52:18.908758Z 1 05h15m06.028456s :BS_NODE DEBUG: [1] VDiskId# [8000001e:5:0:4:0] status changed to REPLICATING 2025-12-23T18:52:18.909529Z 1 05h15m10.078992s :BS_NODE DEBUG: [1] VDiskId# [80000036:6:0:4:0] status changed to READY 2025-12-23T18:52:18.910432Z 21 05h15m10.079504s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-12-23T18:52:18.910494Z 21 05h15m10.079504s :BS_NODE DEBUG: [21] VDiskId# [80000036:5:0:4:0] destroyed 2025-12-23T18:52:18.910621Z 1 05h15m10.208504s :BS_NODE DEBUG: [1] VDiskId# [80000026:5:0:4:0] status changed to READY 2025-12-23T18:52:18.911533Z 21 05h15m10.209016s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-12-23T18:52:18.911574Z 21 05h15m10.209016s :BS_NODE DEBUG: [21] VDiskId# [80000026:4:0:4:0] destroyed 2025-12-23T18:52:18.911703Z 1 05h15m12.209944s :BS_NODE DEBUG: [1] VDiskId# [8000002e:5:0:4:0] status changed to READY 2025-12-23T18:52:18.912429Z 21 05h15m12.210456s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-12-23T18:52:18.912469Z 21 05h15m12.210456s :BS_NODE DEBUG: [21] VDiskId# [8000002e:4:0:4:0] destroyed 2025-12-23T18:52:18.912606Z 1 05h15m14.281016s :BS_NODE DEBUG: [1] VDiskId# [80000016:5:0:4:0] status changed to READY 2025-12-23T18:52:18.913377Z 21 05h15m14.281528s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-12-23T18:52:18.913416Z 21 05h15m14.281528s :BS_NODE DEBUG: [21] VDiskId# [80000016:4:0:4:0] destroyed 2025-12-23T18:52:18.913511Z 1 05h15m14.381432s :BS_NODE DEBUG: [1] VDiskId# [8000003e:6:0:4:0] status changed to READY 2025-12-23T18:52:18.914289Z 21 05h15m14.381944s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-12-23T18:52:18.914329Z 21 05h15m14.381944s :BS_NODE DEBUG: [21] VDiskId# [8000003e:5:0:4:0] destroyed 2025-12-23T18:52:18.914995Z 1 05h15m16.598456s :BS_NODE DEBUG: [1] VDiskId# [8000001e:5:0:4:0] status changed to READY 2025-12-23T18:52:18.915843Z 21 05h15m16.598968s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-12-23T18:52:18.916049Z 21 05h15m16.598968s :BS_NODE DEBUG: [21] VDiskId# [8000001e:4:0:4:0] destroyed 2025-12-23T18:52:18.917643Z 1 05h15m26.840968s :BS_NODE DEBUG: [1] VDiskId# [8000000e:6:0:4:0] status changed to READY 2025-12-23T18:52:18.918901Z 21 05h15m26.841480s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-12-23T18:52:18.918964Z 21 05h15m26.841480s :BS_NODE DEBUG: [21] VDiskId# [8000000e:5:0:4:0] destroyed 2025-12-23T18:52:18.920892Z 1 05h15m32.342480s :BS_NODE DEBUG: [1] VDiskId# [80000003:7:0:0:0] status changed to READY 2025-12-23T18:52:18.922116Z 21 05h15m32.342992s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-12-23T18:52:18.922174Z 21 05h15m32.342992s :BS_NODE DEBUG: [21] VDiskId# [80000003:6:0:0:0] destroyed 2025-12-23T18:52:18.922388Z 23 05h15m33.278528s :BS_NODE DEBUG: [23] VDiskId# [80000006:5:0:4:0] status changed to READY 2025-12-23T18:52:18.923251Z 21 05h15m33.279040s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-12-23T18:52:18.923306Z 21 05h15m33.279040s :BS_NODE DEBUG: [21] VDiskId# [80000006:4:0:4:0] destroyed 2025-12-23T18:52:18.923789Z 1 05h15m37.531920s :BS_NODE DEBUG: [1] VDiskId# [8000002b:6:0:3:0] status changed to READY 2025-12-23T18:52:18.924743Z 21 05h15m37.532432s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-12-23T18:52:18.924807Z 21 05h15m37.532432s :BS_NODE DEBUG: [21] VDiskId# [8000002b:5:0:3:0] destroyed |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> TSchemeShardExportToFsTests::FsExportWithMultipleTables [GOOD] >> ColumnStatistics::CountMinSketchStatistics >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance [FAIL] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> TExportToS3Tests::EncryptedExport >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> KqpBatchUpdate::SimplePartitions [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table >> TExportToS3Tests::TopicsWithPermissionsExport [GOOD] >> TExportToS3Tests::IndexMaterializationGlobalAsync [GOOD] >> TExportToS3Tests::TablePermissions >> BasicStatistics::PersistenceWithStorageFailuresAndReboots |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTree ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TSchemeShardExportToFsTests::FsExportWithMultipleTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:14.383230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:14.383339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:14.383454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:14.383511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:14.383545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:14.383625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:14.384519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:14.386776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:14.475702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:14.475756Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:14.486782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:14.486975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:14.487180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:14.499041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:14.499294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:14.500029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:14.502849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:14.510432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.511183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:14.517954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:14.518283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:14.518332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:14.518470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:14.649799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.651756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.651880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.651959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 42], version: 1 2025-12-23T18:52:20.302753Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 4 2025-12-23T18:52:20.302849Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/2, is published: true 2025-12-23T18:52:20.303523Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:4 msg type: 268697601 2025-12-23T18:52:20.303673Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72057594037968897 2025-12-23T18:52:20.303740Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:4, partId: 1 2025-12-23T18:52:20.304074Z node 5 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 4 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 42 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 4 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 42 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-12-23T18:52:20.304301Z node 5 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 4, type DataShard, boot OK, tablet id 72075186233409548 2025-12-23T18:52:20.304849Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697601 2025-12-23T18:52:20.305127Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1813: TOperation RegisterRelationByTabletId collision in routes has found, TxId# 281474976710758, partId# 0, prevPartId# 1, tablet# 72057594037968897, guessDefaultRootHive# yes, prevTx# WorkingDir: "/MyRoot/export-103" OperationType: ESchemeOpCreateTable CreateTable { Name: "1" CopyFromTable: "/MyRoot/Table2" OmitFollowers: true IsBackup: true AllowUnderSameOperation: false OmitIndexes: true } FailOnExist: true, newTx# WorkingDir: "/MyRoot/export-103" OperationType: ESchemeOpCreateTable CreateTable { Name: "0" CopyFromTable: "/MyRoot/Table1" OmitFollowers: true IsBackup: true AllowUnderSameOperation: false OmitIndexes: true } FailOnExist: true 2025-12-23T18:52:20.305197Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:3, partId: 0 2025-12-23T18:52:20.305663Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6306: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2025-12-23T18:52:20.305728Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:4, partId: 1 2025-12-23T18:52:20.305890Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:1, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2025-12-23T18:52:20.305949Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710758:1 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-12-23T18:52:20.306029Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 281474976710758:1 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2025-12-23T18:52:20.306125Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710758:1 2 -> 3 2025-12-23T18:52:20.307260Z node 5 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 41 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 41 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-12-23T18:52:20.307485Z node 5 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 3, type DataShard, boot OK, tablet id 72075186233409549 2025-12-23T18:52:20.307753Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6306: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2025-12-23T18:52:20.307789Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:3, partId: 0 2025-12-23T18:52:20.307927Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2025-12-23T18:52:20.307970Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-12-23T18:52:20.308039Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2025-12-23T18:52:20.308223Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710758:0 2 -> 3 2025-12-23T18:52:20.309701Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:20.309828Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:20.312152Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:20.312466Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:20.314722Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:1, at schemeshard: 72057594046678944 2025-12-23T18:52:20.315213Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:1, at schemeshard: 72057594046678944 2025-12-23T18:52:20.315269Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 281474976710758:1 ProgressState at tablet# 72057594046678944 2025-12-23T18:52:20.315331Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 281474976710758:1 Propose modify scheme on dstDatashard# 72075186233409548 idx# 72057594046678944:4 srcDatashard# 72075186233409547 idx# 72057594046678944:2 operationId# 281474976710758:1 seqNo# 2:3 at tablet# 72057594046678944 2025-12-23T18:52:20.316085Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-12-23T18:52:20.316332Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2025-12-23T18:52:20.316364Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 281474976710758:0 ProgressState at tablet# 72057594046678944 2025-12-23T18:52:20.316406Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 281474976710758:0 Propose modify scheme on dstDatashard# 72075186233409549 idx# 72057594046678944:3 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 281474976710758:0 seqNo# 2:4 at tablet# 72057594046678944 2025-12-23T18:52:20.321505Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:4 msg type: 269549568 2025-12-23T18:52:20.321675Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-12-23T18:52:20.321757Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72075186233409548 2025-12-23T18:52:20.321784Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72075186233409547 2025-12-23T18:52:20.322338Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:3 msg type: 269549568 2025-12-23T18:52:20.322450Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-12-23T18:52:20.322546Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409549 2025-12-23T18:52:20.322588Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409546 >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup >> BasicStatistics::TwoDatabases |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/00545f/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk22/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.upsert/audit_log.o3l6nkmr.txt 2025-12-23T18:52:06.693178Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:52:06.693123Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-12-23T18:52:06.620817Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> BasicStatistics::NotFullStatisticsDatashard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimplePartitions [GOOD] Test command err: Trying to start YDB, gRPC: 17207, MsgBus: 62954 2025-12-23T18:47:31.404187Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587126950758702416:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:47:31.404236Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005bd8/r3tmp/tmpgZL2Me/pdisk_1.dat 2025-12-23T18:47:31.675646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:47:31.785571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:31.785684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:31.809337Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:31.810684Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587126950758702310:2081] 1766515651398280 != 1766515651398283 2025-12-23T18:47:31.810766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17207, node 1 2025-12-23T18:47:31.936911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:47:31.938522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:31.938531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:31.938537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:31.938629Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62954 TClient is connected to server localhost:62954 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-12-23T18:47:32.415901Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515651859 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:47:32.524011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:47:32.531135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:47:32.540219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:32.685478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:32.860283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:32.951798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:47:35.039685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126967938573384:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.039810Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.046973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126967938573394:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.047071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.426480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:35.460020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:35.495405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:35.546851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:35.580581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:35.621333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:35.666335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:35.715260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:47:35.815231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126967938574272:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.815330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.815440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126967938574277:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.815732Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587126967938574279:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.815779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:47:35.819280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sc ... ode 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:02.559817Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:02.559917Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 31240, node 16 2025-12-23T18:52:02.623261Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:02.623286Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:02.623296Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:02.623386Z node 16 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3043 2025-12-23T18:52:02.940457Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515922752 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:03.200707Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:03.207595Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:52:03.219036Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.303020Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:03.308874Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:52:03.624130Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:03.729395Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:07.760073Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587128137249703759:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:07.760178Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:07.760459Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587128137249703768:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:07.760512Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:07.859726Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:07.894079Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:07.926542Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:07.971462Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:08.008002Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:08.047672Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:08.088859Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:08.196424Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:08.285239Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587128141544671936:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:08.285348Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:08.285590Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587128141544671941:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:08.285658Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587128141544671942:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:08.285713Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:08.290536Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:08.306719Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7587128141544671945:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:08.384345Z node 16 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [16:7587128141544671996:3784] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:17.460662Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:52:17.460694Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TExportToS3Tests::TablePermissions [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> TExportToS3Tests::EncryptedExport [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable >> TExportToS3Tests::TopicExport |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TopicsWithPermissionsExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:14.383207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:14.383300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:14.383386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:14.383440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:14.383478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:14.383556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:14.384460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:14.386725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:14.483299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:14.483363Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:14.496432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:14.496574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:14.496749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:14.506814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:14.507023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:14.507651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:14.507803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:14.511425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.511583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:14.517925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:14.518321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:14.518376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:14.518525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:14.695720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.696738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.696846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.696918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.696998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 5-12-23T18:52:21.534572Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 3 2025-12-23T18:52:21.534603Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2025-12-23T18:52:21.534666Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-12-23T18:52:21.536581Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-12-23T18:52:21.536636Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-12-23T18:52:21.536688Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-12-23T18:52:21.537396Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-12-23T18:52:21.537499Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:52:21.537844Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000044 FAKE_COORDINATOR: advance: minStep5000044 State->FrontStep: 5000043 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000044 2025-12-23T18:52:21.538499Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000044, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:21.538597Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 21474838640 } } Step: 5000044 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:52:21.538639Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000044, at schemeshard: 72057594046678944 2025-12-23T18:52:21.538732Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-12-23T18:52:21.538831Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2025-12-23T18:52:21.538862Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-12-23T18:52:21.538907Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2025-12-23T18:52:21.538936Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-12-23T18:52:21.538985Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 7 2025-12-23T18:52:21.539037Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2025-12-23T18:52:21.539078Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-12-23T18:52:21.539118Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-12-23T18:52:21.539146Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710758:0 2025-12-23T18:52:21.539173Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710758:0 2025-12-23T18:52:21.539224Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2025-12-23T18:52:21.539259Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-12-23T18:52:21.539289Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 20 2025-12-23T18:52:21.539318Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 43], 18446744073709551615 2025-12-23T18:52:21.539887Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:21.541132Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:21.541171Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:21.541323Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2025-12-23T18:52:21.541443Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:21.541479Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:209:2210], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-12-23T18:52:21.541532Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:209:2210], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 43 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-12-23T18:52:21.542286Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 20 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:21.542360Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 20 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:21.542401Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-12-23T18:52:21.542439Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 20 2025-12-23T18:52:21.542473Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 8 2025-12-23T18:52:21.542875Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:21.542961Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:21.542994Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-12-23T18:52:21.543020Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2025-12-23T18:52:21.543050Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2025-12-23T18:52:21.543124Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-12-23T18:52:21.543170Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:128:2153] 2025-12-23T18:52:21.543429Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:52:21.543460Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2025-12-23T18:52:21.543529Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 7 2025-12-23T18:52:21.545710Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:21.546892Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:21.546990Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-12-23T18:52:21.547050Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710758 2025-12-23T18:52:21.547470Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:52:21.548661Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:52:21.548701Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [5:1493:3316] TestWaitNotification: OK eventTxId 106 >> TExportToS3Tests::DisableAutoDropping |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] |97.1%| [TA] $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTree [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTreePrefix |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::TopicExport [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] Test command err: 2025-12-23T18:52:11.746728Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-12-23T18:52:11.746795Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-12-23T18:52:11.746860Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-12-23T18:52:11.746875Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-12-23T18:52:11.746901Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-12-23T18:52:11.746915Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-12-23T18:52:11.746937Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-12-23T18:52:11.746961Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-12-23T18:52:11.746984Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-12-23T18:52:11.747002Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-12-23T18:52:11.747032Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-12-23T18:52:11.747045Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-12-23T18:52:11.747068Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-12-23T18:52:11.747087Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-12-23T18:52:11.747113Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-12-23T18:52:11.747126Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-12-23T18:52:11.747165Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-12-23T18:52:11.747179Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-12-23T18:52:11.747198Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-12-23T18:52:11.747219Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-12-23T18:52:11.747245Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-12-23T18:52:11.747257Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-12-23T18:52:11.747312Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-12-23T18:52:11.747326Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-12-23T18:52:11.747346Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-12-23T18:52:11.747365Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-12-23T18:52:11.747392Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-12-23T18:52:11.747408Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-12-23T18:52:11.747434Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-12-23T18:52:11.747446Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-12-23T18:52:11.747463Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-12-23T18:52:11.747478Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-12-23T18:52:11.747501Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-12-23T18:52:11.747523Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-12-23T18:52:11.747550Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-12-23T18:52:11.747563Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-12-23T18:52:11.747581Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-12-23T18:52:11.747594Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-12-23T18:52:11.747615Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-12-23T18:52:11.747627Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-12-23T18:52:11.747653Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-12-23T18:52:11.747686Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-12-23T18:52:11.747723Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-12-23T18:52:11.747740Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-12-23T18:52:11.747759Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-12-23T18:52:11.747770Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-12-23T18:52:11.747791Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-12-23T18:52:11.747803Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-12-23T18:52:11.747822Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-12-23T18:52:11.747839Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-12-23T18:52:11.747873Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-12-23T18:52:11.747892Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-12-23T18:52:11.747923Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-12-23T18:52:11.747939Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-12-23T18:52:11.747974Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-12-23T18:52:11.747988Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-12-23T18:52:11.748007Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-12-23T18:52:11.748026Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-12-23T18:52:11.748062Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-12-23T18:52:11.748075Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-12-23T18:52:11.748095Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-12-23T18:52:11.748124Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-12-23T18:52:11.748158Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-12-23T18:52:11.748171Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-12-23T18:52:11.748189Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-12-23T18:52:11.748201Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-12-23T18:52:11.748224Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-12-23T18:52:11.748235Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-12-23T18:52:11.748253Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-12-23T18:52:11.748265Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-12-23T18:52:11.748283Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-12-23T18:52:11.748298Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-12-23T18:52:11.762856Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2025-12-23T18:52:11.763757Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2025-12-23T18:52:11.763798Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2025-12-23T18:52:11.763822Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2025-12-23T18:52:11.763861Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2025-12-23T18:52:11.763894Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2025-12-23T18:52:11.763919Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2025-12-23T18:52:11.763941Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2025-12-23T18:52:11.763963Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2025-12-23T18:52:11.764015Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2025-12-23T18:52:11.764043Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2025-12-23T18:52:11.764079Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2025-12-23T18:52:11.764102Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2025-12-23T18:52:11.764131Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2025-12-23T18:52:11.764153Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2025-12-23T18:52:11.764174Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2025-12-23T18:52:11.764199Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2025-12-23T18:52:11.764226Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2025-12-23T18:52:11.764256Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2025-12-23T18:52:11.764278Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2025-12-23T18:52:11.764299Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2025-12-23T18:52:11.764322Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2025-12-23T18:52:11.764357Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2025-12-23T18:52:11.764382Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2025-12-23T18:52:11.764413Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2025-12-23T18:52:11.764451Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2025-12-23T18:52:11.764485Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2025-12-23T18:52:11.764521Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2025-12-23T18:52:11.764548Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2025-12-23T18:52:11.764571Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2025-12-23T18:52:11.764601Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2025-12-23T18:52:11.764651Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2025-12-23T18:52:11.764688Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2025-12-23T18:52:11.764726Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2025-12-23T18:52:11.764756Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:22.861796Z 1 05h45m00.123040s :BS_NODE DEBUG: [1] VDiskId# [80000014:1:0:0:0] -> [80000014:2:0:0:0] 2025-12-23T18:52:22.862292Z 1 05h45m00.123040s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483668 Items# [80000014:1:1:0:0]: 13:1003:1001 -> 14:1002:1016 ConfigTxSeqNo# 537 2025-12-23T18:52:22.862319Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483668 Success# true 2025-12-23T18:52:22.862434Z 19 05h45m00.123040s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-12-23T18:52:22.862471Z 19 05h45m00.123040s :BS_NODE DEBUG: [19] VDiskId# [80000014:1:1:2:0] -> [80000014:2:1:2:0] 2025-12-23T18:52:22.862534Z 4 05h45m00.123040s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-12-23T18:52:22.862565Z 4 05h45m00.123040s :BS_NODE DEBUG: [4] VDiskId# [80000014:1:0:1:0] -> [80000014:2:0:1:0] 2025-12-23T18:52:22.862619Z 7 05h45m00.123040s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-12-23T18:52:22.862648Z 7 05h45m00.123040s :BS_NODE DEBUG: [7] VDiskId# [80000014:1:0:2:0] -> [80000014:2:0:2:0] 2025-12-23T18:52:22.862703Z 25 05h45m00.123040s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-12-23T18:52:22.862752Z 25 05h45m00.123040s :BS_NODE DEBUG: [25] VDiskId# [80000014:1:2:0:0] -> [80000014:2:2:0:0] 2025-12-23T18:52:22.862818Z 28 05h45m00.123040s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-12-23T18:52:22.862849Z 28 05h45m00.123040s :BS_NODE DEBUG: [28] VDiskId# [80000014:1:2:1:0] -> [80000014:2:2:1:0] 2025-12-23T18:52:22.862889Z 13 05h45m00.123040s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-12-23T18:52:22.862939Z 31 05h45m00.123040s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-12-23T18:52:22.862968Z 31 05h45m00.123040s :BS_NODE DEBUG: [31] VDiskId# [80000014:1:2:2:0] -> [80000014:2:2:2:0] 2025-12-23T18:52:22.863024Z 14 05h45m00.123040s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-12-23T18:52:22.863050Z 14 05h45m00.123040s :BS_NODE DEBUG: [14] VDiskId# [80000014:2:1:0:0] PDiskId# 1002 VSlotId# 1016 created 2025-12-23T18:52:22.863101Z 14 05h45m00.123040s :BS_NODE DEBUG: [14] VDiskId# [80000014:2:1:0:0] status changed to INIT_PENDING 2025-12-23T18:52:22.863164Z 16 05h45m00.123040s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-12-23T18:52:22.863194Z 16 05h45m00.123040s :BS_NODE DEBUG: [16] VDiskId# [80000014:1:1:1:0] -> [80000014:2:1:1:0] 2025-12-23T18:52:22.863377Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483652 2025-12-23T18:52:22.863696Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:22.863724Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:1:0:0:0] DiskIsOk# true 2025-12-23T18:52:22.863897Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:22.863917Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:1:0:1:0] DiskIsOk# true 2025-12-23T18:52:22.863937Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:22.863956Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:1:0:2:0] DiskIsOk# true 2025-12-23T18:52:22.863974Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:22.863992Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:1:1:1:0] DiskIsOk# true 2025-12-23T18:52:22.864008Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:22.864041Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:1:1:2:0] DiskIsOk# true 2025-12-23T18:52:22.864067Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:22.864094Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:1:2:0:0] DiskIsOk# true 2025-12-23T18:52:22.864122Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:22.864148Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:1:2:1:0] DiskIsOk# true 2025-12-23T18:52:22.864178Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-12-23T18:52:22.864206Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:1:2:2:0] DiskIsOk# true 2025-12-23T18:52:22.867566Z 1 05h45m00.123552s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-12-23T18:52:22.867614Z 1 05h45m00.123552s :BS_NODE DEBUG: [1] VDiskId# [80000004:1:0:0:0] -> [80000004:2:0:0:0] 2025-12-23T18:52:22.868019Z 1 05h45m00.123552s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483652 Items# [80000004:1:1:0:0]: 13:1003:1000 -> 14:1002:1017 ConfigTxSeqNo# 538 2025-12-23T18:52:22.868044Z 1 05h45m00.123552s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483652 Success# true 2025-12-23T18:52:22.868149Z 19 05h45m00.123552s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-12-23T18:52:22.868185Z 19 05h45m00.123552s :BS_NODE DEBUG: [19] VDiskId# [80000004:1:1:2:0] -> [80000004:2:1:2:0] 2025-12-23T18:52:22.868248Z 4 05h45m00.123552s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-12-23T18:52:22.868277Z 4 05h45m00.123552s :BS_NODE DEBUG: [4] VDiskId# [80000004:1:0:1:0] -> [80000004:2:0:1:0] 2025-12-23T18:52:22.868328Z 7 05h45m00.123552s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-12-23T18:52:22.868357Z 7 05h45m00.123552s :BS_NODE DEBUG: [7] VDiskId# [80000004:1:0:2:0] -> [80000004:2:0:2:0] 2025-12-23T18:52:22.868409Z 25 05h45m00.123552s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-12-23T18:52:22.868437Z 25 05h45m00.123552s :BS_NODE DEBUG: [25] VDiskId# [80000004:1:2:0:0] -> [80000004:2:2:0:0] 2025-12-23T18:52:22.868485Z 28 05h45m00.123552s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-12-23T18:52:22.868512Z 28 05h45m00.123552s :BS_NODE DEBUG: [28] VDiskId# [80000004:1:2:1:0] -> [80000004:2:2:1:0] 2025-12-23T18:52:22.868549Z 13 05h45m00.123552s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-12-23T18:52:22.868597Z 31 05h45m00.123552s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-12-23T18:52:22.868626Z 31 05h45m00.123552s :BS_NODE DEBUG: [31] VDiskId# [80000004:1:2:2:0] -> [80000004:2:2:2:0] 2025-12-23T18:52:22.868678Z 14 05h45m00.123552s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-12-23T18:52:22.868704Z 14 05h45m00.123552s :BS_NODE DEBUG: [14] VDiskId# [80000004:2:1:0:0] PDiskId# 1002 VSlotId# 1017 created 2025-12-23T18:52:22.868751Z 14 05h45m00.123552s :BS_NODE DEBUG: [14] VDiskId# [80000004:2:1:0:0] status changed to INIT_PENDING 2025-12-23T18:52:22.868812Z 16 05h45m00.123552s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-12-23T18:52:22.868841Z 16 05h45m00.123552s :BS_NODE DEBUG: [16] VDiskId# [80000004:1:1:1:0] -> [80000004:2:1:1:0] 2025-12-23T18:52:22.869488Z 14 05h45m01.487480s :BS_NODE DEBUG: [14] VDiskId# [80000054:2:1:0:0] status changed to REPLICATING 2025-12-23T18:52:22.869957Z 14 05h45m02.756504s :BS_NODE DEBUG: [14] VDiskId# [80000045:7:1:1:0] status changed to REPLICATING 2025-12-23T18:52:22.870367Z 14 05h45m02.763040s :BS_NODE DEBUG: [14] VDiskId# [80000014:2:1:0:0] status changed to REPLICATING 2025-12-23T18:52:22.870770Z 14 05h45m02.919968s :BS_NODE DEBUG: [14] VDiskId# [80000064:2:1:0:0] status changed to REPLICATING 2025-12-23T18:52:22.871245Z 14 05h45m03.823528s :BS_NODE DEBUG: [14] VDiskId# [80000024:2:1:0:0] status changed to REPLICATING 2025-12-23T18:52:22.871659Z 14 05h45m03.887552s :BS_NODE DEBUG: [14] VDiskId# [80000004:2:1:0:0] status changed to REPLICATING 2025-12-23T18:52:22.872255Z 14 05h45m04.204016s :BS_NODE DEBUG: [14] VDiskId# [80000034:2:1:0:0] status changed to REPLICATING 2025-12-23T18:52:22.874119Z 14 05h45m05.048456s :BS_NODE DEBUG: [14] VDiskId# [80000074:2:1:0:0] status changed to REPLICATING 2025-12-23T18:52:22.875154Z 14 05h45m05.382992s :BS_NODE DEBUG: [14] VDiskId# [80000044:2:1:0:0] status changed to REPLICATING 2025-12-23T18:52:22.876381Z 14 05h45m12.655480s :BS_NODE DEBUG: [14] VDiskId# [80000054:2:1:0:0] status changed to READY 2025-12-23T18:52:22.877853Z 13 05h45m12.655992s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-12-23T18:52:22.877912Z 13 05h45m12.655992s :BS_NODE DEBUG: [13] VDiskId# [80000054:1:1:0:0] destroyed 2025-12-23T18:52:22.878104Z 14 05h45m14.694968s :BS_NODE DEBUG: [14] VDiskId# [80000064:2:1:0:0] status changed to READY 2025-12-23T18:52:22.879557Z 13 05h45m14.695480s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-12-23T18:52:22.879608Z 13 05h45m14.695480s :BS_NODE DEBUG: [13] VDiskId# [80000064:1:1:0:0] destroyed 2025-12-23T18:52:22.880756Z 14 05h45m25.764528s :BS_NODE DEBUG: [14] VDiskId# [80000024:2:1:0:0] status changed to READY 2025-12-23T18:52:22.882167Z 13 05h45m25.765040s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-12-23T18:52:22.882220Z 13 05h45m25.765040s :BS_NODE DEBUG: [13] VDiskId# [80000024:1:1:0:0] destroyed 2025-12-23T18:52:22.882675Z 14 05h45m29.476504s :BS_NODE DEBUG: [14] VDiskId# [80000045:7:1:1:0] status changed to READY 2025-12-23T18:52:22.884051Z 13 05h45m29.477016s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-12-23T18:52:22.884102Z 13 05h45m29.477016s :BS_NODE DEBUG: [13] VDiskId# [80000045:6:1:1:0] destroyed 2025-12-23T18:52:22.885222Z 14 05h45m30.836456s :BS_NODE DEBUG: [14] VDiskId# [80000074:2:1:0:0] status changed to READY 2025-12-23T18:52:22.886500Z 13 05h45m30.836968s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-12-23T18:52:22.886552Z 13 05h45m30.836968s :BS_NODE DEBUG: [13] VDiskId# [80000074:1:1:0:0] destroyed 2025-12-23T18:52:22.886757Z 14 05h45m33.447016s :BS_NODE DEBUG: [14] VDiskId# [80000034:2:1:0:0] status changed to READY 2025-12-23T18:52:22.888012Z 13 05h45m33.447528s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-12-23T18:52:22.888073Z 13 05h45m33.447528s :BS_NODE DEBUG: [13] VDiskId# [80000034:1:1:0:0] destroyed 2025-12-23T18:52:22.888223Z 14 05h45m33.908992s :BS_NODE DEBUG: [14] VDiskId# [80000044:2:1:0:0] status changed to READY 2025-12-23T18:52:22.889183Z 13 05h45m33.909504s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-12-23T18:52:22.889216Z 13 05h45m33.909504s :BS_NODE DEBUG: [13] VDiskId# [80000044:1:1:0:0] destroyed 2025-12-23T18:52:22.914303Z 14 05h45m35.047040s :BS_NODE DEBUG: [14] VDiskId# [80000014:2:1:0:0] status changed to READY 2025-12-23T18:52:22.915696Z 13 05h45m35.047552s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-12-23T18:52:22.915743Z 13 05h45m35.047552s :BS_NODE DEBUG: [13] VDiskId# [80000014:1:1:0:0] destroyed 2025-12-23T18:52:22.915872Z 14 05h45m36.641552s :BS_NODE DEBUG: [14] VDiskId# [80000004:2:1:0:0] status changed to READY 2025-12-23T18:52:22.916744Z 13 05h45m36.642064s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-12-23T18:52:22.916782Z 13 05h45m36.642064s :BS_NODE DEBUG: [13] VDiskId# [80000004:1:1:0:0] destroyed |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_selfheal/unittest >> TExportToS3Tests::DisableAutoDropping [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/005471/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk18/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.insert/audit_log.spo4__fm.txt 2025-12-23T18:52:04.573399Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:52:04.573353Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-12-23T18:52:04.492101Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.1%| [TA] $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq |97.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CheckItemProgress >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TopicExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:14.383222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:14.383334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:14.383435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:14.383502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:14.383532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:14.383604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:14.384641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:14.386766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:14.485699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:14.485755Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:14.497439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:14.497647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:14.497839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:14.510253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:14.510522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:14.511326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:14.511523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:14.515736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.515927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:14.517993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:14.518302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:14.518355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:14.518471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:14.696182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.697970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.698075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.698150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.698250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.698323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.698439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 25-12-23T18:52:24.111018Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2025-12-23T18:52:24.111042Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:52:24.111095Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-12-23T18:52:24.112685Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-12-23T18:52:24.112739Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-12-23T18:52:24.112781Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-12-23T18:52:24.113279Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-12-23T18:52:24.113394Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:52:24.113566Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000040 2025-12-23T18:52:24.114097Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:24.114198Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 25769805936 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:52:24.114241Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000040, at schemeshard: 72057594046678944 2025-12-23T18:52:24.114335Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-12-23T18:52:24.114398Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2025-12-23T18:52:24.114431Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-12-23T18:52:24.114472Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2025-12-23T18:52:24.114504Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-12-23T18:52:24.114551Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:52:24.114599Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:52:24.114632Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-12-23T18:52:24.114671Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-12-23T18:52:24.114703Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710758:0 2025-12-23T18:52:24.114733Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710758:0 2025-12-23T18:52:24.114841Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:52:24.114884Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-12-23T18:52:24.114931Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-12-23T18:52:24.114980Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2025-12-23T18:52:24.115544Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:24.116533Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:24.116566Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:24.116679Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:52:24.116791Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:24.116818Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:209:2210], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-12-23T18:52:24.116847Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:209:2210], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 39 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-12-23T18:52:24.117374Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:24.117433Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:24.117461Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-12-23T18:52:24.117505Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-12-23T18:52:24.117545Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:52:24.118026Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:24.118086Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:24.118110Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-12-23T18:52:24.118131Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2025-12-23T18:52:24.118153Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:52:24.118232Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-12-23T18:52:24.118277Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [6:127:2152] 2025-12-23T18:52:24.118551Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:52:24.118595Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:52:24.118662Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:52:24.119788Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:24.120694Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:24.120769Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:52:24.120829Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-12-23T18:52:24.120880Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710758 2025-12-23T18:52:24.122117Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:52:24.122167Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:877:2828] TestWaitNotification: OK eventTxId 102 |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTreePrefix [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::ReadOnlyMode >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon >> TExportToS3Tests::AuditCompletedExport >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits >> KqpSystemView::PartitionStatsFollower [GOOD] >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::CancelledExportEndTime ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTreePrefix [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:14.383228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:14.383312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:14.383409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:14.383461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:14.383499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:14.383566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:14.384515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:14.386773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:14.468092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:14.468154Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:14.481071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:14.481248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:14.482472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:14.496272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:14.496483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:14.499726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:14.502862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:14.510637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.511259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:14.517997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:14.518320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:14.518371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:14.518511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:14.649756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.651711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.651835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.651913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.651987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.652690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... to actorId: [6:127:2152] 2025-12-23T18:52:26.196160Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710764 2025-12-23T18:52:26.196248Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710764 2025-12-23T18:52:26.196313Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710764 2025-12-23T18:52:26.196347Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710764 2025-12-23T18:52:26.197789Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:52:26.197839Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:1228:3111] TestWaitNotification: OK eventTxId 102 2025-12-23T18:52:26.198275Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:52:26.198586Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/index" took 322us result status StatusSuccess 2025-12-23T18:52:26.199583Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/index" PathDescription { Self { Name: "index" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplLevelTable" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPrefixTable" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "index" LocalPathId: 39 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "prefix" KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas >> TSchemeShardTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::PathErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] Test command err: Trying to start YDB, gRPC: 11701, MsgBus: 17800 2025-12-23T18:51:58.902002Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128098933207949:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:51:58.902055Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e7a/r3tmp/tmpOtElUM/pdisk_1.dat 2025-12-23T18:51:59.194268Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:51:59.204479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:51:59.204617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:51:59.325200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:51:59.331718Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:51:59.341736Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128098933207918:2081] 1766515918899218 != 1766515918899221 2025-12-23T18:51:59.415295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 11701, node 1 2025-12-23T18:51:59.615602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:51:59.615627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:51:59.615637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:51:59.615707Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:51:59.915152Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:17800 TClient is connected to server localhost:17800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515919392 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:52:00.496276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:00.504601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:52:01.223729Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7587128103228175566:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:52:01.223765Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:52:01.223802Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:7587128103228175566:2146], Recipient [1:7587128103228175566:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:52:01.223820Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:52:02.226945Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7587128103228175566:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:52:02.226988Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:52:02.227031Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:7587128103228175566:2146], Recipient [1:7587128103228175566:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:52:02.227044Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:52:02.468886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128116113077989:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.468990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.469400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128116113077999:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:02.469443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:03.228474Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7587128103228175566:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:52:03.228511Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:52:03.228559Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:7587128103228175566:2146], Recipient [1:7587128103228175566:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:52:03.228572Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:52:03.314410Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269877761, Sender [1:7587128120408045315:2509], Recipient [1:7587128103228175566:2146]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:52:03.314441Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:52:03.314455Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6166: Pipe server connected, at tablet: 72057594046644480 2025-12-23T18:52:03.314493Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122432, Sender [1:7587128120408045311:2506], Recipient [1:7587128103228175566:2146]: {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-12-23T18:52:03.314506Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5258: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-12-23T18:52:03.369850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T18:52:03.370224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-12-23T18:52:03.370344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2025-12-23T18:52:03.371074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2025-12-23T18:52:03.371112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 0 2025-12-23T18:52:03.371140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976710658:0 type: TxCreateTable target path: [OwnerId: 72057594046644480, LocalPathId: 38] source path: 2025-12-23T18:52:03.371163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2025-12-23T18:52:03.371276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2025-12-23T18:52:03.371286Z node 1 :FLAT_TX_SCHEMESH ... e TTxStoreStats, queue# 2 2025-12-23T18:52:23.589826Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-12-23T18:52:23.589890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046644480:1 data size 800 row count 4 2025-12-23T18:52:23.589935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], pathId map=Followers, is column=0, is olap=0, RowCount 4, DataSize 800 2025-12-23T18:52:23.589964Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2025-12-23T18:52:23.590029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:235: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-12-23T18:52:23.590087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2025-12-23T18:52:23.590116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046644480:1 data size 0 row count 0 2025-12-23T18:52:23.590137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=2, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], pathId map=Followers, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:52:23.590144Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037888, followerId 2 2025-12-23T18:52:23.590182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:393: Do not want to split tablet 72075186224037888 by the CPU load from the follower ID 2, reason: SplitByLoadNotEnabledForTable 2025-12-23T18:52:23.590243Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-12-23T18:52:23.590370Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:7587128103228175566:2146]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:52:23.590390Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:52:23.590419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-12-23T18:52:23.590519Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:152: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 38] shardIdx 72057594046644480 1 followerId 0 stats DataSize: 800 RowCount: 4 IndexSize: 0 CPUCores: 0.001199 TabletId: 72075186224037888 NodeId: 1 StartTime: 1766515923461 AccessTime: 1766515924107 UpdateTime: 1766515923892 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 RangeReadRows: 0 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 0 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-12-23T18:52:23.590614Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:152: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 38] shardIdx 72057594046644480 1 followerId 2 stats DataSize: 0 RowCount: 0 IndexSize: 0 CPUCores: 0.000127 TabletId: 72075186224037888 NodeId: 1 StartTime: 1766515923494 AccessTime: 0 UpdateTime: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-12-23T18:52:24.239904Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7587128103228175566:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:52:24.239943Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:52:24.239984Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:7587128103228175566:2146], Recipient [1:7587128103228175566:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:52:24.239998Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:52:25.000674Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:595: Handle TEvPrivate::TEvProcessInterval: service id# [1:7587128098933207907:2070], interval end# 2025-12-23T18:52:25.000000Z, event interval end# 2025-12-23T18:52:25.000000Z 2025-12-23T18:52:25.000708Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7587128098933207907:2070], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-12-23T18:52:25.000703Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:595: Handle TEvPrivate::TEvProcessInterval: service id# [1:7587128098933208020:2135], interval end# 2025-12-23T18:52:25.000000Z, event interval end# 2025-12-23T18:52:25.000000Z 2025-12-23T18:52:25.000731Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7587128098933208020:2135], query logs count# 1, processor ids count# 1, processor id to database count# 0 2025-12-23T18:52:25.240415Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7587128103228175566:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:52:25.240458Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:52:25.240510Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:7587128103228175566:2146], Recipient [1:7587128103228175566:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:52:25.240537Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:52:26.240751Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7587128103228175566:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:52:26.240801Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:52:26.240844Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:7587128103228175566:2146], Recipient [1:7587128103228175566:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:52:26.240859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... SELECT from partition_stats for /Root/Followers , attempt 2 2025-12-23T18:52:26.410375Z node 1 :SYSTEM_VIEWS INFO: sysview_service.cpp:888: Navigate by database succeeded: service id# [1:7587128098933208020:2135], database# /Root, no sysview processor 2025-12-23T18:52:26.554299Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7587128219192293701:2494], owner: [1:7587128219192293698:2492], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-12-23T18:52:26.555289Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7587128219192293701:2494], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-12-23T18:52:26.555530Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 274595843, Sender [1:7587128219192293701:2494], Recipient [1:7587128103228175566:2146]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2025-12-23T18:52:26.555571Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5312: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2025-12-23T18:52:26.555665Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7587128219192293701:2494], row count: 2, finished: 1 2025-12-23T18:52:26.555729Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [1:7587128219192293701:2494], owner: [1:7587128219192293698:2492], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-12-23T18:52:26.556482Z node 1 :SYSTEM_VIEWS TRACE: sysview_service.cpp:902: Collect query stats: service id# [1:7587128098933208020:2135], database# /Root, query hash# 3266603936201095014, cpu time# 269189 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId != 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/Followers' ... SELECT from partition_stats, attempt 0 2025-12-23T18:52:26.870130Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7587128219192293719:2502], owner: [1:7587128219192293715:2500], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-12-23T18:52:26.870720Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7587128219192293719:2502], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-12-23T18:52:26.870978Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 274595843, Sender [1:7587128219192293719:2502], Recipient [1:7587128103228175566:2146]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2025-12-23T18:52:26.871005Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5312: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2025-12-23T18:52:26.871096Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7587128219192293719:2502], row count: 2, finished: 1 2025-12-23T18:52:26.871177Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [1:7587128219192293719:2502], owner: [1:7587128219192293715:2500], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2025-12-23T18:52:26.874276Z node 1 :SYSTEM_VIEWS TRACE: sysview_service.cpp:902: Collect query stats: service id# [1:7587128098933208020:2135], database# /Root, query hash# 18339066598126957035, cpu time# 303271 |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> test_canonical_records.py::test_dstool_add_group_http [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::DefaultStorageConfig >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::NestedDirs >> TExportToS3Tests::Changefeeds >> ActorHandler::NoValidGroupForbidden >> MonPage::HttpOk >> ActorPage::InvalidTokenForbidden >> ActorPage::NoValidGroupForbidden >> ActorHandler::OptionsNoContent >> Other::TraceInvalidTokenForbidden >> Other::TraceHttpOk >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes >> TSchemeShardTest::DefaultStorageConfig [GOOD] >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex >> TSchemeShardTest::NestedDirs [GOOD] >> TSchemeShardTest::NewOwnerOnDatabase >> Other::UnknownPathNotFound >> HttpRequest::AnalyzeServerless [GOOD] >> TExportToS3Tests::Changefeeds [GOOD] >> Other::TraceNoValidGroupForbidden >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes [GOOD] >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex [GOOD] >> TSchemeShardTest::NewOwnerOnDatabase [GOOD] >> TSchemeShardTest::PreserveColumnOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:17.413551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:17.498917Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:17.502889Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:17.503224Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:17.503315Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:17.827443Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:17.910885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:17.911019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:17.968896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:18.060623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00323b/r3tmp/tmpvQvY0i/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24635, node 1 2025-12-23T18:52:18.712110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:18.713210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:18.713257Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:18.713290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:18.713512Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:18.778667Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18611 2025-12-23T18:52:19.303897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:21.965420Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:21.970717Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:21.973435Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:22.000686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:22.000786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:22.039478Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:22.041896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:22.284942Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.285626Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.286529Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.287328Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.287683Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.287875Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.288039Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.288209Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.288381Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.361017Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-12-23T18:52:22.443116Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:22.443202Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:22.456029Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:22.613742Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:22.665161Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:22.665255Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:22.695588Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:22.697054Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:22.697270Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:22.697331Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:22.697379Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:22.697431Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:22.697479Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:22.697527Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:22.697932Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:22.741825Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:22.741929Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:22.755274Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:52:22.764924Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:52:22.766098Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:52:22.771692Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-12-23T18:52:22.805738Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:52:22.805808Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:52:22.805882Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:52:22.830791Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:22.841508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:22.850198Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:22.850331Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:22.864065Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:22.995569Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:23.326982Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:23.427932Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:23.428026Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. ... rc=[2:2925:3026];cookie=101:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.134906Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;self_id=[2:3609:3455];ev=NActors::IEventHandle;tablet_id=72075186224037912;tx_id=281474976715661;this=137017762679456;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4507;max=18446744073709551615;plan=0;src=[2:2925:3026];cookie=141:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.135272Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;self_id=[2:3610:3456];ev=NActors::IEventHandle;tablet_id=72075186224037913;tx_id=281474976715661;this=137017762672512;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4507;max=18446744073709551615;plan=0;src=[2:2925:3026];cookie=151:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.171895Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[2:3615:3458];ev=NActors::IEventHandle;tablet_id=72075186224037911;tx_id=281474976715661;this=137017762664896;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4508;max=18446744073709551615;plan=0;src=[2:2925:3026];cookie=131:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.172295Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;self_id=[2:3623:3462];ev=NActors::IEventHandle;tablet_id=72075186224037910;tx_id=281474976715661;this=137017762663776;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4508;max=18446744073709551615;plan=0;src=[2:2925:3026];cookie=121:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.172626Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;self_id=[2:3633:3470];ev=NActors::IEventHandle;tablet_id=72075186224037909;tx_id=281474976715661;this=137017762662880;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4508;max=18446744073709551615;plan=0;src=[2:2925:3026];cookie=111:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.172908Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;self_id=[2:3634:3471];ev=NActors::IEventHandle;tablet_id=72075186224037907;tx_id=281474976715661;this=137017762444928;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4508;max=18446744073709551615;plan=0;src=[2:2925:3026];cookie=91:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.189165Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;self_id=[2:3635:3472];ev=NActors::IEventHandle;tablet_id=72075186224037914;tx_id=281474976715661;this=137017762442688;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4509;max=18446744073709551615;plan=0;src=[2:2925:3026];cookie=161:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.288011Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.288128Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.288177Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.311193Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.311291Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.311322Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.321177Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.321249Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.321287Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.326914Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.326987Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.327016Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.334365Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.334436Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.334463Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.340549Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.340639Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.340680Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.348165Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.348234Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.348263Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.354631Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.354714Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.354753Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.360452Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.360523Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.360550Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.365785Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.365853Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2025-12-23T18:52:27.365880Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; waiting actualization: 0/0.000015s 2025-12-23T18:52:29.513473Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4486:4114] 2025-12-23T18:52:29.532556Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4454:3709] , Record { OperationId: "\000\000\000\000\026\021 \362\372\245}\363\'\234Y\034" Tables { PathId { OwnerId: 72057594046644480 LocalId: 38 } } Database: "" } 2025-12-23T18:52:29.532624Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: ` }'Y ', DatabaseName: `', Types: 2025-12-23T18:52:29.532662Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: ` }'Y ', PathId: [OwnerId: 72057594046644480, LocalPathId: 38], ColumnTags: Answer: 'Analyze sent. OperationId: 00000005gh43sfn9bxycksrp8w' |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes [GOOD] Test command err: 2025-12-23T18:52:29.261893Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128230976651668:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:29.262043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056e3/r3tmp/tmpSMWZTe/pdisk_1.dat 2025-12-23T18:52:29.477226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:29.537112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:29.537218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:29.549248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:29.588557Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128230976651632:2081] 1766515949256573 != 1766515949256576 2025-12-23T18:52:29.591640Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:29.647674Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.clusters.get ydb.clusters.manage ydb.clusters.monitor) 2025-12-23T18:52:29.647751Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:29.647785Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d24424f07d0] Connect to grpc://localhost:23757 2025-12-23T18:52:29.650601Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d24424f07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.monitor" } } result_filter: ALL_FAILED } 2025-12-23T18:52:29.669558Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d24424f07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-12-23T18:52:29.670412Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2025-12-23T18:52:29.670508Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:14.383226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:14.383337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:14.383439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:14.383507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:14.383539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:14.383608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:14.384558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:14.386780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:14.484281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:14.484335Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:14.496290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:14.496462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:14.496637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:14.509168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:14.509417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:14.510240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:14.510471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:14.514873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.515054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:14.518009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:14.518315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:14.518371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:14.518505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:14.691044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.693033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.693103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.693171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.693259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.693367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-12-23T18:52:31.214715Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-12-23T18:52:31.214791Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:52:31.215313Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:31.215387Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:31.215415Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-12-23T18:52:31.215444Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 45], version: 18446744073709551615 2025-12-23T18:52:31.215489Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 2 2025-12-23T18:52:31.215557Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-12-23T18:52:31.215626Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:128:2153] 2025-12-23T18:52:31.218489Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:31.218867Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:31.218949Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-12-23T18:52:31.218997Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710761 2025-12-23T18:52:31.219039Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:351: TExport::TTxProgress: DoExecute 2025-12-23T18:52:31.219062Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1314: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-12-23T18:52:31.219093Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1345: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-12-23T18:52:31.220755Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:371: TExport::TTxProgress: DoComplete 2025-12-23T18:52:31.220831Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:52:31.220868Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [5:1530:3322] TestWaitNotification: OK eventTxId 105 2025-12-23T18:52:31.221991Z node 5 :EXPORT DEBUG: schemeshard_export__forget.cpp:79: TExport::TTxForget, dropping export tables, info: { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-12-23T18:52:31.223882Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:351: TExport::TTxProgress: DoExecute 2025-12-23T18:52:31.223927Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:798: TExport::TTxProgress: Resume: id# 105 2025-12-23T18:52:31.224014Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:579: TExport::TTxProgress: Allocate txId: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-12-23T18:52:31.224064Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:371: TExport::TTxProgress: DoComplete 2025-12-23T18:52:31.224192Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 105, at schemeshard: 72057594046678944 2025-12-23T18:52:31.224243Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:351: TExport::TTxProgress: DoExecute 2025-12-23T18:52:31.224286Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:919: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 105 2025-12-23T18:52:31.224352Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:571: TExport::TTxProgress: Drop propose: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2025-12-23T18:52:31.224438Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:371: TExport::TTxProgress: DoComplete 2025-12-23T18:52:31.226627Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-105" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2025-12-23T18:52:31.226783Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/export-105, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2025-12-23T18:52:31.226927Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-105', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761, at schemeshard: 72057594046678944 2025-12-23T18:52:31.228635Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 45 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2025-12-23T18:52:31.228833Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-105', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761, operation: DROP DIRECTORY, path: /MyRoot/export-105 2025-12-23T18:52:31.229003Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7144: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-12-23T18:52:31.229079Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7146: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 45 PathDropTxId: 281474976710761 2025-12-23T18:52:31.229129Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:351: TExport::TTxProgress: DoExecute 2025-12-23T18:52:31.229182Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:980: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-12-23T18:52:31.229264Z node 5 :EXPORT TRACE: schemeshard_export__create.cpp:981: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 45 PathDropTxId: 281474976710761 2025-12-23T18:52:31.229356Z node 5 :EXPORT INFO: schemeshard_export__create.cpp:1162: TExport::TTxProgress: Wait for completion: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2025-12-23T18:52:31.230635Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:371: TExport::TTxProgress: DoComplete 2025-12-23T18:52:31.230786Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2025-12-23T18:52:31.230868Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-12-23T18:52:31.230917Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710761 2025-12-23T18:52:31.230987Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:351: TExport::TTxProgress: DoExecute 2025-12-23T18:52:31.231030Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1314: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-12-23T18:52:31.231069Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:1345: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-12-23T18:52:31.232394Z node 5 :EXPORT DEBUG: schemeshard_export__create.cpp:371: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 105 2025-12-23T18:52:31.232615Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:52:31.232654Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-12-23T18:52:31.233075Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:52:31.233176Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:52:31.233225Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [5:1754:3542] TestWaitNotification: OK eventTxId 105 >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-false |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest |97.1%| [TA] $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TA] {RESULT} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:52:10.241744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:10.241800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:10.241832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:10.241868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:10.241894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:10.241916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:10.241967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:10.242034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:10.242680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:10.242919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:10.309626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:10.309687Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:10.313954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:10.314097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:10.314189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:10.316584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:10.316742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:10.317248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:10.317389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:10.318975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:10.319146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:10.320038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:10.320100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:10.320215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:10.320286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:10.320328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:10.320422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:10.437862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.438495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.438558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.438608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.438704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.438811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.438872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.438932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.439016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.439091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.439144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.439185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.439228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.439285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.439338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... status: StatusAccepted 2025-12-23T18:52:32.137775Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-12-23T18:52:32.137823Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:30: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2025-12-23T18:52:32.137881Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:52:32.137993Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:52:32.139327Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2025-12-23T18:52:32.139413Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:823:2793], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:52:32.139491Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-12-23T18:52:32.139560Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:52:32.139656Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-12-23T18:52:32.139683Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-12-23T18:52:32.139717Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000043 2025-12-23T18:52:32.139957Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:32.140041Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 73014446192 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:52:32.140093Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000043 2025-12-23T18:52:32.140175Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2025-12-23T18:52:32.141810Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-12-23T18:52:32.141856Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-12-23T18:52:32.141923Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2025-12-23T18:52:32.141950Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:52:32.141989Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2025-12-23T18:52:32.142015Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:52:32.142049Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-12-23T18:52:32.142102Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [17:125:2150] message: TxId: 281474976710760 2025-12-23T18:52:32.142161Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:52:32.142202Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2025-12-23T18:52:32.142224Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710760:0 2025-12-23T18:52:32.142277Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-12-23T18:52:32.143751Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-12-23T18:52:32.143797Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710760 2025-12-23T18:52:32.143850Z node 17 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 103, txId# 281474976710760 2025-12-23T18:52:32.143929Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:823:2793], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2025-12-23T18:52:32.145118Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2025-12-23T18:52:32.145200Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:823:2793], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:52:32.145240Z node 17 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-12-23T18:52:32.146399Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done 2025-12-23T18:52:32.146536Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:823:2793], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:52:32.146594Z node 17 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 103, subscribers count# 1 2025-12-23T18:52:32.146729Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:52:32.146829Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [17:912:2871] TestWaitNotification: OK eventTxId 103 |97.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::PreserveColumnOrder [GOOD] >> HttpRequest::Status [GOOD] >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-false [GOOD] >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-true >> ActorPage::InvalidTokenForbidden [GOOD] >> ActorPage::NoUseAuthOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::PreserveColumnOrder [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:52:08.507594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:08.508319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:08.508381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:08.508424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:08.508468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:08.508505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:08.508612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:08.508726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:08.509756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:08.510080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:08.604907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:08.604975Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:08.611460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:08.611705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:08.611868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:08.615339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:08.615598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:08.616352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:08.616564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:08.618631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:08.618875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:08.620129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:08.620215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:08.620392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:08.620446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:08.620494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:08.620638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:08.628102Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:52:08.748204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:52:08.748450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:52:08.748657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:52:08.748700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:52:08.748932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:52:08.749003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:08.751962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:08.752189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:52:08.752403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:52:08.752558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:52:08.752620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:52:08.752661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:52:08.755152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:52:08.755226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:52:08.755275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:52:08.757332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:52:08.757386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:52:08.757441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:52:08.757507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:52:08.761994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:52:08.764245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:52:08.764451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:52:08.765668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:08.765836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:52:08.765888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:52:08.766167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:52:08.766225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:52:08.766429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:52:08.766532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:52:08.769077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:08.769127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1982 } } CommitVersion { Step: 5000038 TxId: 101 } 2025-12-23T18:52:33.485560Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1982 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2025-12-23T18:52:33.486857Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 68719479442 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:52:33.486934Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-12-23T18:52:33.487118Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 720 RawX2: 68719479442 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:52:33.487221Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:52:33.487382Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 720 RawX2: 68719479442 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:52:33.487490Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:33.487552Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:52:33.487607Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:52:33.487670Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2025-12-23T18:52:33.490486Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:52:33.491050Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:52:33.491147Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:52:33.492415Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:52:33.492790Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:52:33.492848Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:52:33.493049Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:52:33.493117Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:52:33.493184Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:52:33.493241Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:52:33.493300Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-12-23T18:52:33.493404Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:748:2726] message: TxId: 101 2025-12-23T18:52:33.493478Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:52:33.493544Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:52:33.493598Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:52:33.493777Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:52:33.495728Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:52:33.495812Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [16:749:2727] TestWaitNotification: OK eventTxId 101 2025-12-23T18:52:33.496502Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:52:33.496943Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 482us result status StatusSuccess 2025-12-23T18:52:33.497907Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "col01" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col02" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col03" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "col04" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "col05" Type: "Utf8" TypeId: 4608 Id: 5 NotNull: false IsBuildInProgress: false } Columns { Name: "col06" Type: "Utf8" TypeId: 4608 Id: 6 NotNull: false IsBuildInProgress: false } Columns { Name: "col07" Type: "Utf8" TypeId: 4608 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "col08" Type: "Utf8" TypeId: 4608 Id: 8 NotNull: false IsBuildInProgress: false } Columns { Name: "col09" Type: "Utf8" TypeId: 4608 Id: 9 NotNull: false IsBuildInProgress: false } Columns { Name: "col10" Type: "Utf8" TypeId: 4608 Id: 10 NotNull: false IsBuildInProgress: false } Columns { Name: "col11" Type: "Utf8" TypeId: 4608 Id: 11 NotNull: false IsBuildInProgress: false } Columns { Name: "col12" Type: "Utf8" TypeId: 4608 Id: 12 NotNull: false IsBuildInProgress: false } Columns { Name: "col13" Type: "Utf8" TypeId: 4608 Id: 13 NotNull: false IsBuildInProgress: false } Columns { Name: "col14" Type: "Utf8" TypeId: 4608 Id: 14 NotNull: false IsBuildInProgress: false } Columns { Name: "col15" Type: "Utf8" TypeId: 4608 Id: 15 NotNull: false IsBuildInProgress: false } Columns { Name: "col16" Type: "Utf8" TypeId: 4608 Id: 16 NotNull: false IsBuildInProgress: false } Columns { Name: "col17" Type: "Utf8" TypeId: 4608 Id: 17 NotNull: false IsBuildInProgress: false } Columns { Name: "col18" Type: "Utf8" TypeId: 4608 Id: 18 NotNull: false IsBuildInProgress: false } Columns { Name: "col19" Type: "Utf8" TypeId: 4608 Id: 19 NotNull: false IsBuildInProgress: false } Columns { Name: "col20" Type: "Utf8" TypeId: 4608 Id: 20 NotNull: false IsBuildInProgress: false } Columns { Name: "col21" Type: "Utf8" TypeId: 4608 Id: 21 NotNull: false IsBuildInProgress: false } Columns { Name: "col22" Type: "Utf8" TypeId: 4608 Id: 22 NotNull: false IsBuildInProgress: false } Columns { Name: "col23" Type: "Utf8" TypeId: 4608 Id: 23 NotNull: false IsBuildInProgress: false } Columns { Name: "col24" Type: "Utf8" TypeId: 4608 Id: 24 NotNull: false IsBuildInProgress: false } Columns { Name: "col25" Type: "Utf8" TypeId: 4608 Id: 25 NotNull: false IsBuildInProgress: false } Columns { Name: "col26" Type: "Utf8" TypeId: 4608 Id: 26 NotNull: false IsBuildInProgress: false } Columns { Name: "col27" Type: "Utf8" TypeId: 4608 Id: 27 NotNull: false IsBuildInProgress: false } Columns { Name: "col28" Type: "Utf8" TypeId: 4608 Id: 28 NotNull: false IsBuildInProgress: false } Columns { Name: "col29" Type: "Utf8" TypeId: 4608 Id: 29 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col01" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ActorPage::NoValidGroupForbidden [GOOD] >> ActorPage::OptionsNoContent >> ActorHandler::NoValidGroupForbidden [GOOD] >> ActorHandler::NoUseAuthOk >> MonPage::HttpOk [GOOD] >> MonPage::OptionsNoContent |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> ActorHandler::OptionsNoContent [GOOD] >> ActorPage::HttpOk >> Other::TraceInvalidTokenForbidden [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:22.684579Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:22.779856Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:22.785960Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:22.786317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:22.786386Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:23.138850Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:23.225210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:23.225344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:23.283810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:23.391202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0031f7/r3tmp/tmpziErOc/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2032, node 1 2025-12-23T18:52:24.028823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:24.029583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:24.029632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:24.029655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:24.029799Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:24.093419Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22510 2025-12-23T18:52:24.629783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:27.134133Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:27.139778Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:27.142282Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:27.166783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:27.166886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:27.205604Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:27.207359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:27.455213Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:27.455737Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:27.456388Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:27.456862Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:27.457076Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:27.457219Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:27.457326Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:27.457428Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:27.457502Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:27.569342Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:52:27.580967Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:27.581075Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:27.636020Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:27.770779Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:27.807777Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:27.807848Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:27.829809Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:27.831002Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:27.831154Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:27.831198Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:27.831241Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:27.831285Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:27.831324Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:27.831365Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:27.831631Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:27.871109Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:27.871198Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:27.881392Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:52:27.889132Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:52:27.889968Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:52:27.894005Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:52:27.917940Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:52:27.917998Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:52:27.918078Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:52:27.936853Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2352:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:27.945025Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:27.951331Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:27.951488Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:27.960648Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:28.056253Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:28.391656Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:28.458415Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:28.458483Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:26 ... ;this=136552101417120;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3010;max=18446744073709551615;plan=0;src=[2:1942:2455];cookie=131:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.024641Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;self_id=[2:2901:3063];ev=NActors::IEventHandle;tablet_id=72075186224037902;tx_id=281474976715659;this=136552101418240;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3010;max=18446744073709551615;plan=0;src=[2:1942:2455];cookie=151:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.025089Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;self_id=[2:2912:3072];ev=NActors::IEventHandle;tablet_id=72075186224037901;tx_id=281474976715659;this=136552101419360;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3010;max=18446744073709551615;plan=0;src=[2:1942:2455];cookie=141:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.044988Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[2:2919:3075];ev=NActors::IEventHandle;tablet_id=72075186224037903;tx_id=281474976715659;this=136552101422720;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3011;max=18446744073709551615;plan=0;src=[2:1942:2455];cookie=161:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.045514Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;self_id=[2:2927:3079];ev=NActors::IEventHandle;tablet_id=72075186224037904;tx_id=281474976715659;this=136552101423840;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3011;max=18446744073709551615;plan=0;src=[2:1942:2455];cookie=171:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.075400Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;self_id=[2:2935:3085];ev=NActors::IEventHandle;tablet_id=72075186224037906;tx_id=281474976715659;this=136552101427648;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3012;max=18446744073709551615;plan=0;src=[2:1942:2455];cookie=191:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.075968Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;self_id=[2:2936:3086];ev=NActors::IEventHandle;tablet_id=72075186224037908;tx_id=281474976715659;this=136552101428768;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3012;max=18446744073709551615;plan=0;src=[2:1942:2455];cookie=211:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.107888Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;self_id=[2:2938:3088];ev=NActors::IEventHandle;tablet_id=72075186224037907;tx_id=281474976715659;this=136552101432800;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3013;max=18446744073709551615;plan=0;src=[2:1942:2455];cookie=201:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.108424Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;self_id=[2:2939:3089];ev=NActors::IEventHandle;tablet_id=72075186224037905;tx_id=281474976715659;this=136552101433920;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3013;max=18446744073709551615;plan=0;src=[2:1942:2455];cookie=181:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.304931Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.305074Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.305117Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.328160Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.328231Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.328254Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.334636Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.334733Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.334789Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.341022Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.341127Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.341171Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.346855Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.346939Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.346976Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.353321Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.353407Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.353448Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.360775Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.360849Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.360873Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.366040Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.366120Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.366150Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.371011Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.371072Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.371095Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.375767Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.375844Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:52:30.375872Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; waiting actualization: 0/0.000017s 2025-12-23T18:52:32.593294Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3729:3672] 2025-12-23T18:52:32.653169Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> Other::TraceHttpOk [GOOD] >> Other::UnknownPathNotFound [GOOD] >> AutoConfig::GetASPoolsith1CPU [GOOD] >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> XdsBootstrapConfigInitializer::CanNotSetGrpcXdsBootstrapConfigEnvIfVariableAlreadySet [GOOD] >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnv [GOOD] >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> Other::TraceNoValidGroupForbidden [GOOD] >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnvWithSomeNumberOfXdsServers [GOOD] |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanNotSetGrpcXdsBootstrapConfigEnvIfVariableAlreadySet [GOOD] |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] |97.1%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceInvalidTokenForbidden [GOOD] Test command err: 2025-12-23T18:52:30.193336Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128234668779037:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:30.193396Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:30.484755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:30.514225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:30.514361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:30.563831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:30.596302Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2916, node 1 2025-12-23T18:52:30.704205Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:30.902380Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:30.902426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:30.902440Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:30.902526Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:31.197809Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:4646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515950626 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:31.379473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:31.432282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:31.436677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... |97.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnv [GOOD] |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceHttpOk [GOOD] Test command err: 2025-12-23T18:52:30.289474Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128234102541015:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:30.289735Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:30.529405Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:30.537628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:30.537743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:30.593733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:30.604752Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:30.605960Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128234102540911:2081] 1766515950286259 != 1766515950286262 TServer::EnableGrpc on GrpcPort 2553, node 1 2025-12-23T18:52:30.718363Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:30.902382Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:30.902419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:30.902430Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:30.902494Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:31.294544Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3307 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515950661 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:31.398372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:31.456787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:31.461023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-true [GOOD] >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnvWithSomeNumberOfXdsServers [GOOD] |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::UnknownPathNotFound [GOOD] Test command err: 2025-12-23T18:52:30.932369Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128234775466319:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:30.932437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:31.131836Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:31.162370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:31.162461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:31.207781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:31.239494Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:31.240990Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128234775466286:2081] 1766515950931084 != 1766515950931087 TServer::EnableGrpc on GrpcPort 5507, node 1 2025-12-23T18:52:31.319203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:31.319258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:31.319267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:31.319461Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:31.395288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11181 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515951256 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:31.555792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:31.602826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:31.605667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceNoValidGroupForbidden [GOOD] Test command err: 2025-12-23T18:52:31.871558Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128237652999735:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:31.871966Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:32.055184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:32.059612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:32.059689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:32.066341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:32.172935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128237652999632:2081] 1766515951867929 != 1766515951867932 2025-12-23T18:52:32.174947Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5874, node 1 2025-12-23T18:52:32.211985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:32.212008Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:32.212017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:32.212135Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:32.306383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515952180 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:32.421880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:32.475318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:32.478335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |97.2%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::DisableAutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:14.383229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:14.383323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:14.383424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:14.383490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:14.383534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:14.383622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:14.384606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:14.386777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:14.484713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:14.484769Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:14.496551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:14.496726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:14.496945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:14.507975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:14.508237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:14.509034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:14.509276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:14.514048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.514246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:14.517968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:14.518244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:14.518288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:14.518435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:14.670187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.673051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.673212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.673279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.673366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.673465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.673531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.673623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.673728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.673796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.673852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.673930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.673992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.674053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.674162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 82Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-12-23T18:52:24.591817Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2025-12-23T18:52:24.591855Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:52:24.591909Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-12-23T18:52:24.594059Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:371: TExport::TTxProgress: DoComplete 2025-12-23T18:52:24.594275Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-12-23T18:52:24.594314Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-12-23T18:52:24.594355Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-12-23T18:52:24.594419Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-12-23T18:52:24.594497Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000043 2025-12-23T18:52:24.595303Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:24.595377Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 25769805936 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:52:24.595445Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000043, at schemeshard: 72057594046678944 2025-12-23T18:52:24.595543Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-12-23T18:52:24.595598Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2025-12-23T18:52:24.595637Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-12-23T18:52:24.595682Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2025-12-23T18:52:24.595723Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-12-23T18:52:24.595781Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:52:24.595833Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:52:24.595861Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-12-23T18:52:24.595894Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-12-23T18:52:24.595927Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710761:0 2025-12-23T18:52:24.595959Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710761:0 2025-12-23T18:52:24.596027Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:52:24.596061Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-12-23T18:52:24.596093Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-12-23T18:52:24.596134Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2025-12-23T18:52:24.596962Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:24.597062Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:24.598106Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:24.598136Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:24.598266Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:52:24.598394Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:24.598433Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:209:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-12-23T18:52:24.598470Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:209:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 39 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-12-23T18:52:24.599164Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:24.599238Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:24.599265Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-12-23T18:52:24.599322Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-12-23T18:52:24.599381Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:52:24.599817Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:24.599872Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:24.599891Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-12-23T18:52:24.599913Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2025-12-23T18:52:24.599935Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:52:24.599996Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-12-23T18:52:24.600048Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [6:127:2152] 2025-12-23T18:52:24.602095Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:24.602439Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:24.602504Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-12-23T18:52:24.602550Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710761 2025-12-23T18:52:24.602594Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:351: TExport::TTxProgress: DoExecute 2025-12-23T18:52:24.602625Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1314: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-12-23T18:52:24.602652Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1345: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-12-23T18:52:24.603992Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:371: TExport::TTxProgress: DoComplete 2025-12-23T18:52:24.604063Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:52:24.604111Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:1012:2966] TestWaitNotification: OK eventTxId 102 |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:52:09.982824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:09.982894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:09.982937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:09.982961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:09.982985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:09.983004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:09.983059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:09.983151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:09.983832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:09.984084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:10.045723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:10.045763Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:10.050447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:10.050603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:10.050712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:10.053319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:10.053484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:10.053938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:10.054095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:10.055885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:10.056077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:10.056931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:10.056970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:10.057045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:10.057075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:10.057099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:10.057190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:10.171981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.172711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.172808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.172894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.172998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.173063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.173120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.173182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.173246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.173331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.173410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.173487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.173549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.173618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:10.173697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... end, to populator: [16:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-12-23T18:52:37.799181Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [16:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:52:37.799717Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:52:37.799794Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:52:37.800778Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:52:37.800898Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:52:37.800950Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:52:37.801001Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-12-23T18:52:37.801060Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:52:37.801746Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:52:37.801807Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:52:37.801831Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:52:37.801858Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:52:37.801887Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:52:37.801949Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:52:37.804087Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1684 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:52:37.804135Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:52:37.804252Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1684 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:52:37.804405Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 1 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1684 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2025-12-23T18:52:37.805542Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 745 RawX2: 68719479467 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:52:37.805629Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:52:37.805841Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 745 RawX2: 68719479467 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:52:37.805962Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:52:37.806153Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 745 RawX2: 68719479467 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:52:37.806277Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:37.806337Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:52:37.806390Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:52:37.806453Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:52:37.807706Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:52:37.807802Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:52:37.808983Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:52:37.809096Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:52:37.809368Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:52:37.809415Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:52:37.809580Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:52:37.809631Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:52:37.809680Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:52:37.809728Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:52:37.809778Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:52:37.809860Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:712:2702] message: TxId: 102 2025-12-23T18:52:37.809927Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:52:37.809985Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:52:37.810026Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:52:37.810167Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:52:37.811483Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:52:37.811548Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [16:774:2752] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-12-23T18:52:37.814901Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table1" PartitionConfig { ChannelProfileId: 0 } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:52:37.815140Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table1, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:52:37.815450Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Setting ChannelProfileId to 0 for tables with storage config is not allowed, at schemeshard: 72057594046678944 2025-12-23T18:52:37.817379Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Setting ChannelProfileId to 0 for tables with storage config is not allowed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:52:37.817696Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Setting ChannelProfileId to 0 for tables with storage config is not allowed, operation: ALTER TABLE, path: /MyRoot/Table1 TestModificationResult got TxId: 103, wait until txId: 103 |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> ActorPage::OptionsNoContent [GOOD] >> ActorHandler::NoUseAuthOk [GOOD] >> MonPage::OptionsNoContent [GOOD] >> ActorPage::NoUseAuthOk [GOOD] >> ActorPage::HttpOk [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative >> TargetDiscoverer::Transfer >> TargetDiscoverer::Basic >> TargetDiscoverer::InvalidCredentials >> TargetDiscoverer::RetryableError >> TargetDiscoverer::SystemObjects >> TargetDiscoverer::IndexedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::OptionsNoContent [GOOD] Test command err: 2025-12-23T18:52:30.192992Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128235923529759:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:30.193049Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:30.479544Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:30.553972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:30.554079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:30.586208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:30.608240Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13021, node 1 2025-12-23T18:52:30.734920Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:30.902349Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:30.902401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:30.902410Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:30.902507Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:31.198485Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:26370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515950654 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:31.370459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:31.430051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:31.433545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:52:34.988932Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128251546711006:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:34.989027Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:35.002136Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:35.100817Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:35.110005Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587128251546710981:2081] 1766515954988406 != 1766515954988409 2025-12-23T18:52:35.114478Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:35.114601Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:35.119104Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4533, node 2 2025-12-23T18:52:35.158255Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:35.158308Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:35.158338Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:35.158421Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:35.198094Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515955176 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:35.402403Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:35.454073Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:35.458120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorHandler::NoUseAuthOk [GOOD] Test command err: 2025-12-23T18:52:30.193042Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128236393114070:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:30.193096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:30.446159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:30.512499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:30.512598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:30.578832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:30.582701Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30091, node 1 2025-12-23T18:52:30.740414Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:30.902429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:30.902471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:30.902478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:30.902561Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:31.197963Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:17325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515950640 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:31.405800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:31.454029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:31.458305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:52:35.125563Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128257772354306:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:35.125652Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:35.139212Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:35.183174Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587128257772354280:2081] 1766515955124641 != 1766515955124644 TServer::EnableGrpc on GrpcPort 28190, node 2 2025-12-23T18:52:35.230046Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T18:52:35.231504Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T18:52:35.237787Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:35.237918Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:35.239714Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:35.259630Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:35.262604Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:35.262635Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:35.262643Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:35.262734Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:35.424979Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:14370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515955337 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:35.509899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:35.557463Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:35.561226Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> MonPage::OptionsNoContent [GOOD] Test command err: 2025-12-23T18:52:30.193011Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128236312477068:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:30.193099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:30.506952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:30.586545Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:30.590105Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128236312476958:2081] 1766515950181733 != 1766515950181736 2025-12-23T18:52:30.606454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:30.606552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:30.609740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21793, node 1 2025-12-23T18:52:30.761735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:30.902392Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:30.902423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:30.902429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:30.902557Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:31.197085Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515950654 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:31.374430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:31.430284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:31.449214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:52:35.127078Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128257423332623:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:35.127149Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:35.144755Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:35.232531Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587128257423332595:2081] 1766515955125927 != 1766515955125930 2025-12-23T18:52:35.241423Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:35.266588Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:35.266688Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:35.272968Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21207, node 2 2025-12-23T18:52:35.316653Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:35.316670Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:35.316677Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:35.316761Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:35.356507Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515955323 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:35.583619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:35.629175Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:35.632995Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::NoUseAuthOk [GOOD] Test command err: 2025-12-23T18:52:30.191640Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128236753585713:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:30.191684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:30.497568Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:30.535271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:30.535382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:30.578225Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:30.583397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26289, node 1 2025-12-23T18:52:30.780365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:30.902487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:30.902523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:30.902531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:30.902610Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:31.197659Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:32627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515950633 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:31.365803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:31.431815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:31.436046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:52:34.835006Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128250467737248:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:34.835049Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:34.848162Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:34.929807Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:34.961677Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:34.961754Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:34.971245Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17337, node 2 2025-12-23T18:52:35.007694Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:35.007725Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:35.007734Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:35.007841Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:35.016020Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23191 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515955022 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:35.210633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:35.250117Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:35.253838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::HttpOk [GOOD] Test command err: 2025-12-23T18:52:30.193227Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128235806602321:2140];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:30.193408Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:30.495977Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:30.534995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:30.535097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:30.583273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:30.615280Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30742, node 1 2025-12-23T18:52:30.762103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:30.902266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:30.902352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:30.902397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:30.902499Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:31.198916Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515950647 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:31.416434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:31.470728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:31.474384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:52:35.248695Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128258331612158:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:35.248807Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # SectorMap:test-client[:2000] 2025-12-23T18:52:35.263480Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:35.361183Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:35.367181Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587128258331612135:2081] 1766515955247450 != 1766515955247453 2025-12-23T18:52:35.379442Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:35.379519Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:35.393155Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9515, node 2 2025-12-23T18:52:35.442053Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:35.442076Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:35.442084Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:35.442201Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:35.528213Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515955442 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:35.675063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:35.721697Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:52:35.725053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_add_group_http [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups |97.2%| [TA] $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {RESULT} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> TargetDiscoverer::Negative [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |97.2%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> TargetDiscoverer::Basic [GOOD] >> TargetDiscoverer::InvalidCredentials [GOOD] >> TargetDiscoverer::RetryableError [GOOD] >> TargetDiscoverer::Transfer [GOOD] >> TargetDiscoverer::IndexedTable [GOOD] >> TargetDiscoverer::SystemObjects [GOOD] |97.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2025-12-23T18:52:40.606489Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128277664624446:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:40.606540Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040f7/r3tmp/tmp2GVlKd/pdisk_1.dat 2025-12-23T18:52:40.819286Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:40.856011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:40.856184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:40.883893Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:40.900290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:40.971918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10411 TServer::EnableGrpc on GrpcPort 28089, node 1 2025-12-23T18:52:41.237276Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:41.237355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:41.237363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:41.237430Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:52:41.614203Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515960951 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:41.692372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:41.746812Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-12-23T18:52:41.746862Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found }, iteration# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Transfer [GOOD] Test command err: 2025-12-23T18:52:40.606561Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128277403248289:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:40.606619Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040ed/r3tmp/tmpyKlnl0/pdisk_1.dat 2025-12-23T18:52:40.821311Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:40.839866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:40.839984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:40.890684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:40.916657Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:40.974240Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22743 TServer::EnableGrpc on GrpcPort 23224, node 1 2025-12-23T18:52:41.237437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:41.237472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:41.237478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:41.237555Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22743 2025-12-23T18:52:41.614057Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515960951 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:41.693628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:52:41.833807Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Topic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1766515961833, tx_id: 281474976715658 } } } 2025-12-23T18:52:41.833842Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root/Topic 2025-12-23T18:52:41.863422Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:168: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTopicResponse { Result: { status: SUCCESS, issues: } } 2025-12-23T18:52:41.863472Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:183: [TargetDiscoverer][rid 1] Describe topic succeeded: path# /Root/Topic 2025-12-23T18:52:41.863560Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:193: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Topic, dstPath# /Root/Replicated/Table, kind# Transfer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2025-12-23T18:52:40.606244Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128278331561272:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:40.606284Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040f1/r3tmp/tmp068shC/pdisk_1.dat 2025-12-23T18:52:40.821535Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:40.826656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:40.826726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:40.858061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:40.898365Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:12063 TServer::EnableGrpc on GrpcPort 4810, node 1 2025-12-23T18:52:41.072252Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:41.237239Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:41.237266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:41.237275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:41.237339Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12063 2025-12-23T18:52:41.614039Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515960923 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:41.707345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:41.715534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:41.834980Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1766515961756, tx_id: 1 } } } 2025-12-23T18:52:41.835015Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-12-23T18:52:41.842924Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1766515960923, tx_id: 281474976715657 } }, { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766515961805, tx_id: 281474976710658 } }] } } 2025-12-23T18:52:41.842958Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-12-23T18:52:42.894708Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766515961805, tx_id: 281474976710658 } } } 2025-12-23T18:52:42.894760Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-12-23T18:52:42.894821Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2025-12-23T18:52:40.606497Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128279845288378:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:40.606596Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040ee/r3tmp/tmpoGDepf/pdisk_1.dat 2025-12-23T18:52:40.801552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:40.827953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:40.828037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:40.877960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:40.909867Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:41.023007Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17994 TServer::EnableGrpc on GrpcPort 25606, node 1 2025-12-23T18:52:41.237226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:41.237260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:41.237266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:41.237333Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:41.613676Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:17994 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515960930 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:41.739945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:41.748534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:41.925076Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:219: Cannot find user: user } } } 2025-12-23T18:52:41.925121Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:219: Cannot find user: user }, iteration# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::RetryableError [GOOD] Test command err: 2025-12-23T18:52:40.606638Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128276838636560:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:40.606682Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040eb/r3tmp/tmpjiPSPs/pdisk_1.dat 2025-12-23T18:52:40.843226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:40.843371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:40.844858Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:40.865366Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:40.866718Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128276838636523:2081] 1766515960602795 != 1766515960602798 2025-12-23T18:52:40.902202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8405 TServer::EnableGrpc on GrpcPort 23543, node 1 2025-12-23T18:52:41.108064Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:41.237212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:41.237255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:41.237270Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:41.237333Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8405 2025-12-23T18:52:41.614568Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515960965 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:41.685158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:41.694627Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-0 } } } 2025-12-23T18:52:41.694686Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-0 }, iteration# 0 2025-12-23T18:52:41.694796Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-1 } } } 2025-12-23T18:52:41.694831Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-1 }, iteration# 1 2025-12-23T18:52:41.694883Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-2 } } } 2025-12-23T18:52:41.694921Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-2 }, iteration# 2 2025-12-23T18:52:41.694963Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-3 } } } 2025-12-23T18:52:41.694978Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-3 }, iteration# 3 2025-12-23T18:52:41.695032Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-4 } } } 2025-12-23T18:52:41.695049Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-4 }, iteration# 4 2025-12-23T18:52:41.695096Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-5 } } } 2025-12-23T18:52:41.695145Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-5 }, iteration# 5 |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |97.2%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2025-12-23T18:52:40.606840Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128278663856212:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:40.606879Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040f5/r3tmp/tmp09rb6W/pdisk_1.dat 2025-12-23T18:52:40.802089Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:40.827937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:40.828064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:40.861683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:40.901546Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128278663856175:2081] 1766515960602720 != 1766515960602723 2025-12-23T18:52:40.909684Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:41.020303Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13680 TServer::EnableGrpc on GrpcPort 20368, node 1 2025-12-23T18:52:41.237122Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:41.237210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:41.237222Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:41.237296Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:52:41.614894Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515960923 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:41.670725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:41.685979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:52:41.814424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:41.872451Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1766515961721, tx_id: 1 } } } 2025-12-23T18:52:41.872484Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-12-23T18:52:41.881408Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1766515960923, tx_id: 281474976715657 } }, { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766515961791, tx_id: 281474976710658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1766515961854, tx_id: 281474976710659 } }] } } 2025-12-23T18:52:41.881453Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-12-23T18:52:42.950100Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766515961791, tx_id: 281474976710658 } } } 2025-12-23T18:52:42.950124Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-12-23T18:52:42.950171Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2025-12-23T18:52:40.606402Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128277011428564:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:40.606449Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040f8/r3tmp/tmpf8OZ8Z/pdisk_1.dat 2025-12-23T18:52:40.803372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:40.818649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:40.818782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:40.830438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:40.894697Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:40.959446Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18048 TServer::EnableGrpc on GrpcPort 5342, node 1 2025-12-23T18:52:41.237168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:41.237191Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:41.237196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:41.237279Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18048 2025-12-23T18:52:41.613471Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515960958 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:41.700033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:41.709697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:42.020113Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1766515961749, tx_id: 1 } } } 2025-12-23T18:52:42.020142Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-12-23T18:52:42.028635Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1766515960958, tx_id: 281474976715657 } }, { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766515961980, tx_id: 281474976710658 } }] } } 2025-12-23T18:52:42.028672Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-12-23T18:52:42.970177Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766515961980, tx_id: 281474976710658 } } } 2025-12-23T18:52:42.970202Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-12-23T18:52:42.970258Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2025-12-23T18:52:42.970342Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:142: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] |97.2%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] |97.2%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] |97.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TExportToS3Tests::CompletedExportEndTime [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> TExportToS3Tests::ShouldCheckQuotasExportsLimited >> TExportToS3Tests::Checksums >> TPQTest::TestSourceIdDropBySourceIdCount [GOOD] >> TPQTest::TestTimeRetention >> TExportToS3Tests::ShouldCheckQuotasExportsLimited [GOOD] >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited >> TExportToS3Tests::Checksums [FAIL] >> TExportToS3Tests::ChecksumsWithCompression >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited [GOOD] >> TPQTest::TestTimeRetention [GOOD] >> TPQTest::TestStorageRetention |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> TExportToS3Tests::SchemaMapping >> TExportToS3Tests::ChecksumsWithCompression [GOOD] |97.2%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExportToS3Tests::DecimalOutOfRange >> TPQTest::TestStorageRetention [GOOD] >> TPQTest::TestStatusWithMultipleConsumers >> TExportToS3Tests::SchemaMapping [GOOD] |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> test_canonical_records.py::test_dstool_evict_vdisk_grpc [FAIL] >> TPQTest::TestStatusWithMultipleConsumers [GOOD] >> TPQTest::TestTabletRestoreEventsOrder |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink >> TExportToS3Tests::SchemaMappingEncryption >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink >> KqpDataIntegrityTrails::Ddl >> KqpDataIntegrityTrails::Select |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] >> TExportToS3Tests::DecimalOutOfRange [GOOD] >> TExportToS3Tests::CorruptedDecimalValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] Test command err: 2025-12-23T18:48:56.739794Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2025-12-23T18:48:56.820136Z node 1 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:56.820226Z node 1 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:56.820313Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:56.820399Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2025-12-23T18:48:56.842243Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:56.864250Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2025-12-23T18:48:56.865383Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2025-12-23T18:48:56.868168Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2025-12-23T18:48:56.870379Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2025-12-23T18:48:56.872314Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2025-12-23T18:48:56.880027Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:48:56.880506Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|19b89030-6f182156-f9c27baf-67577552_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 100 2025-12-23T18:48:56.893299Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:48:56.893735Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|839d3c8b-8a5247d8-3015ba85-321474db_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:48:56.908238Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:48:56.908701Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a4197073-ed44f39-a4223b66-c9ba34c6_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 2000 2025-12-23T18:48:56.926275Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:48:56.926680Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|934edcaf-91be3225-fdf30472-f37f6163_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 2000 2025-12-23T18:48:56.928837Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:48:56.931400Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|33fb610-1c6c25be-eeaa668d-bdb910d4_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-12-23T18:48:56.970644Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-12-23T18:48:56.971099Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|81545a73-6575fd23-143da49-93a24863_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 2000 2025-12-23T18:48:57.358323Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:113:2057] recipient: [2:106:2138] 2025-12-23T18:48:57.406931Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:57.407008Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:57.407068Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:57.407140Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927938 is [2:158:2176] sender: [2:159:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:184:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:112:2142]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:186:2057] recipient: [2:104:2137] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:189:2057] recipient: [2:188:2197] Leader for TabletID 72057594037927937 is [2:190:2198] sender: [2:191:2057] recipient: [2:188:2197] 2025-12-23T18:48:57.449716Z node 2 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:48:57.449774Z node 2 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:48:57.449815Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:57.449858Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:112:2142]) rebooted! 2025-12-23T18:48:57.460363Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 !Reboot 72057594037927937 (actor [2:112:2142]) tablet resolver refreshed! new actor is[2:190:2198] 2025-12-23T18:48:57.522657Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.554124Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.564923Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.585888Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.649266Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.690820Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.807051Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:57.838977Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.120741Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.142332Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.501341Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.750233Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:48:58.865018Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:190:2198] sender: [2:270:2057] recipient: [2:14:2061] 2025-12-23T18:48:59.051572Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:48:59.052670Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 2 actor [2:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } 2025-12-23T18:48:59.053889Z node 2 :PERSQUEUE INFO: partition_init ... 2025-12-23T18:52:48.697485Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:588:2576] connected; active server actors: 1 2025-12-23T18:52:48.699092Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:593:2581] connected; active server actors: 1 2025-12-23T18:52:48.700652Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:598:2586] connected; active server actors: 1 2025-12-23T18:52:48.702123Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:603:2591] connected; active server actors: 1 2025-12-23T18:52:48.703796Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:608:2596] connected; active server actors: 1 2025-12-23T18:52:48.705291Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:613:2601] connected; active server actors: 1 2025-12-23T18:52:48.706964Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:618:2606] connected; active server actors: 1 2025-12-23T18:52:48.708312Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:623:2611] connected; active server actors: 1 2025-12-23T18:52:48.709763Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:628:2616] connected; active server actors: 1 2025-12-23T18:52:48.711383Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:633:2621] connected; active server actors: 1 2025-12-23T18:52:48.712913Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:638:2626] connected; active server actors: 1 2025-12-23T18:52:48.714302Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:643:2631] connected; active server actors: 1 2025-12-23T18:52:48.715835Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:648:2636] connected; active server actors: 1 2025-12-23T18:52:48.717222Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:653:2641] connected; active server actors: 1 2025-12-23T18:52:48.718698Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:658:2646] connected; active server actors: 1 2025-12-23T18:52:48.720673Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:663:2651] connected; active server actors: 1 2025-12-23T18:52:48.722290Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:668:2656] connected; active server actors: 1 2025-12-23T18:52:48.723925Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:673:2661] connected; active server actors: 1 2025-12-23T18:52:48.725421Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:678:2666] connected; active server actors: 1 2025-12-23T18:52:48.726856Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:683:2671] connected; active server actors: 1 2025-12-23T18:52:48.728252Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:688:2676] connected; active server actors: 1 2025-12-23T18:52:48.729677Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:693:2681] connected; active server actors: 1 2025-12-23T18:52:48.731237Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:698:2686] connected; active server actors: 1 2025-12-23T18:52:48.732746Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:703:2691] connected; active server actors: 1 2025-12-23T18:52:48.734224Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:708:2696] connected; active server actors: 1 2025-12-23T18:52:48.735702Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:713:2701] connected; active server actors: 1 2025-12-23T18:52:48.737294Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:718:2706] connected; active server actors: 1 2025-12-23T18:52:48.738781Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:723:2711] connected; active server actors: 1 2025-12-23T18:52:48.740265Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:728:2716] connected; active server actors: 1 2025-12-23T18:52:48.742145Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:733:2721] connected; active server actors: 1 2025-12-23T18:52:48.743879Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:738:2726] connected; active server actors: 1 2025-12-23T18:52:48.745457Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:743:2731] connected; active server actors: 1 2025-12-23T18:52:48.746892Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:748:2736] connected; active server actors: 1 2025-12-23T18:52:48.748472Z node 156 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [156:753:2741], now have 1 active actors on pipe 2025-12-23T18:52:48.749567Z node 156 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [156:756:2744], now have 1 active actors on pipe 2025-12-23T18:52:48.750442Z node 156 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037927937] server connected, pipe [156:759:2747], now have 1 active actors on pipe 2025-12-23T18:52:48.751426Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:762:2750] connected; active server actors: 1 2025-12-23T18:52:49.163015Z node 157 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 157 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:52:49.203736Z node 157 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:52:49.203801Z node 157 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:52:49.203856Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:52:49.203915Z node 157 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:52:49.237604Z node 157 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:52:49.237676Z node 157 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:52:49.237728Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:52:49.237793Z node 157 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:52:49.238067Z node 157 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 157 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:52:49.240707Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:52:49.241744Z node 157 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037927937] Config applied version 163 actor [157:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 163 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 163 } Consumers { Name: "aaa" Generation: 163 Important: true } 2025-12-23T18:52:49.242450Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [157:248:2196] 2025-12-23T18:52:49.243550Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 3 [157:248:2196] 2025-12-23T18:52:49.244685Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [157:250:2196] 2025-12-23T18:52:49.245326Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 3 [157:250:2196] 2025-12-23T18:52:49.272514Z node 157 :PQ_TX INFO: pq_impl.cpp:563: [PQ: 72057594037927937] doesn't have tx info 2025-12-23T18:52:49.272578Z node 157 :PQ_TX INFO: pq_impl.cpp:575: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-12-23T18:52:49.273835Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:52:49.273890Z node 157 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037927937] doesn't have tx writes info 2025-12-23T18:52:49.274601Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [157:324:2252] 2025-12-23T18:52:49.275920Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [157:326:2252] 2025-12-23T18:52:49.279906Z node 157 :PERSQUEUE INFO: partition_init.cpp:1016: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:52:49.280085Z node 157 :PERSQUEUE INFO: partition_init.cpp:1016: [topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:52:49.280348Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 4 [157:324:2252] 2025-12-23T18:52:49.280558Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 4 [157:326:2252] |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TExportToS3Tests::SchemaMappingEncryption [GOOD] |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx |97.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TExportToS3Tests::CorruptedDecimalValue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:14.383204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:14.383326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:14.383421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:14.383478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:14.383520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:14.383597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:14.384539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:14.386777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:14.482318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:14.482380Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:14.493524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:14.493692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:14.493863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:14.505311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:14.505527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:14.506170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:14.506356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:14.510691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.511198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:14.517957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:14.518254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:14.518302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:14.518463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:14.690673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.694617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.694876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.694969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.695058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.695161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.695248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.695322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.695403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.695478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.695549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.695633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.695730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.695852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.695947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... emove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:52:50.357396Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:50.357477Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:50.357505Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-12-23T18:52:50.357535Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2025-12-23T18:52:50.357566Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:52:50.357635Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-12-23T18:52:50.358729Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-12-23T18:52:50.358791Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-12-23T18:52:50.358842Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-12-23T18:52:50.360017Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-12-23T18:52:50.360145Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2025-12-23T18:52:50.360433Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000043 2025-12-23T18:52:50.360688Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:50.360788Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 25769805936 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:52:50.360836Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000043, at schemeshard: 72057594046678944 2025-12-23T18:52:50.360973Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-12-23T18:52:50.361049Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2025-12-23T18:52:50.361091Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-12-23T18:52:50.361140Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2025-12-23T18:52:50.361180Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-12-23T18:52:50.361238Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:52:50.361316Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:52:50.361357Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-12-23T18:52:50.361404Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-12-23T18:52:50.361450Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710761:0 2025-12-23T18:52:50.361488Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710761:0 2025-12-23T18:52:50.361561Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:52:50.361608Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-12-23T18:52:50.361654Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-12-23T18:52:50.361698Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2025-12-23T18:52:50.363164Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:50.364387Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:50.364430Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:50.364582Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:52:50.364670Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:50.364695Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:209:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-12-23T18:52:50.364751Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:209:2210], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 39 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-12-23T18:52:50.365377Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:50.365458Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:50.365491Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-12-23T18:52:50.365534Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-12-23T18:52:50.365577Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:52:50.366146Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:50.366200Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:50.366223Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-12-23T18:52:50.366246Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2025-12-23T18:52:50.366269Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:52:50.366341Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-12-23T18:52:50.366391Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [6:127:2152] 2025-12-23T18:52:50.368335Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:50.368719Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-12-23T18:52:50.368775Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-12-23T18:52:50.368810Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710761 2025-12-23T18:52:50.370096Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:52:50.370149Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:999:2951] TestWaitNotification: OK eventTxId 102 |97.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest |97.3%| [TA] $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless >> KqpWorkloadService::TestQueueSizeSimple |97.3%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest >> DefaultPoolSettings::TestResourcePoolsSysViewFilters >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:14.383159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:14.383257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:14.383358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:14.383466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:14.383508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:14.383577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:14.384531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:14.386699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:14.482773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:14.482841Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:14.494354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:14.494548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:14.494716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:14.505465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:14.505650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:14.506290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:14.506458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:14.510320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.511211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:14.517982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:14.518307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:14.518363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:14.518516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:14.697870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.698983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.699122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.699201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.699298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.699409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.699487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.699594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.699678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.699794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.699872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.699980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.700071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.700155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.700254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... otify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-12-23T18:52:52.122842Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-12-23T18:52:52.122958Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000041 2025-12-23T18:52:52.123408Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:52.123510Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 25769805936 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:52:52.123561Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000041, at schemeshard: 72057594046678944 2025-12-23T18:52:52.123684Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-12-23T18:52:52.123771Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2025-12-23T18:52:52.123809Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-12-23T18:52:52.123849Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2025-12-23T18:52:52.123881Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-12-23T18:52:52.123935Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:52:52.123991Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:52:52.124024Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-12-23T18:52:52.124068Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-12-23T18:52:52.124107Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710758:0 2025-12-23T18:52:52.124138Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710758:0 2025-12-23T18:52:52.124196Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:52:52.124232Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-12-23T18:52:52.124268Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2025-12-23T18:52:52.124304Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 40], 18446744073709551615 2025-12-23T18:52:52.124581Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:52.124731Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-12-23T18:52:52.125895Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:52.125924Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:52.126035Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2025-12-23T18:52:52.126139Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:52.126163Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:209:2210], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-12-23T18:52:52.126191Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:209:2210], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 40 2025-12-23T18:52:52.126716Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:52.126790Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:52.126829Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-12-23T18:52:52.126872Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-12-23T18:52:52.126913Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-12-23T18:52:52.127269Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:52.127326Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:52.127347Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-12-23T18:52:52.127371Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2025-12-23T18:52:52.127396Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:52:52.127456Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-12-23T18:52:52.127491Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [6:127:2152] 2025-12-23T18:52:52.127611Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:52:52.127666Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2025-12-23T18:52:52.127733Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:52:52.129201Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:52.130033Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-12-23T18:52:52.130107Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-12-23T18:52:52.130165Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710758 2025-12-23T18:52:52.130213Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:351: TExport::TTxProgress: DoExecute 2025-12-23T18:52:52.130253Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1314: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-12-23T18:52:52.130297Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1345: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 103, itemIdx# 4294967295 2025-12-23T18:52:52.130557Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:52:52.131547Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:371: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-12-23T18:52:52.131746Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:52:52.131801Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:52:52.132184Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:52:52.132252Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:52:52.132285Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [6:949:2907] TestWaitNotification: OK eventTxId 103 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithOnePartition >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> TSchemeShardTopicSplitMergeTest::SetBoundWithWrongPartition >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> TSchemeShardTopicSplitMergeTest::Boot >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] >> KqpDataIntegrityTrails::Select [GOOD] >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] >> KqpDataIntegrityTrails::Ddl [GOOD] >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TExportToS3Tests::AuditCancelledExport >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CorruptedDecimalValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:14.383242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:14.383325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:14.383383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:14.383426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:14.383471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:14.383571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:14.384386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:14.386713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:14.473044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:14.473104Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:14.483990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:14.484126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:14.484254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:14.494701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:14.496160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:14.499751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:14.502858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:14.510980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.511180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:14.517920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:14.517995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:14.518210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:14.518260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:14.518385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:14.691006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.692984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.693055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.693148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... ATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000041 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000041 2025-12-23T18:52:51.156035Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:51.156163Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 25769805936 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:52:51.156228Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000041, at schemeshard: 72057594046678944 2025-12-23T18:52:51.156381Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710759:0 128 -> 129 2025-12-23T18:52:51.156533Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 REQUEST: PUT /Backup1/metadata.json HTTP/1.1 HEADERS: Host: localhost:24724 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 73714699-5B53-488E-85A9-A0E2540948F5 amz-sdk-request: attempt=1 content-length: 119 content-md5: oX2vUQoa+8sc65ouCpRQqg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /Backup1/metadata.json / / 119 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000041 2025-12-23T18:52:51.193645Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:51.193721Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2025-12-23T18:52:51.193995Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:51.194038Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:209:2210], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 40 2025-12-23T18:52:51.194902Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-12-23T18:52:51.194969Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-12-23T18:52:51.196141Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-12-23T18:52:51.196230Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-12-23T18:52:51.196272Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-12-23T18:52:51.196320Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2025-12-23T18:52:51.196374Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2025-12-23T18:52:51.196507Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true 2025-12-23T18:52:51.197150Z node 6 :DATASHARD_BACKUP ERROR: export_scan.cpp:208: [Export] [scanner] Error read data from table: Invalid Decimal binary representation REQUEST: PUT /Backup1/metadata.json.sha256 HTTP/1.1 HEADERS: Host: localhost:24724 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E25F6DD7-0ED2-420D-9550-16CD5F9149B9 amz-sdk-request: attempt=1 content-length: 78 content-md5: Vj/U8z9C9WvNxxNxUoW5sg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /Backup1/metadata.json.sha256 / / 78 2025-12-23T18:52:51.209572Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2025-12-23T18:52:51.222239Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 846 RawX2: 25769806589 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2025-12-23T18:52:51.222316Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-12-23T18:52:51.222495Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 846 RawX2: 25769806589 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2025-12-23T18:52:51.222641Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 846 RawX2: 25769806589 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2025-12-23T18:52:51.222754Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:51.222823Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-12-23T18:52:51.222877Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:52:51.222959Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710759:0 129 -> 240 2025-12-23T18:52:51.223194Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:51.225356Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-12-23T18:52:51.225664Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-12-23T18:52:51.225716Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-12-23T18:52:51.225843Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710759:0 progress is 1/1 2025-12-23T18:52:51.225879Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-12-23T18:52:51.225918Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710759:0 progress is 1/1 2025-12-23T18:52:51.225950Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-12-23T18:52:51.225986Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-12-23T18:52:51.226053Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:127:2152] message: TxId: 281474976710759 2025-12-23T18:52:51.226100Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-12-23T18:52:51.226144Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710759:0 2025-12-23T18:52:51.226174Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710759:0 2025-12-23T18:52:51.226282Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2025-12-23T18:52:51.228174Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-12-23T18:52:51.228237Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710759 2025-12-23T18:52:51.228450Z node 6 :EXPORT NOTICE: schemeshard_export__create.cpp:710: TExport::TTxProgress: issues during backing up, cancelling, info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Transferring WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/MyRoot/Table1' SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38] SourcePathType: EPathTypeTable State: Done SubState: Proposed WaitTxId: 0 Issue: 'shard: 72057594046678944:2, error: Invalid Decimal binary representation' } 2025-12-23T18:52:51.230197Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:52:51.230262Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:875:2834] TestWaitNotification: OK eventTxId 102 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10595, MsgBus: 64857 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0030a4/r3tmp/tmpaZ5p3y/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10595, node 1 TClient is connected to server localhost:64857 TClient is connected to server localhost:64857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515969624 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless >> TSchemeShardTopicSplitMergeTest::SetBoundWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12609, MsgBus: 11985 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0030a5/r3tmp/tmpMO5oSe/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12609, node 1 TClient is connected to server localhost:11985 TClient is connected to server localhost:11985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515969624 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 6849, MsgBus: 12526 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0030a2/r3tmp/tmpGIuAj5/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6849, node 1 TClient is connected to server localhost:12526 TClient is connected to server localhost:12526 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515969617 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] Test command err: Trying to start YDB, gRPC: 3657, MsgBus: 4772 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003099/r3tmp/tmphLm9ba/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3657, node 1 TClient is connected to server localhost:4772 TClient is connected to server localhost:4772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515969617 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> BasicStatistics::TwoTables [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:54.470752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:54.470904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.470958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:54.470996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:54.471043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:54.471093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:54.471161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.471234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:54.472151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:54.473969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:54.569074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:54.569129Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:54.578968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:54.579168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:54.579327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:54.589267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:54.589478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:54.590214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:54.590425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:54.594577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.594760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:54.598220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:54.598571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:54.598624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:54.599343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:54.743268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.744878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.453178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:52:55.453311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2025-12-23T18:52:55.453429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:52:55.453472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:52:55.455663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.456186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:55.456227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:52:55.456328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:52:55.456443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:55.456468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 104, path id: 38 2025-12-23T18:52:55.456497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 104, path id: 39 2025-12-23T18:52:55.457036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.457079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:52:55.457167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:52:55.457188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:52:55.457219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:52:55.457240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:52:55.457265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-12-23T18:52:55.457307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:52:55.457338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:52:55.457360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:52:55.457471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:52:55.457513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-12-23T18:52:55.457541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2025-12-23T18:52:55.457562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2025-12-23T18:52:55.458378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:52:55.458444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:52:55.458483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:52:55.458527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2025-12-23T18:52:55.458562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:52:55.459162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:52:55.459213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:52:55.459230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:52:55.459258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2025-12-23T18:52:55.459293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:52:55.459344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-12-23T18:52:55.462856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:52:55.463030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:52:55.463241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:52:55.463318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:52:55.463727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:52:55.463849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:52:55.463890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:999:2918] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } TestModificationResults wait txId: 106 2025-12-23T18:52:55.468657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:52:55.468945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.469075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split and merge operations disabled, at schemeshard: 72057594046678944 2025-12-23T18:52:55.473427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split and merge operations disabled" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:52:55.473663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split and merge operations disabled, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-12-23T18:52:55.473966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-12-23T18:52:55.474013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-12-23T18:52:55.474379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-12-23T18:52:55.474476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:52:55.474509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1006:2925] TestWaitNotification: OK eventTxId 106 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:54.470728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:54.470894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.470947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:54.470988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:54.471033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:54.471066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:54.471129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.471199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:54.472123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:54.473967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:54.569407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:54.569460Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:54.580330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:54.580529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:54.580702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:54.591872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:54.592073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:54.592774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:54.592965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:54.596735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.596888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:54.598133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:54.598444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:54.598495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:54.599317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:54.769474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.770461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.770614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.770710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.770819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.770886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.770986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ecute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2025-12-23T18:52:55.972135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2025-12-23T18:52:55.972183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.972213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:52:55.972375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2025-12-23T18:52:55.972534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:52:55.975259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.975924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:55.975977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:52:55.976218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:55.976254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 105, path id: 39 2025-12-23T18:52:55.976571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.976607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2025-12-23T18:52:55.976707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:52:55.976743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:52:55.976774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2025-12-23T18:52:55.976818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:52:55.976847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-12-23T18:52:55.976878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-12-23T18:52:55.976928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2025-12-23T18:52:55.976955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 105:0 2025-12-23T18:52:55.977051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:52:55.977085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-12-23T18:52:55.977122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2025-12-23T18:52:55.977912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:52:55.977998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-12-23T18:52:55.978026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-12-23T18:52:55.978062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2025-12-23T18:52:55.978098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:52:55.978195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-12-23T18:52:55.978226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:821:2787] 2025-12-23T18:52:55.980462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-12-23T18:52:55.980536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:52:55.980573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1028:2951] TestWaitNotification: OK eventTxId 105 2025-12-23T18:52:55.981017Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:52:55.981242Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 227us result status StatusSuccess 2025-12-23T18:52:55.981957Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Inactive ChildPartitionIds: 2 ChildPartitionIds: 3 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Inactive ChildPartitionIds: 4 ChildPartitionIds: 5 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { ToBound: "?" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "?" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 5 TabletId: 72075186233409548 KeyRange { FromBound: "\277" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 6 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 6 NextPartitionId: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "?" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "?" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 5 GroupId: 6 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\277" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:54.470731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:54.470859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.470901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:54.470932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:54.470964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:54.470993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:54.471041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.471095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:54.471865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:54.473956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:54.551002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:54.551050Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:54.561147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:54.561279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:54.563865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:54.574665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:54.574944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:54.576860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:54.579312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:54.588534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.589454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:54.598217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:54.598531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:54.598592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:54.599317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:54.763605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.764629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.764763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.764859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.764930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.764996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.765101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.765172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.765265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.765335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.765402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.765484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.765570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.765679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.765770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at schemeshard: 72057594046678944 2025-12-23T18:52:55.770864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.770920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.770975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.771094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.771129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.771177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.775057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:55.778734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:55.778824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:55.779153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:55.779196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:55.779245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:55.781486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1135:3040] sender: [1:1195:2058] recipient: [1:15:2062] 2025-12-23T18:52:55.870479Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:52:55.870753Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 283us result status StatusSuccess 2025-12-23T18:52:55.871283Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } TestModificationResults wait txId: 109 2025-12-23T18:52:55.874420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:52:55.874694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.874888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, at schemeshard: 72057594046678944 2025-12-23T18:52:55.877498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "KeyRange must be specified for root partition bounds" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:52:55.877725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-12-23T18:52:55.878033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-12-23T18:52:55.878071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-12-23T18:52:55.878420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-12-23T18:52:55.878490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-12-23T18:52:55.878524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:1200:3092] TestWaitNotification: OK eventTxId 109 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } TestModificationResults wait txId: 111 2025-12-23T18:52:55.881355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } } } TxId: 111 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:52:55.881528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 111:0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.881728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 111:1, propose status:StatusInvalidParameter, reason: Unable to change bounds of non-root partition: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:55.883787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 111, response: Status: StatusInvalidParameter Reason: "Unable to change bounds of non-root partition: 1" TxId: 111 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:52:55.884009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 111, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Unable to change bounds of non-root partition: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 111, wait until txId: 111 TestWaitNotification wait txId: 111 2025-12-23T18:52:55.884330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 111: send EvNotifyTxCompletion 2025-12-23T18:52:55.884371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 111 2025-12-23T18:52:55.884780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 111, at schemeshard: 72057594046678944 2025-12-23T18:52:55.884855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2025-12-23T18:52:55.884890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [1:1207:3099] TestWaitNotification: OK eventTxId 111 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:54.470774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:54.470899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.470933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:54.470957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:54.470987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:54.471007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:54.471051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.471100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:54.471752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:54.473927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:54.552024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:54.552062Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:54.560737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:54.560892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:54.563844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:54.573431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:54.573649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:54.576836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:54.579223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:54.587922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.589411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:54.598112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:54.598365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:54.598466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:54.599299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:54.770589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.771956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.772021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.772103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.772192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.772252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.772344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.772417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.772510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.772606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 0 2025-12-23T18:52:55.799690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 0 2025-12-23T18:52:55.799710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 0 2025-12-23T18:52:55.799747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2025-12-23T18:52:55.799782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2025-12-23T18:52:55.799806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2025-12-23T18:52:55.799831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2025-12-23T18:52:55.799872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2025-12-23T18:52:55.799898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2025-12-23T18:52:55.799921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2025-12-23T18:52:55.799943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2025-12-23T18:52:55.799966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2025-12-23T18:52:55.800039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2025-12-23T18:52:55.800063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2025-12-23T18:52:55.800106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2025-12-23T18:52:55.800139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2025-12-23T18:52:55.800175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2025-12-23T18:52:55.800196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2025-12-23T18:52:55.800228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2025-12-23T18:52:55.800258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2025-12-23T18:52:55.800280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2025-12-23T18:52:55.800305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2025-12-23T18:52:55.800325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2025-12-23T18:52:55.800364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2025-12-23T18:52:55.800396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2025-12-23T18:52:55.800431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2025-12-23T18:52:55.800452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2025-12-23T18:52:55.800475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2025-12-23T18:52:55.800494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2025-12-23T18:52:55.800518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2025-12-23T18:52:55.800538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2025-12-23T18:52:55.800558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2025-12-23T18:52:55.800586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2025-12-23T18:52:55.800747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.800846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.800973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-12-23T18:52:55.801029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:52:55.801054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:52:55.801075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2025-12-23T18:52:55.801113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:52:55.801227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.801300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.801525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-12-23T18:52:55.801692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:52:55.802091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.802230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.802645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.802727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.803072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.803240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.803292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.803406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.803602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.803678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.804117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.804420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.804505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.804552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.804709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.804763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.804808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:55.808684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:55.811331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:55.811375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:55.811750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:55.811793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:55.811829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:55.812055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> TExportToS3Tests::AuditCancelledExport [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition [GOOD] >> BasicStatistics::SimpleGlobalIndex [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] Test command err: Trying to start YDB, gRPC: 20615, MsgBus: 11943 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003092/r3tmp/tmpESjgEW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20615, node 1 TClient is connected to server localhost:11943 TClient is connected to server localhost:11943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515971143 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TExportToS3Tests::AutoDropping ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:18.956212Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:19.061655Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:19.068243Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:19.068696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:19.068803Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:19.447736Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:19.544080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:19.544189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:19.602947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:19.718877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003233/r3tmp/tmpRU5WIH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8123, node 1 2025-12-23T18:52:20.365828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:20.366986Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:20.367039Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:20.367080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:20.367332Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:20.435198Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:16118 2025-12-23T18:52:20.986722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:23.745428Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:23.752405Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:23.755433Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:23.784415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:23.784546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:23.822407Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:23.824326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:24.059905Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.060498Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.061360Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.061842Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.062156Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.062275Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.062376Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.062464Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.062606Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.175587Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:52:24.186943Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:24.187037Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:24.199911Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:24.325005Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:24.364345Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:24.364453Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:24.387487Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:24.388793Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:24.388965Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:24.389020Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:24.389069Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:24.389113Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:24.389155Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:24.389192Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:24.389516Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:24.430577Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:24.430697Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:24.443879Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:52:24.453250Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:52:24.454423Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:52:24.459988Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:52:24.493911Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:52:24.493974Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:52:24.494045Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:52:24.519306Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:24.530390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:24.539403Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:24.539543Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:24.551727Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:24.680977Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:25.080794Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:25.195102Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:25.195196Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:26 ... lyToActorId = [2:3834:3401], StatRequests.size() = 1 2025-12-23T18:52:42.911262Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:3873:3419]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:42.911443Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2025-12-23T18:52:42.911470Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:3873:3419], StatRequests.size() = 1 2025-12-23T18:52:43.698898Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:3900:3429]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:43.699173Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2025-12-23T18:52:43.699214Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:3900:3429], StatRequests.size() = 1 2025-12-23T18:52:44.178559Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:44.543979Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:3931:3443]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:44.544223Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2025-12-23T18:52:44.544262Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:3931:3443], StatRequests.size() = 1 2025-12-23T18:52:45.302336Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:3958:3453]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:45.302614Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2025-12-23T18:52:45.302656Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:3958:3453], StatRequests.size() = 1 2025-12-23T18:52:45.730039Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:45.730189Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2025-12-23T18:52:45.730560Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:52:45.730663Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-12-23T18:52:46.104031Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:3987:3465]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:46.104334Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2025-12-23T18:52:46.104378Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:3987:3465], StatRequests.size() = 1 2025-12-23T18:52:46.872832Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4014:3475]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:46.873118Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2025-12-23T18:52:46.873149Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4014:3475], StatRequests.size() = 1 2025-12-23T18:52:47.689363Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4047:3487]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:47.689633Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2025-12-23T18:52:47.689674Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4047:3487], StatRequests.size() = 1 2025-12-23T18:52:48.307466Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:52:48.307529Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:48.307559Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:48.307588Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:52:48.424395Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:48.799056Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4081:3500]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:48.799327Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2025-12-23T18:52:48.799365Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4081:3500], StatRequests.size() = 1 2025-12-23T18:52:49.639407Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4114:3513]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:49.639806Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-12-23T18:52:49.639883Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4114:3513], StatRequests.size() = 1 2025-12-23T18:52:50.263782Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:50.264317Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2025-12-23T18:52:50.264890Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:52:50.265039Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-12-23T18:52:50.297706Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:52:50.297791Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:52:50.298011Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2025-12-23T18:52:50.311392Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:50.732897Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4143:3525]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:50.733223Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-12-23T18:52:50.733258Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4143:3525], StatRequests.size() = 1 2025-12-23T18:52:51.676641Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4172:3537]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:51.676859Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-12-23T18:52:51.676917Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4172:3537], StatRequests.size() = 1 2025-12-23T18:52:52.469094Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4199:3547]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:52.469427Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-12-23T18:52:52.469471Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4199:3547], StatRequests.size() = 1 2025-12-23T18:52:53.049479Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:53.419443Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4234:3561]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:53.419813Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2025-12-23T18:52:53.419860Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4234:3561], StatRequests.size() = 1 2025-12-23T18:52:54.219158Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4261:3571]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:54.219415Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2025-12-23T18:52:54.219447Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4261:3571], StatRequests.size() = 1 2025-12-23T18:52:54.685526Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:54.686075Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2025-12-23T18:52:54.686452Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-12-23T18:52:54.686507Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:52:54.719159Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:52:54.719249Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:52:54.719478Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2025-12-23T18:52:54.733238Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:55.068361Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4292:3585]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:55.068702Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2025-12-23T18:52:55.068750Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4292:3585], StatRequests.size() = 1 2025-12-23T18:52:55.069224Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 36 ], ReplyToActorId[ [2:4294:3587]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:55.073094Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 36 ] 2025-12-23T18:52:55.073156Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 36, ReplyToActorId = [2:4294:3587], StatRequests.size() = 1 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:54.470763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:54.470872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.470914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:54.470943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:54.470978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:54.471004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:54.471042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.471115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:54.471841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:54.473958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:54.550868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:54.550939Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:54.561233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:54.561363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:54.563862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:54.574883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:54.575055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:54.576849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:54.579298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:54.587961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.589431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:54.598177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:54.598456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:54.598498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:54.599270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:54.733780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.734525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.734630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.734681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.734719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.734784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.734859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.734932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.734979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.735043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.735095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.735140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.735213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.735278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.735348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ionPlan, step: 300, at tablet: 72057594046678944 2025-12-23T18:52:56.796204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 107:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-12-23T18:52:56.809028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2025-12-23T18:52:56.809184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 2025-12-23T18:52:56.809253Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 107:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 2025-12-23T18:52:56.809299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 107:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.809338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 107:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:52:56.809482Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:0 128 -> 240 2025-12-23T18:52:56.809646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:52:56.812753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.812954Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:56.812996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:52:56.813216Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:56.813254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 107, path id: 39 2025-12-23T18:52:56.813623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.813667Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 107:0 ProgressState 2025-12-23T18:52:56.813762Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2025-12-23T18:52:56.813793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-12-23T18:52:56.813829Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2025-12-23T18:52:56.813858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-12-23T18:52:56.813887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: false 2025-12-23T18:52:56.813919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-12-23T18:52:56.813962Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2025-12-23T18:52:56.814013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 107:0 2025-12-23T18:52:56.814131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:52:56.814165Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 107, publications: 1, subscribers: 1 2025-12-23T18:52:56.814193Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 107, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2025-12-23T18:52:56.814853Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 107 2025-12-23T18:52:56.814932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 107 2025-12-23T18:52:56.814966Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 107 2025-12-23T18:52:56.815013Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2025-12-23T18:52:56.815058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:52:56.815144Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 107, subscribers: 1 2025-12-23T18:52:56.815181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:815:2782] 2025-12-23T18:52:56.817440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-12-23T18:52:56.817517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-12-23T18:52:56.817553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:1091:3005] TestWaitNotification: OK eventTxId 107 2025-12-23T18:52:56.818038Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:52:56.818220Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 222us result status StatusSuccess 2025-12-23T18:52:56.818881Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ResourcePoolsDdl::TestPoolSwitchToLimitedState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:54.470760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:54.470869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.470908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:54.470943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:54.471015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:54.471042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:54.471100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.471156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:54.471957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:54.473971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:54.563500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:54.563549Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:54.573388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:54.573571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:54.573747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:54.583522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:54.583671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:54.584240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:54.584447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:54.588350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.589431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:54.598144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:54.598442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:54.598492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:54.599304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:54.725908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.727575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.727684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.727777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.727856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.727901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.727981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... _common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:52:56.774616Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2025-12-23T18:52:56.774803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:52:56.774884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:52:56.777472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.777849Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:56.777891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:52:56.778026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:52:56.778231Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:56.778266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2025-12-23T18:52:56.778302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2025-12-23T18:52:56.778366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.778405Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:52:56.778500Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:52:56.778538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:52:56.778578Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:52:56.778610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:52:56.778645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-12-23T18:52:56.778708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:52:56.778778Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:52:56.778828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:52:56.778960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2025-12-23T18:52:56.779001Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-12-23T18:52:56.779036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2025-12-23T18:52:56.779067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2025-12-23T18:52:56.780689Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:52:56.780777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:52:56.780808Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:52:56.780849Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2025-12-23T18:52:56.780916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2025-12-23T18:52:56.781768Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:52:56.781839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:52:56.781866Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:52:56.781912Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2025-12-23T18:52:56.781943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:52:56.782025Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-12-23T18:52:56.784957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:52:56.785045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:52:56.785225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:52:56.785267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:52:56.785601Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:52:56.785682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:52:56.785720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:981:2911] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ChildPartitionIds: 3 ChildPartitionIds: 4 } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } TestModificationResults wait txId: 105 2025-12-23T18:52:56.789344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ChildPartitionIds: 3 ChildPartitionIds: 4 } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:52:56.789596Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.789803Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:56.791730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 1" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:52:56.791953Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-12-23T18:52:56.792245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-12-23T18:52:56.792286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-12-23T18:52:56.792604Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:52:56.792684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:52:56.792719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:988:2918] TestWaitNotification: OK eventTxId 105 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> KqpWorkloadServiceActors::TestPoolFetcher ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:54.470726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:54.470846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.470882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:54.470912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:54.470957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:54.470988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:54.471046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.471128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:54.472031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:54.473944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:54.551080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:54.551133Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:54.561046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:54.561163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:54.563836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:54.573573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:54.573729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:54.576884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:54.579247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:54.588413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.589436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:54.598123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:54.598515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:54.598600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:54.599303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:54.772333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.773303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.773442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.773541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.773644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.773714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.773803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.773886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.773969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.774037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.774103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.774173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.774271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.774384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.774527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... lPathId: 27] was 0 2025-12-23T18:52:56.776350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2025-12-23T18:52:56.776386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2025-12-23T18:52:56.776429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2025-12-23T18:52:56.776456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2025-12-23T18:52:56.776485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2025-12-23T18:52:56.776513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2025-12-23T18:52:56.776550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2025-12-23T18:52:56.776584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2025-12-23T18:52:56.776634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2025-12-23T18:52:56.776662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2025-12-23T18:52:56.776846Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.776958Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.777046Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-12-23T18:52:56.777089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:52:56.777113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:52:56.777128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2025-12-23T18:52:56.777145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:52:56.777237Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.777325Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.777596Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-12-23T18:52:56.777806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:52:56.778228Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.778351Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.778816Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.778928Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.779274Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.779450Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.779503Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.779615Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.779965Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.780061Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.780634Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.780934Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.781026Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.781089Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.781301Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.781351Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.781406Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:56.786557Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:56.792749Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:56.792821Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:56.793917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:56.793984Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:56.794035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:56.795062Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:1141:3045] sender: [2:1201:2058] recipient: [2:15:2062] 2025-12-23T18:52:56.874653Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:52:56.874959Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 351us result status StatusSuccess 2025-12-23T18:52:56.875708Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "\325UUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } Status: Active } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "\325UUUUUUUUUUUUUUT" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:54.470757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:54.470862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.470901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:54.470937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:54.470971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:54.471000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:54.471070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:54.471150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:54.472044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:54.473963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:54.567455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:54.567509Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:54.576269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:54.576396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:54.576537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:54.584013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:54.584162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:54.584678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:54.584821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:54.588529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.589429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:54.598128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:54.598362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:54.598418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:54.598464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:54.599296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:54.727835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.728986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.729034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.729097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.729178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:54.729269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2025-12-23T18:52:57.040022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2025-12-23T18:52:57.040039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2025-12-23T18:52:57.040161Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.040229Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.040323Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-12-23T18:52:57.040353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:52:57.040395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:52:57.040410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2025-12-23T18:52:57.040425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:52:57.040487Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.040545Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.040696Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-12-23T18:52:57.040835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:52:57.041147Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.041236Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.041521Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.041577Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.041779Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.041885Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.041923Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.041989Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.042114Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.042228Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.042577Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.042760Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.042831Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.042887Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.042999Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.043038Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.043076Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:52:57.048920Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:57.052713Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:57.052787Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:57.053464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:57.053542Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:57.053615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:57.054099Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:1089:2984] sender: [2:1147:2058] recipient: [2:15:2062] 2025-12-23T18:52:57.100709Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:52:57.100964Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 312us result status StatusSuccess 2025-12-23T18:52:57.101653Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" ToBound: "\325UUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 5 TabletId: 72075186233409548 KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 6 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 6 NextPartitionId: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" ToBound: "\325UUUUUUUUUUUUUUT" } } Partitions { PartitionId: 5 GroupId: 6 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::SimpleGlobalIndex [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:16.347536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:16.441358Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:16.447322Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:16.447703Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:16.447796Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:16.809783Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:16.899098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:16.899221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:16.957372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:17.059765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003244/r3tmp/tmpLT6v5S/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10049, node 1 2025-12-23T18:52:17.710945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:17.712087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:17.712138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:17.712176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:17.712433Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:17.779203Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6964 2025-12-23T18:52:18.355713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:21.340774Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:21.346361Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:21.349181Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:21.375441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:21.375530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:21.414016Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:21.415936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:21.653350Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:52:21.665134Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.665776Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.666386Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.667001Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.667159Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.667207Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.667469Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.667569Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.667633Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.782409Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:21.782515Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:21.825264Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:21.979901Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:22.028418Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:22.028523Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:22.060880Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:22.062529Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:22.062864Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:22.062939Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:22.062996Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:22.063061Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:22.063126Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:22.063184Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:22.063603Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:22.109853Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:22.109954Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2188:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:22.113662Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2185:2602] 2025-12-23T18:52:22.136221Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:52:22.140847Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2258:2658] 2025-12-23T18:52:22.141146Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2258:2658], schemeshard id = 72075186224037897 2025-12-23T18:52:22.172315Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Describe result: PathErrorUnknown 2025-12-23T18:52:22.172377Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Creating table 2025-12-23T18:52:22.172459Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:52:22.201288Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2356:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:22.208806Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:22.223651Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:22.223815Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:22.237632Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:22.370499Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:22.748526Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:22.862329Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:22.862425Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:26 ... 6224037894] EvPropagateStatisticsResponse, cookie: 4 2025-12-23T18:52:41.236353Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [2:3858:3397]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:41.236619Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2025-12-23T18:52:41.236661Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:3858:3397], StatRequests.size() = 1 2025-12-23T18:52:42.101019Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:3895:3413]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:42.101343Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2025-12-23T18:52:42.101397Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:3895:3413], StatRequests.size() = 1 2025-12-23T18:52:42.897259Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:3924:3425]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:42.897523Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2025-12-23T18:52:42.897557Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:3924:3425], StatRequests.size() = 1 2025-12-23T18:52:43.399541Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:43.740219Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:3957:3439]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:43.740450Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2025-12-23T18:52:43.740482Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:3957:3439], StatRequests.size() = 1 2025-12-23T18:52:44.472700Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:3984:3449]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:44.473095Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2025-12-23T18:52:44.473140Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:3984:3449], StatRequests.size() = 1 2025-12-23T18:52:44.921919Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:44.922091Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2025-12-23T18:52:44.922675Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-12-23T18:52:44.922780Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:52:45.349380Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4011:3459]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:45.349635Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2025-12-23T18:52:45.349669Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4011:3459], StatRequests.size() = 1 2025-12-23T18:52:46.347759Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4040:3471]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:46.348013Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2025-12-23T18:52:46.348059Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4040:3471], StatRequests.size() = 1 2025-12-23T18:52:47.339185Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4071:3481]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:47.339484Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2025-12-23T18:52:47.339516Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4071:3481], StatRequests.size() = 1 2025-12-23T18:52:48.076589Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:52:48.076655Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:48.076689Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:48.076724Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:52:48.182360Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:48.628991Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4107:3496]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:48.629366Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2025-12-23T18:52:48.629414Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4107:3496], StatRequests.size() = 1 2025-12-23T18:52:49.701483Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4140:3508]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:49.701699Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-12-23T18:52:49.701730Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4140:3508], StatRequests.size() = 1 2025-12-23T18:52:50.434261Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:50.434766Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2025-12-23T18:52:50.435040Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:52:50.435101Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-12-23T18:52:50.467571Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:52:50.467634Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:52:50.467863Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2025-12-23T18:52:50.481109Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:51.008554Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4171:3522]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:51.008782Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-12-23T18:52:51.008812Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4171:3522], StatRequests.size() = 1 2025-12-23T18:52:52.108970Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4200:3534]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:52.109369Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-12-23T18:52:52.109424Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4200:3534], StatRequests.size() = 1 2025-12-23T18:52:53.067973Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4227:3544]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:53.068315Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-12-23T18:52:53.068365Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4227:3544], StatRequests.size() = 1 2025-12-23T18:52:53.706817Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:54.146761Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4260:3556]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:54.147084Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2025-12-23T18:52:54.147127Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4260:3556], StatRequests.size() = 1 2025-12-23T18:52:55.157011Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4289:3568]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:55.157238Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2025-12-23T18:52:55.157272Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4289:3568], StatRequests.size() = 1 2025-12-23T18:52:55.767491Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:55.768051Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2025-12-23T18:52:55.768528Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:52:55.768622Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-12-23T18:52:55.801019Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:52:55.801094Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:52:55.801302Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2025-12-23T18:52:55.814627Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:56.170300Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4322:3582]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:56.170526Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2025-12-23T18:52:56.170566Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4322:3582], StatRequests.size() = 1 >> BasicStatistics::Simple [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool >> TExportToS3Tests::AutoDropping [GOOD] >> BasicStatistics::TwoServerlessDbs [GOOD] |97.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop |97.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkloadService::TestQueueSizeSimple [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:16.236432Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:16.321940Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:16.325772Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:16.326027Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:16.326090Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:16.735318Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:16.835762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:16.835888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:16.894373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:16.944269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003246/r3tmp/tmpEZuUyv/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28167, node 1 2025-12-23T18:52:17.660831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:17.661823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:17.661874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:17.661907Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:17.662233Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:17.728519Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:17905 2025-12-23T18:52:18.267216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:21.212454Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:21.220273Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:21.224046Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:21.260560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:21.260676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:21.300660Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:21.302399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:21.533764Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.534497Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.535164Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.535829Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.536087Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.536199Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.536342Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.536557Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.536699Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.647728Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:52:21.659332Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:21.659431Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:21.672829Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:21.826356Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:21.864760Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:21.864845Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:21.897012Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:21.897220Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:21.897407Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:21.897462Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:21.897509Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:21.897551Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:21.897590Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:21.897640Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:21.897963Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:21.930106Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2171:2593] 2025-12-23T18:52:21.935014Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:52:21.947736Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2186:2606] Owner: [2:2185:2605]. Describe result: PathErrorUnknown 2025-12-23T18:52:21.947812Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2186:2606] Owner: [2:2185:2605]. Creating table 2025-12-23T18:52:21.947899Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2186:2606] Owner: [2:2185:2605]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:52:21.948573Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:21.948653Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2221:2624], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:21.983373Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2310:2679] 2025-12-23T18:52:21.983926Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2310:2679], schemeshard id = 72075186224037897 2025-12-23T18:52:22.015714Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2353:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:22.030061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:22.041674Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2186:2606] Owner: [2:2185:2605]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:22.041788Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2186:2606] Owner: [2:2185:2605]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:22.048354Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2186:2606] Owner: [2:2185:2605]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:22.172243Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:22.555494Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2186:2606] Owner: [2:2185:2605]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:22.612210Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2186:2606] Owner: [2:2185:2605]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:22.612295Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2186:2606] Owner: [2:2185:2 ... 6224037894] EvPropagateStatisticsResponse, cookie: 4 2025-12-23T18:52:41.363536Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [2:3657:3310]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:41.363820Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2025-12-23T18:52:41.363854Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:3657:3310], StatRequests.size() = 1 2025-12-23T18:52:42.414152Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:3692:3324]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:42.414455Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2025-12-23T18:52:42.414500Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:3692:3324], StatRequests.size() = 1 2025-12-23T18:52:43.425256Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:3721:3336]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:43.425480Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2025-12-23T18:52:43.425509Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:3721:3336], StatRequests.size() = 1 2025-12-23T18:52:44.048464Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:44.472553Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:3752:3350]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:44.472873Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2025-12-23T18:52:44.472918Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:3752:3350], StatRequests.size() = 1 2025-12-23T18:52:45.445339Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:3779:3360]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:45.445585Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2025-12-23T18:52:45.445619Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:3779:3360], StatRequests.size() = 1 2025-12-23T18:52:46.029843Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:46.030177Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2025-12-23T18:52:46.030481Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:52:46.030539Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-12-23T18:52:46.437025Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:3806:3370]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:46.437238Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2025-12-23T18:52:46.437269Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:3806:3370], StatRequests.size() = 1 2025-12-23T18:52:47.433273Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:3835:3382]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:47.433549Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2025-12-23T18:52:47.433592Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:3835:3382], StatRequests.size() = 1 2025-12-23T18:52:48.449830Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:3866:3392]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:48.450126Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2025-12-23T18:52:48.450159Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:3866:3392], StatRequests.size() = 1 2025-12-23T18:52:49.217088Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:52:49.217169Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:49.217210Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:49.217242Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:52:49.344942Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:49.784495Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:3902:3407]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:49.784711Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2025-12-23T18:52:49.784741Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:3902:3407], StatRequests.size() = 1 2025-12-23T18:52:50.836304Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:3933:3419]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:50.836659Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-12-23T18:52:50.836707Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:3933:3419], StatRequests.size() = 1 2025-12-23T18:52:51.638228Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:51.638795Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2025-12-23T18:52:51.639161Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:52:51.639255Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-12-23T18:52:51.671280Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:52:51.671356Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:52:51.671542Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:52:51.684469Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:52.175936Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:3962:3431]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:52.176250Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-12-23T18:52:52.176292Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:3962:3431], StatRequests.size() = 1 2025-12-23T18:52:53.249720Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:3991:3443]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:53.250045Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-12-23T18:52:53.250091Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:3991:3443], StatRequests.size() = 1 2025-12-23T18:52:54.242165Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4018:3453]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:54.242480Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-12-23T18:52:54.242522Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4018:3453], StatRequests.size() = 1 2025-12-23T18:52:54.888224Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:55.317749Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4051:3465]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:55.318089Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2025-12-23T18:52:55.318131Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4051:3465], StatRequests.size() = 1 2025-12-23T18:52:56.333664Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4080:3477]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:56.333970Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2025-12-23T18:52:56.334014Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4080:3477], StatRequests.size() = 1 2025-12-23T18:52:56.938925Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:56.939410Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2025-12-23T18:52:56.939904Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:52:56.939991Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-12-23T18:52:56.961742Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:52:56.961826Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:52:56.962070Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:52:56.975388Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:57.363114Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4111:3491]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:57.363438Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2025-12-23T18:52:57.363486Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4111:3491], StatRequests.size() = 1 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:19.914387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:20.034307Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:20.041917Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:20.042225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:20.042298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:20.423120Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:20.517193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:20.517293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:20.576348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:20.694999Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003229/r3tmp/tmp4l3jmY/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3475, node 1 2025-12-23T18:52:21.332085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:21.333175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:21.333227Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:21.333259Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:21.333478Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:21.398886Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:9202 2025-12-23T18:52:21.931150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:24.463378Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:24.470920Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:24.473989Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:24.499573Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:24.499695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:24.537767Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:24.540111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:24.793122Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.793677Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.794399Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.794954Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.795347Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.795496Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.795631Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.795748Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.795864Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.823507Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-12-23T18:52:24.920905Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:24.921030Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:24.934340Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:25.111942Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:25.144739Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:25.144834Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:25.171392Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:25.171625Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:25.171788Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:25.171832Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:25.171890Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:25.171934Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:25.171968Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:25.172004Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:25.172414Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:25.208672Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:25.208769Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2185:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:25.219998Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2193:2608] 2025-12-23T18:52:25.230956Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2251:2652] 2025-12-23T18:52:25.231139Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2251:2652], schemeshard id = 72075186224037897 2025-12-23T18:52:25.239290Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-12-23T18:52:25.269900Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2275:2665] Owner: [2:2274:2664]. Describe result: PathErrorUnknown 2025-12-23T18:52:25.269951Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2275:2665] Owner: [2:2274:2664]. Creating table 2025-12-23T18:52:25.270017Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2275:2665] Owner: [2:2274:2664]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:52:25.284650Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2353:2698], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:25.293095Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:25.307628Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2275:2665] Owner: [2:2274:2664]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:25.307771Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2275:2665] Owner: [2:2274:2664]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:25.321183Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2275:2665] Owner: [2:2274:2664]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:25.428874Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:25.784873Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2275:2665] Owner: [2:2274:2664]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:25.872374Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2275:2665] Owner: [2:2274:2664]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:25.872461Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2275:2665] Owner: [2:2274:2664]. Co ... ce_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2025-12-23T18:52:45.087105Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:5344:4256], StatRequests.size() = 1 2025-12-23T18:52:45.862730Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:5373:4268]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:45.863182Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2025-12-23T18:52:45.863236Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:5373:4268], StatRequests.size() = 1 2025-12-23T18:52:46.294959Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-12-23T18:52:46.295340Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2025-12-23T18:52:46.295790Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:52:46.295874Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-12-23T18:52:46.732222Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:5404:4281]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:46.732537Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2025-12-23T18:52:46.732572Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:5404:4281], StatRequests.size() = 1 2025-12-23T18:52:47.778463Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:5441:4300]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:47.778733Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2025-12-23T18:52:47.778798Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:5441:4300], StatRequests.size() = 1 2025-12-23T18:52:48.769293Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:5476:4314]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:48.769538Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2025-12-23T18:52:48.769572Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:5476:4314], StatRequests.size() = 1 2025-12-23T18:52:49.444083Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:52:49.444181Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:49.444230Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:49.444275Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:52:49.602064Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:50.081546Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:5516:4333]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:50.081890Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2025-12-23T18:52:50.081940Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:5516:4333], StatRequests.size() = 1 2025-12-23T18:52:51.129778Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:5549:4347]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:51.130160Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2025-12-23T18:52:51.130210Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:5549:4347], StatRequests.size() = 1 2025-12-23T18:52:51.897645Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-12-23T18:52:51.898234Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2025-12-23T18:52:51.898755Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:52:51.898919Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-12-23T18:52:51.931931Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:52:51.931999Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:52:51.932219Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:52:51.945441Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:52.469776Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:5580:4361]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:52.470014Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2025-12-23T18:52:52.470048Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:5580:4361], StatRequests.size() = 1 2025-12-23T18:52:53.473454Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2025-12-23T18:52:53.473525Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.952000s, at schemeshard: 72075186224037899 2025-12-23T18:52:53.473721Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2025-12-23T18:52:53.487617Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:53.623276Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:5617:4381]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:53.623597Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2025-12-23T18:52:53.623638Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:5617:4381], StatRequests.size() = 1 2025-12-23T18:52:54.592545Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:5648:4394]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:54.592821Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-12-23T18:52:54.592858Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:5648:4394], StatRequests.size() = 1 2025-12-23T18:52:55.086282Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037905 2025-12-23T18:52:55.086358Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.039000s, at schemeshard: 72075186224037905 2025-12-23T18:52:55.086543Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 26, entries count: 1, are all stats full: 1 2025-12-23T18:52:55.100750Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:55.346356Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:55.801286Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:5691:4416]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:55.801689Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-12-23T18:52:55.801747Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:5691:4416], StatRequests.size() = 1 2025-12-23T18:52:56.878567Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:5722:4429]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:56.879008Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-12-23T18:52:56.879059Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:5722:4429], StatRequests.size() = 1 2025-12-23T18:52:57.435121Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-12-23T18:52:57.435514Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2025-12-23T18:52:57.436318Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:52:57.436425Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-12-23T18:52:57.469937Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:52:57.470011Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:52:57.470347Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:52:57.485657Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:57.990910Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:5755:4445]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:57.991280Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-12-23T18:52:57.991333Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:5755:4445], StatRequests.size() = 1 2025-12-23T18:52:57.991919Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:5757:4447]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:57.996238Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2025-12-23T18:52:57.996302Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:5757:4447], StatRequests.size() = 1 |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:14.383250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:14.383337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:14.383425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:14.383481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:14.383530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:14.383602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:14.383701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:14.384576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:14.386771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:14.481394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:14.481452Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:14.493051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:14.493238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:14.493430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:14.505177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:14.505431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:14.506099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:14.506321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:14.510337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.511211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:14.517922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:14.517991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:14.518140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:14.518183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:14.518313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:14.518414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:14.695715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.699989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.700136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.700247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.700335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.700439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.700510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.700605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.700719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.700830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.700924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.700994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.701064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.701140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:14.701237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ode 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710761 2025-12-23T18:52:58.758124Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:351: TExport::TTxProgress: DoExecute 2025-12-23T18:52:58.758165Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1314: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-12-23T18:52:58.758204Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1345: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-12-23T18:52:58.759962Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:371: TExport::TTxProgress: DoComplete 2025-12-23T18:52:58.760087Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:52:58.760141Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:873:2832] TestWaitNotification: OK eventTxId 102 2025-12-23T18:52:58.761196Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:52:58.761401Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 251us result status StatusSuccess 2025-12-23T18:52:58.761974Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 12 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 desc: 2 2025-12-23T18:52:58.762683Z node 6 :EXPORT DEBUG: schemeshard_export__forget.cpp:79: TExport::TTxForget, dropping export tables, info: { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-12-23T18:52:58.765082Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:351: TExport::TTxProgress: DoExecute 2025-12-23T18:52:58.765162Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:798: TExport::TTxProgress: Resume: id# 102 2025-12-23T18:52:58.765254Z node 6 :EXPORT INFO: schemeshard_export__create.cpp:579: TExport::TTxProgress: Allocate txId: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2025-12-23T18:52:58.765333Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:371: TExport::TTxProgress: DoComplete 2025-12-23T18:52:58.765440Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2025-12-23T18:52:58.765499Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:351: TExport::TTxProgress: DoExecute 2025-12-23T18:52:58.765548Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:919: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 102 2025-12-23T18:52:58.765622Z node 6 :EXPORT INFO: schemeshard_export__create.cpp:571: TExport::TTxProgress: Drop propose: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2025-12-23T18:52:58.765735Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:371: TExport::TTxProgress: DoComplete 2025-12-23T18:52:58.768109Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-102" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2025-12-23T18:52:58.768230Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/export-102, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2025-12-23T18:52:58.768357Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761, at schemeshard: 72057594046678944 2025-12-23T18:52:58.770138Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2025-12-23T18:52:58.770356Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761, operation: DROP DIRECTORY, path: /MyRoot/export-102 2025-12-23T18:52:58.770481Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7144: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-12-23T18:52:58.770577Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7146: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761 2025-12-23T18:52:58.770640Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:351: TExport::TTxProgress: DoExecute 2025-12-23T18:52:58.770681Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:980: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2025-12-23T18:52:58.770760Z node 6 :EXPORT TRACE: schemeshard_export__create.cpp:981: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761 2025-12-23T18:52:58.770896Z node 6 :EXPORT INFO: schemeshard_export__create.cpp:1162: TExport::TTxProgress: Wait for completion: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2025-12-23T18:52:58.772187Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:371: TExport::TTxProgress: DoComplete 2025-12-23T18:52:58.772297Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2025-12-23T18:52:58.772366Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-12-23T18:52:58.772415Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710761 2025-12-23T18:52:58.772455Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:351: TExport::TTxProgress: DoExecute 2025-12-23T18:52:58.772508Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1314: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-12-23T18:52:58.772544Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1345: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-12-23T18:52:58.773840Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:371: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 102 2025-12-23T18:52:58.774012Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:52:58.774049Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:52:58.774410Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:52:58.774479Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:52:58.774525Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:1089:3043] TestWaitNotification: OK eventTxId 102 |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcher [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> KqpSystemView::QueryStatsSimple [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |97.3%| [TA] $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 30650, MsgBus: 7844 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003093/r3tmp/tmpj5O0Gt/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30650, node 1 TClient is connected to server localhost:7844 TClient is connected to server localhost:7844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515969624 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |97.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_evict_vdisk_grpc [FAIL] |97.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor >> BasicStatistics::ServerlessGlobalIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 1647, MsgBus: 7250 2025-12-23T18:52:09.226350Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128144962056748:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:09.226439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:52:09.263041Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128143470137991:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:09.274978Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:52:09.275481Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587128143138376900:2087];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:09.275560Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004e30/r3tmp/tmp2Pi3jk/pdisk_1.dat 2025-12-23T18:52:09.462902Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:09.463617Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:09.476081Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:09.508555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:09.508664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:09.508848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:09.508963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:09.509672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:09.509726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:09.541236Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:09.541280Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:52:09.543718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:09.544000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:09.546939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:09.596134Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1647, node 1 2025-12-23T18:52:09.664262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:09.664295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:09.664305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:09.664415Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:09.680381Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:09.750476Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:09.751908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7250 TClient is connected to server localhost:7250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515929619 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:10.134391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:10.154734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:10.239585Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:10.280617Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:10.287224Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:10.317617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:10.434971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:10.495419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:12.307246Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128157846960917:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:12.307346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:12.307652Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128157846960927:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:12.307707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:12.619148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:12.659716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:12.700096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:12.744286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:12.789811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 720575940466444 ... 876 TClient is connected to server localhost:29876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515970576 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:51.092687Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:51.112349Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:51.185574Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:51.286263Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:51.295835Z node 17 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:51.301469Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:51.307763Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:51.368901Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:52:54.316158Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587128336323524290:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:54.316254Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:54.316468Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587128336323524299:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:54.316527Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:54.396269Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:54.447561Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:54.501047Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:54.552258Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:54.599643Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:54.746989Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:54.799812Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:54.860429Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:55.063608Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587128340618492677:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:55.063713Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:55.063776Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587128340618492682:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:55.063951Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7587128340618492684:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:55.064013Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:52:55.068904Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:55.091368Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7587128340618492686:2414], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:52:55.191720Z node 16 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [16:7587128340618492769:4720] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:55.277662Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7587128319143652859:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:55.277731Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:55.287404Z node 17 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[17:7587128321139137988:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:55.287489Z node 17 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:55.294537Z node 18 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7587128320993557756:2152];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:55.294619Z node 18 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:52:58.236781Z node 16 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766515978228, txId: 281474976710675] shutting down |97.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |97.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> DefaultPoolSettings::TestResourcePoolsSysViewFilters [GOOD] >> KqpWorkloadService::TestLargeConcurrentQueryLimit >> BasicStatistics::TwoNodes [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToLimitedState [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState |97.3%| [TA] $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> test_canonical_records.py::test_replace_config [GOOD] |97.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> TStreamingQueryTest::CreateStreamingQueryOrReplace >> TStreamingQueryTest::ParallelAlterStreamingQuery >> TStreamingQueryTest::CreateStreamingQueryWithProperties >> TStreamingQueryTest::CreateStreamingQueryOrReplaceFailNameConflict |97.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessGlobalIndex [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:13.980738Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:14.114734Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:14.126310Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:14.127253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:14.127367Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:14.633624Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:14.744045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:14.744194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:14.805900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:14.914424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00324f/r3tmp/tmpwCVhns/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25972, node 1 2025-12-23T18:52:15.668235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:15.670169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:15.670222Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:15.670253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:15.670502Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:15.736568Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:15067 2025-12-23T18:52:16.266610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:19.082148Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:19.089660Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:19.095364Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:19.138192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:19.138357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:19.183041Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:19.185546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:19.449478Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:19.451556Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:19.452533Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:19.453370Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:19.453694Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:19.453825Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:19.454003Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:19.454169Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:19.454312Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:19.578883Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-12-23T18:52:19.590826Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:19.590981Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:19.604719Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:19.820526Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:19.881297Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:19.881426Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:19.914417Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:19.919884Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:19.920137Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:19.920210Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:19.920272Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:19.920333Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:19.920392Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:19.920451Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:19.920942Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:19.966490Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:19.966592Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:19.978234Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:52:19.986090Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:52:19.988064Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:52:19.992877Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-12-23T18:52:20.027960Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:52:20.028030Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:52:20.028101Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:52:20.052737Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:20.067025Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:20.074821Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:20.074971Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:20.087189Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:20.232052Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:20.633295Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:20.738341Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:20.738465Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. ... atType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:45.893031Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2025-12-23T18:52:45.893059Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:4597:3809], StatRequests.size() = 1 2025-12-23T18:52:46.944357Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:4633:3827]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:46.944642Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2025-12-23T18:52:46.944672Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:4633:3827], StatRequests.size() = 1 2025-12-23T18:52:47.020651Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:47.881838Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:4667:3843]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:47.882096Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2025-12-23T18:52:47.882135Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:4667:3843], StatRequests.size() = 1 2025-12-23T18:52:48.782395Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:4695:3854]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:48.782698Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2025-12-23T18:52:48.782729Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:4695:3854], StatRequests.size() = 1 2025-12-23T18:52:48.814825Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:48.815039Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2025-12-23T18:52:48.815341Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:52:48.815468Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-12-23T18:52:49.885075Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:4727:3869]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:49.885452Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2025-12-23T18:52:49.885503Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:4727:3869], StatRequests.size() = 1 2025-12-23T18:52:50.907937Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4759:3881]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:50.908200Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2025-12-23T18:52:50.908243Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4759:3881], StatRequests.size() = 1 2025-12-23T18:52:52.166132Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:52:52.166215Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:52.166254Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:52.166294Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:52:52.242483Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4800:3898]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:52.242904Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2025-12-23T18:52:52.242952Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4800:3898], StatRequests.size() = 1 2025-12-23T18:52:52.297126Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:53.186843Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4834:3912]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:53.187088Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2025-12-23T18:52:53.187129Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4834:3912], StatRequests.size() = 1 2025-12-23T18:52:54.233287Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4865:3924]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:54.233570Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2025-12-23T18:52:54.233614Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4865:3924], StatRequests.size() = 1 2025-12-23T18:52:54.356262Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:54.356425Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2025-12-23T18:52:54.356908Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:52:54.357059Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-12-23T18:52:54.389957Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:52:54.390034Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:52:54.390282Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:52:54.403848Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:55.521943Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4901:3942]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:55.522278Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-12-23T18:52:55.522316Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4901:3942], StatRequests.size() = 1 2025-12-23T18:52:56.004812Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2025-12-23T18:52:56.004897Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.713000s, at schemeshard: 72075186224037899 2025-12-23T18:52:56.005099Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 50, entries count: 2, are all stats full: 1 2025-12-23T18:52:56.018890Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:56.688139Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4933:3957]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:56.688468Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-12-23T18:52:56.688516Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4933:3957], StatRequests.size() = 1 2025-12-23T18:52:57.888759Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4969:3973]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:57.889174Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-12-23T18:52:57.889226Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4969:3973], StatRequests.size() = 1 2025-12-23T18:52:57.932893Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:58.979695Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:5001:3986]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:58.980011Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-12-23T18:52:58.980058Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:5001:3986], StatRequests.size() = 1 2025-12-23T18:53:00.009184Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:5029:3997]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:00.009437Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2025-12-23T18:53:00.009473Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:5029:3997], StatRequests.size() = 1 2025-12-23T18:53:00.052868Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:53:00.053323Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2025-12-23T18:53:00.053671Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:53:00.053734Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-12-23T18:53:00.086784Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:00.086864Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:00.087108Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:00.100949Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:01.106265Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:5063:4014]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:01.106593Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2025-12-23T18:53:01.106630Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:5063:4014], StatRequests.size() = 1 >> TStreamingQueryTest::CreateStreamingQuery |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:15.762117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:15.836002Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:15.840456Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:529:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:15.840799Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:15.840867Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:16.163749Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:16.280951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:16.281061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:16.315021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:16.386835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003249/r3tmp/tmp5xZiMD/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11260, node 1 2025-12-23T18:52:17.058240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:17.058658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:17.058702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:17.058733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:17.059267Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:17.124582Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:13955 2025-12-23T18:52:17.683874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:23.131636Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:23.131911Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:23.141439Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:23.141537Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2025-12-23T18:52:23.146395Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:23.146787Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:23.186778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:23.186889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:23.187216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:23.187262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:23.216592Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:23.216733Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:52:23.219371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:23.219740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:23.415187Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:23.415279Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:23.416304Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:23.416384Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:23.416940Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.417622Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.418705Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.418845Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.418979Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.419109Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.419326Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.419511Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.419670Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.434731Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:23.435464Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:23.435754Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:23.616029Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:23.637385Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:23.637463Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:23.670211Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:23.672206Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:23.672431Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:23.672497Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:23.672552Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:23.672612Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:23.672668Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:23.672725Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:23.673737Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:23.683025Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2352:2408] 2025-12-23T18:52:23.688382Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:52:23.694510Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:23.694592Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2524:2451], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:23.699317Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2511:2448] Owner: [2:2510:2447]. Describe result: PathErrorUnknown 2025-12-23T18:52:23.699381Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2511:2448] Owner: [2:2510:2447]. Creating table 2025-12-23T18:52:23.699462Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2511:2448] Owner: [2:2510:2447]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:52:23.720401Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2593:2485] 2025-12-23T18:52:23.721076Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2593:2485], schemeshard id = 72075186224037897 2025-12-23T18:52:23.759089Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2650:2499], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:23.783167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemesha ... ] RequestId[ 23 ], ReplyToActorId[ [2:4309:2987]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:48.778550Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2025-12-23T18:52:48.778584Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:4309:2987], StatRequests.size() = 1 2025-12-23T18:52:49.750295Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:4340:2993]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:49.750598Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2025-12-23T18:52:49.750637Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:4340:2993], StatRequests.size() = 1 2025-12-23T18:52:50.185036Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:50.185412Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2025-12-23T18:52:50.185586Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:52:50.185649Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-12-23T18:52:50.756666Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4371:2999]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:50.756978Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2025-12-23T18:52:50.757021Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4371:2999], StatRequests.size() = 1 2025-12-23T18:52:51.787419Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4406:3007]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:51.787758Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2025-12-23T18:52:51.787789Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4406:3007], StatRequests.size() = 1 2025-12-23T18:52:52.800987Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4439:3013]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:52.801269Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2025-12-23T18:52:52.801305Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4439:3013], StatRequests.size() = 1 2025-12-23T18:52:53.384662Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:53.564438Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:52:53.564539Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:53.564581Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:53.564640Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:52:54.124364Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4482:3022]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:54.124798Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2025-12-23T18:52:54.124852Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4482:3022], StatRequests.size() = 1 2025-12-23T18:52:55.181890Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4517:3028]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:55.182170Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-12-23T18:52:55.182208Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4517:3028], StatRequests.size() = 1 2025-12-23T18:52:55.807980Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:55.808642Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2025-12-23T18:52:55.808876Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-12-23T18:52:55.809147Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:52:55.862764Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:52:55.862861Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:52:55.863099Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:52:55.876195Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:56.556226Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4550:3036]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:56.556637Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-12-23T18:52:56.556686Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4550:3036], StatRequests.size() = 1 2025-12-23T18:52:57.664534Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4583:3044]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:57.664897Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-12-23T18:52:57.664951Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4583:3044], StatRequests.size() = 1 2025-12-23T18:52:58.672545Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4616:3050]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:58.672940Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-12-23T18:52:58.672985Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4616:3050], StatRequests.size() = 1 2025-12-23T18:52:59.179441Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:59.748322Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4651:3058]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:59.748651Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2025-12-23T18:52:59.748697Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4651:3058], StatRequests.size() = 1 2025-12-23T18:53:00.759115Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4686:3064]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:00.759407Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2025-12-23T18:53:00.759437Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4686:3064], StatRequests.size() = 1 2025-12-23T18:53:01.240184Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:53:01.240319Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2025-12-23T18:53:01.240439Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-12-23T18:53:01.240606Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:53:01.283658Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:01.283732Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:01.283975Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:53:01.297515Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:01.826007Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4721:3072]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:01.826423Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2025-12-23T18:53:01.826472Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4721:3072], StatRequests.size() = 1 2025-12-23T18:53:01.826978Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:4723:2963]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:01.829345Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:53:01.829422Z node 3 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [3:4733:2967] 2025-12-23T18:53:01.829473Z node 3 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [3:4733:2967] 2025-12-23T18:53:01.832217Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4741:3074] 2025-12-23T18:53:01.832865Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:4733:2967], server id = [2:4741:3074], tablet id = 72075186224037894, status = OK 2025-12-23T18:53:01.833135Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4741:3074], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:53:01.833210Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2025-12-23T18:53:01.833446Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 18446744073709551615 2025-12-23T18:53:01.833561Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [3:4723:2963], StatRequests.size() = 1 2025-12-23T18:53:01.833835Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] >> TStreamingQueryTest::CreateStreamingQueryOrReplaceFailNameConflict [GOOD] >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors >> TStreamingQueryTest::CreateStreamingQuery [GOOD] >> TStreamingQueryTest::CreateStreamingQueryOrReplace [GOOD] >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists >> TStreamingQueryTest::CreateStreamingQueryWithProperties [GOOD] >> TStreamingQueryTest::DropStreamingQuery >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath >> TStreamingQueryTest::ParallelAlterStreamingQuery [GOOD] >> Yq_1::DescribeJob >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists [GOOD] >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists [GOOD] >> TStreamingQueryTest::DropStreamingQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelAlterStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:53:03.907331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:53:03.907420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:03.907470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:53:03.907508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:53:03.907552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:53:03.907581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:53:03.907636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:03.907698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:53:03.908749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:53:03.910982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:53:04.002630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:53:04.002682Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:04.013423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:53:04.013597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:53:04.013755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:53:04.024444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:53:04.024646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:53:04.025843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:04.028338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:04.038025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:04.038172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:04.055650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:04.055720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:04.055892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:04.055940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:04.056049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:04.056849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:53:04.236745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.237717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.237803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.237867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.237919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.237965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.238020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.238078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.238131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.238174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.238206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.238242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.238296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.238353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.238420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... transaction, txId: 105, at schemeshard: 72057594046678944 2025-12-23T18:53:04.865065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.865096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.865209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-12-23T18:53:04.865332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-12-23T18:53:04.865385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.865409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.865528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-12-23T18:53:04.865630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.865666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.865725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-12-23T18:53:04.865882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2025-12-23T18:53:04.865950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.865976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.866138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 111, at schemeshard: 72057594046678944 2025-12-23T18:53:04.866196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.866228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.866274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-12-23T18:53:04.866376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.866405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.866529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.866554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.866610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 113, at schemeshard: 72057594046678944 2025-12-23T18:53:04.866710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.866766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.866881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.866921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.867033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 114, at schemeshard: 72057594046678944 2025-12-23T18:53:04.867131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 115, at schemeshard: 72057594046678944 2025-12-23T18:53:04.867182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.867203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.867297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.867324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.867412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 116, at schemeshard: 72057594046678944 2025-12-23T18:53:04.867503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.867532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.867653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 117, at schemeshard: 72057594046678944 2025-12-23T18:53:04.867723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.867747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.867827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.867862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.868001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.868061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.868205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.868229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.868312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.868333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.868463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.868497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:797:2786] 2025-12-23T18:53:04.868618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2025-12-23T18:53:04.868643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:797:2786] TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 2025-12-23T18:53:04.872321Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:53:04.872587Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 289us result status StatusSuccess 2025-12-23T18:53:04.873095Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool >> ColumnBuildTest::Rejecting >> ColumnBuildTest::Applying_Failed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:20.618305Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:20.739189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:20.745019Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:20.745392Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:20.745504Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:21.136339Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:21.227173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:21.227291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:21.285384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:21.392386Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00321b/r3tmp/tmpSf7uzk/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5471, node 1 2025-12-23T18:52:22.010112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:22.011211Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:22.011261Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:22.011299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:22.011514Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:22.076170Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12319 2025-12-23T18:52:22.605636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:24.942680Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:24.949617Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:24.953142Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:24.976072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:24.976167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:25.013422Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:25.015044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:25.243640Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:25.244128Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:25.244827Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:25.245287Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:25.245463Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:25.245540Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:25.245726Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:25.245852Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:25.245971Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:25.357003Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-12-23T18:52:25.368265Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:25.368368Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:25.381047Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:25.556570Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:25.603416Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:25.603504Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:25.631375Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:25.631546Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:25.631745Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:25.631808Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:25.631875Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:25.631934Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:25.631987Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:25.632038Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:25.632471Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:25.671084Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:25.671173Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2185:2603], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:25.673611Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2601] 2025-12-23T18:52:25.692537Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-12-23T18:52:25.694527Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2249:2651] 2025-12-23T18:52:25.694796Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2249:2651], schemeshard id = 72075186224037897 2025-12-23T18:52:25.721114Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Describe result: PathErrorUnknown 2025-12-23T18:52:25.721184Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Creating table 2025-12-23T18:52:25.721261Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:52:25.746676Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2348:2695], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:25.753936Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:25.766383Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:25.766518Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:25.778926Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:25.909200Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:26.267626Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:26.365287Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:26.365347Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. C ... atType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:48.149132Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2025-12-23T18:52:48.149164Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:4410:3734], StatRequests.size() = 1 2025-12-23T18:52:49.223442Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:4442:3749]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:49.223635Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2025-12-23T18:52:49.223663Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:4442:3749], StatRequests.size() = 1 2025-12-23T18:52:49.311941Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:50.308034Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:4474:3764]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:50.308283Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2025-12-23T18:52:50.308325Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:4474:3764], StatRequests.size() = 1 2025-12-23T18:52:51.348439Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:4502:3775]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:51.348792Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2025-12-23T18:52:51.348844Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:4502:3775], StatRequests.size() = 1 2025-12-23T18:52:51.403381Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:51.403883Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2025-12-23T18:52:51.404363Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:52:51.404484Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-12-23T18:52:52.430539Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:4536:3791]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:52.430804Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2025-12-23T18:52:52.430853Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:4536:3791], StatRequests.size() = 1 2025-12-23T18:52:53.458265Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4568:3803]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:53.458551Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2025-12-23T18:52:53.458599Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4568:3803], StatRequests.size() = 1 2025-12-23T18:52:54.740534Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4609:3820]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:54.740830Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2025-12-23T18:52:54.740873Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4609:3820], StatRequests.size() = 1 2025-12-23T18:52:54.783900Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:52:54.783987Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:54.784026Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:52:54.784063Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:52:54.902321Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:52:55.900815Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4642:3834]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:55.901044Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2025-12-23T18:52:55.901072Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4642:3834], StatRequests.size() = 1 2025-12-23T18:52:57.067539Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4673:3846]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:57.067751Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2025-12-23T18:52:57.067776Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4673:3846], StatRequests.size() = 1 2025-12-23T18:52:57.199096Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:52:57.199724Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2025-12-23T18:52:57.200205Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:52:57.200281Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-12-23T18:52:57.222455Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:52:57.222533Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:52:57.222867Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:52:57.236727Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:58.302483Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4709:3864]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:58.302968Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-12-23T18:52:58.303016Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4709:3864], StatRequests.size() = 1 2025-12-23T18:52:58.828776Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2025-12-23T18:52:58.828835Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.882000s, at schemeshard: 72075186224037899 2025-12-23T18:52:58.828948Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2025-12-23T18:52:58.842091Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:52:59.482680Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4739:3877]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:59.482914Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-12-23T18:52:59.482946Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4739:3877], StatRequests.size() = 1 2025-12-23T18:53:00.630424Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4773:3891]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:00.630635Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-12-23T18:53:00.630666Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4773:3891], StatRequests.size() = 1 2025-12-23T18:53:00.694763Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:01.764791Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4809:3907]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:01.765089Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-12-23T18:53:01.765140Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4809:3907], StatRequests.size() = 1 2025-12-23T18:53:02.796435Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4839:3919]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:02.796685Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2025-12-23T18:53:02.796732Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4839:3919], StatRequests.size() = 1 2025-12-23T18:53:02.851637Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:53:02.851826Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2025-12-23T18:53:02.852435Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:53:02.852665Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-12-23T18:53:02.874841Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:02.874922Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:02.875194Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:02.889134Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:03.817658Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4875:3937]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:03.817848Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2025-12-23T18:53:03.817880Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4875:3937], StatRequests.size() = 1 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> ColumnBuildTest::BaseCase >> ColumnBuildTest::Filling_Failed >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> TestKinesisHttpProxy::TestPing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:53:02.243817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:53:02.243953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:02.244024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:53:02.244076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:53:02.244141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:53:02.244192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:53:02.244274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:02.259448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:53:02.260486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:53:02.261895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:53:02.353683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:53:02.353737Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:02.362579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:53:02.362750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:53:02.362910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:53:02.373752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:53:02.373948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:53:02.374507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:02.380539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:02.389574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:02.390491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:02.410216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:02.410319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:02.410542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:02.410608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:02.410670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:02.410825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:53:02.559807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.563040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.563124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.563221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.563386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.563475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.047641Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2025-12-23T18:53:05.088919Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:53:05.088990Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.089031Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:53:05.089085Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.089122Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][1][StateIdle] Try persist 2025-12-23T18:53:05.089195Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:53:05.089222Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.089246Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:53:05.089277Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.089300Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][2][StateIdle] Try persist 2025-12-23T18:53:05.089343Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:53:05.089368Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.089393Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:53:05.089419Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.089444Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2025-12-23T18:53:05.120400Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:53:05.120508Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.120539Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:53:05.120571Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.120596Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][1][StateIdle] Try persist 2025-12-23T18:53:05.120674Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:53:05.120709Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.120733Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:53:05.120759Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.120783Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][2][StateIdle] Try persist 2025-12-23T18:53:05.120835Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:53:05.120863Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.120888Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:53:05.120912Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.120935Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2025-12-23T18:53:05.151905Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-12-23T18:53:05.152010Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-12-23T18:53:05.152546Z node 1 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186233409546][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-12-23T18:53:05.152710Z node 1 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186233409546][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-12-23T18:53:05.152804Z node 1 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186233409546][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-12-23T18:53:05.153368Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2025-12-23T18:53:05.153430Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-12-23T18:53:05.153714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 16975298 2025-12-23T18:53:05.167874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:53:05.188682Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:53:05.188751Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.188789Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:53:05.188827Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.188860Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][1][StateIdle] Try persist 2025-12-23T18:53:05.188982Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:53:05.189015Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.189041Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:53:05.189112Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.189137Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][2][StateIdle] Try persist 2025-12-23T18:53:05.189189Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:53:05.189215Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.189237Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:53:05.189262Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:53:05.189285Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2025-12-23T18:53:05.210437Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:53:05.210676Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 289us result status StatusSuccess 2025-12-23T18:53:05.211197Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:53:03.907392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:53:03.907507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:03.907554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:53:03.907589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:53:03.907635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:53:03.907667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:53:03.907732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:03.907805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:53:03.908830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:53:03.910962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:53:04.006658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:53:04.006717Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:04.017333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:53:04.017512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:53:04.017682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:53:04.026577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:53:04.026734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:53:04.027305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:04.028605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:04.037720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:04.037903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:04.055555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:04.055627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:04.055778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:04.055843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:04.056002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:04.056821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:53:04.210524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.211275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.211381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.211464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.211515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.211556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.211623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.211683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.211768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.211846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.211884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.211933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.211997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.212068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.212136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :53:05.627000Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:53:05.628114Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:05.628161Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:53:05.628191Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:53:05.629283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:05.629315Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:05.629360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:53:05.629402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:53:05.629493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:53:05.630540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:53:05.630636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:53:05.630881Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:05.630960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:53:05.630992Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:53:05.631176Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:53:05.631210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:53:05.631342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:53:05.631392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:05.632580Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:05.632624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:05.632761Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:05.632793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:53:05.633022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:05.633054Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:53:05.633125Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:53:05.633151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:53:05.633179Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:53:05.633207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:53:05.633235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:53:05.633294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:53:05.633337Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:53:05.633359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:53:05.633404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:53:05.633431Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:53:05.633454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:53:05.633813Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:53:05.633880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:53:05.633908Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:53:05.633950Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:53:05.633986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:53:05.634050Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:53:05.635882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:53:05.636232Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:53:05.636954Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:675:2665] Bootstrap 2025-12-23T18:53:05.637713Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:675:2665] Become StateWork (SchemeCache [2:680:2670]) 2025-12-23T18:53:05.639771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterStreamingQuery CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:53:05.639938Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_streaming_query.cpp:208: [72057594046678944] TAlterStreamingQuery Propose: opId# 101:0, path# /MyRoot/MyStreamingQuery 2025-12-23T18:53:05.640034Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-12-23T18:53:05.640698Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:675:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:53:05.642573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:53:05.642771Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: ALTER STREAMING QUERY, path: MyStreamingQuery 2025-12-23T18:53:05.643191Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:53:05.643350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:53:05.643378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:53:05.643639Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:53:05.643715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:53:05.643740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:690:2680] TestWaitNotification: OK eventTxId 101 >> TestKinesisHttpProxy::CreateStreamInIncorrectDb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:53:03.907321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:53:03.907421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:03.907457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:53:03.907484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:53:03.907816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:53:03.907858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:53:03.907923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:03.907969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:53:03.908713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:53:03.910972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:53:04.004135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:53:04.004186Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:04.013934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:53:04.014100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:53:04.014249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:53:04.024305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:53:04.024511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:53:04.025818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:04.028369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:04.037729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:04.037903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:04.055589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:04.055658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:04.055802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:04.055867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:04.055961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:04.056802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:53:04.191844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.193748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.193858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.193921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.193980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.194026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.194081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.194119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.194181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.194222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.194258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.194302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.194360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.194420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.194494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:53:05.701006Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:53:05.701062Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-12-23T18:53:05.701113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:53:05.702261Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:53:05.702357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:53:05.702402Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:53:05.702430Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:53:05.702457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:53:05.702524Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:53:05.704563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:53:05.705350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:53:05.705541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:53:05.705585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:53:05.705904Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:53:05.705982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:53:05.706018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:705:2695] TestWaitNotification: OK eventTxId 101 2025-12-23T18:53:05.706412Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:53:05.706632Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 271us result status StatusSuccess 2025-12-23T18:53:05.707059Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO Output SELECT * FROM Input" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-12-23T18:53:05.710248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:53:05.710590Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 102:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery FailOnExist: false CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } } } 2025-12-23T18:53:05.710673Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 102:0, path# /MyRoot/MyStreamingQuery 2025-12-23T18:53:05.710847Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:53:05.713018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-12-23T18:53:05.713206Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), operation: CREATE STREAMING QUERY, path: MyStreamingQuery TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:53:05.713442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:53:05.713482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:53:05.713749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:53:05.713816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:53:05.713848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:713:2703] TestWaitNotification: OK eventTxId 102 2025-12-23T18:53:05.714155Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:53:05.714281Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 151us result status StatusSuccess 2025-12-23T18:53:05.714581Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO Output SELECT * FROM Input" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::DropStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:53:03.907328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:53:03.907428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:03.907466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:53:03.907496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:53:03.907542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:53:03.907576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:53:03.907637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:03.907693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:53:03.908489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:53:03.910959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:53:03.991316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:53:03.991369Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:04.006327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:53:04.006477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:53:04.008517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:53:04.021760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:53:04.021972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:53:04.025818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:04.028387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:04.038083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:04.038241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:04.055580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:04.055647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:04.055825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:04.055888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:04.055984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:04.056819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:53:04.197679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.198480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.198589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.198668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.198716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.198780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.198865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.198907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.198983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.199038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.199072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.199117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.199178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.199260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.199365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2025-12-23T18:53:05.624505Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:05.624619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:53:05.624680Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_streaming_query.cpp:22: [72057594046678944] TDropStreamingQuery TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2025-12-23T18:53:05.624765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:53:05.624848Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2025-12-23T18:53:05.625006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:53:05.625073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:53:05.626389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:53:05.627274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:53:05.628336Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:05.628375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:05.628493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:53:05.628614Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:05.628647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-12-23T18:53:05.628699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:53:05.628949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:53:05.628990Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:53:05.629104Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:53:05.629141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:53:05.629182Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:53:05.629220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:53:05.629263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:53:05.629301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:53:05.629340Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:53:05.629384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:53:05.629464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:53:05.629510Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:53:05.629545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-12-23T18:53:05.629579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2025-12-23T18:53:05.630014Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:53:05.630115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:53:05.630155Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:53:05.630196Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2025-12-23T18:53:05.630241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:53:05.630641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:53:05.630707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:53:05.630805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:53:05.631028Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:53:05.631097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:53:05.631125Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:53:05.631154Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-12-23T18:53:05.631183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:53:05.631254Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:53:05.634477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:53:05.634618Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:53:05.634684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:53:05.634900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:53:05.634943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:53:05.635318Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:53:05.635399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:53:05.635434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:733:2723] TestWaitNotification: OK eventTxId 102 2025-12-23T18:53:05.635884Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:53:05.636044Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 197us result status StatusPathDoesNotExist 2025-12-23T18:53:05.636203Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyStreamingQuery" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:53:03.907357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:53:03.907467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:03.907518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:53:03.907556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:53:03.907613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:53:03.907655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:53:03.907721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:03.907811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:53:03.908760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:53:03.910958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:53:03.998007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:53:03.998057Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:04.006264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:53:04.006417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:53:04.008520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:53:04.021253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:53:04.021522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:53:04.025819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:04.028371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:04.038153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:04.038330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:04.055655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:04.055740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:04.055933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:04.056013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:04.056151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:04.056851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:53:04.220982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.221664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.221746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.221813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.221866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.221914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.221971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.222011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.222068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.222111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.222146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.222196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.222247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.222309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:04.222381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... emeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:53:05.614999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:05.615115Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:53:05.615227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:05.615265Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:53:05.615294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:53:05.615321Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:53:05.616349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:05.616407Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:53:05.616442Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:53:05.617403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:05.617437Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:05.617472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:53:05.617512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:53:05.617600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:53:05.618644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:53:05.618778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:53:05.619106Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:05.619205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:53:05.619238Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:53:05.619428Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:53:05.619466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:53:05.619602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:53:05.619665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:05.620821Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:05.620857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:05.620973Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:05.621009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:53:05.621243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:05.621277Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:53:05.621367Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:53:05.621397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:53:05.621426Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:53:05.621453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:53:05.621483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:53:05.621517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:53:05.621552Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:53:05.621580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:53:05.621623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:53:05.621653Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:53:05.621680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:53:05.622026Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:53:05.622107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:53:05.622135Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:53:05.622167Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:53:05.622198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:53:05.622263Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:53:05.624508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:53:05.624905Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:53:05.625363Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:675:2665] Bootstrap 2025-12-23T18:53:05.626529Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:675:2665] Become StateWork (SchemeCache [2:680:2670]) 2025-12-23T18:53:05.629399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:53:05.629593Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 101:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "" } 2025-12-23T18:53:05.629685Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 101:0, path# /MyRoot/ 2025-12-23T18:53:05.629781Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-12-23T18:53:05.630536Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:675:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:53:05.632980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:53:05.633180Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE STREAMING QUERY, path: 2025-12-23T18:53:05.633692Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool [GOOD] >> KqpWorkloadServiceActors::TestCreateDefaultPool >> KqpWorkloadServiceTables::TestPoolStateFetcherActor [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier |97.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithPreparedQuery >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState [GOOD] >> ResourcePoolsDdl::TestResourcePoolAcl >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath [GOOD] >> KqpWorkloadService::TestZeroQueueSize >> ColumnBuildTest::Applying_Failed [GOOD] >> ColumnBuildTest::Applying_DisableFlag |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> ColumnBuildTest::Filling_Failed [GOOD] >> ColumnBuildTest::Filling_DisableFlag >> KqpCompileFallback::FallbackToVersion1Success >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet >> BasicStatistics::TwoDatabases [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpWorkloadServiceActors::TestCreateDefaultPool [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoDatabases [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:24.547954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:24.664880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:24.670947Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:527:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:24.671206Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:24.671319Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:25.020766Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:25.125883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:25.126039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:25.185308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:25.281245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0031df/r3tmp/tmpktoW1H/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13715, node 1 2025-12-23T18:52:25.909843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:25.909896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:25.909947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:25.912681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:25.913076Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:25.977786Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:64477 2025-12-23T18:52:26.529755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:28.926338Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:28.933507Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2025-12-23T18:52:28.936271Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:28.960368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:28.960461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:29.008370Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:52:29.009709Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:29.154263Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:29.154358Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:29.168448Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:29.247260Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.247771Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.248274Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.248750Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.248900Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.248965Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.249142Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.249284Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.249416Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.427988Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:29.451944Z node 3 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:29.452020Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:29.484805Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:29.486196Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:29.486448Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:29.486519Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:29.486571Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:29.486636Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:29.486698Z node 3 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:29.486794Z node 3 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:29.487446Z node 3 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:29.488721Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2000:2458] 2025-12-23T18:52:29.492383Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database1 2025-12-23T18:52:29.498948Z node 3 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [3:2215:2590] Owner: [3:2214:2589]. Describe result: PathErrorUnknown 2025-12-23T18:52:29.499006Z node 3 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [3:2215:2590] Owner: [3:2214:2589]. Creating table 2025-12-23T18:52:29.499110Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2215:2590] Owner: [3:2214:2589]. Created ESchemeOpCreateTable transaction for path: /Root/Database1/.metadata/_statistics 2025-12-23T18:52:29.503963Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:29.504057Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [3:2239:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:29.531186Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2313:2655] 2025-12-23T18:52:29.531972Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2313:2655], schemeshard id = 72075186224037897 2025-12-23T18:52:29.536970Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2320:2659], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:29.564237Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:29.575702Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2215:2590] Owner: [3:2214:2589]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:29.575818Z node 3 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [3:2215:2590] Owner: [3:2214:2589]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:29.581769Z node 3 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [3:2215:2590] Owner: [3:2214:2589]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:29.662689Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database1/.metadata/script_executions 2025-12-23T18:52:29.784338Z node 3 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:30.133493Z node 3 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [3:2215:2590] Owner: [3:2214:2589]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:30.231302Z node 3 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [3:2215:2590] Owner: [3:2214:2589]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:30.231406Z node 3 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [3:2215:2590] Owner: [3:221 ... ] RequestId[ 21 ], ReplyToActorId[ [3:5735:3345]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:55.921309Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2025-12-23T18:52:55.921341Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [3:5735:3345], StatRequests.size() = 1 2025-12-23T18:52:56.865759Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [3:5780:3358]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:56.866121Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2025-12-23T18:52:56.866160Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [3:5780:3358], StatRequests.size() = 1 2025-12-23T18:52:57.751867Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-12-23T18:52:57.752045Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 5 2025-12-23T18:52:57.752116Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-12-23T18:52:57.752463Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:52:57.775369Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [3:5821:3368]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:57.775629Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2025-12-23T18:52:57.775674Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [3:5821:3368], StatRequests.size() = 1 2025-12-23T18:52:58.706362Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [3:5864:3380]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:58.706661Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2025-12-23T18:52:58.706696Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [3:5864:3380], StatRequests.size() = 1 2025-12-23T18:52:59.617137Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [3:5907:3390]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:59.617365Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2025-12-23T18:52:59.617395Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:5907:3390], StatRequests.size() = 1 2025-12-23T18:53:00.737301Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:53:00.737379Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:53:00.737411Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:53:00.737457Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:53:00.792386Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:00.815546Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:5954:3403]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:00.815824Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2025-12-23T18:53:00.815860Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:5954:3403], StatRequests.size() = 1 2025-12-23T18:53:01.855000Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:6002:3415]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:01.855365Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2025-12-23T18:53:01.855405Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:6002:3415], StatRequests.size() = 1 2025-12-23T18:53:02.935578Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-12-23T18:53:02.936141Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2025-12-23T18:53:02.936559Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2025-12-23T18:53:02.936630Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-12-23T18:53:02.960319Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:6042:3425]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:02.960826Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2025-12-23T18:53:02.960888Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:6042:3425], StatRequests.size() = 1 2025-12-23T18:53:02.983507Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:02.983593Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:02.983843Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:53:02.997755Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:04.050080Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:6090:3439]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:04.050470Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-12-23T18:53:04.050523Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:6090:3439], StatRequests.size() = 1 2025-12-23T18:53:04.667402Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2025-12-23T18:53:04.667462Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224038898 2025-12-23T18:53:04.667685Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:53:04.681394Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2025-12-23T18:53:05.288177Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:6131:3449]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:05.288481Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-12-23T18:53:05.288516Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:6131:3449], StatRequests.size() = 1 2025-12-23T18:53:06.370771Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:06.405580Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [3:6174:3459]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:06.405976Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-12-23T18:53:06.406030Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [3:6174:3459], StatRequests.size() = 1 2025-12-23T18:53:07.398051Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [3:6219:3473]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:07.398412Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-12-23T18:53:07.398460Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [3:6219:3473], StatRequests.size() = 1 2025-12-23T18:53:08.327670Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-12-23T18:53:08.328017Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 7 2025-12-23T18:53:08.328179Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-12-23T18:53:08.328613Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:53:08.352375Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [3:6262:3483]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:08.352773Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2025-12-23T18:53:08.352818Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [3:6262:3483], StatRequests.size() = 1 2025-12-23T18:53:08.353324Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6264:3394]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:08.357514Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:53:08.357614Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:6274:3398] 2025-12-23T18:53:08.357675Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:6274:3398] 2025-12-23T18:53:08.358953Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6274:3398], server id = [2:6279:3399], tablet id = 72075186224038895, status = OK 2025-12-23T18:53:08.359025Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224038895] EvServerConnected, pipe server id = [2:6279:3399] 2025-12-23T18:53:08.359342Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224038895] EvConnectNode, pipe server id = [2:6279:3399], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:53:08.359403Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:53:08.359681Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:53:08.359778Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6264:3394], StatRequests.size() = 1 2025-12-23T18:53:08.359856Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 18446744073709551615 |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:53:02.243776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:53:02.243916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:02.243977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:53:02.244024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:53:02.244076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:53:02.244113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:53:02.244192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:02.259446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:53:02.260503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:53:02.261894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:53:02.338131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:53:02.338204Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:02.353684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:53:02.353925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:53:02.354132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:53:02.367144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:53:02.369274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:53:02.372155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:02.380605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:02.389569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:02.390508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:02.410220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:02.410324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:02.410541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:02.410590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:02.410671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:02.410824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:53:02.574608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.575822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.575981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.576097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.576183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.576285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.576364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.576461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.576559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.576639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.576727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.576813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.576912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.577012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.577095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T18:53:10.266458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-12-23T18:53:10.266864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.267000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.267394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.267480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.267752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.267902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.267952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.268098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.268308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.268396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.268802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.269086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.269170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.269221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.269462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.269520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.269570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.269945Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:53:10.276570Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:53:10.276774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:10.280337Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435083, Sender [1:1420:3363], Recipient [1:1420:3363]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-12-23T18:53:10.280394Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-12-23T18:53:10.281322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:10.281400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:10.282019Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:1420:3363], Recipient [1:1420:3363]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:53:10.282062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:53:10.282662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:10.282725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:10.282795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:10.282842Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:53:10.284273Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 274399233, Sender [1:1456:3363], Recipient [1:1420:3363]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-12-23T18:53:10.284322Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5420: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-12-23T18:53:10.284353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1420:3363] sender: [1:1477:2058] recipient: [1:15:2062] 2025-12-23T18:53:10.322803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [1:1476:3408], Recipient [1:1420:3363]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-12-23T18:53:10.322883Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:53:10.323015Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:53:10.323327Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 313us result status StatusSuccess 2025-12-23T18:53:10.324189Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82624 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:53:10.515055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:53:10.515153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:10.515195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:53:10.515255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:53:10.515297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:53:10.515328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:53:10.515383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:10.515468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:53:10.516390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:53:10.516725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:53:10.603179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:53:10.603248Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:10.609365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:53:10.609601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:53:10.609728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:53:10.613136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:53:10.613347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:53:10.614137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:10.614395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:10.616573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:10.616755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:10.617958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:10.618018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:10.618162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:10.618214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:10.618267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:10.618423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.625954Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:53:10.755513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:53:10.755782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.756053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:53:10.756102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:53:10.756368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:53:10.756441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:53:10.759397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:10.759671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:53:10.759921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.759990Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:53:10.760031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:53:10.760067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:53:10.762495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.762568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:53:10.762612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:53:10.764765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.764827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:53:10.764891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:53:10.764940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:53:10.774795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:53:10.777399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:53:10.777624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:53:10.778891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:10.779033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:53:10.779088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:53:10.779448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:53:10.779510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:53:10.779697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:53:10.779780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:53:10.782258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:10.782312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... blishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:53:11.020603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:11.020659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:343:2319], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-12-23T18:53:11.020711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:343:2319], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-12-23T18:53:11.020781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-12-23T18:53:11.020830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2025-12-23T18:53:11.020955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:53:11.021006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:53:11.021059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2025-12-23T18:53:11.021095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:53:11.021136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-12-23T18:53:11.021177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-12-23T18:53:11.021231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2025-12-23T18:53:11.021281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 100:0 2025-12-23T18:53:11.021447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-12-23T18:53:11.021492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-12-23T18:53:11.021542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-12-23T18:53:11.021578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-12-23T18:53:11.022273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:53:11.022381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:53:11.022427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:53:11.022480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-12-23T18:53:11.022527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:53:11.023217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:53:11.023304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-12-23T18:53:11.023336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-12-23T18:53:11.023379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-12-23T18:53:11.023408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-12-23T18:53:11.023471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-12-23T18:53:11.026622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-12-23T18:53:11.026716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-12-23T18:53:11.027003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-12-23T18:53:11.027058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-12-23T18:53:11.027491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-12-23T18:53:11.027562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-12-23T18:53:11.027599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:488:2437] TestWaitNotification: OK eventTxId 100 2025-12-23T18:53:11.028017Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:53:11.028198Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 175us result status StatusSuccess 2025-12-23T18:53:11.028603Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 3 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 3 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 300 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:53:11.029146Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:53:11.029373Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 234us result status StatusSuccess 2025-12-23T18:53:11.029800Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 3 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 3 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 300 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> ColumnBuildTest::Applying_DisableFlag [GOOD] >> ColumnBuildTest::BaseCase [GOOD] >> ColumnBuildTest::Cancellation_Applying_DisableFlag |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> ColumnBuildTest::Rejecting [GOOD] >> ColumnBuildTest::Unlocking_DisableFlag |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TestKinesisHttpProxy::TestPing [GOOD] >> ColumnBuildTest::Filling_DisableFlag [GOOD] >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> KqpCompileFallback::NoFallbackWhenSqlVersion1 >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Applying_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:53:06.631993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:53:06.632109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:06.632157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:53:06.632194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:53:06.632298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:53:06.632325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:53:06.632386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:06.632465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:53:06.633333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:53:06.637422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:53:06.728259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:53:06.728347Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:06.741841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:53:06.741947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:53:06.742397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:53:06.756134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:53:06.756339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:53:06.756976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:06.760510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:06.771027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:06.771193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:06.777910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:06.777992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:06.778196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:06.778294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:06.778356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:06.779806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:53:06.953463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.954487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.954632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.954717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.954803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.954911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.955020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.955111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.955196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.955264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.955320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.955384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.955473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.955574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.955675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... BUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2025-12-23T18:53:12.059366Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2025-12-23T18:53:12.059395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2025-12-23T18:53:12.059427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2025-12-23T18:53:12.059515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1826:3691] message: TxId: 281474976730757 2025-12-23T18:53:12.059570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2025-12-23T18:53:12.059610Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2025-12-23T18:53:12.059638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976730757:0 2025-12-23T18:53:12.059709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2025-12-23T18:53:12.062578Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2025-12-23T18:53:12.062659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976730757 2025-12-23T18:53:12.062725Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730757 2025-12-23T18:53:12.062882Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2025-12-23T18:53:12.064735Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2025-12-23T18:53:12.064867Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:53:12.064921Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-12-23T18:53:12.066264Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2025-12-23T18:53:12.066411Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:53:12.066455Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2025-12-23T18:53:12.066587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:53:12.066636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1884:3738] TestWaitNotification: OK eventTxId 105 2025-12-23T18:53:12.067388Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2025-12-23T18:53:12.067779Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } 2025-12-23T18:53:12.068576Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-12-23T18:53:12.068813Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 258us result status StatusSuccess 2025-12-23T18:53:12.069328Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> TestKinesisHttpProxy::TestRequestBadJson >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Filling_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:53:06.631994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:53:06.632114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:06.632163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:53:06.632196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:53:06.632305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:53:06.632331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:53:06.632383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:06.632467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:53:06.633319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:53:06.637421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:53:06.728906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:53:06.728941Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:06.741065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:53:06.741272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:53:06.742399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:53:06.754162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:53:06.754480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:53:06.757041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:06.760439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:06.771704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:06.771869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:06.777917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:06.777994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:06.778188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:06.778267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:06.778388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:06.779804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:53:06.951301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.952264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.952400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.952480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.952541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.952622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.952717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.952800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.952895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.952964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.953016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.953073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.953149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.953238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.953328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 12.450318Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976730759:0 ProgressState 2025-12-23T18:53:12.450383Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730759:0 progress is 1/1 2025-12-23T18:53:12.450411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2025-12-23T18:53:12.450437Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730759:0 progress is 1/1 2025-12-23T18:53:12.450467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2025-12-23T18:53:12.450495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730759, ready parts: 1/1, is published: true 2025-12-23T18:53:12.450545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1811:3677] message: TxId: 281474976730759 2025-12-23T18:53:12.450588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2025-12-23T18:53:12.450617Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730759:0 2025-12-23T18:53:12.450645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976730759:0 2025-12-23T18:53:12.450700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2025-12-23T18:53:12.453249Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976730759 2025-12-23T18:53:12.453325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976730759 2025-12-23T18:53:12.453384Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730759 2025-12-23T18:53:12.453522Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730759 2025-12-23T18:53:12.455016Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2025-12-23T18:53:12.455139Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:53:12.455213Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2025-12-23T18:53:12.456428Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2025-12-23T18:53:12.456513Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:53:12.456554Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2025-12-23T18:53:12.456666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:53:12.456700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1956:3810] TestWaitNotification: OK eventTxId 105 2025-12-23T18:53:12.457225Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2025-12-23T18:53:12.457471Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2025-12-23T18:53:12.458199Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-12-23T18:53:12.458452Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 257us result status StatusSuccess 2025-12-23T18:53:12.458858Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> KqpCompileFallback::FallbackMechanismWorks |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] >> ResourcePoolsDdl::TestWorkloadConfigOnServerless |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> ColumnBuildTest::Unlocking_DisableFlag [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet [GOOD] >> KqpCompileFallback::FallbackWithPreparedQuery [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpWorkloadService::TestZeroQueueSize [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_replace_config [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> ColumnBuildTest::Cancellation_Applying_DisableFlag [GOOD] >> KqpCompileFallback::FallbackToVersion1Success [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Unlocking_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:53:06.631998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:53:06.632161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:06.632221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:53:06.632255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:53:06.632294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:53:06.632326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:53:06.632381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:06.632498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:53:06.633320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:53:06.637414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:53:06.728768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:53:06.728818Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:06.741314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:53:06.741513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:53:06.742421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:53:06.755624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:53:06.755845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:53:06.757085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:06.760456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:06.772189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:06.772384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:06.777918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:06.778006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:06.778205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:06.778282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:06.778341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:06.779822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:53:06.945132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.948944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.949148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.949235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.949301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.949389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.949506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.949578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.949662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.949756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.949821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.949887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.949971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.950080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.950185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... BUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-12-23T18:53:15.070195Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976725761:0 progress is 1/1 2025-12-23T18:53:15.070230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-12-23T18:53:15.070268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-12-23T18:53:15.070341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1882:3747] message: TxId: 281474976725761 2025-12-23T18:53:15.070396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-12-23T18:53:15.070435Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:0 2025-12-23T18:53:15.070468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976725761:0 2025-12-23T18:53:15.070539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2025-12-23T18:53:15.072786Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-12-23T18:53:15.072865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976725761 2025-12-23T18:53:15.072942Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976725761 2025-12-23T18:53:15.073076Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976725761 2025-12-23T18:53:15.074596Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2025-12-23T18:53:15.074703Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:53:15.074776Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-12-23T18:53:15.075981Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2025-12-23T18:53:15.076062Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:53:15.076098Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2025-12-23T18:53:15.076186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-12-23T18:53:15.076228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1941:3795] TestWaitNotification: OK eventTxId 105 2025-12-23T18:53:15.076800Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2025-12-23T18:53:15.077076Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } 2025-12-23T18:53:15.077709Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-12-23T18:53:15.077905Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 217us result status StatusSuccess 2025-12-23T18:53:15.078244Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet [GOOD] Test command err: Trying to start YDB, gRPC: 4583, MsgBus: 24852 2025-12-23T18:53:10.029843Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128406030193520:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:10.030909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bd5/r3tmp/tmpkeGvdz/pdisk_1.dat 2025-12-23T18:53:10.209885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:10.210018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:10.269293Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:10.272295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:10.272572Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:10.274226Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128406030193416:2081] 1766515990025924 != 1766515990025927 TServer::EnableGrpc on GrpcPort 4583, node 1 2025-12-23T18:53:10.360723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:10.360756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:10.360769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:10.360875Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:10.487250Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24852 TClient is connected to server localhost:24852 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515990351 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:10.835172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:10.853379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:10.999207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:11.094050Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:11.144465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:11.204477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:12.691891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128414620129880:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:12.692007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:12.692375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128414620129890:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:12.692435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:12.975354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.000741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.025450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.049257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.076049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.106095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.149150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.185780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.253523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128418915098057:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.253592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128418915098062:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.253607Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.253770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128418915098064:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.253816Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.257394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:13.268990Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128418915098065:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:53:13.359487Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128418915098117:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:14.737135Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:53:14.737288Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007CB22ED0ACE8 2025-12-23T18:53:14.737337Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7587128423210065717:2529], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kd68xw3g5hnjjzy9z4fdfagd, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzRlMTliOTAtMzg1NjJlYTUtMWVjYmFmNzYtZTE2ODNhMTE=, PoolId: default, IsStreamingQuery: 0} 2025-12-23T18:53:14.737500Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:53:14.737552Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7587128423210065717:2529], queueSize: 1 2025-12-23T18:53:14.738134Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7587128423210065717:2529], compileActor: [1:7587128423210065725:2534] 2025-12-23T18:53:14.738168Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2025-12-23T18:53:14.738226Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7587128423210065725:2534], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2025-12-23T18:53:14.738138Z 2025-12-23T18:53:14.887079Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7587128423210065725:2534]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1766515994","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"3db31375-7c7be34a-1259110e-df90bbe7","version":"1.0"} 2025-12-23T18:53:14.887606Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7587128423210065725:2534], duration: 0.149442s 2025-12-23T18:53:14.887645Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7587128423210065725:2534], owner: [1:7587128414620129841:2383], status: SUCCESS, issues: , uid: 3db31375-7c7be34a-1259110e-df90bbe7 2025-12-23T18:53:14.887755Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7587128423210065717:2529], status: SUCCESS, compileActor: [1:7587128423210065725:2534] 2025-12-23T18:53:14.887831Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7587128423210065717:2529], queryUid: 3db31375-7c7be34a-1259110e-df90bbe7, status:SUCCESS 2025-12-23T18:53:15.029108Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128406030193520:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:15.029198Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithPreparedQuery [GOOD] Test command err: Trying to start YDB, gRPC: 25554, MsgBus: 23757 2025-12-23T18:53:09.108637Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128400833222615:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:09.108726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bd7/r3tmp/tmptyuc8r/pdisk_1.dat 2025-12-23T18:53:09.376331Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:09.411187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:09.411265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:09.446878Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128400833222578:2081] 1766515989104024 != 1766515989104027 2025-12-23T18:53:09.453222Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:09.454967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25554, node 1 2025-12-23T18:53:09.565941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:09.689984Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:09.690069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:09.690081Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:09.690152Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23757 2025-12-23T18:53:10.116099Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:23757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515989525 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:10.363533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:10.381932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:10.528599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:10.667277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:10.736217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:11.907226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128409423159039:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:11.907346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:11.907709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128409423159048:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:11.907763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:12.495459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:12.525954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:12.548298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:12.570903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:12.599573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:12.628741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:12.659940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:12.706790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:12.789402Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128413718127216:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:12.789532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:12.789778Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128413718127222:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:12.789781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128413718127221:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:12.789829Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:12.796072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:12.805275Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128413718127225:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:12.879860Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128413718127278:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:14.108946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128400833222615:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:14.109024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:53:14.541518Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:53:14.543603Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C2560CBA888 2025-12-23T18:53:14.543664Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7587128422308062175:2530], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n ", keepInCache: 1, split: 0{ TraceId: 01kd68xvxb9xh6b96ge3thvfbr, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmRhMjk5NjktZmFiZTk1MjItNTA1Zjk4ZGEtNmQyZWFhN2M=, PoolId: default, IsStreamingQuery: 0} 2025-12-23T18:53:14.543868Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:53:14.543934Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7587128422308062175:2530], queueSize: 1 2025-12-23T18:53:14.544516Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n 2025-12-23T18:53:14.544561Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7587128422308062175:2530], compileActor: [1:7587128422308062183:2535] 2025-12-23T18:53:14.544591Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2025-12-23T18:53:14.544629Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7587128422308062183:2535], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n ", startTime: 2025-12-23T18:53:14.544567Z 2025-12-23T18:53:14.581078Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with SqlVersion = 1 failed, retrying with SqlVersion = 0, self: [1:7587128422308062183:2535], database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n " 2025-12-23T18:53:14.855608Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7587128422308062183:2535]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1766515994","query_text":"\\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"b11f6b3f-5cb690e-fdc7143d-94eac1f2","version":"1.0"} 2025-12-23T18:53:14.856010Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7587128422308062183:2535], duration: 0.311418s 2025-12-23T18:53:14.856035Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7587128422308062183:2535], owner: [1:7587128409423159004:2383], status: SUCCESS, issues: , uid: b11f6b3f-5cb690e-fdc7143d-94eac1f2 2025-12-23T18:53:14.856157Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7587128422308062175:2530], status: SUCCESS, compileActor: [1:7587128422308062183:2535] 2025-12-23T18:53:14.856349Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:719: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:53:14.856469Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7587128422308062175:2530], queryUid: b11f6b3f-5cb690e-fdc7143d-94eac1f2, status:SUCCESS 2025-12-23T18:53:14.864561Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1223: Served query from cache by uid, sender: [1:7587128422308062175:2530], queryUid: b11f6b3f-5cb690e-fdc7143d-94eac1f2 |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Cancellation_Applying_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:53:06.632016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:53:06.632176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:06.632241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:53:06.632283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:53:06.632334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:53:06.632366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:53:06.632436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:06.632572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:53:06.633504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:53:06.637442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:53:06.736424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:53:06.736489Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:06.748406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:53:06.748627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:53:06.748837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:53:06.760651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:53:06.760887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:53:06.761669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:06.761883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:06.771810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:06.771985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:06.777931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:06.778017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:06.778228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:06.778310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:06.778381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:06.779823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:53:06.940943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.943296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.943472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.943550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.943626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.943697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.943780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.943846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.943926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.943991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.944061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.944145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.944260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.944342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:06.944441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976730757:0, at schemeshard: 72075186233409549 2025-12-23T18:53:15.915033Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976730757:0 ProgressState 2025-12-23T18:53:15.915130Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2025-12-23T18:53:15.915161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2025-12-23T18:53:15.915196Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2025-12-23T18:53:15.915220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2025-12-23T18:53:15.915249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2025-12-23T18:53:15.915323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2594:4355] message: TxId: 281474976730757 2025-12-23T18:53:15.915368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2025-12-23T18:53:15.915422Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2025-12-23T18:53:15.915450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976730757:0 2025-12-23T18:53:15.915515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 12 2025-12-23T18:53:15.919079Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2025-12-23T18:53:15.919156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976730757 2025-12-23T18:53:15.919220Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976730757 2025-12-23T18:53:15.919370Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2025-12-23T18:53:15.921474Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking 2025-12-23T18:53:15.921653Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:53:15.921737Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-12-23T18:53:15.923319Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled 2025-12-23T18:53:15.923435Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:53:15.923476Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-12-23T18:53:15.923631Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:53:15.923698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:2664:4414] TestWaitNotification: OK eventTxId 106 2025-12-23T18:53:15.924362Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-12-23T18:53:15.924661Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2025-12-23T18:53:15.925416Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-12-23T18:53:15.925640Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 255us result status StatusSuccess 2025-12-23T18:53:15.926102Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 350 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackToVersion1Success [GOOD] Test command err: Trying to start YDB, gRPC: 4374, MsgBus: 4217 2025-12-23T18:53:10.010764Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128409102974644:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:10.011213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bd4/r3tmp/tmp9V9NaM/pdisk_1.dat 2025-12-23T18:53:10.197945Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:10.205750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:10.205843Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:10.250390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:10.284163Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:10.284681Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128409102974541:2081] 1766515990006951 != 1766515990006954 TServer::EnableGrpc on GrpcPort 4374, node 1 2025-12-23T18:53:10.334087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:10.334109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:10.334116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:10.334234Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:10.490819Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4217 TClient is connected to server localhost:4217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515990337 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:10.789307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:10.809175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:10.929596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:11.034550Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:11.092559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:11.152524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:13.200052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128421987878313:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.200153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.200478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128421987878323:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.200551Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.489157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.519622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.553572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.583060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.610225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.641014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.671902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.716933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:13.790954Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128421987879192:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.791034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.791075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128421987879197:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.791523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128421987879199:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.791620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.795239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:13.807408Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128421987879200:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:53:13.893200Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128421987879252:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:15.009983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128409102974644:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:15.010052Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:53:15.482175Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:53:15.482346Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C8D2009DB48 2025-12-23T18:53:15.482406Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7587128430577814152:2530], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kd68xwtsav82btb0avarcsdq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTI2OGNhYjItM2NmMTVkM2MtMmRiMzE3ODEtNDA1MGZiNDQ=, PoolId: default, IsStreamingQuery: 0} 2025-12-23T18:53:15.482551Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:53:15.482610Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7587128430577814152:2530], queueSize: 1 2025-12-23T18:53:15.483241Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n 2025-12-23T18:53:15.483313Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7587128430577814152:2530], compileActor: [1:7587128430577814160:2535] 2025-12-23T18:53:15.483347Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2025-12-23T18:53:15.483391Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7587128430577814160:2535], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2025-12-23T18:53:15.483320Z 2025-12-23T18:53:15.619654Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7587128430577814160:2535]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1766515995","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"40257b7a-99a0113d-c4fd5a97-94d0c355","version":"1.0"} 2025-12-23T18:53:15.620211Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7587128430577814160:2535], duration: 0.136854s 2025-12-23T18:53:15.620250Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7587128430577814160:2535], owner: [1:7587128421987878273:2383], status: SUCCESS, issues: , uid: 40257b7a-99a0113d-c4fd5a97-94d0c355 2025-12-23T18:53:15.620370Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7587128430577814152:2530], status: SUCCESS, compileActor: [1:7587128430577814160:2535] 2025-12-23T18:53:15.620441Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7587128430577814152:2530], queryUid: 40257b7a-99a0113d-c4fd5a97-94d0c355, status:SUCCESS |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False [GOOD] Test command err: Trying to start YDB, gRPC: 11146, MsgBus: 64550 2025-12-23T18:53:10.855257Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128406398625888:2140];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:10.855646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bc8/r3tmp/tmpfcLr8N/pdisk_1.dat 2025-12-23T18:53:11.088759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:11.092064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:11.092192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:11.099692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:11.140909Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:11.143814Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128406398625786:2081] 1766515990851046 != 1766515990851049 TServer::EnableGrpc on GrpcPort 11146, node 1 2025-12-23T18:53:11.210661Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:11.210689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:11.210695Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:11.210803Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:11.283329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:64550 TClient is connected to server localhost:64550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515991205 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:11.577718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:11.591410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:11.713157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:11.861333Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:11.870686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:11.934698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:13.924306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128419283529548:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.924465Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.924851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128419283529558:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:13.924932Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:14.258366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:14.290564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:14.320624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:14.346992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:14.376152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:14.408713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:14.469641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:14.507244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:14.572486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128423578497725:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:14.572595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:14.572630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128423578497730:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:14.572822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128423578497732:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:14.572894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:14.576176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:14.586523Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128423578497734:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:14.690152Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128423578497785:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:15.854125Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128406398625888:2140];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:15.854178Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:53:16.161004Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:53:16.161160Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C5C4A12F048 2025-12-23T18:53:16.161208Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7587128432168432685:2530], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kd68xxfzek02m4g87etaq56n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OGQyYWM5ZTctMWUyY2VlNWYtYTUzNzExZWYtOGNlMTFiZWY=, PoolId: default, IsStreamingQuery: 0} 2025-12-23T18:53:16.161352Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:53:16.161407Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7587128432168432685:2530], queueSize: 1 2025-12-23T18:53:16.161979Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7587128432168432685:2530], compileActor: [1:7587128432168432693:2535] 2025-12-23T18:53:16.162010Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2025-12-23T18:53:16.162050Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7587128432168432693:2535], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", startTime: 2025-12-23T18:53:16.161985Z 2025-12-23T18:53:16.328175Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7587128432168432693:2535]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1766515996","query_text":"\\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (-∞, +∞)\"],\"limit\":\"1\",\"type\":\"FullScan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"8ffb8458-71cdf285-acc7fb49-6a481b3e","version":"1.0"} 2025-12-23T18:53:16.329010Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7587128432168432693:2535], duration: 0.166985s 2025-12-23T18:53:16.329042Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7587128432168432693:2535], owner: [1:7587128419283529507:2382], status: SUCCESS, issues: , uid: 8ffb8458-71cdf285-acc7fb49-6a481b3e 2025-12-23T18:53:16.331140Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7587128432168432685:2530], status: SUCCESS, compileActor: [1:7587128432168432693:2535] 2025-12-23T18:53:16.331224Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7587128432168432685:2530], queryUid: 8ffb8458-71cdf285-acc7fb49-6a481b3e, status:SUCCESS |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] >> TKesusTest::TestSessionTimeoutAfterDetach >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] |97.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> TKesusTest::TestAcquireLocks >> TKesusTest::TestAttachNewSessions >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> TKesusTest::TestAcquireUpgrade >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> TKesusTest::TestUnregisterProxy >> TKesusTest::TestAttachOutOfSequence >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestRegisterProxy >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> TKesusTest::TestQuoterResourceDescribe >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] >> KqpCompileFallback::NoFallbackWhenSqlVersion1 [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] >> TKesusTest::TestQuoterHDRRParametersValidation |97.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestReleaseLockFailure >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> TestKinesisHttpProxy::TestConsumersEmptyNames |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] |97.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestKesusConfig |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> KqpCompileFallback::FallbackMechanismWorks [GOOD] >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAcquireTimeout >> TKesusTest::TestAttachOutOfSequenceInTx >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor >> TKesusTest::TestAttachMissingSession [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> TKesusTest::TestAttachOldGeneration >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersion1 [GOOD] Test command err: Trying to start YDB, gRPC: 1136, MsgBus: 12703 2025-12-23T18:53:12.959010Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128416599308187:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:12.959773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bbf/r3tmp/tmpIgMrb3/pdisk_1.dat 2025-12-23T18:53:13.165821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:13.193794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:13.193944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:13.215541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:13.274927Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128416599308159:2081] 1766515992957188 != 1766515992957191 2025-12-23T18:53:13.277783Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1136, node 1 2025-12-23T18:53:13.316494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:13.316524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:13.316538Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:13.316644Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:13.372901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12703 TClient is connected to server localhost:12703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515993291 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:13.839108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:13.854026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:13.966538Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:53:14.010475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:14.143859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:14.200433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:15.890615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128429484211917:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:15.890733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:15.891023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128429484211927:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:15.891073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.235090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.263106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.292695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.323312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.351413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.381304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.429399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.473761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.545962Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128433779180095:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.546046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.546138Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128433779180100:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.546218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128433779180102:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.546265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.549394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:16.558258Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128433779180104:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:16.650172Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128433779180155:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:17.721498Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:53:17.721646Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C7769AFB568 2025-12-23T18:53:17.721689Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7587128438074147752:2528], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kd68xz0r2a9arhth2vsc7bw7, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWVhM2ViN2MtZTA4NjUxOTUtODg1Yjk5MWItMTVlZDQzNjk=, PoolId: default, IsStreamingQuery: 0} 2025-12-23T18:53:17.721843Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:53:17.721900Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7587128438074147752:2528], queueSize: 1 2025-12-23T18:53:17.722546Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7587128438074147752:2528], compileActor: [1:7587128438074147760:2533] 2025-12-23T18:53:17.722563Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2025-12-23T18:53:17.722614Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7587128438074147760:2533], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2025-12-23T18:53:17.722539Z 2025-12-23T18:53:17.860856Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7587128438074147760:2533]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1766515997","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"b59fc069-c261b36b-6c9e8045-c0ec5c51","version":"1.0"} 2025-12-23T18:53:17.861382Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7587128438074147760:2533], duration: 0.138817s 2025-12-23T18:53:17.861421Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7587128438074147760:2533], owner: [1:7587128429484211879:2383], status: SUCCESS, issues: , uid: b59fc069-c261b36b-6c9e8045-c0ec5c51 2025-12-23T18:53:17.861546Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7587128438074147752:2528], status: SUCCESS, compileActor: [1:7587128438074147760:2533] 2025-12-23T18:53:17.861629Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7587128438074147752:2528], queryUid: b59fc069-c261b36b-6c9e8045-c0ec5c51, status:SUCCESS 2025-12-23T18:53:17.959128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128416599308187:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:17.959216Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool [GOOD] >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] Test command err: 2025-12-23T18:52:57.776715Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128349166905834:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:57.776780Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003938/r3tmp/tmpLqfvrD/pdisk_1.dat 2025-12-23T18:52:57.999121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:58.046296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:58.046528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:58.049071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:58.082442Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128349166905808:2081] 1766515977775476 != 1766515977775479 2025-12-23T18:52:58.087417Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20319, node 1 2025-12-23T18:52:58.128258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:58.128280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:58.128286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:58.128376Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:58.171873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515978108 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:58.345360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:58.783467Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:59.823235Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-12-23T18:52:59.823492Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128357756841262:2319], Start check tables existence, number paths: 2 2025-12-23T18:52:59.828459Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:52:59.828483Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:52:59.828586Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128357756841262:2319], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:52:59.828633Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128357756841262:2319], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:52:59.828654Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128357756841262:2319], Successfully finished 2025-12-23T18:52:59.828736Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-12-23T18:52:59.828950Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-12-23T18:52:59.830667Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=Y2ViNzYzOWMtNWYzMzNmNTEtZWIyMDQzMjktYTVjNmRkMjY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Y2ViNzYzOWMtNWYzMzNmNTEtZWIyMDQzMjktYTVjNmRkMjY= (tmp dir name: 6fa969f3-4b30-859d-1179-a280fd5e72f8) 2025-12-23T18:52:59.830755Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=Y2ViNzYzOWMtNWYzMzNmNTEtZWIyMDQzMjktYTVjNmRkMjY=, ActorId: [1:7587128357756841282:2323], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:52:59.840440Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128357756841284:2498], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:52:59.843253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:59.844289Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128357756841284:2498], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-12-23T18:52:59.844449Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128357756841284:2498], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-12-23T18:52:59.851070Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128357756841284:2498], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:52:59.935528Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128357756841284:2498], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:52:59.939029Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128357756841335:2530] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:59.939127Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128357756841284:2498], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-12-23T18:52:59.939364Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128357756841342:2536], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-12-23T18:52:59.940457Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128357756841342:2536], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-12-23T18:52:59.947361Z node 1 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=1&id=Y2ViNzYzOWMtNWYzMzNmNTEtZWIyMDQzMjktYTVjNmRkMjY=, ActorId: [1:7587128357756841282:2323], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:52:59.947414Z node 1 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=1&id=Y2ViNzYzOWMtNWYzMzNmNTEtZWIyMDQzMjktYTVjNmRkMjY=, ActorId: [1:7587128357756841282:2323], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:52:59.947432Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=1&id=Y2ViNzYzOWMtNWYzMzNmNTEtZWIyMDQzMjktYTVjNmRkMjY=, ActorId: [1:7587128357756841282:2323], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:52:59.947445Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=1&id=Y2ViNzYzOWMtNWYzMzNmNTEtZWIyMDQzMjktYTVjNmRkMjY=, ActorId: [1:7587128357756841282:2323], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:52:59.947516Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=1&id=Y2ViNzYzOWMtNWYzMzNmNTEtZWIyMDQzMjktYTVjNmRkMjY=, ActorId: [1:7587128357756841282:2323], ActorState: unknown state, Session actor destroyed 2025-12-23T18:53:00.517128Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128364187735970:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:00.517190Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003938/r3tmp/tmppRg8fM/pdisk_1.dat 2025-12-23T18:53:00.531526Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:00.586677Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:00.588487Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587128364187735944:2081] 1766515980516302 != 1766515980516305 TServer::EnableGrpc on GrpcPort 7526, node 2 2025-12-23T18:53:00.635190Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:00.635282Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileS ... ctorId: [6:7587128442535277150:2341], ActorState: ReadyState, TraceId: 01kd68xzf0cke0f0g60cdmq6b7, received request, proxyRequestId: 5 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: ALTER RESOURCE POOL default SET ( QUERY_MEMORY_LIMIT_PERCENT_PER_NODE=1 ); rpcActor: [6:7587128442535277149:2573] database: Root databaseId: /Root pool id: default 2025-12-23T18:53:18.176356Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:268: SessionId: ydb://session/3?node_id=6&id=Y2QyZDcyNDUtZmVlZDU0NWMtNGY4MTA1NGQtM2MzYzU5Njk=, ActorId: [6:7587128442535277150:2341], ActorState: ReadyState, TraceId: 01kd68xzf0cke0f0g60cdmq6b7, request placed into pool from cache: default 2025-12-23T18:53:18.176437Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:630: SessionId: ydb://session/3?node_id=6&id=Y2QyZDcyNDUtZmVlZDU0NWMtNGY4MTA1NGQtM2MzYzU5Njk=, ActorId: [6:7587128442535277150:2341], ActorState: ExecuteState, TraceId: 01kd68xzf0cke0f0g60cdmq6b7, Sending CompileQuery request 2025-12-23T18:53:18.222667Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp:155) 2025-12-23T18:53:18.225046Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7587128442535277115:2334], DatabaseId: /Root, PoolId: default, Got watch notification 2025-12-23T18:53:18.225180Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:476: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7587128442535277115:2334], DatabaseId: /Root, PoolId: default, Pool config has changed, queue size: -1, in flight limit: -1 2025-12-23T18:53:18.228275Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2090: SessionId: ydb://session/3?node_id=6&id=Y2QyZDcyNDUtZmVlZDU0NWMtNGY4MTA1NGQtM2MzYzU5Njk=, ActorId: [6:7587128442535277150:2341], ActorState: ExecuteState, TraceId: 01kd68xzf0cke0f0g60cdmq6b7, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-12-23T18:53:18.228428Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2383: SessionId: ydb://session/3?node_id=6&id=Y2QyZDcyNDUtZmVlZDU0NWMtNGY4MTA1NGQtM2MzYzU5Njk=, ActorId: [6:7587128442535277150:2341], ActorState: ExecuteState, TraceId: 01kd68xzf0cke0f0g60cdmq6b7, txInfo Status: Committed Kind: Pure TotalDuration: 7.866 ServerDuration: 7.809 QueriesCount: 2 2025-12-23T18:53:18.228505Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2543: SessionId: ydb://session/3?node_id=6&id=Y2QyZDcyNDUtZmVlZDU0NWMtNGY4MTA1NGQtM2MzYzU5Njk=, ActorId: [6:7587128442535277150:2341], ActorState: ExecuteState, TraceId: 01kd68xzf0cke0f0g60cdmq6b7, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-12-23T18:53:18.228649Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=6&id=Y2QyZDcyNDUtZmVlZDU0NWMtNGY4MTA1NGQtM2MzYzU5Njk=, ActorId: [6:7587128442535277150:2341], ActorState: ExecuteState, TraceId: 01kd68xzf0cke0f0g60cdmq6b7, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:18.228699Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=6&id=Y2QyZDcyNDUtZmVlZDU0NWMtNGY4MTA1NGQtM2MzYzU5Njk=, ActorId: [6:7587128442535277150:2341], ActorState: ExecuteState, TraceId: 01kd68xzf0cke0f0g60cdmq6b7, EndCleanup, isFinal: 1 2025-12-23T18:53:18.228757Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=6&id=Y2QyZDcyNDUtZmVlZDU0NWMtNGY4MTA1NGQtM2MzYzU5Njk=, ActorId: [6:7587128442535277150:2341], ActorState: ExecuteState, TraceId: 01kd68xzf0cke0f0g60cdmq6b7, Sent query response back to proxy, proxyRequestId: 5, proxyId: [6:7587128429650374502:2264] 2025-12-23T18:53:18.228802Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=6&id=Y2QyZDcyNDUtZmVlZDU0NWMtNGY4MTA1NGQtM2MzYzU5Njk=, ActorId: [6:7587128442535277150:2341], ActorState: unknown state, TraceId: 01kd68xzf0cke0f0g60cdmq6b7, Cleanup temp tables: 0 2025-12-23T18:53:18.228940Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=6&id=Y2QyZDcyNDUtZmVlZDU0NWMtNGY4MTA1NGQtM2MzYzU5Njk=, ActorId: [6:7587128442535277150:2341], ActorState: unknown state, TraceId: 01kd68xzf0cke0f0g60cdmq6b7, Session actor destroyed 2025-12-23T18:53:18.231659Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=6&id=YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg== (tmp dir name: 7d094194-4b10-208f-493e-f890bfa5ef67) 2025-12-23T18:53:18.231805Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=6&id=YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg==, ActorId: [6:7587128442535277175:2343], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:53:18.232102Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=6&id=YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg==, ActorId: [6:7587128442535277175:2343], ActorState: ReadyState, TraceId: 01kd68xzgra7z5wr88xgv097d3, received request, proxyRequestId: 6 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: DROP RESOURCE POOL default; rpcActor: [6:7587128442535277174:2593] database: Root databaseId: /Root pool id: default 2025-12-23T18:53:18.232128Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:268: SessionId: ydb://session/3?node_id=6&id=YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg==, ActorId: [6:7587128442535277175:2343], ActorState: ReadyState, TraceId: 01kd68xzgra7z5wr88xgv097d3, request placed into pool from cache: default 2025-12-23T18:53:18.232193Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:630: SessionId: ydb://session/3?node_id=6&id=YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg==, ActorId: [6:7587128442535277175:2343], ActorState: ExecuteState, TraceId: 01kd68xzgra7z5wr88xgv097d3, Sending CompileQuery request 2025-12-23T18:53:18.255480Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:294: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7587128442535277115:2334], DatabaseId: /Root, PoolId: default, Got delete notification 2025-12-23T18:53:18.255572Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-12-23T18:53:18.255608Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-12-23T18:53:18.255649Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7587128442535277189:2345], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-12-23T18:53:18.256092Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7587128442535277189:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:18.256227Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:18.258896Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2090: SessionId: ydb://session/3?node_id=6&id=YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg==, ActorId: [6:7587128442535277175:2343], ActorState: ExecuteState, TraceId: 01kd68xzgra7z5wr88xgv097d3, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-12-23T18:53:18.259036Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2383: SessionId: ydb://session/3?node_id=6&id=YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg==, ActorId: [6:7587128442535277175:2343], ActorState: ExecuteState, TraceId: 01kd68xzgra7z5wr88xgv097d3, txInfo Status: Committed Kind: Pure TotalDuration: 7.838 ServerDuration: 7.775 QueriesCount: 2 2025-12-23T18:53:18.259121Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2543: SessionId: ydb://session/3?node_id=6&id=YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg==, ActorId: [6:7587128442535277175:2343], ActorState: ExecuteState, TraceId: 01kd68xzgra7z5wr88xgv097d3, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-12-23T18:53:18.259278Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=6&id=YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg==, ActorId: [6:7587128442535277175:2343], ActorState: ExecuteState, TraceId: 01kd68xzgra7z5wr88xgv097d3, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:18.259309Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=6&id=YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg==, ActorId: [6:7587128442535277175:2343], ActorState: ExecuteState, TraceId: 01kd68xzgra7z5wr88xgv097d3, EndCleanup, isFinal: 1 2025-12-23T18:53:18.259367Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=6&id=YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg==, ActorId: [6:7587128442535277175:2343], ActorState: ExecuteState, TraceId: 01kd68xzgra7z5wr88xgv097d3, Sent query response back to proxy, proxyRequestId: 6, proxyId: [6:7587128429650374502:2264] 2025-12-23T18:53:18.259386Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=6&id=YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg==, ActorId: [6:7587128442535277175:2343], ActorState: unknown state, TraceId: 01kd68xzgra7z5wr88xgv097d3, Cleanup temp tables: 0 2025-12-23T18:53:18.259547Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=6&id=YTBlZDdmYTctYzkwYzRlOC01ZTZjYmU3Yi1jNzA5YTgxNg==, ActorId: [6:7587128442535277175:2343], ActorState: unknown state, TraceId: 01kd68xzgra7z5wr88xgv097d3, Session actor destroyed 2025-12-23T18:53:18.267371Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=6&id=ODQwZTc3ZjctNWZmM2VjNDYtZWI1NDgyNDItOWNhNzY5ZmQ=, ActorId: [6:7587128438240309705:2323], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:53:18.267448Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=6&id=ODQwZTc3ZjctNWZmM2VjNDYtZWI1NDgyNDItOWNhNzY5ZmQ=, ActorId: [6:7587128438240309705:2323], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:18.267478Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=6&id=ODQwZTc3ZjctNWZmM2VjNDYtZWI1NDgyNDItOWNhNzY5ZmQ=, ActorId: [6:7587128438240309705:2323], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:53:18.267503Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=6&id=ODQwZTc3ZjctNWZmM2VjNDYtZWI1NDgyNDItOWNhNzY5ZmQ=, ActorId: [6:7587128438240309705:2323], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:53:18.267594Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=6&id=ODQwZTc3ZjctNWZmM2VjNDYtZWI1NDgyNDItOWNhNzY5ZmQ=, ActorId: [6:7587128438240309705:2323], ActorState: unknown state, Session actor destroyed >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackMechanismWorks [GOOD] Test command err: Trying to start YDB, gRPC: 10548, MsgBus: 13367 2025-12-23T18:53:13.719585Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128419059941634:2142];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:13.720016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bbd/r3tmp/tmphaWFiw/pdisk_1.dat 2025-12-23T18:53:13.899575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:13.902300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:13.902384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:13.957227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:13.968347Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:13.993637Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128419059941530:2081] 1766515993715745 != 1766515993715748 TServer::EnableGrpc on GrpcPort 10548, node 1 2025-12-23T18:53:14.052533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:14.052564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:14.052572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:14.052695Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:14.087454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13367 TClient is connected to server localhost:13367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515994047 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:14.476066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:14.494315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:14.621077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:14.727450Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:14.754941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:14.816624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:16.303998Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128431944845295:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.304124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.304461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128431944845305:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.304516Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.610876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.642329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.672110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.701863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.732137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.790640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.816986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.852526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:16.906979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128431944846176:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.907070Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.907096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128431944846181:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.907234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128431944846183:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.907273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:16.910459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:16.919678Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128431944846184:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:53:16.989308Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128431944846236:3779] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:18.212529Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:53:18.212721Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C81873B3448 2025-12-23T18:53:18.212775Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7587128440534781129:2528], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", keepInCache: 0, split: 0{ TraceId: 01kd68xzg382q87z1thqk7qx3h, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZDA0MDFhYWMtNjFhZGZlZTctODQ3NWZlOWQtZDIwYWJmZjk=, PoolId: default, IsStreamingQuery: 0} 2025-12-23T18:53:18.212910Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-12-23T18:53:18.212970Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7587128440534781129:2528], queueSize: 1 2025-12-23T18:53:18.213424Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n 2025-12-23T18:53:18.213469Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7587128440534781129:2528], compileActor: [1:7587128440534781137:2533] 2025-12-23T18:53:18.213501Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2025-12-23T18:53:18.213540Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7587128440534781137:2533], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", startTime: 2025-12-23T18:53:18.213477Z 2025-12-23T18:53:18.232038Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with SqlVersion = 1 failed, retrying with SqlVersion = 0, self: [1:7587128440534781137:2533], database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n " 2025-12-23T18:53:18.419953Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7587128440534781137:2533]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1766515998","query_text":"\\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (-∞, +∞)\"],\"limit\":\"1\",\"type\":\"FullScan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"5a10570a-bea67492-f5f9b49d-fab45a54","version":"1.0"} 2025-12-23T18:53:18.420697Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7587128440534781137:2533], duration: 0.207191s 2025-12-23T18:53:18.420739Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7587128440534781137:2533], owner: [1:7587128431944845255:2382], status: SUCCESS, issues: , uid: 5a10570a-bea67492-f5f9b49d-fab45a54 2025-12-23T18:53:18.420879Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7587128440534781129:2528], status: SUCCESS, compileActor: [1:7587128440534781137:2533] 2025-12-23T18:53:18.420956Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7587128440534781129:2528], queryUid: 5a10570a-bea67492-f5f9b49d-fab45a54, status:SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2025-12-23T18:53:18.900428Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:18.900573Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:18.922331Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:18.922453Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:18.960784Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:18.961609Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=12010028002293596923, session=0, seqNo=0) 2025-12-23T18:53:18.962799Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:18.974672Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=12010028002293596923, session=1) 2025-12-23T18:53:18.975121Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=10831734014618276248, session=0, seqNo=0) 2025-12-23T18:53:18.975231Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:53:18.987195Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=10831734014618276248, session=2) 2025-12-23T18:53:18.988357Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:53:18.988538Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:53:18.988641Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:53:19.000824Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2025-12-23T18:53:19.001165Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=112, session=1, semaphore="Lock2" count=1) 2025-12-23T18:53:19.001305Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-12-23T18:53:19.001389Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-12-23T18:53:19.013139Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=112) 2025-12-23T18:53:19.013477Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:136:2161], cookie=333, name="Lock1") 2025-12-23T18:53:19.013592Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-12-23T18:53:19.013762Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2025-12-23T18:53:19.013846Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2025-12-23T18:53:19.013906Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-12-23T18:53:19.013998Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-12-23T18:53:19.025908Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:136:2161], cookie=333) 2025-12-23T18:53:19.026000Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=222) 2025-12-23T18:53:19.026047Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=223) 2025-12-23T18:53:19.026379Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:136:2161], cookie=334, name="Lock2") 2025-12-23T18:53:19.026480Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-12-23T18:53:19.026567Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-12-23T18:53:19.038424Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:136:2161], cookie=334) 2025-12-23T18:53:19.038984Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:166:2188], cookie=4404532704255584836, name="Lock1") 2025-12-23T18:53:19.039078Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:166:2188], cookie=4404532704255584836) 2025-12-23T18:53:19.039551Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:169:2191], cookie=12377762329759513376, name="Lock2") 2025-12-23T18:53:19.039623Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:169:2191], cookie=12377762329759513376) 2025-12-23T18:53:19.054017Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.054137Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.054641Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.055438Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.071921Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.072059Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-12-23T18:53:19.072124Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-12-23T18:53:19.072486Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:208:2221], cookie=11463857078655460609, name="Lock1") 2025-12-23T18:53:19.072559Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:208:2221], cookie=11463857078655460609) 2025-12-23T18:53:19.073113Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:216:2228], cookie=1265348253577856710, name="Lock2") 2025-12-23T18:53:19.073211Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:216:2228], cookie=1265348253577856710) 2025-12-23T18:53:19.404221Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.404321Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.421119Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.421227Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.447116Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.447944Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:135:2159], cookie=11108405341267491936, session=0, seqNo=0) 2025-12-23T18:53:19.448080Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:19.459902Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:135:2159], cookie=11108405341267491936, session=1) 2025-12-23T18:53:19.460163Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2160], cookie=2698588945513017216, session=0, seqNo=0) 2025-12-23T18:53:19.460291Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:53:19.471950Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2160], cookie=2698588945513017216, session=2) 2025-12-23T18:53:19.472921Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:135:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:53:19.473060Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:53:19.473147Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:53:19.484900Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:135:2159], cookie=111) 2025-12-23T18:53:19.485196Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:135:2159], cookie=112, session=1, semaphore="Lock2" count=1) 2025-12-23T18:53:19.485340Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-12-23T18:53:19.485416Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-12-23T18:53:19.497217Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:135:2159], cookie=112) 2025-12-23T18:53:19.497564Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:135:2159], cookie=333, session=1, semaphore="Lock1" count=1) 2025-12-23T18:53:19.497760Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2160], cookie=222, session=2, semaphore="Lock1" count=1) 2025-12-23T18:53:19.497856Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-12-23T18:53:19.497949Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2160], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-12-23T18:53:19.509713Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:135:2159], cookie=333) 2025-12-23T18:53:19.509791Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2160], cookie=222) 2025-12-23T18:53:19.509820Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2160], cookie=223) 2025-12-23T18:53:19.510285Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:162:2184], cookie=3282688187466121753, name="Lock1") 2025-12-23T18:53:19.510369Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:162:2184], cookie=3282688187466121753) 2025-12-23T18:53:19.510782Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:165:2187], cookie=5970241673785772001, name="Lock2") 2025-12-23T18:53:19.510870Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:165:2187], cookie=5970241673785772001) 2025-12-23T18:53:19.511284Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:168:2190], cookie=12011362405596396094, name="Lock1") 2025-12-23T18:53:19.511350Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:168:2190], cookie=12011362405596396094) 2025-12-23T18:53:19.511839Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:171:2193], cookie=808669915567450076, name="Lock2") 2025-12-23T18:53:19.511904Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:171:2193], cookie=808669915567450076) 2025-12-23T18:53:19.512159Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2160], cookie=444, session=2, semaphore="Lock2" count=1) 2025-12-23T18:53:19.512271Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-12-23T18:53:19.524144Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2160], cookie=444) 2025-12-23T18:53:19.524692Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:176:2198], cookie=8023743606550919712, name="Lock2") 2025-12-23T18:53:19.524780Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:176:2198], cookie=8023743606550919712) 2025-12-23T18:53:19.525237Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:179:2201], cookie=16586538208260075064, name="Lock2") 2025-12-23T18:53:19.525347Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:179:2201], cookie=16586538208260075064) 2025-12-23T18:53:19.537971Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.538063Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.538508Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.539247Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.576080Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.576180Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:53:19.576214Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-12-23T18:53:19.576231Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-12-23T18:53:19.576247Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-12-23T18:53:19.576501Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:218:2231], cookie=9337305749270028418, name="Lock1") 2025-12-23T18:53:19.576557Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:218:2231], cookie=9337305749270028418) 2025-12-23T18:53:19.576924Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:226:2238], cookie=452688949467339639, name="Lock2") 2025-12-23T18:53:19.576971Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:226:2238], cookie=452688949467339639) |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:22.204388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:22.317769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:22.323788Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:527:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:22.324219Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:22.324327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:22.677903Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:22.789055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:22.789187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:22.823932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:22.899694Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003205/r3tmp/tmpuPTjAu/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61410, node 1 2025-12-23T18:52:23.553779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:23.554235Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:23.554290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:23.554319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:23.554650Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:23.618000Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:64535 2025-12-23T18:52:24.155553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:26.720801Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:26.725614Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2025-12-23T18:52:26.728231Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:26.749538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:26.749616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:26.787583Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:52:26.789452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:26.953316Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:26.953388Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:26.954613Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.955339Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.955860Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.956684Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.956952Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.957159Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.957237Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.957382Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.957592Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.982517Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:27.138352Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:27.195166Z node 3 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:27.195252Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:27.231845Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:27.232387Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:27.232625Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:27.232694Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:27.232747Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:27.232807Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:27.232878Z node 3 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:27.232936Z node 3 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:27.233540Z node 3 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:27.246297Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:27.246424Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [3:2208:2591], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:27.275062Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2281:2644] 2025-12-23T18:52:27.275312Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2281:2644], schemeshard id = 72075186224037897 2025-12-23T18:52:27.320299Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2331:2667] 2025-12-23T18:52:27.322733Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared1 2025-12-23T18:52:27.333359Z node 3 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [3:2337:2671] Owner: [3:2336:2670]. Describe result: PathErrorUnknown 2025-12-23T18:52:27.333427Z node 3 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [3:2337:2671] Owner: [3:2336:2670]. Creating table 2025-12-23T18:52:27.333499Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2337:2671] Owner: [3:2336:2670]. Created ESchemeOpCreateTable transaction for path: /Root/Shared1/.metadata/_statistics 2025-12-23T18:52:27.337569Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2388:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:27.341114Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:27.347095Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2337:2671] Owner: [3:2336:2670]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:27.347200Z node 3 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [3:2337:2671] Owner: [3:2336:2670]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:27.356071Z node 3 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [3:2337:2671] Owner: [3:2336:2670]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:27.496492Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared1/.metadata/script_executions 2025-12-23T18:52:27.557761Z node 3 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:27.825620Z node 3 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [3:2337:2671] Owner: [3:2336:2670]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:27.935125Z node 3 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [3:2337:2671] Owner: [3:2336:2670]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:27.935201Z node 3 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [3:2337:2671] Owner: [3:2336:2670 ... node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [3:7290:3775], StatRequests.size() = 1 2025-12-23T18:53:05.263449Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [3:7335:3786]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:05.263811Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2025-12-23T18:53:05.263853Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [3:7335:3786], StatRequests.size() = 1 2025-12-23T18:53:06.648453Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:06.764925Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:53:06.765014Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:53:06.765055Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:53:06.765098Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:53:06.910104Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [3:7384:3800]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:06.910454Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2025-12-23T18:53:06.910489Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [3:7384:3800], StatRequests.size() = 1 2025-12-23T18:53:08.438368Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [3:7442:3816]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:08.438865Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2025-12-23T18:53:08.438922Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [3:7442:3816], StatRequests.size() = 1 2025-12-23T18:53:09.901907Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-12-23T18:53:09.902190Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 5 2025-12-23T18:53:09.902305Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-12-23T18:53:09.902821Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:53:09.926541Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:09.926636Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:09.927086Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:09.942679Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:10.192433Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [3:7490:3830]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:10.193060Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2025-12-23T18:53:10.193115Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:7490:3830], StatRequests.size() = 1 2025-12-23T18:53:11.735184Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:7542:3845]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:11.735555Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2025-12-23T18:53:11.735606Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:7542:3845], StatRequests.size() = 1 2025-12-23T18:53:12.700405Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2025-12-23T18:53:12.700488Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224038898 2025-12-23T18:53:12.700980Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:12.716261Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2025-12-23T18:53:13.493443Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:7591:3857]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:13.493813Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2025-12-23T18:53:13.493854Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:7591:3857], StatRequests.size() = 1 2025-12-23T18:53:14.680833Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2025-12-23T18:53:14.680919Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 4.867000s, at schemeshard: 72075186224037899 2025-12-23T18:53:14.681277Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2025-12-23T18:53:14.700181Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:14.831851Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:15.027684Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:7642:3871]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:15.028423Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2025-12-23T18:53:15.028469Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:7642:3871], StatRequests.size() = 1 2025-12-23T18:53:16.625467Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:7695:3889]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:16.625837Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-12-23T18:53:16.625892Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:7695:3889], StatRequests.size() = 1 2025-12-23T18:53:17.230447Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224038900 2025-12-23T18:53:17.230534Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.057000s, at schemeshard: 72075186224038900 2025-12-23T18:53:17.230901Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038900, stats byte size: 26, entries count: 1, are all stats full: 1 2025-12-23T18:53:17.246100Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2025-12-23T18:53:17.999585Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-12-23T18:53:18.000150Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2025-12-23T18:53:18.000755Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2025-12-23T18:53:18.000876Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-12-23T18:53:18.023978Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:18.024062Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:18.024411Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:18.040524Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:18.198164Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:7748:3903]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:18.198558Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-12-23T18:53:18.198614Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:7748:3903], StatRequests.size() = 1 2025-12-23T18:53:18.199312Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7750:3834]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:18.204720Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:53:18.205582Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-12-23T18:53:18.205670Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:7765:3839] 2025-12-23T18:53:18.205742Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:7765:3839] 2025-12-23T18:53:18.206979Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224038895] EvServerConnected, pipe server id = [2:7768:3840] 2025-12-23T18:53:18.207398Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:7765:3839], server id = [2:7768:3840], tablet id = 72075186224038895, status = OK 2025-12-23T18:53:18.207486Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224038895] EvConnectNode, pipe server id = [2:7768:3840], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:53:18.207550Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:53:18.207931Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:53:18.208084Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:7750:3834], StatRequests.size() = 1 2025-12-23T18:53:18.208251Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 18446744073709551615 |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> KqpWorkloadService::TestLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced Test command err: 2025-12-23T18:53:18.900490Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:18.900621Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:18.923692Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:18.923828Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:18.960856Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:18.961366Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=13450141927843377554, session=0, seqNo=0) 2025-12-23T18:53:18.962754Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:18.974679Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=13450141927843377554, session=1) 2025-12-23T18:53:18.975083Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=10777818535910569576, session=0, seqNo=0) 2025-12-23T18:53:18.975239Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:53:18.987286Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=10777818535910569576, session=2) 2025-12-23T18:53:19.407968Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.408063Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.425255Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.425354Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.449907Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.450383Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:135:2159], cookie=9930480580177120475, session=1, seqNo=0) 2025-12-23T18:53:19.462131Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:135:2159], cookie=9930480580177120475, session=1) 2025-12-23T18:53:19.837844Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.837975Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.852913Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.853289Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.886729Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.887393Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=6496982542360247735, session=0, seqNo=0) 2025-12-23T18:53:19.887553Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:19.899127Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=6496982542360247735, session=1) 2025-12-23T18:53:20.270337Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:20.270451Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:20.290236Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:20.290356Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:20.315469Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:20.315859Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[4:134:2159], cookie=14586335737016845592, path="") 2025-12-23T18:53:20.338950Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[4:134:2159], cookie=14586335737016845592, status=SUCCESS) 2025-12-23T18:53:20.340089Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:144:2166], cookie=7642307982027162116, session=0, seqNo=0) 2025-12-23T18:53:20.340244Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:20.352678Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:144:2166], cookie=7642307982027162116, session=1) 2025-12-23T18:53:20.353500Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:145:2167], cookie=111, session=0, seqNo=0) 2025-12-23T18:53:20.353636Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:53:20.353804Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:262: [72057594037927937] Fast-path attach session=1 to sender=[4:145:2167], cookie=222, seqNo=0 2025-12-23T18:53:20.366006Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:145:2167], cookie=111, session=2) 2025-12-23T18:53:20.693778Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:20.693882Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:20.710392Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:20.710495Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:20.745029Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:20.745428Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[5:136:2161], cookie=10805901989200854062, path="") 2025-12-23T18:53:20.757611Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[5:136:2161], cookie=10805901989200854062, status=SUCCESS) 2025-12-23T18:53:20.758671Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:146:2168], cookie=1679663480465178245, session=0, seqNo=0) 2025-12-23T18:53:20.758852Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:20.770911Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:146:2168], cookie=1679663480465178245, session=1) 2025-12-23T18:53:20.771680Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:146:2168], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:53:20.771846Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:53:20.771954Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:53:20.772353Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:147:2169], cookie=111, session=0, seqNo=0) 2025-12-23T18:53:20.772443Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:53:20.772564Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:147:2169], cookie=222, session=1, seqNo=0) 2025-12-23T18:53:20.784462Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:146:2168], cookie=123) 2025-12-23T18:53:20.784571Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:147:2169], cookie=111, session=2) 2025-12-23T18:53:20.784631Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:147:2169], cookie=222, session=1) |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesBurst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2025-12-23T18:53:18.900443Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:18.900588Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:18.922929Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:18.923024Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:18.961007Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:18.961921Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=13681832566741479336, session=0, seqNo=222) 2025-12-23T18:53:18.962768Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:18.975048Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=13681832566741479336, session=1) 2025-12-23T18:53:18.975384Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=12399217232345567276, session=1, seqNo=111) 2025-12-23T18:53:18.987506Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=12399217232345567276, session=1) 2025-12-23T18:53:19.398829Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.398949Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.416966Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.417073Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.444319Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.444911Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:135:2159], cookie=111, session=0, seqNo=42) 2025-12-23T18:53:19.445119Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:19.445341Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:135:2159], cookie=222, session=1, seqNo=41) 2025-12-23T18:53:19.457681Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:135:2159], cookie=111, session=1) 2025-12-23T18:53:19.457772Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:135:2159], cookie=222, session=1) 2025-12-23T18:53:19.819763Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.819881Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.835515Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.835876Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.870374Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.870963Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=3663245790570926999, session=0, seqNo=0) 2025-12-23T18:53:19.871127Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:19.883159Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=3663245790570926999, session=1) 2025-12-23T18:53:19.884724Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:154:2176], cookie=12299175794953897992) 2025-12-23T18:53:19.884827Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:154:2176], cookie=12299175794953897992) 2025-12-23T18:53:20.225776Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:20.225894Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:20.240091Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:20.240193Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:20.264829Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:20.636736Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:20.636853Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:20.657655Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:20.657787Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:20.692145Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:20.692626Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=17013450435946541535, session=0, seqNo=0) 2025-12-23T18:53:20.692795Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:20.704738Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=17013450435946541535, session=1) 2025-12-23T18:53:20.705106Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:53:20.705271Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:53:20.705370Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:53:20.717553Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=111) 2025-12-23T18:53:20.718531Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:151:2173], cookie=17005823319217853400, name="Sem1", limit=42) 2025-12-23T18:53:20.718690Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2025-12-23T18:53:20.730973Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:151:2173], cookie=17005823319217853400) 2025-12-23T18:53:20.731562Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:156:2178], cookie=11351022700291777546, name="Sem1", limit=42) 2025-12-23T18:53:20.743949Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:156:2178], cookie=11351022700291777546) 2025-12-23T18:53:20.744614Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:161:2183], cookie=17641723729800716668, name="Sem1", limit=51) 2025-12-23T18:53:20.756856Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:161:2183], cookie=17641723729800716668) 2025-12-23T18:53:20.757377Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:166:2188], cookie=17172312278335164532, name="Lock1", limit=42) 2025-12-23T18:53:20.769738Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:166:2188], cookie=17172312278335164532) 2025-12-23T18:53:20.770305Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:171:2193], cookie=3609255961946997789, name="Lock1", limit=18446744073709551615) 2025-12-23T18:53:20.782696Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:171:2193], cookie=3609255961946997789) 2025-12-23T18:53:20.783324Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:176:2198], cookie=10366697940881979496, name="Sem1") 2025-12-23T18:53:20.783408Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:176:2198], cookie=10366697940881979496) 2025-12-23T18:53:20.783850Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:179:2201], cookie=2842269351318996642, name="Sem2") 2025-12-23T18:53:20.783922Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:179:2201], cookie=2842269351318996642) 2025-12-23T18:53:20.799214Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:20.799323Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:20.799783Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:20.800392Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:20.847674Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:20.847801Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:53:20.848188Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:218:2231], cookie=16996311964096969796, name="Sem1") 2025-12-23T18:53:20.848265Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:218:2231], cookie=16996311964096969796) 2025-12-23T18:53:20.848809Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:225:2237], cookie=9046032310571608223, name="Sem2") 2025-12-23T18:53:20.848882Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:225:2237], cookie=9046032310571608223) |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource >> TKesusTest::TestSemaphoreSessionFailures [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2025-12-23T18:53:19.073979Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.074061Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.087350Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.087447Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.122021Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.122626Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=14595996944018301564, session=0, seqNo=0) 2025-12-23T18:53:19.122837Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:19.134781Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=14595996944018301564, session=1) 2025-12-23T18:53:19.135146Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=18423409912375381867, session=0, seqNo=0) 2025-12-23T18:53:19.135294Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:53:19.147518Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=18423409912375381867, session=2) 2025-12-23T18:53:19.147878Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:136:2161], cookie=111, name="Lock1") 2025-12-23T18:53:19.160082Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:136:2161], cookie=111) 2025-12-23T18:53:19.160450Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:53:19.160602Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:53:19.160701Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:53:19.172987Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=222) 2025-12-23T18:53:19.173320Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:136:2161], cookie=333, name="Lock1") 2025-12-23T18:53:19.185630Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:136:2161], cookie=333) 2025-12-23T18:53:19.485285Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.485365Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.496417Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.496498Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.520105Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.520498Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:135:2159], cookie=8540306694354383915, session=0, seqNo=0) 2025-12-23T18:53:19.520628Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:19.532025Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:135:2159], cookie=8540306694354383915, session=1) 2025-12-23T18:53:19.532246Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:135:2159], cookie=15769965225748815810, session=0, seqNo=0) 2025-12-23T18:53:19.532334Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:53:19.544335Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:135:2159], cookie=15769965225748815810, session=2) 2025-12-23T18:53:19.544984Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:146:2168], cookie=7616897871576954952, name="Sem1", limit=1) 2025-12-23T18:53:19.545167Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-12-23T18:53:19.557067Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:146:2168], cookie=7616897871576954952) 2025-12-23T18:53:19.557349Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:135:2159], cookie=111, session=1, semaphore="Sem1" count=1) 2025-12-23T18:53:19.557469Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-12-23T18:53:19.557618Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:135:2159], cookie=222, session=2, semaphore="Sem1" count=1) 2025-12-23T18:53:19.569809Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:135:2159], cookie=111) 2025-12-23T18:53:19.569901Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:135:2159], cookie=222) 2025-12-23T18:53:19.570343Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:154:2176], cookie=17130749865816898331, name="Sem1") 2025-12-23T18:53:19.570435Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:154:2176], cookie=17130749865816898331) 2025-12-23T18:53:19.570838Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:157:2179], cookie=11302887350752011005, name="Sem1") 2025-12-23T18:53:19.570901Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:157:2179], cookie=11302887350752011005) 2025-12-23T18:53:19.571085Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:135:2159], cookie=333, name="Sem1") 2025-12-23T18:53:19.571179Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2025-12-23T18:53:19.582855Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:135:2159], cookie=333) 2025-12-23T18:53:19.583274Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:162:2184], cookie=15565311356358791823, name="Sem1") 2025-12-23T18:53:19.583337Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:162:2184], cookie=15565311356358791823) 2025-12-23T18:53:19.583656Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:165:2187], cookie=11881783433359103465, name="Sem1") 2025-12-23T18:53:19.583698Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:165:2187], cookie=11881783433359103465) 2025-12-23T18:53:19.583898Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:135:2159], cookie=444, name="Sem1") 2025-12-23T18:53:19.583980Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-12-23T18:53:19.596081Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:135:2159], cookie=444) 2025-12-23T18:53:19.596705Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:170:2192], cookie=5275777439899851624, name="Sem1") 2025-12-23T18:53:19.596799Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:170:2192], cookie=5275777439899851624) 2025-12-23T18:53:19.597314Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:173:2195], cookie=9812785321036627850, name="Sem1") 2025-12-23T18:53:19.597382Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:173:2195], cookie=9812785321036627850) 2025-12-23T18:53:19.931465Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.931553Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.942695Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.943330Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.977358Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.977747Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:136:2161], cookie=10488166668838964288, name="Sem1", limit=1) 2025-12-23T18:53:19.977898Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-12-23T18:53:19.989953Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:136:2161], cookie=10488166668838964288) 2025-12-23T18:53:19.990488Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:146:2168], cookie=10333996418396508466, name="Sem2", limit=1) 2025-12-23T18:53:19.990652Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem2" 2025-12-23T18:53:20.002801Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:146:2168], cookie=10333996418396508466) 2025-12-23T18:53:20.003407Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:151:2173], cookie=2519983156507301, name="Sem1") 2025-12-23T18:53:20.003514Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:151:2173], cookie=2519983156507301) 2025-12-23T18:53:20.003919Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:154:2176], cookie=17738492968831092582, name="Sem2") 2025-12-23T18:53:20.003988Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:154:2176], cookie=17738492968831092582) 2025-12-23T18:53:20.016436Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:20.016541Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::E ... TTxSemaphoreCreate::Complete (sender=[4:247:2268], cookie=15402518913228046742) 2025-12-23T18:53:20.908448Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:134:2159], cookie=111, session=1, semaphore="Sem1" count=1) 2025-12-23T18:53:20.908606Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2025-12-23T18:53:20.920646Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:134:2159], cookie=111) 2025-12-23T18:53:20.921279Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:134:2159], cookie=222, session=2, semaphore="Sem1" count=1) 2025-12-23T18:53:20.933540Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:134:2159], cookie=222) 2025-12-23T18:53:20.934087Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:134:2159], cookie=333, name="Sem1") 2025-12-23T18:53:20.934215Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2025-12-23T18:53:20.946305Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:134:2159], cookie=333) 2025-12-23T18:53:20.946988Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:134:2159], cookie=444, session=2, semaphore="Sem1" count=1) 2025-12-23T18:53:20.959288Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:134:2159], cookie=444) 2025-12-23T18:53:20.959855Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:134:2159], cookie=555, name="Sem1") 2025-12-23T18:53:20.959984Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2025-12-23T18:53:20.960050Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2025-12-23T18:53:20.972021Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:134:2159], cookie=555) 2025-12-23T18:53:21.329715Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:21.329795Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:21.344363Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:21.344460Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:21.378291Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:21.378719Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=4105583458487548254, session=0, seqNo=0) 2025-12-23T18:53:21.378890Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:21.390619Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=4105583458487548254, session=1) 2025-12-23T18:53:21.390967Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:136:2161], cookie=112, name="Sem1", limit=5) 2025-12-23T18:53:21.391103Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-12-23T18:53:21.402916Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:136:2161], cookie=112) 2025-12-23T18:53:21.403267Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:136:2161], cookie=113, name="Sem1") 2025-12-23T18:53:21.415221Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:136:2161], cookie=113) 2025-12-23T18:53:21.415515Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:136:2161], cookie=114, name="Sem1", force=0) 2025-12-23T18:53:21.415607Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-12-23T18:53:21.427457Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:136:2161], cookie=114) 2025-12-23T18:53:21.427790Z node 5 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[5:136:2161], cookie=2981367412979829695 2025-12-23T18:53:21.428139Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:136:2161], cookie=115, name="Sem1", limit=5) 2025-12-23T18:53:21.439950Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:136:2161], cookie=115) 2025-12-23T18:53:21.440247Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:136:2161], cookie=116, name="Sem1") 2025-12-23T18:53:21.451989Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:136:2161], cookie=116) 2025-12-23T18:53:21.452269Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:136:2161], cookie=117, name="Sem1", force=0) 2025-12-23T18:53:21.464451Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:136:2161], cookie=117) 2025-12-23T18:53:21.464854Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=118, session=1, semaphore="Sem1" count=1) 2025-12-23T18:53:21.477060Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=118) 2025-12-23T18:53:21.477381Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:136:2161], cookie=119, name="Sem1") 2025-12-23T18:53:21.489644Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:136:2161], cookie=119) 2025-12-23T18:53:21.489999Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:136:2161], cookie=120, name="Sem1") 2025-12-23T18:53:21.490089Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:136:2161], cookie=120) 2025-12-23T18:53:21.490316Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:136:2161], cookie=9887365342822686395, session=1) 2025-12-23T18:53:21.490415Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-12-23T18:53:21.502506Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:136:2161], cookie=9887365342822686395) 2025-12-23T18:53:21.502897Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:136:2161], cookie=121, name="Sem1", limit=5) 2025-12-23T18:53:21.514846Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:136:2161], cookie=121) 2025-12-23T18:53:21.515148Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:136:2161], cookie=122, name="Sem1") 2025-12-23T18:53:21.527222Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:136:2161], cookie=122) 2025-12-23T18:53:21.527562Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:136:2161], cookie=123, name="Sem1", force=0) 2025-12-23T18:53:21.539603Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:136:2161], cookie=123) 2025-12-23T18:53:21.539950Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=124, session=1, semaphore="Sem1" count=1) 2025-12-23T18:53:21.552222Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=124) 2025-12-23T18:53:21.552496Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:136:2161], cookie=125, name="Sem1") 2025-12-23T18:53:21.575169Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:136:2161], cookie=125) 2025-12-23T18:53:21.575427Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:136:2161], cookie=126, name="Sem1") 2025-12-23T18:53:21.575513Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:136:2161], cookie=126) 2025-12-23T18:53:21.576010Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:136:2161], cookie=127, name="Sem1", limit=5) 2025-12-23T18:53:21.576064Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:136:2161], cookie=127) 2025-12-23T18:53:21.576265Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:136:2161], cookie=128, name="Sem1") 2025-12-23T18:53:21.576314Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:136:2161], cookie=128) 2025-12-23T18:53:21.576508Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:136:2161], cookie=129, name="Sem1", force=0) 2025-12-23T18:53:21.576556Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:136:2161], cookie=129) 2025-12-23T18:53:21.576794Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=130, session=1, semaphore="Sem1" count=1) 2025-12-23T18:53:21.576855Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=130) 2025-12-23T18:53:21.577073Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:136:2161], cookie=131, name="Sem1") 2025-12-23T18:53:21.577131Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:136:2161], cookie=131) 2025-12-23T18:53:21.577347Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:136:2161], cookie=132, name="Sem1") 2025-12-23T18:53:21.577403Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:136:2161], cookie=132) |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2025-12-23T18:53:18.900421Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:18.900576Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:18.921875Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:18.921997Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:18.961274Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:18.968395Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:136:2161], cookie=13745623103931520374, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-12-23T18:53:18.968718Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-12-23T18:53:18.980639Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:136:2161], cookie=13745623103931520374) 2025-12-23T18:53:18.981295Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:146:2168], cookie=2998201529529324816, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-12-23T18:53:18.981576Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2025-12-23T18:53:18.993397Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:146:2168], cookie=2998201529529324816) 2025-12-23T18:53:18.993918Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:151:2173], cookie=1476926670697373153, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-12-23T18:53:18.994090Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2025-12-23T18:53:19.006185Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:151:2173], cookie=1476926670697373153) 2025-12-23T18:53:19.006708Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:156:2178], cookie=3436939377282759301, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-12-23T18:53:19.006909Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-12-23T18:53:19.018629Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:156:2178], cookie=3436939377282759301) 2025-12-23T18:53:19.019239Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:161:2183], cookie=9302042036344010586, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-12-23T18:53:19.019423Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2025-12-23T18:53:19.031279Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:161:2183], cookie=9302042036344010586) 2025-12-23T18:53:19.031955Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:166:2188], cookie=9263235744027781949, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2025-12-23T18:53:19.032176Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2025-12-23T18:53:19.043932Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:166:2188], cookie=9263235744027781949) 2025-12-23T18:53:19.044467Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:171:2193], cookie=15904729723100774378, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-12-23T18:53:19.044618Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 7 "Root2" 2025-12-23T18:53:19.056598Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:171:2193], cookie=15904729723100774378) 2025-12-23T18:53:19.057293Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:176:2198], cookie=14103766323735625707, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2025-12-23T18:53:19.057511Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2025-12-23T18:53:19.069604Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:176:2198], cookie=14103766323735625707) 2025-12-23T18:53:19.070258Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:181:2203], cookie=9354020682843322458, ids=[100], paths=[], recursive=0) 2025-12-23T18:53:19.070416Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:181:2203], cookie=9354020682843322458) 2025-12-23T18:53:19.071004Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:184:2206], cookie=11428350778357243596, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-12-23T18:53:19.071092Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:184:2206], cookie=11428350778357243596) 2025-12-23T18:53:19.071591Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:187:2209], cookie=8545273966072358703, ids=[], paths=[/Root, ], recursive=0) 2025-12-23T18:53:19.071685Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:187:2209], cookie=8545273966072358703) 2025-12-23T18:53:19.072191Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:190:2212], cookie=6847917583693336493, ids=[1, 1], paths=[], recursive=0) 2025-12-23T18:53:19.072248Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:190:2212], cookie=6847917583693336493) 2025-12-23T18:53:19.072799Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:193:2215], cookie=1155035945550168935, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-12-23T18:53:19.072898Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:193:2215], cookie=1155035945550168935) 2025-12-23T18:53:19.073381Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:196:2218], cookie=17217963170449679983, ids=[], paths=[], recursive=1) 2025-12-23T18:53:19.073452Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:196:2218], cookie=17217963170449679983) 2025-12-23T18:53:19.075000Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:199:2221], cookie=12312846821529582488, ids=[], paths=[], recursive=0) 2025-12-23T18:53:19.075066Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:199:2221], cookie=12312846821529582488) 2025-12-23T18:53:19.075597Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:202:2224], cookie=1371424156671738774, ids=[3, 2], paths=[], recursive=1) 2025-12-23T18:53:19.075669Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:202:2224], cookie=1371424156671738774) 2025-12-23T18:53:19.076229Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:205:2227], cookie=7166022009811135203, ids=[3, 2], paths=[], recursive=0) 2025-12-23T18:53:19.076290Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:205:2227], cookie=7166022009811135203) 2025-12-23T18:53:19.076848Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:208:2230], cookie=5893068219451113353, ids=[], paths=[Root2/], recursive=1) 2025-12-23T18:53:19.076917Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:208:2230], cookie=5893068219451113353) 2025-12-23T18:53:19.077487Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:211:2233], cookie=12552426746531394434, ids=[], paths=[Root2/], recursive=0) 2025-12-23T18:53:19.077547Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:211:2233], cookie=12552426746531394434) 2025-12-23T18:53:19.092004Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.092121Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.092663Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.093270Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.110018Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.110402Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:250:2263], cookie=13672216643689554296, ids=[100], paths=[], recursive=0) 2025-12-23T18:53:19.110497Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:250:2263], cookie=13672216643689554296) 2025-12-23T18:53:19.111307Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:256:2268], cookie=13768943407010361007, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-12-23T18:53:19.111396Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:256:2268], cookie=13768943407010361007) 2025-12-23T18:53:19.112072Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:259:2271], cookie=7676512461599700411, ids=[], paths=[/Root, ], recursive=0) 2025-12-23T18:53:19.112159Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:259:2271], cookie=7676512461599700411) 2025-12-23T18:53:19.112835Z nod ... _TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-12-23T18:53:21.297725Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:154:2176], cookie=4009059333931150560) 2025-12-23T18:53:21.298225Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:159:2181], cookie=15569112752762967031, ids=[], paths=[], recursive=1) 2025-12-23T18:53:21.298332Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:159:2181], cookie=15569112752762967031) 2025-12-23T18:53:21.299156Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:165:2187], cookie=12714054769655350165, ids=[], paths=[], recursive=1) 2025-12-23T18:53:21.299236Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:165:2187], cookie=12714054769655350165) 2025-12-23T18:53:21.300000Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:171:2193], cookie=9217369792193875492, ids=[], paths=[], recursive=1) 2025-12-23T18:53:21.300066Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:171:2193], cookie=9217369792193875492) 2025-12-23T18:53:21.300479Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:174:2196], cookie=12247847440264416159, id=0, path="/Root/Folder/NonexistingRes") 2025-12-23T18:53:21.300553Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:174:2196], cookie=12247847440264416159) 2025-12-23T18:53:21.300964Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:177:2199], cookie=4268614027318529232, ids=[], paths=[], recursive=1) 2025-12-23T18:53:21.301027Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:177:2199], cookie=4268614027318529232) 2025-12-23T18:53:21.301472Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:180:2202], cookie=9522138765003880342, id=100, path="") 2025-12-23T18:53:21.301535Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:180:2202], cookie=9522138765003880342) 2025-12-23T18:53:21.301969Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:183:2205], cookie=17146929630739083937, ids=[], paths=[], recursive=1) 2025-12-23T18:53:21.302032Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:183:2205], cookie=17146929630739083937) 2025-12-23T18:53:21.302466Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:186:2208], cookie=16691179988291632724, id=3, path="") 2025-12-23T18:53:21.302524Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:186:2208], cookie=16691179988291632724) 2025-12-23T18:53:21.303015Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:189:2211], cookie=9435422063150543365, ids=[], paths=[], recursive=1) 2025-12-23T18:53:21.303079Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:189:2211], cookie=9435422063150543365) 2025-12-23T18:53:21.303596Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:192:2214], cookie=13267145215863458757, id=0, path="/Root/Folder/Q1") 2025-12-23T18:53:21.303729Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2025-12-23T18:53:21.315722Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:192:2214], cookie=13267145215863458757) 2025-12-23T18:53:21.316338Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:197:2219], cookie=17727385624053848768, ids=[], paths=[], recursive=1) 2025-12-23T18:53:21.316418Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:197:2219], cookie=17727385624053848768) 2025-12-23T18:53:21.328338Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:21.328430Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:21.328872Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:21.329380Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:21.375802Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:21.376083Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:236:2249], cookie=1946426484734583136, ids=[], paths=[], recursive=1) 2025-12-23T18:53:21.376150Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:236:2249], cookie=1946426484734583136) 2025-12-23T18:53:21.376646Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:242:2254], cookie=9714069309829036492, id=3, path="") 2025-12-23T18:53:21.376786Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2025-12-23T18:53:21.388635Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:242:2254], cookie=9714069309829036492) 2025-12-23T18:53:21.389230Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:247:2259], cookie=4575145090537931976, ids=[], paths=[], recursive=1) 2025-12-23T18:53:21.389309Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:247:2259], cookie=4575145090537931976) 2025-12-23T18:53:21.397712Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:21.397786Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:21.398124Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:21.398534Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:21.444491Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:21.444856Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:286:2289], cookie=483246484309399830, ids=[], paths=[], recursive=1) 2025-12-23T18:53:21.444937Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:286:2289], cookie=483246484309399830) 2025-12-23T18:53:21.798565Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:21.798671Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:21.819318Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:21.819451Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:21.854274Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:21.854645Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=6707777252793756604, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-12-23T18:53:21.854874Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Q1" 2025-12-23T18:53:21.867061Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=6707777252793756604) 2025-12-23T18:53:21.867739Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=8800454813932010379, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-12-23T18:53:21.867954Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Q2" 2025-12-23T18:53:21.880072Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=8800454813932010379) 2025-12-23T18:53:21.881570Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 11936169034997332308. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:21.881628Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=11936169034997332308) 2025-12-23T18:53:21.882323Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 4397158101137836502. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2025-12-23T18:53:21.882371Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=4397158101137836502) |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] Test command err: 2025-12-23T18:53:18.900432Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:18.900561Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:18.924320Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:18.924422Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:18.960871Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.415027Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.415122Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.432660Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.432753Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.456688Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.834178Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.834293Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.849025Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.849319Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.883140Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:20.264768Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:20.264886Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:20.286061Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:20.286196Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:20.333265Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:20.335735Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 5 2025-12-23T18:53:20.336323Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([4:193:2162]) 2025-12-23T18:53:20.892740Z node 6 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:20.892846Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:20.913352Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:20.913591Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute ... waiting for register request 2025-12-23T18:53:20.970328Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR cookie 7695389272266762152 ... waiting for register request (done) ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR 2025-12-23T18:53:20.971186Z node 6 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 7 2025-12-23T18:53:20.971560Z node 6 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([6:193:2162]) |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2025-12-23T18:53:19.328869Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.328973Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.346037Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.346142Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.382887Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.383262Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:136:2161], cookie=7133513628867598213, path="/foo/bar/baz") 2025-12-23T18:53:19.396804Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:136:2161], cookie=7133513628867598213, status=SUCCESS) 2025-12-23T18:53:19.397383Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:146:2168], cookie=9216200880939634970) 2025-12-23T18:53:19.410768Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:146:2168], cookie=9216200880939634970) 2025-12-23T18:53:19.411337Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:151:2173], cookie=203741524920664844, path="/foo/bar/baz") 2025-12-23T18:53:19.423252Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:151:2173], cookie=203741524920664844, status=SUCCESS) 2025-12-23T18:53:19.423746Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:156:2178], cookie=795606342717045485) 2025-12-23T18:53:19.436799Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:156:2178], cookie=795606342717045485) 2025-12-23T18:53:19.448481Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.448553Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.448894Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.449105Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.484304Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.484665Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:197:2210], cookie=1934366744632840080) 2025-12-23T18:53:19.506429Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:197:2210], cookie=1934366744632840080) 2025-12-23T18:53:19.506970Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:205:2217], cookie=12608814071621516161, path="/foo/bar/baz") 2025-12-23T18:53:19.518571Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:205:2217], cookie=12608814071621516161, status=SUCCESS) 2025-12-23T18:53:19.519018Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:210:2222], cookie=6209697116797246007, path="/foo/bar/baz") 2025-12-23T18:53:19.519100Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:210:2222], cookie=6209697116797246007, status=PRECONDITION_FAILED) 2025-12-23T18:53:19.908319Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.908417Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.919890Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.919969Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.943537Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.943819Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:135:2159], cookie=9005464659499410718, name="Lock1") 2025-12-23T18:53:19.943870Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:135:2159], cookie=9005464659499410718) 2025-12-23T18:53:20.304217Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:20.304322Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:20.321227Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:20.321611Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:20.356805Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:20.357294Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=5096510312097358878, session=0, seqNo=0) 2025-12-23T18:53:20.357431Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:20.368939Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=5096510312097358878, session=1) 2025-12-23T18:53:20.369226Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:53:20.369374Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:53:20.369466Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:53:20.381338Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=111) 2025-12-23T18:53:20.381876Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:148:2170], cookie=8266299586753809186, name="Lock1", force=0) 2025-12-23T18:53:20.393911Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:148:2170], cookie=8266299586753809186) 2025-12-23T18:53:20.394401Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:153:2175], cookie=8031623056062395806, name="Sem1", force=0) 2025-12-23T18:53:20.406431Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:153:2175], cookie=8031623056062395806) 2025-12-23T18:53:20.406951Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:158:2180], cookie=17618747101252007072, name="Sem1", limit=42) 2025-12-23T18:53:20.407098Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2025-12-23T18:53:20.419175Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:158:2180], cookie=17618747101252007072) 2025-12-23T18:53:20.419677Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:163:2185], cookie=1613879055125071632, name="Sem1", force=0) 2025-12-23T18:53:20.419771Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 2 "Sem1" 2025-12-23T18:53:20.431638Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:163:2185], cookie=1613879055125071632) 2025-12-23T18:53:20.432121Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:168:2190], cookie=8579849489285920777, name="Sem1", force=0) 2025-12-23T18:53:20.444259Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:168:2190], cookie=8579849489285920777) 2025-12-23T18:53:20.679388Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:20.679502Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:20.698113Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:20.698227Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:20.723414Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:20.723999Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:134:2159], cookie=7672610950936031828, session=0, seqNo=0) 2025-12-23T18:53:20.724161Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:20.746439Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:134:2159], cookie=7672610950936031828, session=1) 2025-12-23T18:53:20.746779Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:134:2159], cookie=6904051842184202494, session=0, seqNo=0) 2025-12-23T18:53:20.746908Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:53:20.758938Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:134:2159], cookie=6904051842184202494, session=2) 2025-12-23T18:53:20.759290Z node 4 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=2 from sender=[4:134:2159], cookie=241729277877247079 2025-12-23T18:53:20.759775Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:147:2169], cookie=10516997243943163558, name="Sem1", limit=3) 2025-12-23T18:53:20.759923Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-12-23T18:53:20.771894Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:147:2169], cookie=10516997243943163558) 2025-12-23T18:53:20.772180Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:134:2159], cookie=112, name="Sem1") 2025-12-23T18:53:20.772260Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:134:2159], cookie=112) 2025-12-23T18:53:20.772482Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:134:2159], cookie=113, name="Sem1") 2025-12-23T18:53:20.772549Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:134:2159], cookie=113) 2025-12-23T18:53:20.772760Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:134:2159], cookie=10491924165034376450, session=2, seqNo=0) 2025-12-23T18:53:20.784785Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach: ... ode 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:22.172098Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:22.182697Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:134:2159], cookie=129, session=1, semaphore="Sem2" count=2) 2025-12-23T18:53:22.195054Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:134:2159], cookie=129) 2025-12-23T18:53:22.195494Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:134:2159], cookie=130, name="Sem2") 2025-12-23T18:53:22.195577Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:134:2159], cookie=130) 2025-12-23T18:53:22.195788Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:134:2159], cookie=131, session=1, semaphore="Sem2" count=1) 2025-12-23T18:53:22.207767Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:134:2159], cookie=131) 2025-12-23T18:53:22.208131Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:134:2159], cookie=132, name="Sem2") 2025-12-23T18:53:22.208213Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:134:2159], cookie=132) 2025-12-23T18:53:22.208442Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:134:2159], cookie=133, name="Sem2") 2025-12-23T18:53:22.208495Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:134:2159], cookie=133) 2025-12-23T18:53:22.453021Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:22.453098Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:22.465961Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:22.466082Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:22.500252Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:22.504338Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=15585634199423282889, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2025-12-23T18:53:22.504506Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root1" 2025-12-23T18:53:22.515961Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=15585634199423282889) 2025-12-23T18:53:22.516455Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=735172497472464048, path="/Root1/Res", config={ }) 2025-12-23T18:53:22.516654Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2025-12-23T18:53:22.528382Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=735172497472464048) 2025-12-23T18:53:22.528867Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:151:2173], cookie=13319007185360758531, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2025-12-23T18:53:22.529011Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root2" 2025-12-23T18:53:22.540823Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:151:2173], cookie=13319007185360758531) 2025-12-23T18:53:22.541355Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:156:2178], cookie=15091778585053559873, path="/Root2/Res", config={ }) 2025-12-23T18:53:22.541526Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2025-12-23T18:53:22.553107Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:156:2178], cookie=15091778585053559873) 2025-12-23T18:53:22.553527Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:161:2183], cookie=9434494329355752065, path="/Root2/Res/Subres", config={ }) 2025-12-23T18:53:22.553721Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2025-12-23T18:53:22.565454Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:161:2183], cookie=9434494329355752065) 2025-12-23T18:53:22.566412Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:166:2188]. Cookie: 16166576944264015141. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:22.566466Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:166:2188], cookie=16166576944264015141) 2025-12-23T18:53:22.607996Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-12-23T18:53:22.659590Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-12-23T18:53:22.690385Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-12-23T18:53:22.690923Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:174:2192]. Cookie: 3485749752242264173. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-12-23T18:53:22.691529Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:177:2195]. Cookie: 625858617433908759. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:22.691564Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:177:2195], cookie=625858617433908759) 2025-12-23T18:53:22.732867Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:177:2195]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-12-23T18:53:22.774068Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:177:2195]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-12-23T18:53:22.774532Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:183:2199]. Cookie: 8578336218577649641. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-12-23T18:53:22.775124Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:166:2188]. Cookie: 3244090912678678436. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:22.775178Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:166:2188], cookie=3244090912678678436) 2025-12-23T18:53:22.775670Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:177:2195]. Cookie: 13959279061013019694. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:22.775706Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:177:2195], cookie=13959279061013019694) 2025-12-23T18:53:22.806592Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:166:2188]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2025-12-23T18:53:22.806678Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:177:2195]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2025-12-23T18:53:22.807259Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:190:2206]. Cookie: 13725582896563600867. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } |97.4%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> KqpWorkloadService::TestZeroQueueSizeManyQueries [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeQuery >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables |97.4%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |97.4%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |97.5%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |97.5%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |97.5%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |97.5%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration >> TestKinesisHttpProxy::TestListStreamConsumers |97.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> ResourcePoolsDdl::TestDefaultPoolRestrictions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:53:02.243759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:53:02.243865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:02.243923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:53:02.243968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:53:02.244023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:53:02.244075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:53:02.244150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:53:02.259412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:53:02.260316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:53:02.261930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:53:02.345596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:53:02.345641Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:02.353909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:53:02.354047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:53:02.354168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:53:02.367397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:53:02.369286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:53:02.372116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:53:02.380550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:53:02.389731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:02.390535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:02.410217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:02.410320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:02.410513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:02.410575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:02.410672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:02.410836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:53:02.559821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.562949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.563037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.563107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.563206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.563334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:53:02.563474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 3:23.901318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-12-23T18:53:23.901601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.901707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.902039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.902106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.902335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.902467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.923438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.923583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.923772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.923859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.924292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.924533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.924609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.924683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.924820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.924873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.924923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:53:23.925237Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-12-23T18:53:23.931710Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:53:23.931887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:53:23.935889Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435083, Sender [1:2156:4079], Recipient [1:2156:4079]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-12-23T18:53:23.935937Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5310: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-12-23T18:53:23.937522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:53:23.937599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:53:23.938010Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [1:2156:4079], Recipient [1:2156:4079]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:53:23.938062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:53:23.938625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:53:23.938685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:53:23.938760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:53:23.938795Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:53:23.940250Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 274399233, Sender [1:2194:4079], Recipient [1:2156:4079]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-12-23T18:53:23.940298Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5420: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-12-23T18:53:23.940333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:2156:4079] sender: [1:2215:2058] recipient: [1:15:2062] 2025-12-23T18:53:23.988669Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271122945, Sender [1:2214:4126], Recipient [1:2156:4079]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-12-23T18:53:23.988739Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-12-23T18:53:23.988844Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:53:23.989124Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 269us result status StatusSuccess 2025-12-23T18:53:23.989874Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 27456 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 17701 Memory: 156864 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 27456 DataSize: 27456 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> TestKinesisHttpProxy::CreateDeleteStream >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> ResourcePoolsDdl::TestWorkloadConfigOnServerless [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TableCreator::CreateTables [GOOD] >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_SomeBuckets [GOOD] |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountLabels |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_NoBuckets [GOOD] |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_SomeBuckets [GOOD] |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2025-12-23T18:53:24.787699Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128467950076193:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:24.787800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003617/r3tmp/tmpncEw0d/pdisk_1.dat 2025-12-23T18:53:25.058722Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:53:25.110806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:25.110919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:25.143112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:25.159565Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:25.292609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:20573 TServer::EnableGrpc on GrpcPort 25966, node 1 2025-12-23T18:53:25.514205Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:25.514314Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:25.514324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:25.514446Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: 2025-12-23T18:53:25.796743Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516005219 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T18:53:25.835471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:25.850612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:25.852743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |97.5%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_NoBuckets [GOOD] |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_AllBuckets [GOOD] |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_AllBuckets [GOOD] |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::GetLatestMaxCpuUsagePercent [GOOD] |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession >> ResourcePoolsDdl::TestDefaultPoolRestrictions [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::GetLatestMaxCpuUsagePercent [GOOD] |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2025-12-23T18:53:21.769303Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:21.769404Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:21.782592Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:21.782686Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:21.817307Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:21.823712Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:136:2161], cookie=13872652242094062643, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-12-23T18:53:21.824146Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-12-23T18:53:21.836391Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:136:2161], cookie=13872652242094062643) 2025-12-23T18:53:21.837038Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:147:2169], cookie=14459513744942199826, path="/Root/Res", config={ }) 2025-12-23T18:53:21.837293Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-12-23T18:53:21.849231Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:147:2169], cookie=14459513744942199826) 2025-12-23T18:53:21.850690Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:152:2174]. Cookie: 1361944618340040399. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:21.850774Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[1:152:2174], cookie=1361944618340040399) 2025-12-23T18:53:21.851282Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [1:152:2174]. Cookie: 16032722847532149261. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-12-23T18:53:21.851334Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[1:152:2174], cookie=16032722847532149261) 2025-12-23T18:53:23.957854Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:23.957963Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:23.975097Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:23.975217Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:23.999693Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:24.000176Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:135:2159], cookie=17092526016550009354, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-12-23T18:53:24.000476Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-12-23T18:53:24.012331Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:135:2159], cookie=17092526016550009354) 2025-12-23T18:53:24.012961Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:145:2167]. Cookie: 16707714744816544208. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:24.013004Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:145:2167], cookie=16707714744816544208) 2025-12-23T18:53:24.013406Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:145:2167]. Cookie: 1919860679077595969. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:24.013445Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:145:2167], cookie=1919860679077595969) 2025-12-23T18:53:24.013759Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:145:2167]. Cookie: 845288066987160513. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-12-23T18:53:24.013798Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:145:2167], cookie=845288066987160513) 2025-12-23T18:53:24.014070Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:145:2167]. Cookie: 2029545927350819410. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-12-23T18:53:24.014102Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:145:2167], cookie=2029545927350819410) 2025-12-23T18:53:26.167974Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:26.168091Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:26.187145Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:26.187625Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:26.222708Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:26.223267Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:136:2161], cookie=13759347527487994872, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-12-23T18:53:26.223602Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-12-23T18:53:26.235762Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:136:2161], cookie=13759347527487994872) 2025-12-23T18:53:26.236396Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:147:2169], cookie=12485383202367247570, path="/Root/Res1", config={ }) 2025-12-23T18:53:26.236660Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2025-12-23T18:53:26.249007Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:147:2169], cookie=12485383202367247570) 2025-12-23T18:53:26.249632Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:152:2174], cookie=2396822882226921906, path="/Root/Res2", config={ }) 2025-12-23T18:53:26.249868Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2025-12-23T18:53:26.262156Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:152:2174], cookie=2396822882226921906) 2025-12-23T18:53:26.263022Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:157:2179]. Cookie: 10512453175833499845. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:26.263085Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:157:2179], cookie=10512453175833499845) 2025-12-23T18:53:26.263702Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:157:2179]. Cookie: 5247875801928660498. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:26.263754Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:157:2179], cookie=5247875801928660498) 2025-12-23T18:53:26.264244Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:157:2179]. Cookie: 6283761032405366608. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2025-12-23T18:53:26.264299Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:157:2179], cookie=6283761032405366608) 2025-12-23T18:53:28.387001Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:28.387123Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:28.405699Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:28.405833Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:28.431049Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:28.431553Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:134:2159], cookie=11856789487484213721, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-12-23T18:53:28.431937Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-12-23T18:53:28.454645Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:134:2159], cookie=11856789487484213721) 2025-12-23T18:53:28.455855Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:145:2167]. Cookie: 14551913409156688029. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:28.455924Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:145:2167], cookie=14551913409156688029) 2025-12-23T18:53:28.456318Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:145:2167]. Cookie: 2642443021426041762. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 26500 } } 2025-12-23T18:53:28.456355Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:145:2167], cookie=2642443021426041762) 2025-12-23T18:53:30.585456Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:30.585570Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:30.606188Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:30.606325Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:30.641272Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:30.641705Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=460856607602984379, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-12-23T18:53:30.641916Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-12-23T18:53:30.653949Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=460856607602984379) 2025-12-23T18:53:30.654526Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2168], cookie=9975349957193642575, path="/Root/Res", config={ }) 2025-12-23T18:53:30.654791Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-12-23T18:53:30.667156Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2168], cookie=9975349957193642575) 2025-12-23T18:53:30.668020Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:151:2173]. Cookie: 10753041364892796444. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:30.668081Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:151:2173], cookie=10753041364892796444) 2025-12-23T18:53:30.668568Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:34: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:155:2177], cookie=14992869914986692623, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2025-12-23T18:53:30.668750Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:61: [72057594037927937] Updated quoter resource 1 "Root" 2025-12-23T18:53:30.668938Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:151:2173]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2025-12-23T18:53:30.681189Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:75: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:155:2177], cookie=14992869914986692623) 2025-12-23T18:53:30.681809Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:151:2173]. Cookie: 14096489938992146637. Data: { } 2025-12-23T18:53:30.681886Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:151:2173], cookie=14096489938992146637) |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> TSchemeShardPartitionStatsTopCpuUsageTest::Update [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithTags |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::Update [GOOD] |97.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] Test command err: 2025-12-23T18:52:53.964439Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128333130896159:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:53.964496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00393e/r3tmp/tmp3fyIcg/pdisk_1.dat 2025-12-23T18:52:54.146543Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:54.154133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:54.154318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:54.206032Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:54.207216Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128333130896131:2081] 1766515973963064 != 1766515973963067 2025-12-23T18:52:54.209987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19594, node 1 2025-12-23T18:52:54.288481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:54.288514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:54.288522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:54.288620Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:54.306608Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515974265 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:54.491437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:54.970410Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:56.522295Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:52:56.522343Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:116: [WorkloadService] [Service] Resource pools was disabled 2025-12-23T18:52:56.531664Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=OGI1MGUzYWUtZDg4YmQ5NzktNmIyZjY0YmUtYjkyNGE3MzQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OGI1MGUzYWUtZDg4YmQ5NzktNmIyZjY0YmUtYjkyNGE3MzQ= (tmp dir name: 2b576f76-4153-906b-d36d-14b83443f444) 2025-12-23T18:52:56.531843Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=OGI1MGUzYWUtZDg4YmQ5NzktNmIyZjY0YmUtYjkyNGE3MzQ=, ActorId: [1:7587128346015798871:2318], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:52:56.545022Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc= (tmp dir name: 37c65dac-4e22-56f5-4d10-5291a2f23dbf) 2025-12-23T18:52:56.545109Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:52:56.545481Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: ReadyState, TraceId: 01kd68xab19k61mhm7pnx89gh6, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7587128346015798872:2489] database: Root databaseId: /Root pool id: 2025-12-23T18:52:56.545582Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:630: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: ExecuteState, TraceId: 01kd68xab19k61mhm7pnx89gh6, Sending CompileQuery request 2025-12-23T18:52:56.856687Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1659: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: ExecuteState, TraceId: 01kd68xab19k61mhm7pnx89gh6, ExecutePhyTx, tx: 0x00007C33D9082618 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-12-23T18:52:56.856782Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1816: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: ExecuteState, TraceId: 01kd68xab19k61mhm7pnx89gh6, Sending to Executer TraceId: 0 8 2025-12-23T18:52:56.857102Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1880: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: ExecuteState, TraceId: 01kd68xab19k61mhm7pnx89gh6, Created new KQP executer: [1:7587128346015798877:2319] isRollback: 0 2025-12-23T18:52:56.879310Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2191: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: ExecuteState, TraceId: 01kd68xab19k61mhm7pnx89gh6, Forwarded TEvStreamData to [1:7587128346015798872:2489] 2025-12-23T18:52:56.888693Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2090: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: ExecuteState, TraceId: 01kd68xab19k61mhm7pnx89gh6, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-12-23T18:52:56.888880Z node 1 :KQP_SESSION INFO: kqp_session_actor.cpp:2383: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: ExecuteState, TraceId: 01kd68xab19k61mhm7pnx89gh6, txInfo Status: Committed Kind: Pure TotalDuration: 32.337 ServerDuration: 32.252 QueriesCount: 2 2025-12-23T18:52:56.888948Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2543: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: ExecuteState, TraceId: 01kd68xab19k61mhm7pnx89gh6, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-12-23T18:52:56.889738Z node 1 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: ExecuteState, TraceId: 01kd68xab19k61mhm7pnx89gh6, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:52:56.889773Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: ExecuteState, TraceId: 01kd68xab19k61mhm7pnx89gh6, EndCleanup, isFinal: 1 2025-12-23T18:52:56.889836Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: ExecuteState, TraceId: 01kd68xab19k61mhm7pnx89gh6, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7587128333130896391:2264] 2025-12-23T18:52:56.889857Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: unknown state, TraceId: 01kd68xab19k61mhm7pnx89gh6, Cleanup temp tables: 0 2025-12-23T18:52:56.890235Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=1&id=YWQwZmFlNzQtNzNhNjk2YTItYzFiMGM1Y2MtMjAwYzg1NTc=, ActorId: [1:7587128346015798873:2319], ActorState: unknown state, TraceId: 01kd68xab19k61mhm7pnx89gh6, Session actor destroyed 2025-12-23T18:52:56.902085Z node 1 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=1&id=OGI1MGUzYWUtZDg4YmQ5NzktNmIyZjY0YmUtYjkyNGE3MzQ=, ActorId: [1:7587128346015798871:2318], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:52:56.902144Z node 1 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=1&id=OGI1MGUzYWUtZDg4YmQ5NzktNmIyZjY0YmUtYjkyNGE3MzQ=, ActorId: [1:7587128346015798871:2318], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:52:56.902187Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=1&id=OGI1MGUzYWUtZDg4YmQ5NzktNmIyZjY0YmUtYjkyNGE3MzQ=, ActorId: [1:7587128346015798871:2318], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:52:56.902241Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=1&id=OGI1MGUzYWUtZDg4YmQ5NzktNmIyZjY0YmUtYjkyNGE3MzQ=, ActorId: [1:7587128346015798871:2318], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:52:56.902351Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=1&id=OGI1MGUzYWUtZDg4YmQ5NzktNmIyZjY0YmUtYjkyNGE3MzQ=, ActorId: [1:7587128346015798871:2318], ActorState: unknown state, Session actor destroyed 2025-12-23T18:52:57.829047Z node 2 :METADATA_PROVIDER WARN: log.cpp:841 ... 14: SessionId: ydb://session/3?node_id=8&id=NTNlYzljMGMtZmQxMjQ5NjAtNDVhMDIyZjAtOWM3YjAwYWE=, ActorId: [8:7587128479727095358:2330], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:53:28.592738Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=8&id=NTNlYzljMGMtZmQxMjQ5NjAtNDVhMDIyZjAtOWM3YjAwYWE=, ActorId: [8:7587128479727095358:2330], ActorState: unknown state, Session actor destroyed 2025-12-23T18:53:28.594442Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:292: [TQueryRetryActor] [TCpuLoadFetcherActor] OwnerId: [8:7587128484022062693:2887], ActorId: [8:7587128484022062694:2888], Starting query actor #1 [8:7587128484022062695:2889] 2025-12-23T18:53:28.594446Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:135: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7587128484022062694:2888], ActorId: [8:7587128484022062695:2889], Bootstrap. Database: /Root, IsSystemUser: 0, run create session 2025-12-23T18:53:28.597659Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM= (tmp dir name: 71ca62c9-4ce4-bd76-6dc0-fe916f9582fe) 2025-12-23T18:53:28.597801Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:53:28.598137Z node 8 :KQP_WORKLOAD_SERVICE TRACE: query_actor.cpp:156: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7587128484022062694:2888], ActorId: [8:7587128484022062695:2889], Successfully created session: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, run query 2025-12-23T18:53:28.598163Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:201: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7587128484022062694:2888], ActorId: [8:7587128484022062695:2889], RunDataQuery with SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, TxId: , text: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; 2025-12-23T18:53:28.598454Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: ReadyState, TraceId: 01kd68y9mpa3jve3ex1c73c7jj, received request, proxyRequestId: 6 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; rpcActor: [8:7587128484022062698:2344] database: /Root databaseId: /Root pool id: 2025-12-23T18:53:28.598865Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1659: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: ExecuteState, TraceId: 01kd68y9mpa3jve3ex1c73c7jj, ExecutePhyTx, tx: 0x00007C33D90604D8 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-12-23T18:53:28.598905Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1816: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: ExecuteState, TraceId: 01kd68y9mpa3jve3ex1c73c7jj, Sending to Executer TraceId: 0 8 2025-12-23T18:53:28.599053Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1880: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: ExecuteState, TraceId: 01kd68y9mpa3jve3ex1c73c7jj, Created new KQP executer: [8:7587128484022062701:2343] isRollback: 0 2025-12-23T18:53:28.602986Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2090: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: ExecuteState, TraceId: 01kd68y9mpa3jve3ex1c73c7jj, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-12-23T18:53:28.603137Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2383: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: ExecuteState, TraceId: 01kd68y9mpa3jve3ex1c73c7jj, txInfo Status: Committed Kind: ReadOnly TotalDuration: 4.356 ServerDuration: 4.287 QueriesCount: 2 2025-12-23T18:53:28.603245Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2543: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: ExecuteState, TraceId: 01kd68y9mpa3jve3ex1c73c7jj, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-12-23T18:53:28.603315Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: ExecuteState, TraceId: 01kd68y9mpa3jve3ex1c73c7jj, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:28.603353Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: ExecuteState, TraceId: 01kd68y9mpa3jve3ex1c73c7jj, EndCleanup, isFinal: 0 2025-12-23T18:53:28.603416Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: ExecuteState, TraceId: 01kd68y9mpa3jve3ex1c73c7jj, Sent query response back to proxy, proxyRequestId: 6, proxyId: [8:7587128466842192445:2267] 2025-12-23T18:53:28.603680Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:244: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7587128484022062694:2888], ActorId: [8:7587128484022062695:2889], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, TxId: 2025-12-23T18:53:28.603751Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:371: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7587128484022062694:2888], ActorId: [8:7587128484022062695:2889], Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, TxId: 2025-12-23T18:53:28.603807Z node 8 :KQP_WORKLOAD_SERVICE TRACE: query_actor.cpp:171: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7587128484022062694:2888], ActorId: [8:7587128484022062695:2889], Delete session: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM= 2025-12-23T18:53:28.603822Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:311: [TQueryRetryActor] [TCpuLoadFetcherActor] OwnerId: [8:7587128484022062693:2887], ActorId: [8:7587128484022062694:2888], Got response [8:7587128484022062695:2889] SUCCESS 2025-12-23T18:53:28.603955Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:53:28.603988Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:28.604005Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:53:28.604025Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:53:28.604078Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=8&id=NjRhMDAzOGItZmFmY2U3YjYtYjJjYzI4MGUtZjI1MGUwZWM=, ActorId: [8:7587128484022062697:2343], ActorState: unknown state, Session actor destroyed 2025-12-23T18:53:28.612309Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=8&id=MzQ4ODc3MzgtZmZhNzAzYzYtMWJkMGUyMjEtNzk5ZTk2OWI=, ActorId: [8:7587128479727095275:2328], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:53:28.612371Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=8&id=MzQ4ODc3MzgtZmZhNzAzYzYtMWJkMGUyMjEtNzk5ZTk2OWI=, ActorId: [8:7587128479727095275:2328], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:28.612403Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=8&id=MzQ4ODc3MzgtZmZhNzAzYzYtMWJkMGUyMjEtNzk5ZTk2OWI=, ActorId: [8:7587128479727095275:2328], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:53:28.612436Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=8&id=MzQ4ODc3MzgtZmZhNzAzYzYtMWJkMGUyMjEtNzk5ZTk2OWI=, ActorId: [8:7587128479727095275:2328], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:53:28.612515Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=8&id=MzQ4ODc3MzgtZmZhNzAzYzYtMWJkMGUyMjEtNzk5ZTk2OWI=, ActorId: [8:7587128479727095275:2328], ActorState: unknown state, Session actor destroyed 2025-12-23T18:53:29.117752Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7587128468808851046:2153];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:29.117838Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:53:29.124416Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7587128468824225922:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:29.124479Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:53:29.133132Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7587128467201516858:2152];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:29.133219Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:53:29.140297Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7587128467446208938:2094];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:29.140374Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |97.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |97.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2025-12-23T18:53:19.049497Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.049601Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.067822Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.067925Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.102779Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.108142Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:136:2161], cookie=7056905312436053080, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2025-12-23T18:53:19.108296Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:136:2161], cookie=7056905312436053080) 2025-12-23T18:53:19.108828Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:144:2166], cookie=12996300908573510970, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2025-12-23T18:53:19.108931Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:144:2166], cookie=12996300908573510970) 2025-12-23T18:53:19.109354Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:147:2169], cookie=4731872577108642136, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2025-12-23T18:53:19.109560Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2025-12-23T18:53:19.121504Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:147:2169], cookie=4731872577108642136) 2025-12-23T18:53:19.122066Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:152:2174], cookie=4935005601968312779, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2025-12-23T18:53:19.122325Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2025-12-23T18:53:19.134270Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:152:2174], cookie=4935005601968312779) 2025-12-23T18:53:19.468399Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.468490Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.485983Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.486087Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.510267Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.510692Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:135:2159], cookie=4337180754279639218, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-12-23T18:53:19.513533Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-12-23T18:53:19.525131Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:135:2159], cookie=4337180754279639218) 2025-12-23T18:53:19.525647Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:145:2167], cookie=15089699235002788874, path="/Root/Res", config={ }) 2025-12-23T18:53:19.525841Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-12-23T18:53:19.537667Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:145:2167], cookie=15089699235002788874) 2025-12-23T18:53:19.541070Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:150:2172]. Cookie: 9937709270465144943. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:19.541135Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:150:2172], cookie=9937709270465144943) 2025-12-23T18:53:19.541650Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:150:2172]. Cookie: 4892019932893566073. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 28000 } } 2025-12-23T18:53:19.541696Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:150:2172], cookie=4892019932893566073) 2025-12-23T18:53:21.677261Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:21.677373Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:21.696949Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:21.697429Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:21.732429Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:21.732806Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:136:2161], cookie=10343139947248929578, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-12-23T18:53:21.733057Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-12-23T18:53:21.744742Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:136:2161], cookie=10343139947248929578) 2025-12-23T18:53:21.745242Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:147:2169], cookie=15267409377272985415, path="/Root/Res", config={ }) 2025-12-23T18:53:21.745449Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-12-23T18:53:21.757571Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:147:2169], cookie=15267409377272985415) 2025-12-23T18:53:21.758436Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:152:2174]. Cookie: 3966734267448529173. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:21.758503Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:152:2174], cookie=3966734267448529173) 2025-12-23T18:53:21.759027Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:152:2174]. Cookie: 2655650120559318053. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2025-12-23T18:53:21.759080Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:152:2174], cookie=2655650120559318053) 2025-12-23T18:53:23.853939Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:23.854029Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:23.865822Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:23.865939Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:23.890214Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:23.890611Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:134:2159], cookie=13210555278297520453, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-12-23T18:53:23.890939Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-12-23T18:53:23.913363Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:134:2159], cookie=13210555278297520453) 2025-12-23T18:53:23.914282Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:145:2167]. Cookie: 10921501856341244813. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:23.914339Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:145:2167], cookie=10921501856341244813) 2025-12-23T18:53:23.914777Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:145:2167]. Cookie: 467866403483671246. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-12-23T18:53:23.914838Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:145:2167], cookie=467866403483671246) 2025-12-23T18:53:23.915230Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:145:2167]. Cookie: 18178880250347407965. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1016500 } } 2025-12-23T18:53:23.915280Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:145:2167], cookie=18178880250347407965) 2025-12-23T18:53:26.115434Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:26.115548Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:26.134701Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:26.134860Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:26.169458Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:26.169886Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:136:2161], cookie=6589743627887440162, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-12-23T18:53:26.170174Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-12-23T18:53:26.182105Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:136:2161], cookie=6589743627887440162) 2025-12-23T18:53:26.182991Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:147:2169]. Cookie: 14587279754305987720. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:26.183057Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:147:2169], cookie=14587279754305987720) 2025-12-23T18:53:26.183599Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:147:2169]. Cookie: 4636077255341957161. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2025-12-23T18:53:26.183657Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:147:2169], cookie=4636077255341957161) 2025-12-23T18:53:28.624664Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:186:2193]. Cookie: 10155588314191570026. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:28.624741Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:186:2193], cookie=10155588314191570026) 2025-12-23T18:53:28.625161Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:186:2193]. Cookie: 1411523799428822391. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2025-12-23T18:53:28.625198Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:186:2193], cookie=1411523799428822391) 2025-12-23T18:53:30.686289Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:219:2219]. Cookie: 1606239513091371097. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:30.686368Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:219:2219], cookie=1606239513091371097) 2025-12-23T18:53:30.686875Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:219:2219]. Cookie: 10132276785439640398. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2025-12-23T18:53:30.686939Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:219:2219], cookie=10132276785439640398) |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError >> TDatabaseResolverTests::Greenplum_MasterNode >> TDatabaseResolverTests::ClickHouseNative >> TDatabaseResolverTests::Ydb_Serverless >> TDatabaseResolverTests::MySQL >> TDatabaseResolverTests::Ydb_Dedicated >> TDatabaseResolverTests::Ydb_Serverless_Timeout >> TDatabaseResolverTests::DataStreams_Serverless |97.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp >> TDatabaseResolverTests::Greenplum_PermissionDenied >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] >> KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2025-12-23T18:53:33.808466Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2025-12-23T18:53:33.808505Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2025-12-23T18:53:33.890939Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2025-12-23T18:53:33.886971Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2025-12-23T18:53:33.896834Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |97.6%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] >> KqpScripting::QueryStats >> KqpScripting::LimitOnShard >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced >> KqpYql::NonStrictDml >> KqpYql::UpdateBadType >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce >> KqpYql::UuidPrimaryKeyDisabled >> KqpYql::DdlDmlMix >> KqpScripting::StreamScanQuery >> KqpScripting::StreamExecuteYqlScriptSeveralQueries >> KqpPragma::OrderedColumns >> KqpYql::BinaryJsonOffsetNormal >> KqpScripting::UnsafeTimestampCast >> KqpYql::EvaluateExpr3 >> KqpScripting::SelectNullType >> KqpScripting::StreamExecuteYqlScriptScanCancelation >> KqpYql::TableRange >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced >> KqpYql::EvaluateExpr1 >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> TestYmqHttpProxy::TestCreateQueueWithTags [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> TestYmqHttpProxy::TestDeleteMessage >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] >> ResourcePoolsDdl::TestDropResourcePool >> KqpYql::UuidPrimaryKeyDisabled [GOOD] >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 9967, MsgBus: 10030 2025-12-23T18:53:36.954710Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128517252819659:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.954814Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ed3/r3tmp/tmpws9zAu/pdisk_1.dat 2025-12-23T18:53:37.334766Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.363660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.363794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.383539Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.513954Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128517252819627:2081] 1766516016949413 != 1766516016949416 2025-12-23T18:53:37.516796Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9967, node 1 2025-12-23T18:53:37.563556Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.619116Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.619143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.619164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.619257Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:37.961780Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10030 TClient is connected to server localhost:10030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516017574 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.301725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:40.065036Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534432689699:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.065163Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.065560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534432689708:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.065620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.458588Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128534432689722:2506] txid# 281474976710658, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-12-23T18:53:40.501394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534432689730:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.501491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.501767Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534432689732:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.501811Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.519292Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128534432689739:2515] txid# 281474976710659, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-12-23T18:53:40.528850Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534432689747:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.528946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.529280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534432689749:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.529364Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.556664Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128534432689756:2524] txid# 281474976710660, issues: { message: "Uuid as primary key is forbiden by configuration: val" severity: 1 } 2025-12-23T18:53:40.569378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534432689764:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.569469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.569776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534432689766:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.569815Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.589088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.730439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534432689853:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.730547Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.735106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534432689856:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.735195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> HttpRequest::ProbeBaseStats [GOOD] >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> HttpRequest::ProbeBaseStatsServerless [GOOD] >> KqpYql::EvaluateExpr3 [GOOD] >> KqpYql::EvaluateExprPgNull >> KqpYql::UpdateBadType [GOOD] >> KqpYql::BinaryJsonOffsetNormal [GOOD] >> KqpYql::Closure >> KqpYql::EvaluateExpr1 [GOOD] >> KqpYql::EvaluateExpr2 >> KqpYql::NonStrictDml [GOOD] >> KqpYql::PgIntPrimaryKey >> KqpScripting::SelectNullType [GOOD] >> KqpScripting::StreamDdlAndDml >> KqpYql::TableRange [GOOD] >> KqpYql::TableNameConflict >> KqpScripting::LimitOnShard [GOOD] >> KqpScripting::NoAstSizeLimit >> KqpYql::DdlDmlMix [GOOD] >> KqpYql::CreateUseTable >> KqpPragma::OrderedColumns [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] >> KqpScripting::UnsafeTimestampCast [GOOD] >> KqpScripting::SystemTables >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex >> KqpScripting::StreamScanQuery [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeBaseStats [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:21.941957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:22.027723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:22.032718Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:22.033031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:22.033114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:22.405375Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:22.493055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:22.493185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:22.552122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:22.642560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003204/r3tmp/tmpBXBXb0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6854, node 1 2025-12-23T18:52:23.287901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:23.288807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:23.288846Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:23.288872Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:23.289074Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:23.353830Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:9331 2025-12-23T18:52:23.874382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:26.351876Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:26.357367Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:26.360229Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:26.385838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:26.385933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:26.423472Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:26.425352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:26.659376Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:52:26.671053Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.671629Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.672167Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.672605Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.672788Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.672863Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.673098Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.673280Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.673371Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.780929Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:26.781026Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:26.818140Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:26.940720Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:26.980221Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:26.980319Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:27.003107Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:27.004353Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:27.004501Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:27.004543Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:27.004584Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:27.004621Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:27.004664Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:27.004708Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:27.005002Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:27.042883Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:27.042977Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:27.052794Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:52:27.060029Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:52:27.061208Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:52:27.065501Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:52:27.091535Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:52:27.091582Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:52:27.091647Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:52:27.110112Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:27.118589Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:27.127341Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:27.127479Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:27.140228Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:27.267673Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:27.571092Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:27.639623Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:27.639705Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:265 ... request id = 55, ReplyToActorId = [2:5610:4618], StatRequests.size() = 1 2025-12-23T18:53:24.735001Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 56 ], ReplyToActorId[ [2:5645:4632]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:24.735301Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 56 ] 2025-12-23T18:53:24.735338Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 56, ReplyToActorId = [2:5645:4632], StatRequests.size() = 1 2025-12-23T18:53:25.719175Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:53:25.719235Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:53:25.719260Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:53:25.719284Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:53:26.041260Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:26.251831Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 57 ], ReplyToActorId[ [2:5683:4649]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:26.252179Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 57 ] 2025-12-23T18:53:26.252246Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 57, ReplyToActorId = [2:5683:4649], StatRequests.size() = 1 2025-12-23T18:53:27.637918Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 58 ], ReplyToActorId[ [2:5719:4667]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:27.638304Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 58 ] 2025-12-23T18:53:27.638355Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 58, ReplyToActorId = [2:5719:4667], StatRequests.size() = 1 2025-12-23T18:53:28.954509Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:53:28.954701Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2025-12-23T18:53:28.954900Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2025-12-23T18:53:28.998432Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2025-12-23T18:53:28.998489Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:28.998633Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2025-12-23T18:53:29.012831Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:29.869641Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 59 ], ReplyToActorId[ [2:5753:4683]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:29.869954Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 59 ] 2025-12-23T18:53:29.870001Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 59, ReplyToActorId = [2:5753:4683], StatRequests.size() = 1 2025-12-23T18:53:31.493698Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 60 ], ReplyToActorId[ [2:5796:4709]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:31.493905Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 60 ] 2025-12-23T18:53:31.493935Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 60, ReplyToActorId = [2:5796:4709], StatRequests.size() = 1 2025-12-23T18:53:32.535264Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 61 ], ReplyToActorId[ [2:5829:4723]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:32.535495Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 61 ] 2025-12-23T18:53:32.535527Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 61, ReplyToActorId = [2:5829:4723], StatRequests.size() = 1 2025-12-23T18:53:33.490256Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:33.696066Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 62 ], ReplyToActorId[ [2:5864:4737]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:33.696411Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 62 ] 2025-12-23T18:53:33.696461Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 62, ReplyToActorId = [2:5864:4737], StatRequests.size() = 1 2025-12-23T18:53:34.707925Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 63 ], ReplyToActorId[ [2:5899:4755]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:34.708210Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 63 ] 2025-12-23T18:53:34.708256Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 63, ReplyToActorId = [2:5899:4755], StatRequests.size() = 1 2025-12-23T18:53:35.541613Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:53:35.541803Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 13 2025-12-23T18:53:35.541897Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 13 2025-12-23T18:53:35.552771Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:35.552844Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:35.553083Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 50, entries count: 2, are all stats full: 1 2025-12-23T18:53:35.566490Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:35.751508Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 64 ], ReplyToActorId[ [2:5934:4773]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:35.751852Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 64 ] 2025-12-23T18:53:35.751899Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 64, ReplyToActorId = [2:5934:4773], StatRequests.size() = 1 2025-12-23T18:53:36.748409Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 65 ], ReplyToActorId[ [2:5967:4789]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:36.748749Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 65 ] 2025-12-23T18:53:36.748803Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 65, ReplyToActorId = [2:5967:4789], StatRequests.size() = 1 2025-12-23T18:53:37.673899Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 66 ], ReplyToActorId[ [2:5998:4803]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:37.674215Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 66 ] 2025-12-23T18:53:37.674261Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 66, ReplyToActorId = [2:5998:4803], StatRequests.size() = 1 2025-12-23T18:53:38.613852Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:38.875480Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 67 ], ReplyToActorId[ [2:6038:4820]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:38.875790Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 67 ] 2025-12-23T18:53:38.875837Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 67, ReplyToActorId = [2:6038:4820], StatRequests.size() = 1 2025-12-23T18:53:40.020980Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 68 ], ReplyToActorId[ [2:6074:4834]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:40.021322Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 68 ] 2025-12-23T18:53:40.021371Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 68, ReplyToActorId = [2:6074:4834], StatRequests.size() = 1 2025-12-23T18:53:40.934363Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:53:40.934611Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 14 2025-12-23T18:53:40.934704Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 14 2025-12-23T18:53:40.945442Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:40.945500Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:40.945724Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 50, entries count: 2, are all stats full: 1 2025-12-23T18:53:40.959269Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:41.198289Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 69 ], ReplyToActorId[ [2:6110:4852]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:41.198536Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 69 ] 2025-12-23T18:53:41.198568Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 69, ReplyToActorId = [2:6110:4852], StatRequests.size() = 1 2025-12-23T18:53:41.199342Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 70 ], ReplyToActorId[ [2:6113:4855]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:41.199499Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 70 ] 2025-12-23T18:53:41.199536Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 70, ReplyToActorId = [2:6113:4855], StatRequests.size() = 1 Answer: 'HTTP/1.1 200 Ok Content-Type: application/json Connection: Close { "row_count":1000, "bytes_size":11272 }' |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2025-12-23T18:53:18.900450Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:18.900568Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:18.922649Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:18.922778Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:18.960753Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.408368Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.408512Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.427920Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.428047Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.453012Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.828204Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.828305Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.846857Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.847233Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.881635Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.882136Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=10767165044609308272, session=0, seqNo=0) 2025-12-23T18:53:19.882309Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:19.894155Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=10767165044609308272, session=1) 2025-12-23T18:53:19.894670Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:146:2168], cookie=4802965186568227282) 2025-12-23T18:53:19.894785Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:146:2168], cookie=4802965186568227282) 2025-12-23T18:53:20.318705Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:20.331264Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:20.680253Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:20.692121Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:21.041239Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:21.053567Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:21.403468Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:21.415463Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:21.776657Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:21.788650Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:22.138552Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:22.150796Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:22.490719Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:22.502409Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:22.851396Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:22.863329Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:23.212747Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:23.224723Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:23.605529Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:23.617253Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:23.987783Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:23.999730Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:24.360677Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:24.373140Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:24.734787Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:24.747676Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:25.108416Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:25.120343Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:25.512871Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:25.525302Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:25.900761Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:25.912948Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:26.273775Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:26.285845Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:26.646168Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:26.658412Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:27.018767Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:27.030996Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:27.402608Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:27.414795Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:27.775157Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:27.787163Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:28.147062Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:28.158894Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:28.519471Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:28.531308Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:28.891460Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:28.903229Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:29.263554Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:29.275720Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:29.645644Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:29.657634Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:30.016699Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:30.028799Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:30.388577Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:30.400556Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:30.761386Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:30.773348Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:31.176833Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:31.188924Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:31.548221Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:31.560267Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:31.930399Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:31.942038Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:32.301260Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:32.313004Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:32.662722Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:32.674579Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:33.034421Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:33.045961Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:33.404990Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:33.416653Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:33.776153Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:33.788519Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:34.148447Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:34.160347Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:34.521595Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:34.533768Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:34.915871Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:34.927828Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:35.289208Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:35.301546Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:35.672621Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:35.684796Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:36.044774Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:36.056494Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:36.417100Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:36.429284Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:36.832088Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:36.844287Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:37.229965Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:37.251646Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:37.656594Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:37.668806Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:38.039099Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:38.055875Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:38.422658Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:38.447077Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:38.855068Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:38.868216Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:39.230496Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:39.242706Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:39.604776Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:39.616809Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:39.967238Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:39.979307Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:40.319507Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:40.331209Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:40.683640Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:40.696113Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:41.050732Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:41.063639Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:41.406024Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:41.418410Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:41.782167Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:41.794627Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:42.147556Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:42.159913Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:42.564349Z node 3 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-12-23T18:53:42.564449Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-12-23T18:53:42.577365Z node 3 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-12-23T18:53:42.588568Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:535:2482], cookie=8922021516225520646) 2025-12-23T18:53:42.588686Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:535:2482], cookie=8922021516225520646) 2025-12-23T18:53:42.971494Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:42.971611Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:42.990762Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:42.990934Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:43.016354Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:43.022865Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:134:2159], cookie=826383613629698107, path="Root", config={ MaxUnitsPerSecond: 100 }) 2025-12-23T18:53:43.023128Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-12-23T18:53:43.045869Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:134:2159], cookie=826383613629698107) 2025-12-23T18:53:43.047906Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:144:2166]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:43.047979Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:144:2166], cookie=0) 2025-12-23T18:53:43.048312Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:146:2168]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-12-23T18:53:43.048358Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:146:2168], cookie=0) 2025-12-23T18:53:43.090323Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:144:2166]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-12-23T18:53:43.090422Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:146:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-12-23T18:53:43.090783Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([4:149:2171]) 2025-12-23T18:53:43.090988Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:37: [72057594037927937] Send TEvResourcesAllocated to [4:146:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2025-12-23T18:53:43.143476Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:144:2166]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> KqpScripting::QueryStats [GOOD] >> KqpScripting::Pure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] Test command err: Trying to start YDB, gRPC: 9878, MsgBus: 21407 2025-12-23T18:53:36.967049Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128519705292373:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.967570Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ec6/r3tmp/tmprIGIOQ/pdisk_1.dat 2025-12-23T18:53:37.270927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.305354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.305469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.363941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.366193Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:37.374881Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128519705292261:2081] 1766516016950106 != 1766516016950109 TServer::EnableGrpc on GrpcPort 9878, node 1 2025-12-23T18:53:37.529696Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.619234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.619260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.619273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.619353Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21407 2025-12-23T18:53:37.968972Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516017427 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.293543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.309111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.480034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.611546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.684560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:40.091698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536885163318:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.091784Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.091951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536885163328:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.091975Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.482678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.511005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.536484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.562003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.589686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.621515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.656978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.701581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.766069Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536885164195:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.766159Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.766380Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536885164201:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.766644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536885164200:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.766717Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.769730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:40.780034Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128536885164204:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:40.844785Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128536885164255:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:41.959415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128519705292373:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:41.959513Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:4:26: Error: At function: KiUpdateTable!
:3:20: Error: Failed to convert type: Struct<'Amount':String?> to Struct<'Amount':Uint64?>
:3:20: Error: Failed to convert 'Amount': Optional to Optional
:3:20: Error: Row type mismatch for table: db.[/Root/Test] |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeBaseStatsServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:19.279873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:19.398758Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:19.406435Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:19.406849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:19.406955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:19.820664Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:19.916615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:19.916720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:19.974360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:20.034996Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00322b/r3tmp/tmpeXb9t0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9655, node 1 2025-12-23T18:52:20.720255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:20.721306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:20.721352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:20.721381Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:20.721606Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:20.786628Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:15463 2025-12-23T18:52:21.319454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:23.823130Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:23.828525Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:23.831146Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:23.854481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:23.854564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:23.891463Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:23.893182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:24.113729Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.114277Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.114837Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.115215Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.115445Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.115511Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.115581Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.115686Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.115899Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:24.223473Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-12-23T18:52:24.234667Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:24.234766Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:24.271614Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:24.387794Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:24.440545Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:24.440633Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:24.467395Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:24.467562Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:24.467814Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:24.467879Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:24.467947Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:24.468009Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:24.468069Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:24.468123Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:24.468605Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:24.512300Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:24.512397Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2187:2604], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:24.514683Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2184:2601] 2025-12-23T18:52:24.535658Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-12-23T18:52:24.540869Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2272:2660] 2025-12-23T18:52:24.541009Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2272:2660], schemeshard id = 72075186224037897 2025-12-23T18:52:24.560173Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2257:2653] Owner: [2:2256:2652]. Describe result: PathErrorUnknown 2025-12-23T18:52:24.560229Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2257:2653] Owner: [2:2256:2652]. Creating table 2025-12-23T18:52:24.560288Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2257:2653] Owner: [2:2256:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:52:24.581105Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2696], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:24.586985Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:24.595473Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2257:2653] Owner: [2:2256:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:24.595587Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2257:2653] Owner: [2:2256:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:24.602541Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2257:2653] Owner: [2:2256:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:24.718973Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:25.043039Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2257:2653] Owner: [2:2256:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:25.159282Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2257:2653] Owner: [2:2256:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:25.159346Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2257:2653] Owner: [2:2256:2652]. C ... sult] RequestId[ 51 ] 2025-12-23T18:53:24.070582Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 51, ReplyToActorId = [2:6259:5027], StatRequests.size() = 1 2025-12-23T18:53:25.278676Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 52 ], ReplyToActorId[ [2:6291:5042]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:25.279072Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 52 ] 2025-12-23T18:53:25.279124Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 52, ReplyToActorId = [2:6291:5042], StatRequests.size() = 1 2025-12-23T18:53:25.729024Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:25.729098Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:25.729368Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:25.744753Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:26.528768Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 53 ], ReplyToActorId[ [2:6329:5063]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:26.529113Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 53 ] 2025-12-23T18:53:26.529162Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 53, ReplyToActorId = [2:6329:5063], StatRequests.size() = 1 2025-12-23T18:53:27.594509Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:27.714474Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 54 ], ReplyToActorId[ [2:6361:5078]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:27.714969Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 54 ] 2025-12-23T18:53:27.715019Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 54, ReplyToActorId = [2:6361:5078], StatRequests.size() = 1 2025-12-23T18:53:28.665577Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037899 2025-12-23T18:53:28.665656Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.307000s, at schemeshard: 72075186224037899 2025-12-23T18:53:28.665987Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2025-12-23T18:53:28.680907Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:28.900932Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 55 ], ReplyToActorId[ [2:6399:5096]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:28.901280Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 55 ] 2025-12-23T18:53:28.901319Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 55, ReplyToActorId = [2:6399:5096], StatRequests.size() = 1 2025-12-23T18:53:29.571092Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:53:29.571156Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:53:29.571186Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:53:29.571214Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:53:30.759575Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 56 ], ReplyToActorId[ [2:6444:5117]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:30.759998Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 56 ] 2025-12-23T18:53:30.760049Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 56, ReplyToActorId = [2:6444:5117], StatRequests.size() = 1 2025-12-23T18:53:31.473696Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:53:31.473915Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 11 2025-12-23T18:53:31.474419Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2025-12-23T18:53:32.367216Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 57 ], ReplyToActorId[ [2:6480:5134]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:32.367562Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 57 ] 2025-12-23T18:53:32.367594Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 57, ReplyToActorId = [2:6480:5134], StatRequests.size() = 1 2025-12-23T18:53:33.279797Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:33.279857Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:33.280029Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:33.293288Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:34.767964Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 58 ], ReplyToActorId[ [2:6518:5155]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:34.768244Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 58 ] 2025-12-23T18:53:34.768275Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 58, ReplyToActorId = [2:6518:5155], StatRequests.size() = 1 2025-12-23T18:53:36.483895Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 59 ], ReplyToActorId[ [2:6550:5170]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:36.484249Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 59 ] 2025-12-23T18:53:36.484295Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 59, ReplyToActorId = [2:6550:5170], StatRequests.size() = 1 2025-12-23T18:53:38.038393Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:38.152309Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 60 ], ReplyToActorId[ [2:6594:5196]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:38.152696Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 60 ] 2025-12-23T18:53:38.152752Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 60, ReplyToActorId = [2:6594:5196], StatRequests.size() = 1 2025-12-23T18:53:38.239542Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2025-12-23T18:53:38.239623Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.913000s, at schemeshard: 72075186224037899 2025-12-23T18:53:38.239855Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 27, entries count: 1, are all stats full: 1 2025-12-23T18:53:38.261891Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:39.458807Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 61 ], ReplyToActorId[ [2:6634:5217]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:39.459145Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 61 ] 2025-12-23T18:53:39.459193Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 61, ReplyToActorId = [2:6634:5217], StatRequests.size() = 1 2025-12-23T18:53:40.666773Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 62 ], ReplyToActorId[ [2:6668:5232]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:40.667173Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 62 ] 2025-12-23T18:53:40.667221Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 62, ReplyToActorId = [2:6668:5232], StatRequests.size() = 1 2025-12-23T18:53:41.151646Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:53:41.151821Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2025-12-23T18:53:41.152228Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2025-12-23T18:53:41.174021Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:41.174086Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:41.174327Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:41.187479Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:41.899231Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 63 ], ReplyToActorId[ [2:6710:5256]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:41.899540Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 63 ] 2025-12-23T18:53:41.899584Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 63, ReplyToActorId = [2:6710:5256], StatRequests.size() = 1 2025-12-23T18:53:41.900479Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 64 ], ReplyToActorId[ [2:6713:5259]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:41.900688Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 64 ] 2025-12-23T18:53:41.900736Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 64, ReplyToActorId = [2:6713:5259], StatRequests.size() = 1 Answer: 'HTTP/1.1 200 Ok Content-Type: application/json Connection: Close { "row_count":1000, "bytes_size":11296 }' >> TestKinesisHttpProxy::TestCounters >> KqpYql::TestUuidPrimaryKeyPrefixSearch |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TestYmqHttpProxy::TestDeleteMessage [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod >> KqpScripting::StreamExecuteYqlScriptScanCancelation [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce >> TestYmqHttpProxy::TestDeleteMessageBatch >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat >> KqpScripting::StreamExecuteYqlScriptScan >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade >> KqpPragma::Auth >> BasicStatistics::ServerlessTimeIntervals [GOOD] >> KqpYql::TableUseBeforeCreate >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> KqpScripting::StreamExecuteYqlScriptData >> Yq_1::DescribeQuery [GOOD] >> KqpScripting::NoAstSizeLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessTimeIntervals [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:21.434564Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:21.505311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:21.509055Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:21.509302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:21.509371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:21.831633Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:21.923398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:21.923522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:21.981494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:22.006133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003208/r3tmp/tmp324uAJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2018, node 1 2025-12-23T18:52:22.710070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:22.710988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:22.711029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:22.711055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:22.711263Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:22.775624Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:29559 2025-12-23T18:52:23.291556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:25.736579Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:25.741699Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:25.744226Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:25.768584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:25.768674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:25.806160Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:25.807756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:26.036615Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.037342Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.038098Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.038700Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.038932Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.039014Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.039124Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.039238Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.039337Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.155535Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-12-23T18:52:26.167142Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:26.167246Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:26.180288Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:26.327587Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:26.369525Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:26.369608Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:26.394443Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:26.395810Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:26.395982Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:26.396027Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:26.396080Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:26.396135Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:26.396179Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:26.396220Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:26.396575Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:26.438186Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:26.438266Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2188:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:26.441332Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2185:2602] 2025-12-23T18:52:26.461539Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-12-23T18:52:26.466411Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2258:2658] 2025-12-23T18:52:26.466785Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2258:2658], schemeshard id = 72075186224037897 2025-12-23T18:52:26.491716Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Describe result: PathErrorUnknown 2025-12-23T18:52:26.491782Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Creating table 2025-12-23T18:52:26.491852Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:52:26.513074Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2356:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:26.518842Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:26.526627Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:26.526812Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:26.538492Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:26.637369Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:26.938166Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:27.031150Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:27.031230Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. C ... 5Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-12-23T18:52:59.328589Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:5628:4382], StatRequests.size() = 1 2025-12-23T18:52:59.783763Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037905 2025-12-23T18:52:59.783820Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 23.896000s, at schemeshard: 72075186224037905 2025-12-23T18:52:59.784063Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 26, entries count: 1, are all stats full: 1 2025-12-23T18:52:59.797322Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:00.119721Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:00.590624Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:5671:4404]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:00.591141Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-12-23T18:53:00.591192Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:5671:4404], StatRequests.size() = 1 2025-12-23T18:53:01.733305Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:5700:4416]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:01.733651Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-12-23T18:53:01.733696Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:5700:4416], StatRequests.size() = 1 2025-12-23T18:53:02.349777Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-12-23T18:53:02.350002Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2025-12-23T18:53:02.350574Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:53:02.350876Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-12-23T18:53:02.394209Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:02.394266Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:02.394475Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:02.408503Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:02.835296Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:5733:4432]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:02.835578Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-12-23T18:53:02.835611Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:5733:4432], StatRequests.size() = 1 2025-12-23T18:53:02.835994Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:5735:4434]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:02.839340Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2025-12-23T18:53:02.839406Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:5735:4434], StatRequests.size() = 1 2025-12-23T18:53:07.632951Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:07.633024Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:07.633256Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:07.659934Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:13.161509Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:13.161566Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:13.161980Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:13.176019Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:18.699209Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:18.699267Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:18.699503Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:18.713929Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:21.871150Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:23.983408Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:23.983483Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:23.983812Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:23.998260Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:25.799946Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037905 2025-12-23T18:53:25.800021Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 23.954000s, at schemeshard: 72075186224037905 2025-12-23T18:53:25.800226Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 26, entries count: 1, are all stats full: 1 2025-12-23T18:53:25.820421Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:27.419533Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:53:27.419610Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:53:27.419649Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:53:27.419710Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:53:30.222442Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2025-12-23T18:53:30.222525Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 22.708000s, at schemeshard: 72075186224037899 2025-12-23T18:53:30.222899Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2025-12-23T18:53:30.238432Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:31.068449Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:31.068530Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:31.068827Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:31.083678Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:39.379577Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-12-23T18:53:39.380165Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 8 2025-12-23T18:53:39.380563Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:53:39.380651Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 8 2025-12-23T18:53:39.401981Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:39.402041Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:39.402215Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:39.415573Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:45.012043Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:45.012123Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:45.012354Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:53:45.025726Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> ResourcePoolsDdl::TestDropResourcePool [GOOD] >> KqpYql::PgIntPrimaryKey [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] >> KqpYql::TableNameConflict [GOOD] >> BasicStatistics::NotFullStatisticsDatashard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2025-12-23T18:53:05.728712Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128385106599788:2147];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:05.728940Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E1223 18:53:05.953051184 1520728 dns_resolver_ares.cc:452] no server name supplied in dns URI E1223 18:53:05.953202245 1520728 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-12-23T18:53:05.955705Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:05.955793Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:05.960253Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:05.960608Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:05.961929Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:05.962780Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:05.972465Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:05.979894Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:05.979986Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:05.980120Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:05.980126Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:05.980189Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:05.984960Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.003777Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.003867Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.009489Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.009984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:06.012123Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.013846Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.015564Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.020688Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.020997Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.021088Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:14542 2025-12-23T18:53:06.021414Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.021977Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.022091Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.027002Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.030486Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.033759Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12-23T18:53:06.034836Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:14542: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:14542 } ] 2025-12- ... . DatabaseId : /Root. }. Update input channelId: 1, peer: [4:7587128558702988527:2958] 2025-12-23T18:53:45.046059Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587128558702988528:2959], TxId: 281474976715738, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2025-12-23T18:53:45.046116Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715738, task: 1, CA Id [4:7587128558702988527:2958]. Recv TEvReadResult from ShardID=72075186224037889, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-12-23T18:53:45.046140Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715738, task: 1, CA Id [4:7587128558702988527:2958]. Taken 0 locks 2025-12-23T18:53:45.046154Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715738, task: 1, CA Id [4:7587128558702988527:2958]. new data for read #0 seqno = 1 finished = 1 2025-12-23T18:53:45.046174Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587128558702988527:2958], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2025-12-23T18:53:45.046180Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1107: SelfId: [4:7587128558702988528:2959], TxId: 281474976715738, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7587128558702988527 RawX2: 4503616807242638 } } DstEndpoint { ActorId { RawX1: 7587128558702988528 RawX2: 4503616807242639 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7587128558702988528 RawX2: 4503616807242639 } } DstEndpoint { ActorId { RawX1: 7587128558702988523 RawX2: 4503616807242024 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2025-12-23T18:53:45.046193Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587128558702988527:2958], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:53:45.046209Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715738, task: 1, CA Id [4:7587128558702988527:2958]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-12-23T18:53:45.046212Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587128558702988528:2959], TxId: 281474976715738, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:53:45.046226Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715738, task: 1, CA Id [4:7587128558702988527:2958]. enter pack cells method shardId: 72075186224037889 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-12-23T18:53:45.046251Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715738, task: 1, CA Id [4:7587128558702988527:2958]. exit pack cells method shardId: 72075186224037889 processedRows: 0 packed rows: 1 freeSpace: 8386350 2025-12-23T18:53:45.046271Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715738, task: 1, CA Id [4:7587128558702988527:2958]. returned 1 rows; processed 1 rows 2025-12-23T18:53:45.046308Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715738, task: 1, CA Id [4:7587128558702988527:2958]. dropping batch for read #0 2025-12-23T18:53:45.046319Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715738, task: 1, CA Id [4:7587128558702988527:2958]. effective maxinflight 1024 sorted 0 2025-12-23T18:53:45.046328Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715738, task: 1, CA Id [4:7587128558702988527:2958]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-12-23T18:53:45.046342Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715738, task: 1, CA Id [4:7587128558702988527:2958]. returned async data processed rows 1 left freeSpace 8386350 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-12-23T18:53:45.046505Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7587128558702988527:2958], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:53:45.046523Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587128558702988527:2958], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:53:45.046533Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587128558702988528:2959], TxId: 281474976715738, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2025-12-23T18:53:45.046562Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715738, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-12-23T18:53:45.046573Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715738, task: 2. Finish input channelId: 1, from: [4:7587128558702988527:2958] 2025-12-23T18:53:45.046625Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587128558702988528:2959], TxId: 281474976715738, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:53:45.046689Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587128558702988527:2958], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2025-12-23T18:53:45.046718Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587128558702988527:2958], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:53:45.046756Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715738, task: 1. Tasks execution finished 2025-12-23T18:53:45.046769Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [4:7587128558702988527:2958], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:53:45.046834Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7587128558702988528:2959], TxId: 281474976715738, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-12-23T18:53:45.046889Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715738, task: 1. pass away 2025-12-23T18:53:45.046978Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715738;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-12-23T18:53:45.047115Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [4:7587128558702988528:2959], TxId: 281474976715738, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2025-12-23T18:53:45.047159Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715738, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-12-23T18:53:45.047170Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715738, task: 2. Tasks execution finished 2025-12-23T18:53:45.047184Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:543: SelfId: [4:7587128558702988528:2959], TxId: 281474976715738, task: 2. Ctx: { CheckpointId : . TraceId : 01kd68ysb042zcc6ewmytwednd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODAwYzQxY2ItNjBmNDczNjktNDViMTRlODYtNTE2NThhNmQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2025-12-23T18:53:45.047259Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715738, task: 2. pass away 2025-12-23T18:53:45.047337Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715738;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; |97.6%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::NoAstSizeLimit [GOOD] Test command err: Trying to start YDB, gRPC: 61978, MsgBus: 2280 2025-12-23T18:53:36.966128Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128518636793745:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.966172Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:53:37.004654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ece/r3tmp/tmpTDNFNt/pdisk_1.dat 2025-12-23T18:53:37.366863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.469223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.469342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.509326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.545767Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61978, node 1 2025-12-23T18:53:37.594660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.624761Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.624785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.624796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.624875Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2280 2025-12-23T18:53:37.983076Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:2280 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516017560 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.314650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.343403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.508947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.677232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.758723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:40.091287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535816664757:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.091391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.091669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535816664767:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.091734Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.455426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.482160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.508186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.534498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.559772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.594297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.627463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.687621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.762049Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535816665634:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.762159Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.762243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535816665639:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.762292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535816665641:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.762376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.766000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:40.780289Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128535816665643:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:53:40.863426Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128535816665694:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:41.965944Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128518636793745:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:41.966016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:53:42.457512Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516022481, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 32538, MsgBus: 10293 2025-12-23T18:53:43.308308Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128550092816944:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:43.310814Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ece/r3tmp/tmpxD53V2/pdisk_1.dat 2025-12-23T18:53:43.321015Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:43.387587Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:43.421580Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:43.421655Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:43.426030Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32538, node 2 2025-12-23T18:53:43.475267Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:43.475290Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:43.475297Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:43.475365Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:43.480068Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10293 TClient is connected to server localhost:10293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516023468 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:53:43.860130Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:53:44.315515Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:46.489175Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128562977719648:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.489267Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.489525Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128562977719657:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.489581Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.517750Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.568043Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128562977719754:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.568148Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.570094Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128562977719757:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.570181Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.658198Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128562977719767:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.658277Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128562977719772:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.658296Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.658471Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128562977719775:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.658518Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.661831Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:46.672323Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128562977719774:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:53:46.753316Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128562977719827:2600] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprPgNull [GOOD] >> KqpYql::EvaluateExpr2 [GOOD] >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg >> KqpYql::Closure [GOOD] >> KqpYql::CreateUseTable [GOOD] >> KqpScripting::StreamDdlAndDml [GOOD] >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::PgIntPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 61410, MsgBus: 32251 2025-12-23T18:53:36.954583Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128518792191732:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.954646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ed4/r3tmp/tmpk8b8s2/pdisk_1.dat 2025-12-23T18:53:37.251793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.258042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.258151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.346311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.363802Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:37.367263Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128518792191697:2081] 1766516016950021 != 1766516016950024 TServer::EnableGrpc on GrpcPort 61410, node 1 2025-12-23T18:53:37.496533Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.620098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.620188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.620209Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.620307Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:37.961475Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:32251 TClient is connected to server localhost:32251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516017399 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.349880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:53:38.373260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:38.550288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.715638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:38.786285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.061886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535972062752:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.061995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.062236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535972062762:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.062343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.482173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.510685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.535313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.563370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.593724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.626064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.659560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.731808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.798943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535972063632:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.799014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.799044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535972063637:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.799189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535972063639:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.799230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.801973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:40.809782Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128535972063640:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:40.880567Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128535972063692:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:41.955050Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128518792191732:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:41.956417Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16148, MsgBus: 16975 2025-12-23T18:53:43.228106Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128547877269386:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:43.228214Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ed4/r3tmp/tmppJMPtO/pdisk_1.dat 2025-12-23T18:53:43.240843Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:43.330462Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:43.346475Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:43.346570Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:43.351232Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16148, node 2 2025-12-23T18:53:43.416672Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:43.416686Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:43.416691Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:43.416748Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:43.460170Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16975 TClient is connected to server localhost:16975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516023398 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:43.789300Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:44.235033Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:46.347670Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128560762172121:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.347764Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.348040Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128560762172130:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.348112Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.374167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.415431Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128560762172223:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.415520Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.415532Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128560762172228:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.415674Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128560762172230:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.415717Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.419235Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:46.427491Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128560762172231:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:53:46.480968Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128560762172283:2590] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestDropResourcePool [GOOD] Test command err: 2025-12-23T18:52:53.263903Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128335010147562:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:53.263962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00395e/r3tmp/tmpj8i6sC/pdisk_1.dat 2025-12-23T18:52:53.464851Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:53.488578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:53.489218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:53.522662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:53.568101Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31688, node 1 2025-12-23T18:52:53.673371Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:52:53.688345Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:53.688365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:53.688372Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:53.688444Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16809 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515973607 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:53.957444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:54.273332Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:55.729942Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-12-23T18:52:55.732987Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:52:55.733026Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:52:55.733143Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128343600083369:2325], Start check tables existence, number paths: 2 2025-12-23T18:52:55.735135Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128343600083369:2325], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:52:55.735239Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128343600083369:2325], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:52:55.735281Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128343600083369:2325], Successfully finished 2025-12-23T18:52:55.735372Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-12-23T18:52:55.735646Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-12-23T18:52:55.737289Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=ZTY5M2UzYS1mNjk0M2U1NC1mN2Y1MjcxZC03ODVhYzNmNw==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZTY5M2UzYS1mNjk0M2U1NC1mN2Y1MjcxZC03ODVhYzNmNw== (tmp dir name: b6b71dc3-490d-cea4-1361-26bcad2c7225) 2025-12-23T18:52:55.737398Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=ZTY5M2UzYS1mNjk0M2U1NC1mN2Y1MjcxZC03ODVhYzNmNw==, ActorId: [1:7587128343600083388:2329], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:52:55.768489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:55.783733Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587128343030038237:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:55.783784Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:52:55.792109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:55.792247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:55.795326Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:52:55.795587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:55.800637Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-dedicated/.metadata/script_executions 2025-12-23T18:52:55.829754Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:55.829854Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:55.830080Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:55.831428Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:55.831546Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:55.831624Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:55.831691Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:55.831787Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:55.831971Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:55.832090Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:55.832169Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:55.835073Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:55.922950Z node 3 :STATISTICS WARN: tx_init.cpp:298: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2025-12-23T18:52:55.925289Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11757, node 3 2025-12-23T18:52:56.030802Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:56.030844Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:56.030850Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:56.030931Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:56.057214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:56.071670Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128346824251444:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:56.071727Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:52:56.080076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:56.080161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:56.081923Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:56.082679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileSt ... 7587128564713094598:2426], TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Delete session: ydb://session/3?node_id=10&id=YzllYjQwMTItN2U5YzFiNC1jNmU3OTQxYy1hOGIzZmY2 2025-12-23T18:53:47.327755Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:311: [TQueryRetryActor] [TRefreshPoolStateQuery] OwnerId: [10:7587128547533225027:2333], ActorId: [10:7587128564713094596:2425], TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , Got response [10:7587128564713094598:2426] SUCCESS 2025-12-23T18:53:47.327803Z node 10 :KQP_WORKLOAD_SERVICE TRACE: pool_handlers_actors.cpp:746: [WorkloadService] [TPoolHandlerActorBase] ActorId: [10:7587128547533225027:2333], DatabaseId: /Root, PoolId: my_pool, succefully refreshed pool state, in flight: 0, delayed: 0 2025-12-23T18:53:47.327945Z node 10 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=10&id=YzllYjQwMTItN2U5YzFiNC1jNmU3OTQxYy1hOGIzZmY2, ActorId: [10:7587128564713094601:2428], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:53:47.327977Z node 10 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=10&id=YzllYjQwMTItN2U5YzFiNC1jNmU3OTQxYy1hOGIzZmY2, ActorId: [10:7587128564713094601:2428], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:47.328003Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=10&id=YzllYjQwMTItN2U5YzFiNC1jNmU3OTQxYy1hOGIzZmY2, ActorId: [10:7587128564713094601:2428], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:53:47.328027Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=10&id=YzllYjQwMTItN2U5YzFiNC1jNmU3OTQxYy1hOGIzZmY2, ActorId: [10:7587128564713094601:2428], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:53:47.328093Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=10&id=YzllYjQwMTItN2U5YzFiNC1jNmU3OTQxYy1hOGIzZmY2, ActorId: [10:7587128564713094601:2428], ActorState: unknown state, Session actor destroyed 2025-12-23T18:53:47.331847Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI= (tmp dir name: b1b25a80-4afc-c876-f562-05b0205ee5fa) 2025-12-23T18:53:47.332306Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-12-23T18:53:47.332345Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-12-23T18:53:47.332378Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI=, ActorId: [10:7587128564713094684:2446], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:53:47.332488Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI=, ActorId: [10:7587128564713094684:2446], ActorState: ReadyState, TraceId: 01kd68yvy4aty1gqpghf34agmf, received request, proxyRequestId: 19 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [10:7587128564713094683:2790] database: Root databaseId: /Root pool id: my_pool 2025-12-23T18:53:47.332533Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Recieved new request from [10:7587128564713094684:2446], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI= 2025-12-23T18:53:47.332587Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7587128564713094686:2447], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-12-23T18:53:47.332664Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [10:7587128564713094687:2448], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI=, Start pool fetching 2025-12-23T18:53:47.332694Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7587128564713094688:2449], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-12-23T18:53:47.332872Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7587128564713094686:2447], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-12-23T18:53:47.332945Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7587128564713094688:2449], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-12-23T18:53:47.333003Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-12-23T18:53:47.333101Z node 10 :KQP_WORKLOAD_SERVICE ERROR: scheme_actors.cpp:56: [WorkloadService] [TPoolResolverActor] ActorId: [10:7587128564713094687:2448], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-12-23T18:53:47.333218Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:114: [WorkloadService] [TPoolResolverActor] ActorId: [10:7587128564713094687:2448], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2025-12-23T18:53:47.333359Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:553: [WorkloadService] [Service] Reply continue error NOT_FOUND to [10:7587128564713094684:2446]: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2025-12-23T18:53:47.333397Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-12-23T18:53:47.333435Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7587128564713094691:2450], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-12-23T18:53:47.333673Z node 10 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI=, ActorId: [10:7587128564713094684:2446], ActorState: ExecuteState, TraceId: 01kd68yvy4aty1gqpghf34agmf, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool , status: NOT_FOUND, issues: { message: "Failed to resolve pool id my_pool" severity: 1 issues { message: "Resource pool my_pool not found or you don\'t have access permissions" severity: 1 } } 2025-12-23T18:53:47.333798Z node 10 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI=, ActorId: [10:7587128564713094684:2446], ActorState: ExecuteState, TraceId: 01kd68yvy4aty1gqpghf34agmf, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-12-23T18:53:47.333889Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7587128564713094691:2450], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-12-23T18:53:47.334016Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:196: [WorkloadService] [Service] Finished request with worker actor [10:7587128564713094684:2446], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI= 2025-12-23T18:53:47.334061Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-12-23T18:53:47.334130Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI=, ActorId: [10:7587128564713094684:2446], ActorState: CleanupState, TraceId: 01kd68yvy4aty1gqpghf34agmf, EndCleanup, isFinal: 1 2025-12-23T18:53:47.334231Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI=, ActorId: [10:7587128564713094684:2446], ActorState: CleanupState, TraceId: 01kd68yvy4aty1gqpghf34agmf, Sent query response back to proxy, proxyRequestId: 19, proxyId: [10:7587128530353355109:2264] 2025-12-23T18:53:47.334264Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI=, ActorId: [10:7587128564713094684:2446], ActorState: unknown state, TraceId: 01kd68yvy4aty1gqpghf34agmf, Cleanup temp tables: 0 2025-12-23T18:53:47.334365Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=10&id=YmM5NWQyOWEtNTc1NGVkZTYtNDEyOTNlNDItNDBhNzk3NDI=, ActorId: [10:7587128564713094684:2446], ActorState: unknown state, TraceId: 01kd68yvy4aty1gqpghf34agmf, Session actor destroyed 2025-12-23T18:53:47.344334Z node 10 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=10&id=OTM2NGY0ZjMtYmY4MzliYmEtZWY0YTFhNzEtMjQ0NjZiNzU=, ActorId: [10:7587128547533224891:2321], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:53:47.344393Z node 10 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=10&id=OTM2NGY0ZjMtYmY4MzliYmEtZWY0YTFhNzEtMjQ0NjZiNzU=, ActorId: [10:7587128547533224891:2321], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:47.344419Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=10&id=OTM2NGY0ZjMtYmY4MzliYmEtZWY0YTFhNzEtMjQ0NjZiNzU=, ActorId: [10:7587128547533224891:2321], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:53:47.344444Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=10&id=OTM2NGY0ZjMtYmY4MzliYmEtZWY0YTFhNzEtMjQ0NjZiNzU=, ActorId: [10:7587128547533224891:2321], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:53:47.344515Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=10&id=OTM2NGY0ZjMtYmY4MzliYmEtZWY0YTFhNzEtMjQ0NjZiNzU=, ActorId: [10:7587128547533224891:2321], ActorState: unknown state, Session actor destroyed |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> KqpScripting::Pure [GOOD] >> KqpYql::EvaluateExprYsonAndType ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableNameConflict [GOOD] Test command err: Trying to start YDB, gRPC: 31751, MsgBus: 12033 2025-12-23T18:53:36.965741Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128520593001112:2146];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.965801Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ec8/r3tmp/tmptrQp7F/pdisk_1.dat 2025-12-23T18:53:37.262436Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.276060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.276158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.282553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.394913Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128520593001003:2081] 1766516016949209 != 1766516016949212 2025-12-23T18:53:37.398363Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31751, node 1 2025-12-23T18:53:37.525894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.619606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.619634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.619655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.619778Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12033 2025-12-23T18:53:37.973887Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516017462 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.286729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.305006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.478693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.654409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:38.734942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.153138Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128537772872056:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.153283Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.153609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128537772872066:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.153714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.471085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.498540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.524989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.551195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.579277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.613728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.647371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.690161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.756110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128537772872936:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.756167Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.756246Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128537772872941:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.756382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128537772872943:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.756429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.759203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:40.769300Z node 1 ... Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:43.338779Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 17449, node 2 2025-12-23T18:53:43.342256Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:43.399712Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:43.399732Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:43.399738Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:43.399805Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:43.474525Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26565 TClient is connected to server localhost:26565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516023384 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:43.799234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:43.816407Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:43.875073Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.017317Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.072287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.221408Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:46.140236Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561737485833:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.140303Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.140536Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561737485842:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.140573Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.207543Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.232420Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.261870Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.290500Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.320473Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.356151Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.389324Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.462005Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.540576Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561737486717:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.540681Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.540922Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561737486722:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.540980Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561737486723:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.541035Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.545019Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:46.555219Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128561737486726:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:53:46.644669Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128561737486777:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiCreateTable!
:12:30: Error: Table name conflict: db.[/Root/Test] is used to reference multiple tables. 2025-12-23T18:53:48.216631Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128548852582129:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:48.216702Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:25.028951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:25.107831Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:25.116754Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:25.117137Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:25.117225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:25.454975Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:25.537523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:25.537661Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:25.594904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:25.642116Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0031d9/r3tmp/tmpywkBp0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5506, node 1 2025-12-23T18:52:26.313880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:26.314597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:26.314647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:26.314668Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:26.314844Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:26.376916Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6771 2025-12-23T18:52:26.881984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:29.174361Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:29.179446Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:29.181947Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:29.205508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:29.205593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:29.242962Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:29.244674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:29.470198Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.470912Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.471608Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.472070Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.472359Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.472458Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.472587Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.472678Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.472763Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:29.587625Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:52:29.598939Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:29.599013Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:29.612069Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:29.790571Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:29.838953Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:29.839061Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:29.865796Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:29.867325Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:29.867547Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:29.867614Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:29.867678Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:29.867738Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:29.867788Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:29.867836Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:29.868738Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:29.910574Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:29.910684Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2188:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:29.913961Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2185:2602] 2025-12-23T18:52:29.930585Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:52:29.934163Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2258:2658] 2025-12-23T18:52:29.934424Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2258:2658], schemeshard id = 72075186224037897 2025-12-23T18:52:29.959072Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Describe result: PathErrorUnknown 2025-12-23T18:52:29.959136Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Creating table 2025-12-23T18:52:29.959212Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:52:29.984351Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2356:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:29.991599Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:30.004274Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:30.004412Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:30.016603Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:30.133403Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:30.468843Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:30.574766Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:30.574874Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:265 ... 99Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4162:3563], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:52:58.538140Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:52:58.538304Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:52:58.538391Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4145:3558], StatRequests.size() = 1 2025-12-23T18:52:58.538507Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 ... unblocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR 2025-12-23T18:52:58.713621Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:52:58.713726Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:52:58.757114Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4158:3562], schemeshard count = 1 ... waiting for stats update from SchemeShard 2025-12-23T18:53:01.823176Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:03.795565Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:53:03.795780Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2025-12-23T18:53:03.796282Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:53:03.796539Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-12-23T18:53:03.828448Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:03.828549Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:03.828817Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:53:03.841963Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... waiting for stats update from SchemeShard (done) ... waiting for TEvPropagateStatistics 2025-12-23T18:53:06.900985Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:08.975007Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:53:08.975652Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 8 2025-12-23T18:53:08.976026Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 ... waiting for TEvPropagateStatistics (done) 2025-12-23T18:53:08.977225Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 8 2025-12-23T18:53:08.984398Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:104: [72075186224037894] EvServerDisconnected, pipe server id = [2:2258:2658] 2025-12-23T18:53:09.129420Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:09.153215Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:53:09.153337Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:4505:3690], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:53:09.159999Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4510:3695] 2025-12-23T18:53:09.160953Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:4510:3695], schemeshard id = 72075186224037897 ... waiting for TEvPeriodicTableStats2 ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 ... waiting for TEvPeriodicTableStats2 (done) ... waiting for stats update from SchemeShard 2025-12-23T18:53:11.913409Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:13.891247Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:53:13.891913Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 9 2025-12-23T18:53:13.892292Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:53:13.892431Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 9 2025-12-23T18:53:17.125411Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:19.191147Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:53:19.191604Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 10 2025-12-23T18:53:19.191906Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:53:19.191969Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 10 ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 2025-12-23T18:53:22.323336Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:24.260655Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:53:24.260816Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 11 2025-12-23T18:53:24.261343Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:53:24.261633Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2025-12-23T18:53:24.272436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:53:24.272532Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:27.489354Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:53:27.489444Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:53:27.489489Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:53:27.489534Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:53:27.761191Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:30.713288Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:53:30.713411Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2025-12-23T18:53:30.713797Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:53:30.713970Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 2025-12-23T18:53:35.042971Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:37.014004Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:53:37.014493Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 13 2025-12-23T18:53:37.014903Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:53:37.014996Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 13 2025-12-23T18:53:40.223956Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:42.281370Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:53:42.281635Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 14 2025-12-23T18:53:42.281992Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:53:42.282099Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 14 2025-12-23T18:53:42.314430Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2025-12-23T18:53:42.314530Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:42.314771Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2025-12-23T18:53:42.338153Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... waiting for stats update from SchemeShard (done) ... waiting for TEvPropagateStatistics ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 2025-12-23T18:53:45.721061Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:47.871390Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-12-23T18:53:47.871838Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 15 2025-12-23T18:53:47.872197Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 ... waiting for TEvPropagateStatistics (done) 2025-12-23T18:53:47.872458Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 15 2025-12-23T18:53:47.872527Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5551:4068]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:47.872789Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:53:47.872841Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5551:4068], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprPgNull [GOOD] Test command err: Trying to start YDB, gRPC: 23210, MsgBus: 9313 2025-12-23T18:53:36.960290Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128519403616642:2146];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.964760Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ec4/r3tmp/tmpeHVQLJ/pdisk_1.dat 2025-12-23T18:53:37.244485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.244620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.293159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.296800Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.327930Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23210, node 1 2025-12-23T18:53:37.568508Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.619435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.619459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.619475Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.619627Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9313 2025-12-23T18:53:37.978932Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:9313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516017392 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.295981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.301744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:53:38.314436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.449463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.626927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.700580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:40.296545Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536583487579:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.296667Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.296974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536583487589:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.297038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.567267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.598690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.631987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.658621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.683976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.715360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.749938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.793505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.869064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536583488456:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.869149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.869341Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536583488462:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.869358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536583488461:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.869395Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.872400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:40.880271Z node 1 :KQP_WORKLOAD_SE ... ons 2025-12-23T18:53:43.100923Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:43.145125Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:43.145209Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22089, node 2 2025-12-23T18:53:43.147579Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:43.188655Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:43.188681Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:43.188702Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:43.188776Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:43.240642Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30171 TClient is connected to server localhost:30171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516023188 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:53:43.567779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:53:43.580404Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:43.634849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:43.772482Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:43.828798Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.043270Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:46.174593Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561186629647:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.174699Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.174967Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561186629656:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.175021Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.241016Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.268805Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.295349Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.325410Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.381482Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.412976Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.440593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.483049Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.552681Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561186630524:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.552769Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.552969Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561186630529:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.553010Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561186630530:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.553063Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.556429Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:46.566467Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128561186630533:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:53:46.623847Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128561186630584:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:48.038897Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128548301725936:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:48.038987Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] Test command err: 2025-12-23T18:52:57.702446Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128349238487620:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:57.703182Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003936/r3tmp/tmpXzJaYD/pdisk_1.dat 2025-12-23T18:52:57.933741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:57.933828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:57.953240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:57.976303Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:57.991118Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:58.014201Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128349238487593:2081] 1766515977700617 != 1766515977700620 TServer::EnableGrpc on GrpcPort 8427, node 1 2025-12-23T18:52:58.064409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:58.064429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:58.064440Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:58.064524Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:58.192644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:62131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515978066 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:58.292392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:58.708564Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:59.793464Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-12-23T18:52:59.796497Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128357828423044:2319], Start check tables existence, number paths: 2 2025-12-23T18:52:59.796958Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:52:59.796981Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:52:59.798607Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128357828423044:2319], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:52:59.798663Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128357828423044:2319], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:52:59.798682Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128357828423044:2319], Successfully finished 2025-12-23T18:52:59.798797Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-12-23T18:52:59.798928Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-12-23T18:52:59.801049Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=OGI2MzBlOWMtNWY1MWI2ZmMtYWQzOTVkZmUtMjUxN2M1OGY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OGI2MzBlOWMtNWY1MWI2ZmMtYWQzOTVkZmUtMjUxN2M1OGY= (tmp dir name: ff8ac395-4a22-2889-f97a-618c3f08b5ad) 2025-12-23T18:52:59.801152Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=OGI2MzBlOWMtNWY1MWI2ZmMtYWQzOTVkZmUtMjUxN2M1OGY=, ActorId: [1:7587128357828423064:2323], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:52:59.810276Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128357828423066:2494], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:52:59.813566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:59.814413Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128357828423066:2494], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-12-23T18:52:59.814568Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128357828423066:2494], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-12-23T18:52:59.821083Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128357828423066:2494], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:52:59.898954Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128357828423066:2494], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:52:59.902194Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128357828423117:2526] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:59.902266Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128357828423066:2494], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-12-23T18:52:59.904867Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=MzUzODQ1ZWItOThjMGU1NGMtZjZlYjM5YjktNjBlYTQzZGE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MzUzODQ1ZWItOThjMGU1NGMtZjZlYjM5YjktNjBlYTQzZGE= (tmp dir name: 3576de70-4051-7674-fa32-b0a7d42aebe3) 2025-12-23T18:52:59.904982Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=MzUzODQ1ZWItOThjMGU1NGMtZjZlYjM5YjktNjBlYTQzZGE=, ActorId: [1:7587128357828423124:2324], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:52:59.905198Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-12-23T18:52:59.905220Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2025-12-23T18:52:59.905287Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128357828423126:2325], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-12-23T18:52:59.905382Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-12-23T18:52:59.905499Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=1&id=MzUzODQ1ZWItOThjMGU1NGMtZjZlYjM5YjktNjBlYTQzZGE=, ActorId: [1:7587128357828423124:2324], ActorState: ReadyState, TraceId: 01kd68xdm19spbpgkyympanjaz, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7587128357828423123:2531] database: Root databaseId: /Root pool id: sample_pool_id 2025-12-23T18:52:59.905612Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Recieved new request from [1:7587128357828423124:2324], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MzUzODQ1ZWItOThjMGU1NGMtZjZlYjM5YjktNjBlYTQzZGE= 2025-12-23T18:52:59.905691Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7587128357828423133:2326], Database: /Root, Start database fetching 2025-12-23T18:52:59.905913Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7587128357828423133:2326], Database: /Root, Database info successfully fetched, serverless: 0 2025-12-23T18:52:59.905996Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-12-23T18:52:59.906043Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [1:7587128357828423136:2327], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MzUzODQ1ZWItOThjMGU1NGMtZjZlYjM5YjktNjBlYTQzZGE=, Start pool fetching 2025-12-23T18:52:59.906074Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:758712835782 ... ctorId: [12:7587128558736206128:2391], ActorState: ExecuteState, TraceId: 01kd68ytfd1g1gtjpp7gx9t021, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-12-23T18:53:46.040374Z node 12 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=12&id=N2MxYzczZjItYTQyYzEzY2QtNmJiZWY1OTAtZTgzMGViMmY=, ActorId: [12:7587128558736206128:2391], ActorState: ExecuteState, TraceId: 01kd68ytfd1g1gtjpp7gx9t021, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:46.040414Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=12&id=N2MxYzczZjItYTQyYzEzY2QtNmJiZWY1OTAtZTgzMGViMmY=, ActorId: [12:7587128558736206128:2391], ActorState: ExecuteState, TraceId: 01kd68ytfd1g1gtjpp7gx9t021, EndCleanup, isFinal: 1 2025-12-23T18:53:46.040470Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=12&id=N2MxYzczZjItYTQyYzEzY2QtNmJiZWY1OTAtZTgzMGViMmY=, ActorId: [12:7587128558736206128:2391], ActorState: ExecuteState, TraceId: 01kd68ytfd1g1gtjpp7gx9t021, Sent query response back to proxy, proxyRequestId: 5, proxyId: [12:7587128537261368452:2265] 2025-12-23T18:53:46.040494Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=12&id=N2MxYzczZjItYTQyYzEzY2QtNmJiZWY1OTAtZTgzMGViMmY=, ActorId: [12:7587128558736206128:2391], ActorState: unknown state, TraceId: 01kd68ytfd1g1gtjpp7gx9t021, Cleanup temp tables: 0 2025-12-23T18:53:46.040960Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=12&id=N2MxYzczZjItYTQyYzEzY2QtNmJiZWY1OTAtZTgzMGViMmY=, ActorId: [12:7587128558736206128:2391], ActorState: unknown state, TraceId: 01kd68ytfd1g1gtjpp7gx9t021, Session actor destroyed 2025-12-23T18:53:46.046930Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg= (tmp dir name: ad62a6cb-4de6-e4ec-8bc7-8ea6eb656e0e) 2025-12-23T18:53:46.047059Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:53:46.047536Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: ReadyState, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, received request, proxyRequestId: 6 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT * FROM `.sys/resource_pools` WHERE Name >= "default" rpcActor: [10:7587128562297701961:3984] database: /Root/test-dedicated databaseId: /Root/test-dedicated pool id: default 2025-12-23T18:53:46.047571Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:268: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: ReadyState, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, request placed into pool from cache: default 2025-12-23T18:53:46.047644Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:630: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: ExecuteState, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, Sending CompileQuery request 2025-12-23T18:53:46.214797Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1659: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: ExecuteState, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, ExecutePhyTx, tx: 0x00007CA08309B498 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-12-23T18:53:46.214875Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1816: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: ExecuteState, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, Sending to Executer TraceId: 0 8 2025-12-23T18:53:46.215016Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1880: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: ExecuteState, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, Created new KQP executer: [12:7587128563031173464:2400] isRollback: 0 2025-12-23T18:53:46.224715Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2191: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: ExecuteState, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, Forwarded TEvStreamData to [10:7587128562297701961:3984] 2025-12-23T18:53:46.225927Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2090: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: ExecuteState, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-12-23T18:53:46.226096Z node 12 :KQP_SESSION INFO: kqp_session_actor.cpp:2383: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: ExecuteState, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, txInfo Status: Committed Kind: ReadOnly TotalDuration: 11.419 ServerDuration: 11.33 QueriesCount: 2 2025-12-23T18:53:46.226171Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2543: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: ExecuteState, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-12-23T18:53:46.226588Z node 12 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: ExecuteState, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:46.226623Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: ExecuteState, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, EndCleanup, isFinal: 1 2025-12-23T18:53:46.226678Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: ExecuteState, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, Sent query response back to proxy, proxyRequestId: 6, proxyId: [12:7587128537261368452:2265] 2025-12-23T18:53:46.226696Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: unknown state, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, Cleanup temp tables: 0 2025-12-23T18:53:46.227215Z node 12 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=12&id=NzY4NTAwZjgtZmEwMDhhMDUtZjA5ODVmOTEtZGU1ZWU3MTg=, ActorId: [12:7587128563031173456:2400], ActorState: unknown state, TraceId: 01kd68ytnz7wwj6zsrxy5ytp3q, Session actor destroyed 2025-12-23T18:53:46.234678Z node 10 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-12-23T18:53:46.235220Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:53:46.235374Z node 10 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-12-23T18:53:46.235504Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:53:46.241734Z node 10 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=10&id=MTU2YWQ5ZGYtNWYyMzcyNDItNzIyZjQ0ODktNWYwMjUxZmQ=, ActorId: [10:7587128536527896875:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:53:46.241783Z node 10 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=10&id=MTU2YWQ5ZGYtNWYyMzcyNDItNzIyZjQ0ODktNWYwMjUxZmQ=, ActorId: [10:7587128536527896875:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:46.241812Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=10&id=MTU2YWQ5ZGYtNWYyMzcyNDItNzIyZjQ0ODktNWYwMjUxZmQ=, ActorId: [10:7587128536527896875:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:53:46.241837Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=10&id=MTU2YWQ5ZGYtNWYyMzcyNDItNzIyZjQ0ODktNWYwMjUxZmQ=, ActorId: [10:7587128536527896875:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:53:46.241927Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=10&id=MTU2YWQ5ZGYtNWYyMzcyNDItNzIyZjQ0ODktNWYwMjUxZmQ=, ActorId: [10:7587128536527896875:2331], ActorState: unknown state, Session actor destroyed 2025-12-23T18:53:46.337390Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7587128541180640865:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:46.337527Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:53:46.382674Z node 11 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-12-23T18:53:46.383260Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:53:46.383287Z node 11 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:53:46.383457Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7587128562655479022:2389], Start check tables existence, number paths: 2 2025-12-23T18:53:46.385352Z node 11 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-12-23T18:53:46.386063Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7587128562655479022:2389], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:53:46.386160Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7587128562655479022:2389], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:53:46.386220Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7587128562655479022:2389], Successfully finished 2025-12-23T18:53:46.386283Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExpr2 [GOOD] Test command err: Trying to start YDB, gRPC: 21071, MsgBus: 25051 2025-12-23T18:53:37.043523Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128521000751183:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:37.044823Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ed8/r3tmp/tmp0uSemh/pdisk_1.dat 2025-12-23T18:53:37.343086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.396860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.396979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.454479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.487794Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21071, node 1 2025-12-23T18:53:37.561450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.619696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.619726Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.619743Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.619815Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25051 2025-12-23T18:53:38.054880Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:25051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516017518 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.294448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.309380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:53:38.327691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.542855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.692667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.767999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:39.990583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128529590687597:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:39.990707Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:39.991025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128529590687606:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:39.991077Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.494279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.520536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.548191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.575619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.605642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.639647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.671201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.726859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.793161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128533885655772:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.793205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.793351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128533885655778:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.793428Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128533885655777:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.793500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.796421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:40.806614Z node 1 :KQP_WORKLOAD ... 4: Notification cookie mismatch for subscription [2:7587128548349828838:2081] 1766516023154919 != 1766516023154922 2025-12-23T18:53:43.284461Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:43.284548Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:43.288165Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4406, node 2 2025-12-23T18:53:43.365753Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:43.365781Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:43.365788Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:43.365858Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:43.398840Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7041 TClient is connected to server localhost:7041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516023335 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:43.746286Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:43.763183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:43.812835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:43.974503Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.045948Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.165317Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:46.253282Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561234732589:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.253359Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.253797Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561234732599:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.253851Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.325190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.351989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.378067Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.406164Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.462190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.493740Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.524499Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.567523Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.653364Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561234733475:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.653436Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.653440Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561234733480:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.653574Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561234733482:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.653610Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.656241Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:46.665248Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128561234733483:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:46.756179Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128561234733535:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:48.155968Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128548349828874:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:48.156032Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] Test command err: Trying to start YDB, gRPC: 64062, MsgBus: 3775 2025-12-23T18:53:44.708603Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128554999438425:2140];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:44.711129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ebf/r3tmp/tmp5gp0uT/pdisk_1.dat 2025-12-23T18:53:44.917070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:44.934259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:44.934381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:44.966979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:45.030167Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:45.031594Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128554999438323:2081] 1766516024702934 != 1766516024702937 TServer::EnableGrpc on GrpcPort 64062, node 1 2025-12-23T18:53:45.084934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:45.084954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:45.084962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:45.085054Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:45.187051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3775 TClient is connected to server localhost:3775 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516025043 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:53:45.572193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:53:45.711318Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:47.611795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128567884341096:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.611920Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.612329Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128567884341106:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.612402Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.848439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.974984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128567884341200:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.975042Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.975117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128567884341205:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.975196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128567884341207:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.975250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.978680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:47.989286Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128567884341209:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:53:48.057966Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128572179308556:2595] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |97.6%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::CreateUseTable [GOOD] Test command err: Trying to start YDB, gRPC: 3489, MsgBus: 29399 2025-12-23T18:53:36.959178Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128518424349535:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.959231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ed0/r3tmp/tmpFlgpqt/pdisk_1.dat 2025-12-23T18:53:37.310884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.353094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.353193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.381050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.424122Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:37.426026Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128518424349497:2081] 1766516016949238 != 1766516016949241 TServer::EnableGrpc on GrpcPort 3489, node 1 2025-12-23T18:53:37.503050Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.619909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.619934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.619946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.620031Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29399 2025-12-23T18:53:37.981002Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:29399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516017455 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.299544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.319306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.471389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.637489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.721465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:39.990537Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128531309253267:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:39.990633Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:39.990943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128531309253276:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:39.991002Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.468355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.496632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.521436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.547979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.574443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.605141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.634851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.704164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.793108Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535604221447:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.793172Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.793217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535604221452:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.793435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535604221454:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.793472Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.796440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:40.804767Z node 1 : ... _script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8578 TClient is connected to server localhost:8578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516023468 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:53:43.915633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:53:43.930795Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:43.988333Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.163323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.226769Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.300903Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:46.124871Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128563358944408:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.124974Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.125507Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128563358944418:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.125564Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.125703Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128563358944420:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.125740Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.170173Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.202257Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.225527Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.255062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.282209Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.312993Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.343119Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.420269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.482469Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128563358945287:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.482553Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128563358945292:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.482581Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.482874Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128563358945295:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.483007Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.486178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:46.497609Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128563358945294:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:46.556148Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128563358945347:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:48.199641Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:48.292446Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128550474040695:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:48.292503Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:53:48.395068Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516028431, txId: 281474976710675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Closure [GOOD] Test command err: Trying to start YDB, gRPC: 16714, MsgBus: 19809 2025-12-23T18:53:36.954730Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128516840292470:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.954832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ecc/r3tmp/tmpIEhceF/pdisk_1.dat 2025-12-23T18:53:37.275717Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.280670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.280767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.379882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.465378Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128516840292438:2081] 1766516016949093 != 1766516016949096 2025-12-23T18:53:37.471888Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16714, node 1 2025-12-23T18:53:37.573036Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.619191Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.619209Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.619217Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.619305Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19809 2025-12-23T18:53:37.964228Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:19809 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516017469 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.283444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.290944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:53:38.297816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.467516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:38.619310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:38.697719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:40.372810Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534020163499:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.372953Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.373263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534020163509:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.373318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.639047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.667181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.699356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.726104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.760072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.790368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.815609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.846434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.910099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534020164374:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.910202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.910257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534020164379:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.910366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534020164381:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.910426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.913914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sc ... ions 2025-12-23T18:53:43.244167Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:43.277100Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:43.277191Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:43.283542Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9359, node 2 2025-12-23T18:53:43.322370Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:43.323737Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:43.323757Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:43.323764Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:43.323823Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16441 TClient is connected to server localhost:16441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516023328 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:43.673948Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:43.693654Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:43.742606Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:43.861857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:43.915379Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.145657Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:46.430067Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561421904504:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.430172Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.430388Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561421904513:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.430441Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.495956Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.522896Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.552042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.579703Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.644169Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.676488Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.706604Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.747569Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.823038Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561421905386:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.823127Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.823297Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561421905391:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.823358Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561421905392:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.823422Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.826851Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:46.836149Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128561421905395:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:46.925781Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128561421905446:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:48.141828Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128548537000780:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:48.141890Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamDdlAndDml [GOOD] Test command err: Trying to start YDB, gRPC: 24837, MsgBus: 10415 2025-12-23T18:53:36.963324Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128517362663589:2170];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.963557Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ed2/r3tmp/tmpUhB4My/pdisk_1.dat 2025-12-23T18:53:37.202858Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.255325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.255418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.335394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.337951Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:37.363841Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128517362663455:2081] 1766516016951173 != 1766516016951176 TServer::EnableGrpc on GrpcPort 24837, node 1 2025-12-23T18:53:37.445177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.623390Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.623422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.623445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.623575Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10415 2025-12-23T18:53:37.966056Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10415 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516017427 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.273765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.294191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:38.458144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:38.646863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.726965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:40.028149Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534542534507:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.028270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.028595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534542534517:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.028641Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.455195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.482175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.507585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.534069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.557119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.589753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.624165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.694182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.770577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534542535390:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.770660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.770677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534542535395:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.770895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534542535397:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.770939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.773837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:40.784025Z node 1 ... node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 3161, node 2 2025-12-23T18:53:43.475334Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:43.475358Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:43.475367Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:43.475449Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17705 TClient is connected to server localhost:17705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516023433 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:43.883489Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:43.902436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:43.977899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.129483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:44.195147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:44.351952Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:46.220257Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561727748464:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.220335Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.220509Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561727748474:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.220559Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.281676Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.316037Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.347714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.381108Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.412341Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.440274Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.469092Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.512633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.581957Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561727749347:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.582040Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.582211Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561727749352:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.582499Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128561727749354:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.582541Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.585190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:46.592995Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128561727749356:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:46.677878Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128561727749407:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:48.157343Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:48.270548Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128548842844740:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:48.270596Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:53:48.646101Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516028683, txId: 281474976710675] shutting down |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] >> KqpScripting::EndOfQueryCommit >> KqpScripting::ExecuteYqlScriptScanScalar |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] Test command err: Trying to start YDB, gRPC: 3121, MsgBus: 19216 2025-12-23T18:53:36.956531Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128519553224784:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.956683Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ed5/r3tmp/tmpMLVMRZ/pdisk_1.dat 2025-12-23T18:53:37.291431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.291518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.317975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.352365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.420159Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:37.422886Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128519553224747:2081] 1766516016949189 != 1766516016949192 TServer::EnableGrpc on GrpcPort 3121, node 1 2025-12-23T18:53:37.599666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.621140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.621170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.621192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.621282Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19216 2025-12-23T18:53:37.974936Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:19216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516017490 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.302010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.315289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.469836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.635582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.704399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:40.246929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536733095803:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.247073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.247393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536733095813:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.247472Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.564983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.594922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.626945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.656666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.688989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.724525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.759750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.801516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.868548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536733096681:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.868610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.868778Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536733096687:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.868790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536733096686:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.868830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.871652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:40.880422Z node 1 : ... import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:43.928766Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:43.928844Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:43.954591Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63856, node 2 2025-12-23T18:53:44.009087Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:44.009113Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:44.009120Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:44.009189Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:44.078382Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23285 TClient is connected to server localhost:23285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516024007 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:44.429048Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:44.449171Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.503611Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.633578Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.734430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.851910Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:46.782296Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128559617336442:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.782386Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.782728Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128559617336452:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.782834Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.860464Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.888834Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.921408Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.949394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.979857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.014686Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.045250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.082695Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.186004Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128563912304617:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.186143Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.186278Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128563912304622:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.186308Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128563912304623:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.186360Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.189852Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:47.198907Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128563912304626:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:53:47.289946Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128563912304677:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:48.812968Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128546732432743:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:48.813037Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Result: [[[[101u]]];[[[102u]]];[[[103u]]];[[[104u]]];[[[105u]]]] >> KqpWorkloadService::TestStartQueryAfterCancel [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit |97.6%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] Test command err: Trying to start YDB, gRPC: 28145, MsgBus: 10419 2025-12-23T18:53:36.953988Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128518005727943:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.954057Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ec0/r3tmp/tmpI71MyK/pdisk_1.dat 2025-12-23T18:53:37.350877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.399152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.399243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.463382Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:37.465197Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128518005727909:2081] 1766516016949170 != 1766516016949173 2025-12-23T18:53:37.475695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28145, node 1 2025-12-23T18:53:37.539546Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.622391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.622417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.622430Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.622495Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10419 2025-12-23T18:53:37.984556Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516017525 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.291290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.309691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.497300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.654281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.747034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:40.093280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535185598970:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.093418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.093678Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535185598980:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.093743Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.455106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.483087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.507478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.531624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.557259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.588683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.620093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.682713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.753501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535185599848:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.753616Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.753622Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535185599853:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.753759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535185599855:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.753789Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.756684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:40.765943Z node 1 ... 594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:43.955917Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:43.962557Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23449, node 2 2025-12-23T18:53:44.063330Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:44.063357Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:44.063365Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:44.063440Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:44.095790Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8758 TClient is connected to server localhost:8758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516024007 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:44.464737Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:44.472207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:53:44.481220Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:44.543833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.726550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.789321Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.909002Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:47.165917Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128567643937816:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.165985Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.166201Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128567643937825:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.166238Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.233400Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.269877Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.300461Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.332563Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.370256Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.408675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.441749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.491207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.565707Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128567643938695:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.565802Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.565901Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128567643938700:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.565967Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128567643938702:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.566008Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.569565Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:47.581240Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128567643938704:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:53:47.679447Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128567643938755:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:48.830817Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128550464066830:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:48.830920Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 19297, MsgBus: 15104 2025-12-23T18:53:36.954885Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128516690823943:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.954946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ecd/r3tmp/tmpVRFZTV/pdisk_1.dat 2025-12-23T18:53:37.214062Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.325633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.325746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.340684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.364716Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:37.398851Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128516690823910:2081] 1766516016949162 != 1766516016949165 TServer::EnableGrpc on GrpcPort 19297, node 1 2025-12-23T18:53:37.491889Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.619297Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.619325Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.619360Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.619594Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15104 2025-12-23T18:53:37.970917Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:15104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516017406 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.257246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.265270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:53:38.291537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:38.449359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:38.615352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.690323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:39.990468Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128529575727675:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:39.990624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:39.990980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128529575727684:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:39.991052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.455448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.482336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.510457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.537221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.561657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.594069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.628382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.694340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.761858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128533870695850:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.761984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.762074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128533870695855:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.762239Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128533870695857:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.762275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.765584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sc ... 94037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:44.510049Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7409, node 2 2025-12-23T18:53:44.515751Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:44.569373Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:44.569393Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:44.569401Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:44.569510Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:44.581985Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20002 TClient is connected to server localhost:20002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516024560 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:44.977740Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:44.983197Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:53:44.996913Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:45.048152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:45.205949Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:45.262729Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:45.404473Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:47.262851Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128564885056890:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.262987Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.263365Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128564885056900:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.263446Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.322784Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.353974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.385097Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.414467Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.446128Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.476163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.536166Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.580118Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.647950Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128564885057768:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.648020Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.648021Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128564885057773:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.648230Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128564885057775:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.648277Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.651737Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:47.661919Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128564885057776:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:47.758849Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128564885057828:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:49.388844Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128552000153184:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:49.388971Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TestKinesisHttpProxy::TestCounters [GOOD] |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |97.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 7269, MsgBus: 2525 2025-12-23T18:53:36.953550Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128519219565794:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.953616Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ec5/r3tmp/tmpyQgpBt/pdisk_1.dat 2025-12-23T18:53:37.262948Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.380104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.380201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.384073Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:37.405995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7269, node 1 2025-12-23T18:53:37.506834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.620484Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.620503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.620516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.620584Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:2525 2025-12-23T18:53:37.966041Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:2525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516017455 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.355093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.363441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:53:38.374422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.536721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.696911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.767316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:40.112983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536399436819:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.113078Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.113286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536399436828:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.113354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.499267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.525172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.550161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.576059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.608205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.644707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.678067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.720495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.792910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536399437697:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.792985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.792992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536399437702:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.793164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128536399437704:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.793212Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.795993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:40.803400Z node 1 :KQP_WORKLOAD_SERV ... 18:53:43.631354Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:43.631361Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:43.631429Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:43.738603Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:62954 TClient is connected to server localhost:62954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516023615 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:44.042350Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:44.049497Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:53:44.056501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.137813Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.257872Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.308125Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.470992Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:46.606812Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128560478243637:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.606903Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.607123Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128560478243647:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.607163Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.669432Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.697453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.724841Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.751332Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.779681Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.807975Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.835750Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.870266Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.939121Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128560478244514:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.939209Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.939239Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128560478244519:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.939351Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128560478244521:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.939408Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.942546Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:46.952095Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128560478244522:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:47.033324Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128564773211870:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:48.432308Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128547593339910:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:48.433251Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:53:48.745997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.538602Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516029558, txId: 281474976710675] shutting down |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpWorkloadService::TestLessConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold >> KqpYql::UuidPrimaryKeyBulkUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] Test command err: Trying to start YDB, gRPC: 1234, MsgBus: 30094 2025-12-23T18:53:36.954026Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128518711633898:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.960535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ecb/r3tmp/tmpJ0ihV8/pdisk_1.dat 2025-12-23T18:53:37.266837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.268328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.268410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.351356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.382718Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128518711633877:2081] 1766516016948441 != 1766516016948444 2025-12-23T18:53:37.386907Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:37.426478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 1234, node 1 2025-12-23T18:53:37.619743Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.619761Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.619773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.619858Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:37.979312Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:30094 TClient is connected to server localhost:30094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516017455 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.286470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.292341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:53:38.306367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.457825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.625375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.698568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:40.154192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535891504932:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.154349Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.154582Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535891504942:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.154658Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.455432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.482187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.507674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.534309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.560252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.591900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.624192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.682601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.753711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535891505811:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.753803Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.753913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535891505816:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.754031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535891505818:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.754072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.757635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sche ... WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:43.682935Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:43.683013Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:43.740836Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30053 TClient is connected to server localhost:30053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516023685 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:44.083729Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:44.089412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:53:44.096361Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:44.144313Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.286838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.348629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:44.535875Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:46.821075Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128563266340586:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.821151Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.821369Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128563266340595:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.821428Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:46.916642Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.943528Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:46.982067Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.010147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.037134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.065237Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.094241Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.137436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.234566Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128567561308759:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.234661Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.234732Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128567561308764:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.235275Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128567561308766:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.235378Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:47.238467Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:47.249477Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128567561308767:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:53:47.340896Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128567561308819:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:48.521672Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128550381436865:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:48.521744Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:53:49.028083Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516029016, txId: 281474976715673] shutting down 2025-12-23T18:53:49.202491Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516029189, txId: 281474976715675] shutting down 2025-12-23T18:53:50.112398Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516030125, txId: 281474976715677] shutting down >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] >> KqpYql::UuidPrimaryKey |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Auth [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] >> KqpScripting::StreamExecuteYqlScriptScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce >> TestKinesisHttpProxy::TestEmptyHttpBody >> KqpYql::TableUseBeforeCreate [GOOD] >> KqpScripting::StreamExecuteYqlScriptData [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2025-12-23T18:53:06.618819Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128391981011975:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:06.619025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d29/r3tmp/tmpEs0ftK/pdisk_1.dat 2025-12-23T18:53:06.890443Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:06.905846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:06.906008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:06.945442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:07.021181Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3923, node 1 2025-12-23T18:53:07.103135Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:07.254488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:07.254525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:07.254537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:07.254620Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:07.623072Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:16136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515987019 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:07.695438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:16136 2025-12-23T18:53:07.906166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:53:07.912796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:53:07.935918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.023397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:08.061132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:08.103516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.134357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.164774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.199293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.232373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.258694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.289644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:09.711018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128404865915481:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.711018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128404865915473:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.711179Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.711550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128404865915488:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.711624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.714649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:09.724251Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128404865915487:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:53:09.819947Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128404865915540:3098] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:10.409198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:10.440105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__op ... GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 17ms 2025-12-23T18:53:50.501222Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.501466Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:53:50.501497Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-12-23T18:53:50.501629Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 19ms 2025-12-23T18:53:50.502134Z node 7 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:53:50.502439Z node 7 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:53:50.502457Z node 7 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 23ms 2025-12-23T18:53:50.502704Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:53:50.502735Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-12-23T18:53:50.502951Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 23ms 2025-12-23T18:53:50.503245Z node 7 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:53:50.674460Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7587128580434531938:2434]: Pool not found 2025-12-23T18:53:50.674677Z node 7 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-12-23T18:53:51.004344Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7587128580434531957:2438]: Pool not found 2025-12-23T18:53:51.004718Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-12-23T18:53:51.007973Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7587128584729499357:2454], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:51.008021Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7587128584729499358:2455], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:53:51.008043Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:51.008312Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7587128584729499361:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:51.008390Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:51.262230Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7587128584729499355:2453]: Pool not found 2025-12-23T18:53:51.262564Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2025-12-23T18:53:51.473717Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:36810) incoming connection opened 2025-12-23T18:53:51.473805Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:36810) -> (POST /Root, 3 bytes) 2025-12-23T18:53:51.473952Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [58df:ab24:d67b:0:40df:ab24:d67b:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: 13f876d4-84d7c9d7-e2635705-1c3dafeb 2025-12-23T18:53:51.474199Z node 7 :HTTP_PROXY INFO: http_req.cpp:1602: http request [UnknownMethodName] requestId [13f876d4-84d7c9d7-e2635705-1c3dafeb] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2025-12-23T18:53:51.474381Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:36810) <- (400 InvalidAction, 76 bytes) 2025-12-23T18:53:51.474477Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:36810) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { } 2025-12-23T18:53:51.474519Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:36810) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: 13f876d4-84d7c9d7-e2635705-1c3dafeb Content-Type: application/x-amz-json-1.1 Content-Length: 76 2025-12-23T18:53:51.474614Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:36810) connection closed Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate [GOOD] Test command err: Trying to start YDB, gRPC: 13218, MsgBus: 12303 2025-12-23T18:53:46.904050Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128559573241625:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:46.904145Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ebb/r3tmp/tmpKtVdAs/pdisk_1.dat 2025-12-23T18:53:47.135866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:47.140605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:47.140716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:47.252793Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:47.253093Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128559573241600:2081] 1766516026902611 != 1766516026902614 2025-12-23T18:53:47.260658Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13218, node 1 2025-12-23T18:53:47.349272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:47.349312Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:47.349324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:47.349413Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:47.419080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12303 TClient is connected to server localhost:12303 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516027311 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:47.815176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:47.830911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:47.915544Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:47.985973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:48.127017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:48.184080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:50.193959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128576753112664:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.194085Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.194384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128576753112674:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.194458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.546274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.574923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.605922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.633897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.662255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.698076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.733832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.776256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.842581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128576753113546:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.842668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.842806Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128576753113551:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.843000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128576753113553:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.843053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.846113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:50.856618Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128576753113555:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:50.914084Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128576753113606:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:51.904509Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128559573241625:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:51.904574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:3:13: Error: At function: KiReadTable!
:3:13: Error: Cannot find table 'db.[/Root/NewTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] >> KqpScripting::ScriptExplainCreatedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2025-12-23T18:53:06.619606Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128390157054260:2159];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:06.619865Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d2a/r3tmp/tmpn3nD3M/pdisk_1.dat 2025-12-23T18:53:06.874633Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:06.918339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:06.918439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:06.937756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:07.016257Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13617, node 1 2025-12-23T18:53:07.144109Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:07.254348Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:07.254413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:07.254423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:07.254493Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:07.623657Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:64390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515987012 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:07.720073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:64390 2025-12-23T18:53:07.892280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:53:07.898177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:53:07.920830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.054691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:08.097995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:08.144463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.175385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.207791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.241866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.271297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.304298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.340819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:09.533637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128403041957673:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.533648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128403041957685:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.533814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.534242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128403041957688:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.534351Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.540111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:09.550412Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128403041957687:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-12-23T18:53:09.618261Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128403041957740:3101] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:10.405841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:10.437577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__o ... 55741Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [6593ded1-eb8d5d47-675bc98c-76fc13f1] Sending reply from proxy actor: { DeleteMessageBatch { RequestId: "6593ded1-eb8d5d47-675bc98c-76fc13f1" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "6593ded1-eb8d5d47-675bc98c-76fc13f1" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-12-23T18:53:52.455972Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [DeleteMessageBatch] requestId [6593ded1-eb8d5d47-675bc98c-76fc13f1] Got succesfult GRPC response. 2025-12-23T18:53:52.456075Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DeleteMessageBatch] requestId [6593ded1-eb8d5d47-675bc98c-76fc13f1] reply ok 2025-12-23T18:53:52.456193Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [DeleteMessageBatch] requestId [6593ded1-eb8d5d47-675bc98c-76fc13f1] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 716 ResponseSizeInBytes: 197 SourceAddress: d8cb:5b3b:a97b:0:c0cb:5b3b:a97b:0 ResourceId: 000000000000000101v0 Action: DeleteMessageBatch 2025-12-23T18:53:52.456304Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:45976) <- (200 , 44 bytes) 2025-12-23T18:53:52.456376Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:45976) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} 2025-12-23T18:53:52.457213Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:45988) incoming connection opened 2025-12-23T18:53:52.457292Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:45988) -> (POST /Root, 106 bytes) 2025-12-23T18:53:52.457399Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [38a8:6a3b:a97b:0:20a8:6a3b:a97b:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: c6a30a5d-fa5e5963-e7a1c72-bf76de65 2025-12-23T18:53:52.457776Z node 7 :HTTP_PROXY INFO: http_req.cpp:543: http request [ReceiveMessage] requestId [c6a30a5d-fa5e5963-e7a1c72-bf76de65] got new request from [38a8:6a3b:a97b:0:20a8:6a3b:a97b:0] 2025-12-23T18:53:52.458144Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:477: http request [ReceiveMessage] requestId [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-12-23T18:53:52.458165Z node 7 :HTTP_PROXY INFO: http_req.cpp:301: http request [ReceiveMessage] requestId [c6a30a5d-fa5e5963-e7a1c72-bf76de65] sending grpc request to '' database: '/Root' iam token size: 0 2025-12-23T18:53:52.458252Z node 7 :SQS DEBUG: ymq_proxy.cpp:148: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: c6a30a5d-fa5e5963-e7a1c72-bf76de65 2025-12-23T18:53:52.458333Z node 7 :SQS DEBUG: proxy_actor.cpp:263: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Proxy actor: used user_name='cloud4', queue_name='000000000000000101v0', folder_id='folder4' 2025-12-23T18:53:52.458345Z node 7 :SQS DEBUG: proxy_actor.cpp:78: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Request proxy started 2025-12-23T18:53:52.458386Z node 7 :SQS DEBUG: service.cpp:761: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Answer configuration for queue [cloud4/000000000000000101v0] without leader 2025-12-23T18:53:52.458411Z node 7 :SQS DEBUG: proxy_actor.cpp:97: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Get configuration duration: 0ms 2025-12-23T18:53:52.458466Z node 7 :SQS DEBUG: proxy_service.cpp:246: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Send get leader node request to sqs service for cloud4/000000000000000101v0 2025-12-23T18:53:52.458485Z node 7 :SQS DEBUG: service.cpp:581: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Leader node for queue [cloud4/000000000000000101v0] is 7 2025-12-23T18:53:52.458503Z node 7 :SQS DEBUG: proxy_service.cpp:170: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Got leader node for queue response. Node id: 7. Status: 0 2025-12-23T18:53:52.458577Z node 7 :SQS TRACE: proxy_service.cpp:303: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Sending request from proxy to leader node 7: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "c6a30a5d-fa5e5963-e7a1c72-bf76de65" 2025-12-23T18:53:52.458646Z node 7 :SQS DEBUG: proxy_service.cpp:70: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Received Sqs Request: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "c6a30a5d-fa5e5963-e7a1c72-bf76de65" 2025-12-23T18:53:52.458685Z node 7 :SQS DEBUG: action.h:133: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Request started. Actor: [7:7587128586207419791:3931] 2025-12-23T18:53:52.458719Z node 7 :SQS TRACE: service.cpp:1472: Inc local leader ref for actor [7:7587128586207419791:3931] 2025-12-23T18:53:52.458733Z node 7 :SQS DEBUG: service.cpp:754: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Forward configuration request to queue [cloud4/000000000000000101v0] leader 2025-12-23T18:53:52.458773Z node 7 :SQS DEBUG: action.h:627: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Get configuration duration: 0ms 2025-12-23T18:53:52.458787Z node 7 :SQS TRACE: action.h:647: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Got configuration. Root url: http://ghrun-ou4bsyjxme.auto.internal:8771, Shards: 4, Fail: 0 2025-12-23T18:53:52.458809Z node 7 :SQS TRACE: action.h:662: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Got configuration. Attributes: { ContentBasedDeduplication: 0 DelaySeconds: 0.000000s FifoQueue: 0 MaximumMessageSize: 262144 MessageRetentionPeriod: 345600.000000s ReceiveMessageWaitTime: 0.000000s VisibilityTimeout: 30.000000s } 2025-12-23T18:53:52.458821Z node 7 :SQS TRACE: action.h:427: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] DoRoutine 2025-12-23T18:53:52.458868Z node 7 :SQS TRACE: queue_leader.cpp:2426: Increment active message requests for [cloud4/000000000000000101v0/3]. ActiveMessageRequests: 1 2025-12-23T18:53:52.458882Z node 7 :SQS DEBUG: queue_leader.cpp:938: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Received empty result from shard 3 infly. Infly capacity: 0. Messages count: 0 2025-12-23T18:53:52.458891Z node 7 :SQS DEBUG: queue_leader.cpp:1164: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] No known messages in this shard. Skip attempt to add messages to infly 2025-12-23T18:53:52.458900Z node 7 :SQS DEBUG: queue_leader.cpp:1170: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Already tried to add messages to infly 2025-12-23T18:53:52.458950Z node 7 :SQS TRACE: queue_leader.cpp:2436: Decrement active message requests for [[cloud4/000000000000000101v0/3]. ActiveMessageRequests: 0 2025-12-23T18:53:52.459000Z node 7 :SQS TRACE: action.h:264: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] SendReplyAndDie from action actor { ReceiveMessage { RequestId: "c6a30a5d-fa5e5963-e7a1c72-bf76de65" } } 2025-12-23T18:53:52.459079Z node 7 :SQS TRACE: proxy_service.h:35: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Sending sqs response: { ReceiveMessage { RequestId: "c6a30a5d-fa5e5963-e7a1c72-bf76de65" } RequestId: "c6a30a5d-fa5e5963-e7a1c72-bf76de65" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-12-23T18:53:52.459151Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ReceiveMessage { RequestId: "c6a30a5d-fa5e5963-e7a1c72-bf76de65" } RequestId: "c6a30a5d-fa5e5963-e7a1c72-bf76de65" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-12-23T18:53:52.459185Z node 7 :SQS DEBUG: queue_leader.cpp:384: Request ReceiveMessage working duration: 0ms 2025-12-23T18:53:52.459193Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7587128586207419790:2539]: ReceiveMessage { RequestId: "c6a30a5d-fa5e5963-e7a1c72-bf76de65" } RequestId: "c6a30a5d-fa5e5963-e7a1c72-bf76de65" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-12-23T18:53:52.459229Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7587128586207419791:3931]. Found: 1 2025-12-23T18:53:52.459270Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] HandleResponse: { ReceiveMessage { RequestId: "c6a30a5d-fa5e5963-e7a1c72-bf76de65" } RequestId: "c6a30a5d-fa5e5963-e7a1c72-bf76de65" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2025-12-23T18:53:52.459322Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Sending reply from proxy actor: { ReceiveMessage { RequestId: "c6a30a5d-fa5e5963-e7a1c72-bf76de65" } RequestId: "c6a30a5d-fa5e5963-e7a1c72-bf76de65" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-12-23T18:53:52.459397Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [ReceiveMessage] requestId [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Got succesfult GRPC response. 2025-12-23T18:53:52.459420Z node 7 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] HandleResponse { Status: 48 TxId: 281474976715712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2025-12-23T18:53:52.459426Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [c6a30a5d-fa5e5963-e7a1c72-bf76de65] reply ok 2025-12-23T18:53:52.459444Z node 7 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Attempt 1 execution duration: 3ms 2025-12-23T18:53:52.459501Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [ReceiveMessage] requestId [c6a30a5d-fa5e5963-e7a1c72-bf76de65] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 526 ResponseSizeInBytes: 153 SourceAddress: 38a8:6a3b:a97b:0:20a8:6a3b:a97b:0 ResourceId: 000000000000000101v0 Action: ReceiveMessage 2025-12-23T18:53:52.459560Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:45988) <- (200 , 2 bytes) 2025-12-23T18:53:52.459615Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:45988) connection closed 2025-12-23T18:53:52.459629Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976715712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2025-12-23T18:53:52.459653Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"messages": []} Http output full {} 2025-12-23T18:53:52.459716Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] execution duration: 4ms 2025-12-23T18:53:52.459748Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [] Sending executed reply 2025-12-23T18:53:52.459834Z node 7 :SQS DEBUG: queue_leader.cpp:1915: Handle oldest timestamp metrics for [cloud4/000000000000000101v0/2] |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] >> KqpYql::EvaluateExprYsonAndType [GOOD] >> KqpYql::EvaluateFor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 27302, MsgBus: 17208 2025-12-23T18:53:36.954714Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128520001978642:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.954808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ec9/r3tmp/tmpQf189t/pdisk_1.dat 2025-12-23T18:53:37.281256Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.310967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.311100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.396225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.441346Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:37.445313Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128520001978612:2081] 1766516016950672 != 1766516016950675 TServer::EnableGrpc on GrpcPort 27302, node 1 2025-12-23T18:53:37.481070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.619095Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.619121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.619140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.619249Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17208 2025-12-23T18:53:37.969525Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:17208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516017469 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.333253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.361520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:38.491819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:38.630278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:38.704663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.000586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128532886882377:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.003230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.003584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128537181849683:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.003647Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.455332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.483839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.513457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.538707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.564376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.595988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.624648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.667400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.746010Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128537181850551:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.746104Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.746280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128537181850556:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.746326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128537181850558:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.746443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.756548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:40.766451Z node 1 ... 7128578952272326:2638] 2025-12-23T18:53:51.059304Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 67ms, session id ydb://session/3?node_id=2&id=NzdjYjk3ZDItMzY0ZDM4ZjUtOTdhNWQ0MjgtYTUxYjg1MTc= } 2025-12-23T18:53:51.126249Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128583247239651:2647] 2025-12-23T18:53:51.134346Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 70ms, session id ydb://session/3?node_id=2&id=YzhjNDZiN2YtY2EwYTg3ZGUtY2VhNTgwNmQtZTk4NzRlNzI= } 2025-12-23T18:53:51.202110Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128583247239668:2653] 2025-12-23T18:53:51.207752Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 73ms, session id ydb://session/3?node_id=2&id=NzEzMDY0MmYtNTBhMWU2Mi1kYzZjODU1ZC1mOTk1NTc3Nw== } 2025-12-23T18:53:51.280972Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128583247239689:2662] 2025-12-23T18:53:51.289153Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 76ms, session id ydb://session/3?node_id=2&id=ODk4YTlhZmMtZDg1OGNmZDEtNDZiODZiNzctNTFlMmJhYmY= } 2025-12-23T18:53:51.362981Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128583247239717:2670] 2025-12-23T18:53:51.369894Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 79ms, session id ydb://session/3?node_id=2&id=YmRjZTI2MTgtZTc4MjIyN2MtYTQxYTA1MmItODUxMWM0MDc= } 2025-12-23T18:53:51.446865Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128583247239738:2679] 2025-12-23T18:53:51.452005Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 82ms, session id ydb://session/3?node_id=2&id=NTFhOGQwZWYtOTA3ZDY2YmItOWI5OTA3OWEtNjdiNGNlOGE= } 2025-12-23T18:53:51.537799Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128583247239754:2686] 2025-12-23T18:53:51.541082Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 85ms, session id ydb://session/3?node_id=2&id=OWYzOGNkZjEtOTZlMzhlOWMtZmMwZjhkYzYtZjMzNWY3YmE= } 2025-12-23T18:53:51.686449Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 88ms, session id ydb://session/3?node_id=2&id=M2RjNDEwZC04M2Y0YjZjNy00YTI5MTdkNy02YzU1ZTM0Yw== } 2025-12-23T18:53:51.724718Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128583247239789:2701] 2025-12-23T18:53:51.824748Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 91ms, session id ydb://session/3?node_id=2&id=OTZmMDZlOS1mY2RkZmNhNC0yMmYxZDEyYi03ZmU0ZTFhZA== } 2025-12-23T18:53:51.827624Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128583247239810:2710] 2025-12-23T18:53:51.865216Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 94ms, session id ydb://session/3?node_id=2&id=NzIxNDcyNWQtNzg4MTIzNDktYmE3ZThlMjQtMWJjMmMxNzY= } 2025-12-23T18:53:51.922990Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128583247239824:2716] 2025-12-23T18:53:51.928698Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 97ms, session id ydb://session/3?node_id=2&id=Yzg0NjJkNzItM2UwYmRlMTYtYzk5YTA5YTctODQzNGVhMzc= } 2025-12-23T18:53:52.026143Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128583247239845:2725] 2025-12-23T18:53:52.026775Z node 2 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [2:7587128587542207180:2730] TxId: 281474976715674. Ctx: { TraceId: 01kd68z0dr0cxbtwwk1w3fn33y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWIwMTU1ZGItZmY1MWY1YWItNTc5YzQxMmUtYWIwYjQxNGM=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Client lost } 2025-12-23T18:53:52.027044Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=YWIwMTU1ZGItZmY1MWY1YWItNTc5YzQxMmUtYWIwYjQxNGM=, ActorId: [2:7587128583247239856:2730], ActorState: ExecuteState, TraceId: 01kd68z0dr0cxbtwwk1w3fn33y, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Client lost" severity: 1 } 2025-12-23T18:53:52.027563Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516032064, txId: 281474976715673] shutting down 2025-12-23T18:53:52.027747Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [2:7587128587542207190:2735], TxId: 281474976715674, task: 5. Ctx: { TraceId : 01kd68z0dr0cxbtwwk1w3fn33y. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=YWIwMTU1ZGItZmY1MWY1YWItNTc5YzQxMmUtYWIwYjQxNGM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [2:7587128587542207180:2730], status: ABORTED, reason: {
: Error: Terminate execution } 2025-12-23T18:53:52.033364Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 100ms, session id ydb://session/3?node_id=2&id=NzAxMGNiMWMtYjc3ZGExYzUtMzdiZTNhOGQtMTNlMTkwYWE= } 2025-12-23T18:53:52.132038Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128587542207249:2740] 2025-12-23T18:53:52.136830Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 103ms, session id ydb://session/3?node_id=2&id=ZTdiZjYxMTAtNzk4ZmZlNTgtZDJlZTMxMzgtZDE0MjEyOTQ= } 2025-12-23T18:53:52.235008Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516032260, txId: 281474976715676] shutting down 2025-12-23T18:53:52.240872Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128587542207270:2749] 2025-12-23T18:53:52.353068Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128587542207372:2764] 2025-12-23T18:53:52.358090Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 109ms, session id ydb://session/3?node_id=2&id=N2ZjZDIyNzItYzBmZGRlYTUtZTE0NjJkZTQtZjBmZWQ3ZmI= } 2025-12-23T18:53:52.437730Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516032477, txId: 281474976715678] shutting down 2025-12-23T18:53:52.561006Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128587542207501:2791] 2025-12-23T18:53:52.566143Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 115ms, session id ydb://session/3?node_id=2&id=OGU2ZTljMjQtZTA1NWQ4NzMtZDM0NjEzNTQtYWQzZWQwZjg= } 2025-12-23T18:53:52.681958Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128587542207522:2800] 2025-12-23T18:53:52.687513Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 118ms, session id ydb://session/3?node_id=2&id=YTRlZGEwZmItM2ZmMTA0ZC0zNWFlZjY1My0yOTMyNWJhZg== } 2025-12-23T18:53:52.800251Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516032834, txId: 281474976715680] shutting down 2025-12-23T18:53:52.933075Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128587542207645:2824] 2025-12-23T18:53:52.938153Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 124ms, session id ydb://session/3?node_id=2&id=ZTM1YTA4Yi02OGVmNGYtN2Q1ZmFjMTEtMmY2MGFhZDY= } 2025-12-23T18:53:53.062989Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128587542207666:2833] 2025-12-23T18:53:53.071096Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 127ms, session id ydb://session/3?node_id=2&id=Mzg4ZmU1NWYtZGZlZTIzMDYtZWI2OGJhOTEtNjNiMjYyNmE= } 2025-12-23T18:53:53.161001Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516033198, txId: 281474976715682] shutting down 2025-12-23T18:53:53.300961Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128591837175086:2857] 2025-12-23T18:53:53.307030Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 133ms, session id ydb://session/3?node_id=2&id=MTM2MjRkMTMtZjg3N2RkYTgtZDYzMzQ3MWMtOTc3ZjFmNTQ= } 2025-12-23T18:53:53.439858Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128591837175109:2867] 2025-12-23T18:53:53.445761Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 136ms, session id ydb://session/3?node_id=2&id=ZjFhMWI0YmItNGExODQ4OWQtMzRhOTc3ODEtMjU3NTBmN2E= } 2025-12-23T18:53:53.581271Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7587128591837175132:2877] 2025-12-23T18:53:53.587553Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 139ms, session id ydb://session/3?node_id=2&id=YTAzYzNhOTItYjU0NDNjYWItZDcxYzU5YjUtNjYyNzRiNGU= } 2025-12-23T18:53:53.666926Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516033702, txId: 281474976715684] shutting down >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] >> DstCreator::WithSyncIndex |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> DstCreator::ExistingDst >> DstCreator::ReplicationModeMismatch >> DstCreator::WithSyncIndexAndIntermediateDir >> DstCreator::Basic >> DstCreator::ColumnsSizeMismatch >> DstCreator::WithIntermediateDir >> DstCreator::NonExistentSrc >> DstCreator::SameOwner >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] >> DstCreator::GlobalConsistency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 22271, MsgBus: 29611 2025-12-23T18:53:36.967502Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128517521273141:2168];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.967659Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ec7/r3tmp/tmpkWshVi/pdisk_1.dat 2025-12-23T18:53:37.254839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.314122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.314222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.369778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.426216Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:37.428076Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128517521273007:2081] 1766516016950964 != 1766516016950967 TServer::EnableGrpc on GrpcPort 22271, node 1 2025-12-23T18:53:37.552083Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.620921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.620950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.620966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.621045Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29611 2025-12-23T18:53:37.971519Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:29611 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516017462 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.293662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:53:38.315381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:38.517653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:38.686895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:38.785992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:40.079074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534701144065:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.079190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.079465Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534701144075:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.079529Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.455663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.483903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.508600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.533406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.561043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.593743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.628152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.696077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.768061Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534701144942:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.768131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.768393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534701144947:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.768433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128534701144948:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.768481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.772266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:40.781125Z node 1 ... Notification cookie mismatch for subscription [2:7587128576488964694:2081] 1766516029197448 != 1766516029197451 2025-12-23T18:53:49.331663Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:49.331741Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:49.333770Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:49.335500Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24446, node 2 2025-12-23T18:53:49.373346Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:49.373374Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:49.373381Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:49.373436Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27447 TClient is connected to server localhost:27447 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516029383 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:49.728484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:49.737007Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:49.806394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:49.920129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:49.981662Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:50.203516Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:52.640195Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128589373868451:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:52.640287Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:52.640492Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128589373868460:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:52.640562Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:52.708901Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:52.738921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:52.767121Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:52.795577Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:52.826528Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:52.860229Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:52.892581Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:52.942066Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:53.044977Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128593668836627:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.045063Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.045129Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128593668836632:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.045196Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128593668836634:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.045230Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.048306Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:53.057849Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128593668836636:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:53.126415Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128593668836687:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:54.198641Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128576488964723:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:54.198693Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 20268, MsgBus: 5077 2025-12-23T18:53:52.077449Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128585583064918:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:52.077535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005eb3/r3tmp/tmpwaRqZK/pdisk_1.dat 2025-12-23T18:53:52.266461Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:52.290143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:52.290268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:52.349847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:52.352291Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:52.352920Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128585583064893:2081] 1766516032076829 != 1766516032076832 TServer::EnableGrpc on GrpcPort 20268, node 1 2025-12-23T18:53:52.429769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:52.429790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:52.429796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:52.429869Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:52.437512Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5077 TClient is connected to server localhost:5077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516032407 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:52.849716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:53.085243Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:54.477033Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128594173000369:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.477139Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.477442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128594173000379:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.477501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.719003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.834344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128594173000478:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.834428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.834436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128594173000483:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.834615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128594173000485:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.834659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.837807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:54.847165Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128594173000486:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:53:54.920386Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128594173000538:2598] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:42.126083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:42.126191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:42.126236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:42.126270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:42.126303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:42.126331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:42.126396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:42.126482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:42.127359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:42.128109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:42.215962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:42.216013Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:42.225101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:42.225230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:42.225365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:42.232943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:42.233114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:42.233603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:42.233765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:42.239318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:42.240368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:42.249890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:42.249946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:42.250091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:42.250142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:42.250300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:42.250392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:42.371057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.372667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.372764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.372851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.372899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.372957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.373009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.373047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.373094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.373145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.373190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.373265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.373321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.373381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.373438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :403: Unit PlanQueue has no ready operations at 72075186233409546 2025-12-23T18:53:55.690421Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:734:2714]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T18:53:55.690569Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 38 2025-12-23T18:53:55.690947Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269553162, Sender [3:734:2714], Recipient [3:128:2152]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 38 Generation: 2 Round: 10 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 29 Memory: 124368 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 134 TableOwnerId: 72057594046678944 FollowerId: 0 2025-12-23T18:53:55.690999Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5284: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-12-23T18:53:55.691046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0029 2025-12-23T18:53:55.691165Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-12-23T18:53:55.691218Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-12-23T18:53:55.701644Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:735:2715]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-12-23T18:53:55.701723Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-12-23T18:53:55.701813Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409547 outdated step 5000038 last cleanup 0 2025-12-23T18:53:55.701866Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409547 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:53:55.701900Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409547 2025-12-23T18:53:55.701923Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409547 has no attached operations 2025-12-23T18:53:55.701950Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409547 2025-12-23T18:53:55.702077Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:735:2715]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T18:53:55.702174Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 38 2025-12-23T18:53:55.702455Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269553162, Sender [3:735:2715], Recipient [3:128:2152]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 38 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 24 Memory: 119488 } ShardState: 2 UserTablePartOwners: 72075186233409547 NodeId: 3 StartTime: 134 TableOwnerId: 72057594046678944 FollowerId: 0 2025-12-23T18:53:55.702505Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5284: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-12-23T18:53:55.702554Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0024 2025-12-23T18:53:55.702639Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-12-23T18:53:55.744106Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:53:55.744189Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:53:55.744217Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-12-23T18:53:55.744295Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 2 2025-12-23T18:53:55.744327Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-12-23T18:53:55.744426Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2025-12-23T18:53:55.744501Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-12-23T18:53:55.744536Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2025-12-23T18:53:55.744612Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:29.000000Z at schemeshard 72057594046678944 2025-12-23T18:53:55.744679Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 2 out of 2 partitions 2025-12-23T18:53:55.744752Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 0 row count 0 2025-12-23T18:53:55.744808Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:53:55.744838Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409547, followerId 0 2025-12-23T18:53:55.744898Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:2 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2025-12-23T18:53:55.744930Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409547 by load, its table already has 2 out of 2 partitions 2025-12-23T18:53:55.745000Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:53:55.755493Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:53:55.755566Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:53:55.755595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:53:55.778538Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:1740:3658], Recipient [3:734:2714]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:53:55.778622Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:53:55.778672Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409546, clientId# [3:1739:3657], serverId# [3:1740:3658], sessionId# [0:0:0] 2025-12-23T18:53:55.778946Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:1738:3656], Recipient [3:734:2714]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 38 } 2025-12-23T18:53:55.781190Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:1743:3661], Recipient [3:735:2715]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:53:55.781237Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:53:55.781282Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409547, clientId# [3:1742:3660], serverId# [3:1743:3661], sessionId# [0:0:0] 2025-12-23T18:53:55.781423Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:1741:3659], Recipient [3:735:2715]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 38 } |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> KqpYql::UuidPrimaryKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] Test command err: 2025-12-23T18:52:58.838998Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128353442773883:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:58.839056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003935/r3tmp/tmpPJAZNC/pdisk_1.dat 2025-12-23T18:52:59.022479Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:59.031526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:59.031671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:59.068645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:59.100964Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12963, node 1 2025-12-23T18:52:59.134829Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:59.134881Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:59.134893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:59.134970Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:59.203684Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:14882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515979130 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:59.326191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:59.846367Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:00.996981Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-12-23T18:53:01.002020Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:53:01.002050Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:53:01.002107Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128362032709300:2319], Start check tables existence, number paths: 2 2025-12-23T18:53:01.002930Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-12-23T18:53:01.003195Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128362032709300:2319], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:53:01.003274Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128362032709300:2319], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:53:01.003300Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128362032709300:2319], Successfully finished 2025-12-23T18:53:01.003339Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-12-23T18:53:01.004114Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=ZjcxYWMzZmQtZTZiNWM5NmYtZGE1MzIwZDYtOTA0MTY1Yjg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZjcxYWMzZmQtZTZiNWM5NmYtZGE1MzIwZDYtOTA0MTY1Yjg= (tmp dir name: ee98a473-4e92-8aa4-a02d-4abb2a326fc8) 2025-12-23T18:53:01.004215Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=ZjcxYWMzZmQtZTZiNWM5NmYtZGE1MzIwZDYtOTA0MTY1Yjg=, ActorId: [1:7587128366327676616:2323], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:53:01.012440Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128366327676618:2495], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:53:01.015382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:01.016267Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128366327676618:2495], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2025-12-23T18:53:01.016409Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128366327676618:2495], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-12-23T18:53:01.022715Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128366327676618:2495], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:53:01.119104Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128366327676618:2495], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:53:01.122117Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128366327676669:2527] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:01.122223Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128366327676618:2495], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-12-23T18:53:01.124514Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=M2QyMjQ3MjYtZDFmMWIzYmQtOTI1ZGY3MzEtN2Q1Mjk3YWQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id M2QyMjQ3MjYtZDFmMWIzYmQtOTI1ZGY3MzEtN2Q1Mjk3YWQ= (tmp dir name: a646f003-4016-a7cd-627d-e18349975f50) 2025-12-23T18:53:01.124612Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=M2QyMjQ3MjYtZDFmMWIzYmQtOTI1ZGY3MzEtN2Q1Mjk3YWQ=, ActorId: [1:7587128366327676676:2324], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:53:01.124874Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-12-23T18:53:01.124900Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2025-12-23T18:53:01.124940Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-12-23T18:53:01.124990Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128366327676678:2325], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-12-23T18:53:01.125067Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=1&id=M2QyMjQ3MjYtZDFmMWIzYmQtOTI1ZGY3MzEtN2Q1Mjk3YWQ=, ActorId: [1:7587128366327676676:2324], ActorState: ReadyState, TraceId: 01kd68xet50wbqfzkwxr2hspvv, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7587128366327676675:2532] database: Root databaseId: /Root pool id: sample_pool_id 2025-12-23T18:53:01.125162Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Recieved new request from [1:7587128366327676676:2324], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=M2QyMjQ3MjYtZDFmMWIzYmQtOTI1ZGY3MzEtN2Q1Mjk3YWQ= 2025-12-23T18:53:01.125246Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7587128366327676680:2326], Database: /Root, Start database fetching 2025-12-23T18:53:01.125410Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7587128366327676680:2326], Database: /Root, Database info successfully fetched, serverless: 0 2025-12-23T18:53:01.125485Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-12-23T18:53:01.125591Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [1:7587128366327676688:2327], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=M2QyMjQ3MjYtZDFmMWIzYmQtOTI1ZGY3MzEtN2Q1Mjk3YWQ=, Start pool fetching 2025-12-23T18:53:01.125628Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128366327676689:2328], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-12-23T18:53:01.125949Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ... : /Root pool id: default 2025-12-23T18:53:54.610697Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:294: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7587128596274717643:2625], DatabaseId: /Root, PoolId: my_pool, Got delete notification 2025-12-23T18:53:54.610811Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-12-23T18:53:54.610873Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-12-23T18:53:54.610912Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587128596274717671:2630], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-12-23T18:53:54.611248Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587128596274717671:2630], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-12-23T18:53:54.611316Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-12-23T18:53:54.615514Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=8&id=OTM0NzllNTUtZjdmNzUwYTUtOTYyMDE2OWItZjJlNjY4NTE=, ActorId: [8:7587128570504912643:2325], ActorState: ExecuteState, TraceId: 01kd68z311b2h9fwdrj1y3wfw6, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [8:7587128596274717661:2325] WorkloadServiceCleanup: 0 2025-12-23T18:53:54.618845Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=8&id=OTM0NzllNTUtZjdmNzUwYTUtOTYyMDE2OWItZjJlNjY4NTE=, ActorId: [8:7587128570504912643:2325], ActorState: CleanupState, TraceId: 01kd68z311b2h9fwdrj1y3wfw6, EndCleanup, isFinal: 0 2025-12-23T18:53:54.618927Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=8&id=OTM0NzllNTUtZjdmNzUwYTUtOTYyMDE2OWItZjJlNjY4NTE=, ActorId: [8:7587128570504912643:2325], ActorState: CleanupState, TraceId: 01kd68z311b2h9fwdrj1y3wfw6, Sent query response back to proxy, proxyRequestId: 49, proxyId: [8:7587128553325042835:2264] 2025-12-23T18:53:54.623153Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA== (tmp dir name: 3dedd9fe-4290-adb4-b82e-a5bc6939b23b) 2025-12-23T18:53:54.623290Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:53:54.623623Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-12-23T18:53:54.623704Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: ReadyState, TraceId: 01kd68z31z2d83vdc15zek34rb, received request, proxyRequestId: 50 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [8:7587128596274717682:3110] database: Root databaseId: /Root pool id: default 2025-12-23T18:53:54.623707Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-12-23T18:53:54.623745Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:268: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: ReadyState, TraceId: 01kd68z31z2d83vdc15zek34rb, request placed into pool from cache: default 2025-12-23T18:53:54.623747Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587128596274717685:2632], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-12-23T18:53:54.623835Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:630: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: ExecuteState, TraceId: 01kd68z31z2d83vdc15zek34rb, Sending CompileQuery request 2025-12-23T18:53:54.624050Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587128596274717685:2632], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-12-23T18:53:54.624112Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-12-23T18:53:54.624167Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-12-23T18:53:54.624199Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587128596274717687:2633], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-12-23T18:53:54.624339Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587128596274717687:2633], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-12-23T18:53:54.624380Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-12-23T18:53:54.713828Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1659: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: ExecuteState, TraceId: 01kd68z31z2d83vdc15zek34rb, ExecutePhyTx, tx: 0x00007CB89F11A318 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-12-23T18:53:54.713898Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1816: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: ExecuteState, TraceId: 01kd68z31z2d83vdc15zek34rb, Sending to Executer TraceId: 0 8 2025-12-23T18:53:54.714040Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1880: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: ExecuteState, TraceId: 01kd68z31z2d83vdc15zek34rb, Created new KQP executer: [8:7587128596274717691:2631] isRollback: 0 2025-12-23T18:53:54.715238Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2191: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: ExecuteState, TraceId: 01kd68z31z2d83vdc15zek34rb, Forwarded TEvStreamData to [8:7587128596274717682:3110] 2025-12-23T18:53:54.715913Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2090: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: ExecuteState, TraceId: 01kd68z31z2d83vdc15zek34rb, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-12-23T18:53:54.716069Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2383: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: ExecuteState, TraceId: 01kd68z31z2d83vdc15zek34rb, txInfo Status: Committed Kind: Pure TotalDuration: 2.324 ServerDuration: 2.257 QueriesCount: 2 2025-12-23T18:53:54.716150Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2543: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: ExecuteState, TraceId: 01kd68z31z2d83vdc15zek34rb, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-12-23T18:53:54.716348Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: ExecuteState, TraceId: 01kd68z31z2d83vdc15zek34rb, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:54.716383Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: ExecuteState, TraceId: 01kd68z31z2d83vdc15zek34rb, EndCleanup, isFinal: 1 2025-12-23T18:53:54.716432Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: ExecuteState, TraceId: 01kd68z31z2d83vdc15zek34rb, Sent query response back to proxy, proxyRequestId: 50, proxyId: [8:7587128553325042835:2264] 2025-12-23T18:53:54.716463Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: unknown state, TraceId: 01kd68z31z2d83vdc15zek34rb, Cleanup temp tables: 0 2025-12-23T18:53:54.716866Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=8&id=ZDM2MTdhNDUtMjI5MzMyNTQtZmZhNzZlNDktYzM0MDczZA==, ActorId: [8:7587128596274717683:2631], ActorState: unknown state, TraceId: 01kd68z31z2d83vdc15zek34rb, Session actor destroyed 2025-12-23T18:53:54.725456Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=8&id=OTM0NzllNTUtZjdmNzUwYTUtOTYyMDE2OWItZjJlNjY4NTE=, ActorId: [8:7587128570504912643:2325], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:53:54.725509Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=8&id=OTM0NzllNTUtZjdmNzUwYTUtOTYyMDE2OWItZjJlNjY4NTE=, ActorId: [8:7587128570504912643:2325], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:54.725545Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=8&id=OTM0NzllNTUtZjdmNzUwYTUtOTYyMDE2OWItZjJlNjY4NTE=, ActorId: [8:7587128570504912643:2325], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:53:54.725575Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=8&id=OTM0NzllNTUtZjdmNzUwYTUtOTYyMDE2OWItZjJlNjY4NTE=, ActorId: [8:7587128570504912643:2325], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:53:54.725667Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=8&id=OTM0NzllNTUtZjdmNzUwYTUtOTYyMDE2OWItZjJlNjY4NTE=, ActorId: [8:7587128570504912643:2325], ActorState: unknown state, Session actor destroyed |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] Test command err: 2025-12-23T18:52:53.173753Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128335944625984:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:53.173815Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00395c/r3tmp/tmpnL8uxD/pdisk_1.dat 2025-12-23T18:52:53.461314Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:53.553758Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:53.554837Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:53.554930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:53.558937Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128335944625959:2081] 1766515973170801 != 1766515973170804 2025-12-23T18:52:53.566288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22681, node 1 2025-12-23T18:52:53.688142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:53.688159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:53.688163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:53.688233Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:53.717493Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515973614 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:53.957877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:54.179811Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:55.560031Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-12-23T18:52:55.563899Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128344534561416:2319], Start check tables existence, number paths: 2 2025-12-23T18:52:55.564285Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:52:55.564329Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:52:55.565021Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-12-23T18:52:55.565102Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128344534561416:2319], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:52:55.565204Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128344534561416:2319], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:52:55.565244Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128344534561416:2319], Successfully finished 2025-12-23T18:52:55.565293Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-12-23T18:52:55.569045Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=M2Q3YjMyNTMtZmEwODRlOWYtYTIwNGE4YjItOGVmNTc5NGE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id M2Q3YjMyNTMtZmEwODRlOWYtYTIwNGE4YjItOGVmNTc5NGE= (tmp dir name: 822e2bf8-4353-0542-368a-2faeab770c43) 2025-12-23T18:52:55.569147Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=M2Q3YjMyNTMtZmEwODRlOWYtYTIwNGE4YjItOGVmNTc5NGE=, ActorId: [1:7587128344534561435:2323], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:52:55.597292Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128344534561437:2498], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:52:55.602693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:55.604465Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128344534561437:2498], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-12-23T18:52:55.606070Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128344534561437:2498], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-12-23T18:52:55.610587Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128344534561437:2498], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:52:55.677422Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128344534561437:2498], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:52:55.689220Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128344534561488:2530] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:55.689303Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128344534561437:2498], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-12-23T18:52:55.692382Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=ZmM5YTdmOTItNzUwMDcxMzEtODgzNDFjMWEtY2U4YmY4MjQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmM5YTdmOTItNzUwMDcxMzEtODgzNDFjMWEtY2U4YmY4MjQ= (tmp dir name: 4cc5304d-4ef2-42c5-3ba0-38a31c21956d) 2025-12-23T18:52:55.692499Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=ZmM5YTdmOTItNzUwMDcxMzEtODgzNDFjMWEtY2U4YmY4MjQ=, ActorId: [1:7587128344534561496:2324], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:52:55.692810Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-12-23T18:52:55.692839Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2025-12-23T18:52:55.692883Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-12-23T18:52:55.692933Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128344534561498:2325], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-12-23T18:52:55.693656Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=1&id=ZmM5YTdmOTItNzUwMDcxMzEtODgzNDFjMWEtY2U4YmY4MjQ=, ActorId: [1:7587128344534561496:2324], ActorState: ReadyState, TraceId: 01kd68x9gdeexxgyk46kh66qnv, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7587128344534561495:2536] database: Root databaseId: /Root pool id: sample_pool_id 2025-12-23T18:52:55.694015Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128344534561498:2325], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-12-23T18:52:55.694049Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Recieved new request from [1:7587128344534561496:2324], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmM5YTdmOTItNzUwMDcxMzEtODgzNDFjMWEtY2U4YmY4MjQ= 2025-12-23T18:52:55.694092Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7587128344534561507:2326], Database: /Root, Start database fetching 2025-12-23T18:52:55.694221Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7587128344534561507:2326], Database: /Root, Database info successfully fetched, serverless: 0 2025-12-23T18:52:55.694619Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-12-23T18:52:55.694638Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-12-23T18:52:55.694859Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: ... ssion/3?node_id=6&id=ZDc4YTU3OGYtYTM1ZTg2M2QtNDc0ODQ2ZWEtMjU3N2Y3YzI=, ActorId: [6:7587128601688727245:2324], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:53:55.123857Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7587128601688727247:2496], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:53:55.126841Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:55.127767Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7587128601688727247:2496], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-12-23T18:53:55.127877Z node 6 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7587128601688727247:2496], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-12-23T18:53:55.134970Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7587128601688727247:2496], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:53:55.187194Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7587128601688727247:2496], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:53:55.190281Z node 6 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [6:7587128601688727298:2528] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:55.190366Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7587128601688727247:2496], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-12-23T18:53:55.193218Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y= (tmp dir name: 7b98d7b7-438e-9662-43a2-5c96ebd832f0) 2025-12-23T18:53:55.193337Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y=, ActorId: [6:7587128601688727305:2325], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:53:55.193575Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-12-23T18:53:55.193595Z node 6 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2025-12-23T18:53:55.193677Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y=, ActorId: [6:7587128601688727305:2325], ActorState: ReadyState, TraceId: 01kd68z3ks195w3y29wdjy9fhr, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [6:7587128601688727304:2533] database: Root databaseId: /Root pool id: sample_pool_id 2025-12-23T18:53:55.193724Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Recieved new request from [6:7587128601688727305:2325], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y= 2025-12-23T18:53:55.193765Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7587128601688727307:2326], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-12-23T18:53:55.193767Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-12-23T18:53:55.193822Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7587128601688727308:2327], Database: /Root, Start database fetching 2025-12-23T18:53:55.194108Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7587128601688727308:2327], Database: /Root, Database info successfully fetched, serverless: 0 2025-12-23T18:53:55.194234Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-12-23T18:53:55.194338Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [6:7587128601688727316:2328], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y=, Start pool fetching 2025-12-23T18:53:55.194389Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7587128601688727318:2329], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-12-23T18:53:55.195124Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7587128601688727318:2329], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-12-23T18:53:55.195127Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7587128601688727307:2326], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-12-23T18:53:55.195194Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-12-23T18:53:55.195209Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:107: [WorkloadService] [TPoolResolverActor] ActorId: [6:7587128601688727316:2328], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y=, Pool info successfully resolved 2025-12-23T18:53:55.195209Z node 6 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-12-23T18:53:55.195436Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:286: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y= 2025-12-23T18:53:55.195452Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7587128601688727321:2330], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2025-12-23T18:53:55.195544Z node 6 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:297: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y= 2025-12-23T18:53:55.195736Z node 6 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y=, ActorId: [6:7587128601688727305:2325], ActorState: ExecuteState, TraceId: 01kd68z3ks195w3y29wdjy9fhr, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool sample_pool_id, status: PRECONDITION_FAILED, issues: { message: "Resource pool sample_pool_id was disabled due to zero concurrent query limit" severity: 1 } 2025-12-23T18:53:55.195879Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y=, ActorId: [6:7587128601688727305:2325], ActorState: ExecuteState, TraceId: 01kd68z3ks195w3y29wdjy9fhr, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-12-23T18:53:55.195988Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:196: [WorkloadService] [Service] Finished request with worker actor [6:7587128601688727305:2325], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y= 2025-12-23T18:53:55.196062Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y=, ActorId: [6:7587128601688727305:2325], ActorState: CleanupState, TraceId: 01kd68z3ks195w3y29wdjy9fhr, EndCleanup, isFinal: 1 2025-12-23T18:53:55.196173Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y=, ActorId: [6:7587128601688727305:2325], ActorState: CleanupState, TraceId: 01kd68z3ks195w3y29wdjy9fhr, Sent query response back to proxy, proxyRequestId: 3, proxyId: [6:7587128584508857438:2264] 2025-12-23T18:53:55.196205Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y=, ActorId: [6:7587128601688727305:2325], ActorState: unknown state, TraceId: 01kd68z3ks195w3y29wdjy9fhr, Cleanup temp tables: 0 2025-12-23T18:53:55.196325Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=6&id=NzM2MzJjMmQtYmFjNjMzNzctMjY2NTc1YjMtODNiNzFhY2Y=, ActorId: [6:7587128601688727305:2325], ActorState: unknown state, TraceId: 01kd68z3ks195w3y29wdjy9fhr, Session actor destroyed 2025-12-23T18:53:55.196435Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7587128601688727321:2330], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2025-12-23T18:53:55.204549Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=6&id=ZDc4YTU3OGYtYTM1ZTg2M2QtNDc0ODQ2ZWEtMjU3N2Y3YzI=, ActorId: [6:7587128601688727245:2324], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:53:55.204618Z node 6 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=6&id=ZDc4YTU3OGYtYTM1ZTg2M2QtNDc0ODQ2ZWEtMjU3N2Y3YzI=, ActorId: [6:7587128601688727245:2324], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:53:55.204648Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=6&id=ZDc4YTU3OGYtYTM1ZTg2M2QtNDc0ODQ2ZWEtMjU3N2Y3YzI=, ActorId: [6:7587128601688727245:2324], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:53:55.204671Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=6&id=ZDc4YTU3OGYtYTM1ZTg2M2QtNDc0ODQ2ZWEtMjU3N2Y3YzI=, ActorId: [6:7587128601688727245:2324], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:53:55.204766Z node 6 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=6&id=ZDc4YTU3OGYtYTM1ZTg2M2QtNDc0ODQ2ZWEtMjU3N2Y3YzI=, ActorId: [6:7587128601688727245:2324], ActorState: unknown state, Session actor destroyed >> KqpScripting::EndOfQueryCommit [GOOD] >> KqpScripting::ExecuteYqlScriptPg |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> THealthCheckTest::Issues100GroupsListing >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus >> THealthCheckTest::TestNoSchemeShardResponse >> THealthCheckTest::DatabaseDoesNotExist >> THealthCheckTest::Basic >> THealthCheckTest::Issues100Groups100VCardListing >> KqpScripting::ExecuteYqlScriptScanScalar [GOOD] >> KqpScripting::JoinIndexLookup >> THealthCheckTest::SpecificServerless >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 5441, MsgBus: 27300 2025-12-23T18:53:52.307020Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128589434905304:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:52.307123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ea1/r3tmp/tmpeEt1yM/pdisk_1.dat 2025-12-23T18:53:52.515939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:52.516098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:52.559887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:52.567552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:52.577558Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:52.592680Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128589434905263:2081] 1766516032305399 != 1766516032305402 TServer::EnableGrpc on GrpcPort 5441, node 1 2025-12-23T18:53:52.681402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:52.681448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:52.681458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:52.681598Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:52.788047Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27300 TClient is connected to server localhost:27300 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516032652 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:53.108296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:53.314194Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:54.701905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128598024840741:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.701981Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.702193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128598024840751:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.702250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.914967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.998087Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128598024840844:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.998141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128598024840849:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.998143Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.998307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128598024840851:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.998367Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.001741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:55.010248Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128598024840852:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:53:55.092406Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128602319808200:2595] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:55.482020Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587128602319808298:2361], status: GENERIC_ERROR, issues:
:3:25: Error: Invalid value "invalid-uuid" for type Uuid 2025-12-23T18:53:55.482377Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MzkwYWQ5MTItNTUzOTFhNmYtNzhlN2Q1Y2ItOTk5MzRjYmI=, ActorId: [1:7587128598024840738:2320], ActorState: ExecuteState, TraceId: 01kd68z3wf0yd7qfb4snjf05na, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 3 column: 25 } message: "Invalid value \"invalid-uuid\" for type Uuid" end_position { row: 3 column: 25 } severity: 1 }, remove tx with tx_id: |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] >> THealthCheckTest::OneIssueListing >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore >> DstCreator::ReplicationModeMismatch [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch >> DstCreator::Basic [GOOD] >> DstCreator::CannotFindColumn >> DstCreator::WithIntermediateDir [GOOD] >> DstCreator::WithAsyncIndex >> DstCreator::WithSyncIndex [GOOD] >> DstCreator::NonExistentSrc [GOOD] >> DstCreator::KeyColumnsSizeMismatch >> TKesusTest::TestAcquireSemaphore [GOOD] >> DstCreator::ExistingDst [GOOD] >> DstCreator::EmptyReplicationConfig >> DstCreator::SameOwner [GOOD] >> DstCreator::SamePartitionCount >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] >> DstCreator::ColumnsSizeMismatch [GOOD] >> DstCreator::ColumnTypeMismatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 62771, MsgBus: 23744 2025-12-23T18:53:46.270927Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128562666384818:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:46.270995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ebe/r3tmp/tmpWoUVBy/pdisk_1.dat 2025-12-23T18:53:46.497599Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:46.499307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:46.499440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:46.506994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:46.596272Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:46.598115Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128562666384788:2081] 1766516026269896 != 1766516026269899 TServer::EnableGrpc on GrpcPort 62771, node 1 2025-12-23T18:53:46.650784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:46.650818Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:46.650861Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:46.650958Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:46.764562Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23744 TClient is connected to server localhost:23744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516026618 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:53:47.114470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:53:47.132908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:47.269493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:47.279227Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:47.414756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:47.469661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:49.084493Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128575551288547:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.084612Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.084978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128575551288557:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.085040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.399949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.425284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.451066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.479514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.508097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.539532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.593741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.631874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.702764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128575551289428:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.702846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128575551289433:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.702849Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.703087Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128575551289435:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.703167Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.705870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:49.715997Z node 1 ... 6265Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:52.756290Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:52.756298Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:52.756384Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:52.885698Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27501 TClient is connected to server localhost:27501 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516032757 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:53.124539Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:53.132685Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.179586Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.309900Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.366082Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.649554Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:55.056634Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598399780507:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.056698Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.056874Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598399780516:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.056896Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.092586Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.112731Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.135401Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.156069Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.177629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.200289Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.223924Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.261230Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.348873Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598399781382:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.348945Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.349016Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598399781387:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.349062Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598399781389:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.349097Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.352262Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:55.360274Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128598399781391:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:53:55.449250Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128598399781442:3761] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:57.093379Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516037125, txId: 281474976715673] shutting down 2025-12-23T18:53:57.312104Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516037342, txId: 281474976715675] shutting down 2025-12-23T18:53:57.556042Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516037587, txId: 281474976715677] shutting down 2025-12-23T18:53:57.625714Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128585514876794:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:57.625797Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 22430, MsgBus: 22984 2025-12-23T18:53:46.435079Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128560863997032:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:46.436485Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ebd/r3tmp/tmp9UrnkS/pdisk_1.dat 2025-12-23T18:53:46.635662Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:46.642969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:46.643074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:46.701235Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:46.714645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22430, node 1 2025-12-23T18:53:46.804419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:46.804446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:46.804455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:46.804589Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:46.882174Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22984 TClient is connected to server localhost:22984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516026765 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:47.279505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:47.302950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:47.431830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:47.441105Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:47.583219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:47.650017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:49.351651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128573748900743:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.351769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.352099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128573748900753:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.352146Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.689866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.720650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.748032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.777713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.809498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.839746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.875189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.912258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:49.983435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128573748901621:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.983513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.983722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128573748901627:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.983725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128573748901626:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.983766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.987296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:49.997420Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128573748901630:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Tra ... Port 22565, node 2 2025-12-23T18:53:52.758574Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:52.758598Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:52.758605Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:52.758678Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:52.853807Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1330 TClient is connected to server localhost:1330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516032771 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:53.106290Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:53.114471Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.160421Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.286460Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.344545Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.609650Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:55.030089Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598885736670:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.030164Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.030439Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598885736679:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.030529Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.088531Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.112658Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.135496Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.157107Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.177490Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.201538Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.224888Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.259122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.319123Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598885737548:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.319180Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.319308Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598885737553:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.319357Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598885737554:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.319408Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.322163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:55.329109Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128598885737557:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:55.411966Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128598885737608:3760] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:56.898015Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:57.603755Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128586000832965:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:57.603824Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:53:57.625386Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516037664, txId: 281474976710675] shutting down |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] Test command err: Trying to start YDB, gRPC: 17188, MsgBus: 15904 2025-12-23T18:53:46.876368Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128561476578589:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:46.876494Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005eb9/r3tmp/tmpCsoAfT/pdisk_1.dat 2025-12-23T18:53:47.138842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:47.138950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:47.171947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:47.172766Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:47.174208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:47.174405Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128561476578557:2081] 1766516026874886 != 1766516026874889 TServer::EnableGrpc on GrpcPort 17188, node 1 2025-12-23T18:53:47.244273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:47.244305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:47.244315Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:47.244412Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15904 2025-12-23T18:53:47.458606Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516027234 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:47.719688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:47.735464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:47.885936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:47.886866Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:53:48.015516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:48.091772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:49.772299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128574361482331:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.772405Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.772716Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128574361482341:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:49.772790Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.172615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.202731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.228285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.255443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.282705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.315892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.352000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.391862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:50.485883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128578656450519:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.485955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128578656450524:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.485970Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.486217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128578656450527:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.486276Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:50.489508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:50.499741Z node 1 ... 23T18:53:53.363726Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:53.363753Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:53.363763Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:53.363853Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:53.408170Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17323 TClient is connected to server localhost:17323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516033373 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:53.678706Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:53.687792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.729635Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.830033Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.903353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:54.208313Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:55.575533Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598817678971:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.575635Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.576188Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598817678981:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.576244Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.576430Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598817678983:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.576472Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.623417Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.653559Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.681841Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.710263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.739163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.768118Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.797818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.842283Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:55.909672Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598817679849:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.909779Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.909983Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598817679855:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.910037Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128598817679854:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.910057Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:55.914658Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:55.924100Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128598817679858:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:53:55.990124Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128598817679909:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:57.670610Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516037706, txId: 281474976710673] shutting down 2025-12-23T18:53:57.808222Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516037839, txId: 281474976710675] shutting down >> DstCreator::GlobalConsistency [GOOD] >> DstCreator::KeyColumnNameMismatch |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2025-12-23T18:53:18.900420Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:18.900582Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:18.921799Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:18.921914Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:18.960803Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:18.961356Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=13119028132520952318, session=0, seqNo=0) 2025-12-23T18:53:18.962820Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:18.974874Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=13119028132520952318, session=1) 2025-12-23T18:53:18.975234Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=15575776150410992360, session=0, seqNo=0) 2025-12-23T18:53:18.975413Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:53:18.987495Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=15575776150410992360, session=2) 2025-12-23T18:53:18.988318Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:53:18.988473Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:53:18.988572Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:53:18.988773Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=222, session=2, semaphore="Lock2" count=1) 2025-12-23T18:53:18.988828Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-12-23T18:53:18.988877Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-12-23T18:53:18.988949Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=333, session=1, semaphore="Lock2" count=1) 2025-12-23T18:53:18.988998Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-12-23T18:53:19.000700Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2025-12-23T18:53:19.000770Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=222) 2025-12-23T18:53:19.000838Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=333) 2025-12-23T18:53:19.001258Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:153:2175], cookie=6391193996312311706, name="Lock1") 2025-12-23T18:53:19.002255Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:153:2175], cookie=6391193996312311706) 2025-12-23T18:53:19.002622Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:156:2178], cookie=10297541613084661358, name="Lock2") 2025-12-23T18:53:19.002695Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:156:2178], cookie=10297541613084661358) 2025-12-23T18:53:19.018122Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.018259Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.018892Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.019535Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.037022Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.037164Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:53:19.037211Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-12-23T18:53:19.037257Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-12-23T18:53:19.037629Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:195:2208], cookie=914490965316056959, name="Lock1") 2025-12-23T18:53:19.037710Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:195:2208], cookie=914490965316056959) 2025-12-23T18:53:19.038220Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:203:2215], cookie=6442514947738844944, name="Lock2") 2025-12-23T18:53:19.038289Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:203:2215], cookie=6442514947738844944) 2025-12-23T18:53:19.506633Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:19.518857Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:19.871821Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:19.883651Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:20.228293Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:20.240214Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:20.592656Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:20.604918Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:20.955720Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:20.968107Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:21.297911Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:21.309738Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:21.639548Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:21.651826Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:22.001866Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:22.013733Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:22.354790Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:22.366728Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:22.748601Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:22.760364Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:23.109300Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:23.121366Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:23.470760Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:23.482602Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:23.842940Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:23.854488Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:24.205288Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:24.217683Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:24.620744Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:24.632903Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:24.986228Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:24.998727Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:25.351571Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:25.364122Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:25.721309Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:25.735515Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:26.098781Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:26.110883Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:26.492523Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:26.504383Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:26.854311Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:26.866478Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:27.217138Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:27.229139Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:27.579148Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:27.591670Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:27.953500Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:27.965882Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:28.328382Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [720575940379 ... heck.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:54.795824Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:55.157512Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:55.169701Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:55.532259Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:55.544512Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:55.907268Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:55.919389Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:56.286858Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:56.300941Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:56.701946Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:56.714043Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:57.078504Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:57.098760Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:57.459331Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:57.471422Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:57.826859Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:57.839693Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:58.205473Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:58.217471Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:58.581991Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-12-23T18:53:58.582102Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-12-23T18:53:58.582165Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-12-23T18:53:58.582271Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-12-23T18:53:58.582341Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-12-23T18:53:58.582382Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-12-23T18:53:58.594875Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-12-23T18:53:58.595614Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:364:2345], cookie=6569785572329651367, name="Lock1") 2025-12-23T18:53:58.595729Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:364:2345], cookie=6569785572329651367) 2025-12-23T18:53:58.596255Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:367:2348], cookie=13300699327118845522, name="Lock2") 2025-12-23T18:53:58.596399Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:367:2348], cookie=13300699327118845522) 2025-12-23T18:53:58.596927Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:370:2351], cookie=4167547317457270680) 2025-12-23T18:53:58.596995Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:370:2351], cookie=4167547317457270680) 2025-12-23T18:53:58.615168Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:58.615291Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:58.615841Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:58.616526Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:58.665002Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:58.665169Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-12-23T18:53:58.665234Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-12-23T18:53:58.665653Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:409:2381], cookie=270089451120323123) 2025-12-23T18:53:58.665769Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:409:2381], cookie=270089451120323123) 2025-12-23T18:53:58.666457Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:416:2387], cookie=1957741116605519748, name="Lock1") 2025-12-23T18:53:58.666569Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:416:2387], cookie=1957741116605519748) 2025-12-23T18:53:58.667231Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:419:2390], cookie=12234501228911258316, name="Lock2") 2025-12-23T18:53:58.667317Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:419:2390], cookie=12234501228911258316) 2025-12-23T18:53:59.087969Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:59.088053Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:59.103246Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:59.103383Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:59.137646Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:59.138099Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=16820164285932639817, session=0, seqNo=0) 2025-12-23T18:53:59.138211Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:59.150329Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=16820164285932639817, session=1) 2025-12-23T18:53:59.150655Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=18030003479267249102, session=0, seqNo=0) 2025-12-23T18:53:59.150799Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:53:59.162918Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=18030003479267249102, session=2) 2025-12-23T18:53:59.163233Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2025-12-23T18:53:59.175409Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=111) 2025-12-23T18:53:59.175991Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:150:2172], cookie=17977601055371713543, name="Sem1", limit=1) 2025-12-23T18:53:59.176153Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-12-23T18:53:59.187975Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:150:2172], cookie=17977601055371713543) 2025-12-23T18:53:59.188325Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=333, session=1, semaphore="Sem1" count=100500) 2025-12-23T18:53:59.200129Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=333) 2025-12-23T18:53:59.200377Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=222, session=1, semaphore="Sem1" count=1) 2025-12-23T18:53:59.200496Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-12-23T18:53:59.200643Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=333, session=2, semaphore="Sem1" count=1) 2025-12-23T18:53:59.212728Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=222) 2025-12-23T18:53:59.212818Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=333) 2025-12-23T18:53:59.213320Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:160:2182], cookie=2797343399107397909, name="Sem1") 2025-12-23T18:53:59.213394Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:160:2182], cookie=2797343399107397909) 2025-12-23T18:53:59.213836Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:163:2185], cookie=5306073110042722687, name="Sem1") 2025-12-23T18:53:59.213911Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:163:2185], cookie=5306073110042722687) 2025-12-23T18:53:59.214299Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:166:2188], cookie=9344063507395994046, name="Sem1", force=0) 2025-12-23T18:53:59.226358Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:166:2188], cookie=9344063507395994046) 2025-12-23T18:53:59.226883Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:171:2193], cookie=5154274368561304908, name="Sem1", force=1) 2025-12-23T18:53:59.227006Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-12-23T18:53:59.239310Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:171:2193], cookie=5154274368561304908) |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> THealthCheckTest::StaticGroupIssue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2025-12-23T18:53:56.060166Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128604338444390:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:56.060220Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005732/r3tmp/tmpbTx5jJ/pdisk_1.dat 2025-12-23T18:53:56.338817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:56.356804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:56.356901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:56.434839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:56.443019Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:56.635877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18217 TServer::EnableGrpc on GrpcPort 16018, node 1 2025-12-23T18:53:56.791358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:56.791382Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:56.791466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:56.791559Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:57.068988Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516036495 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:57.266391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:57.279420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766516037594 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037314 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516036495 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 3... (TRUNCATED) 2025-12-23T18:53:57.618602Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:53:57.618639Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:53:57.619262Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:53:58.834440Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516037594, tx_id: 281474976715658 } } } 2025-12-23T18:53:58.835911Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:53:58.837970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:58.839721Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-12-23T18:53:58.839749Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-12-23T18:53:58.868499Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-12-23T18:53:58.869712Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 41 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1766516038910 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapsho ... eBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037906 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 43 PathOwnerId: 72057594046644480 } TClient::Ls request: /Root/Replicated/index_by_value 2025-12-23T18:53:58.881129Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 43] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 42 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1766516038910 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1766516038910 ParentPathId: 42 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version ... (TRUNCATED) TClient::Ls request: /Root/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1766516038910 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1766516038910 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037906 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest >> THealthCheckTest::DatabaseDoesNotExist [GOOD] >> THealthCheckTest::BridgeGroupNoIssues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] Test command err: 2025-12-23T18:53:56.060206Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128606381050294:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:56.060269Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:53:56.107239Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005708/r3tmp/tmpnCG8Tt/pdisk_1.dat 2025-12-23T18:53:56.368139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:56.368284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:56.373429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:56.451712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:56.478136Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:1976 TServer::EnableGrpc on GrpcPort 19491, node 1 2025-12-23T18:53:56.732482Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:56.790912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:56.790951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:56.790965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:56.791042Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:57.069236Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036551 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:57.273524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:57.280686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:53:57.289991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766516037594 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037321 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036551 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 3... (TRUNCATED) 2025-12-23T18:53:57.633964Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:53:57.634029Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:53:57.634573Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:53:59.050686Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516037594, tx_id: 281474976710658 } } } 2025-12-23T18:53:59.051148Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:53:59.053497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:59.055255Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-12-23T18:53:59.055285Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-12-23T18:53:59.087593Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-12-23T18:53:59.088852Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dir/Replicated" PathDescription { Self { Name: "Replicated" PathId: 42 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516039127 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionE ... "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 44 PathOwnerId: 72057594046644480 } TClient::Ls request: /Root/Dir/Replicated/index_by_value 2025-12-23T18:53:59.099466Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 44] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 43 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516039127 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 44 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516039127 ParentPathId: 43 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version ... (TRUNCATED) TClient::Ls request: /Root/Dir/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 44 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516039127 ParentPathId: 43 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 44 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516039127 ParentPathId: 43 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Dir/Replicated/index_by_value/indexImplTable" |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2025-12-23T18:53:06.618965Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128388826268253:2161];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:06.619030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004d22/r3tmp/tmp597vfe/pdisk_1.dat 2025-12-23T18:53:06.886807Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:06.916527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:06.916649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:06.961523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:07.033398Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7513, node 1 2025-12-23T18:53:07.162840Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:07.254299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:07.254410Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:07.254420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:07.254491Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18768 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-12-23T18:53:07.623749Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515987019 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:07.684920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient is connected to server localhost:18768 2025-12-23T18:53:07.870922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-12-23T18:53:07.877486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) waiting... 2025-12-23T18:53:07.904377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.025802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:08.061434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:08.103590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.130422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.166058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.194044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.221186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.249283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:08.281401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:09.630450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128401711171674:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.630454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128401711171663:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.630666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.631095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128401711171679:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.631169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:09.635207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:09.645977Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128401711171678:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-12-23T18:53:09.712061Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128401711171732:3102] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:53:10.406338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:10.437035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__op ... eme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:53:57.723546Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-12-23T18:53:57.723601Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 17ms 2025-12-23T18:53:57.724031Z node 8 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:53:57.724045Z node 8 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:53:57.724053Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 18ms 2025-12-23T18:53:57.724316Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:53:57.724333Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-12-23T18:53:57.724432Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 20ms 2025-12-23T18:53:57.724783Z node 8 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-12-23T18:53:57.858089Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7587128606891813819:2435]: Pool not found 2025-12-23T18:53:57.858276Z node 8 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-12-23T18:53:58.190329Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7587128606891813830:2437]: Pool not found 2025-12-23T18:53:58.190642Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2025-12-23T18:53:58.193111Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7587128611186781232:2455], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-12-23T18:53:58.193130Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587128611186781231:2454], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:58.193210Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:58.193370Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587128611186781235:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:58.193423Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:58.486774Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7587128611186781229:2453]: Pool not found 2025-12-23T18:53:58.487229Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2025-12-23T18:53:58.695802Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:43652) incoming connection opened 2025-12-23T18:53:58.695899Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:43652) -> (POST /Root, 4 bytes) 2025-12-23T18:53:58.696034Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d8fd:720e:917b:0:c0fd:720e:917b:0] request [CreateStream] url [/Root] database [/Root] requestId: d6ca5c2b-f887a28c-a39aab19-27a08f22 2025-12-23T18:53:58.696647Z node 8 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateStream] requestId [d6ca5c2b-f887a28c-a39aab19-27a08f22] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map 2025-12-23T18:53:58.696792Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:43652) <- (400 MissingParameter, 127 bytes) 2025-12-23T18:53:58.696858Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:43652) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked null 2025-12-23T18:53:58.696892Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:43652) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: d6ca5c2b-f887a28c-a39aab19-27a08f22 Content-Type: application/x-amz-json-1.1 Content-Length: 127 2025-12-23T18:53:58.696997Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:43652) connection closed Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> BasicStatistics::PersistenceWithStorageFailuresAndReboots [GOOD] >> KqpScripting::ScriptExplainCreatedTable [GOOD] >> KqpScripting::ScriptExplain >> KqpYql::EvaluateFor [GOOD] |97.6%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] >> KqpWorkloadServiceTables::TestLeaseUpdates >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge |97.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DstCreator::KeyColumnsSizeMismatch [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> DstCreator::EmptyReplicationConfig [GOOD] |97.6%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::PersistenceWithStorageFailuresAndReboots [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:24.142280Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:24.217152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:24.221626Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:529:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:24.221916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:24.221977Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:24.599501Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:24.715608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:24.715698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:24.749239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:24.842587Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0031eb/r3tmp/tmpd1gMoi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16342, node 1 2025-12-23T18:52:25.430346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:25.430724Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:25.430776Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:25.430815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:25.431180Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:25.493035Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5447 2025-12-23T18:52:26.025783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:30.370328Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:30.370698Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:30.382911Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:30.382994Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2025-12-23T18:52:30.388486Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:30.388897Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:30.440481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:30.440573Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:30.440904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:30.440948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:30.469998Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:30.470166Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:52:30.473097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:30.473374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:30.666814Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:30.666981Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:30.668330Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:30.668416Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:30.669398Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:30.670275Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:30.671873Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:30.672010Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:30.672173Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:30.672273Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:30.672509Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:30.672770Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:30.672933Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:30.688712Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:30.689655Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:30.689972Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:30.850120Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:30.891779Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:30.891859Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:30.929775Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:30.931508Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:30.931673Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:30.931721Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:30.931760Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:30.931806Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:30.931848Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:30.931887Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:30.932641Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:30.941050Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2352:2408] 2025-12-23T18:52:30.946103Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:52:30.952385Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:30.952496Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2524:2451], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:30.957842Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2511:2448] Owner: [2:2510:2447]. Describe result: PathErrorUnknown 2025-12-23T18:52:30.957913Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2511:2448] Owner: [2:2510:2447]. Creating table 2025-12-23T18:52:30.958001Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2511:2448] Owner: [2:2510:2447]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:52:30.983250Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2593:2485] 2025-12-23T18:52:30.984025Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2593:2485], schemeshard id = 72075186224037897 2025-12-23T18:52:31.024636Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2650:2499], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:31.047490Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshar ... = 16, ReplyToActorId = [3:5857:3259], StatRequests.size() = 1 2025-12-23T18:53:43.467251Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:53:43.467338Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:43.536318Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2025-12-23T18:53:43.536617Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 3 2025-12-23T18:53:43.536869Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:53:43.536987Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:53:43.537114Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2025-12-23T18:53:43.569372Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 17 ], ReplyToActorId[ [3:5890:3271]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:43.569634Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 17 ] 2025-12-23T18:53:43.569677Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 17, ReplyToActorId = [3:5890:3271], StatRequests.size() = 1 row count: 5 (expected: 7) 2025-12-23T18:53:44.632301Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 18 ], ReplyToActorId[ [3:5923:3281]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:44.632611Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18 ] 2025-12-23T18:53:44.632658Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 18, ReplyToActorId = [3:5923:3281], StatRequests.size() = 1 2025-12-23T18:53:45.620398Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 19 ], ReplyToActorId[ [3:5954:3291]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:45.620736Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 19 ] 2025-12-23T18:53:45.620784Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 19, ReplyToActorId = [3:5954:3291], StatRequests.size() = 1 2025-12-23T18:53:46.733795Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:46.756151Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [3:5993:3302]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:46.756489Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2025-12-23T18:53:46.756544Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [3:5993:3302], StatRequests.size() = 1 2025-12-23T18:53:47.833791Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [3:6029:3312]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:47.834114Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2025-12-23T18:53:47.834166Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [3:6029:3312], StatRequests.size() = 1 2025-12-23T18:53:48.930248Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2025-12-23T18:53:48.930419Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 4 2025-12-23T18:53:48.930572Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2025-12-23T18:53:48.930814Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2025-12-23T18:53:48.931070Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:53:48.953467Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [3:6067:3326]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:48.953776Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2025-12-23T18:53:48.953825Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [3:6067:3326], StatRequests.size() = 1 row count: 5 (expected: 7) 2025-12-23T18:53:50.045215Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [3:6102:3338]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:50.045463Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2025-12-23T18:53:50.045507Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [3:6102:3338], StatRequests.size() = 1 2025-12-23T18:53:51.020803Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [3:6135:3349]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:51.021112Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2025-12-23T18:53:51.021158Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [3:6135:3349], StatRequests.size() = 1 2025-12-23T18:53:52.189233Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:52.211506Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [3:6166:3359]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:52.211786Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2025-12-23T18:53:52.211832Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:6166:3359], StatRequests.size() = 1 2025-12-23T18:53:53.273935Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:6201:3369]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:53.274189Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2025-12-23T18:53:53.274229Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:6201:3369], StatRequests.size() = 1 2025-12-23T18:53:54.402958Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2025-12-23T18:53:54.403336Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 5 2025-12-23T18:53:54.403766Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:53:54.403868Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:53:54.403983Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-12-23T18:53:54.426246Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:6236:3381]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:54.426560Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2025-12-23T18:53:54.426629Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:6236:3381], StatRequests.size() = 1 row count: 5 (expected: 7) 2025-12-23T18:53:55.501651Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:6275:3395]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:55.501914Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2025-12-23T18:53:55.501980Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:6275:3395], StatRequests.size() = 1 2025-12-23T18:53:56.491936Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:6308:3407]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:56.492296Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-12-23T18:53:56.492348Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:6308:3407], StatRequests.size() = 1 2025-12-23T18:53:57.585455Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:57.607646Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:6345:3418]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:57.607950Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-12-23T18:53:57.607988Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:6345:3418], StatRequests.size() = 1 2025-12-23T18:53:58.650255Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [3:6379:3428]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:58.650530Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-12-23T18:53:58.650570Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [3:6379:3428], StatRequests.size() = 1 2025-12-23T18:53:59.631230Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:53:59.631330Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:53:59.631634Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2025-12-23T18:53:59.645766Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:53:59.772072Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2025-12-23T18:53:59.772628Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 6 2025-12-23T18:53:59.772940Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2025-12-23T18:53:59.773089Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2025-12-23T18:53:59.773163Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-12-23T18:53:59.798644Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [3:6415:3438]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:59.799036Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-12-23T18:53:59.799091Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [3:6415:3438], StatRequests.size() = 1 row count: 7 (expected: 7) >> DstCreator::ColumnTypeMismatch [GOOD] |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateFor [GOOD] Test command err: Trying to start YDB, gRPC: 19579, MsgBus: 7125 2025-12-23T18:53:50.281068Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128578089942035:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:50.283023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005eb8/r3tmp/tmpM5lX8g/pdisk_1.dat 2025-12-23T18:53:50.490249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:50.495626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:50.495754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:50.497859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:50.598988Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128578089942003:2081] 1766516030278920 != 1766516030278923 TServer::EnableGrpc on GrpcPort 19579, node 1 2025-12-23T18:53:50.614368Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:50.656600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:50.656634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:50.656652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:50.656772Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:50.773401Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7125 TClient is connected to server localhost:7125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516030664 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:51.136394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:51.155515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:51.266381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:51.308146Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:51.406269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:51.463372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.056828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128590974845762:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.056961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.057254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128590974845772:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.057308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.390948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:53.417511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:53.440425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:53.464983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:53.491514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:53.519026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:53.564509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:53.599065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:53.656966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128590974846641:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.657080Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.657090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128590974846646:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.657304Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128590974846649:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.657351Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.660535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:53.670009Z node 1 :K ... e 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:55.832011Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:55.833656Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:55.847534Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:55.847549Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:55.847555Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:55.847613Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:55.900923Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:64745 TClient is connected to server localhost:64745 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516035872 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:56.227409Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:56.233742Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:53:56.246181Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:56.323824Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:56.492026Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:56.557839Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:56.729174Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:58.689986Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128612387762447:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:58.690067Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:58.690314Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128612387762456:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:58.690383Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:58.746012Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:58.774701Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:58.802130Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:58.825869Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:58.853108Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:58.879709Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:58.906758Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:58.977747Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:59.044909Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128616682730628:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:59.044966Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:59.044977Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128616682730633:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:59.045108Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128616682730635:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:59.045150Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:59.048126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:59.056469Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128616682730636:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:53:59.118431Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128616682730688:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:54:00.722725Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128599502858729:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:54:00.722804Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> THealthCheckTest::SpecificServerless [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> DstCreator::WithAsyncIndex [GOOD] >> DstCreator::SamePartitionCount [GOOD] >> KqpStreamLookup::ReadTableWithIndexDuringSplit >> DstCreator::CannotFindColumn [GOOD] >> DstCreator::KeyColumnNameMismatch [GOOD] >> KqpStreamLookup::ReadTableDuringSplit >> KqpScripting::ExecuteYqlScriptPg [GOOD] >> KqpScripting::JoinIndexLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] Test command err: 2025-12-23T18:53:56.059977Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128606652835280:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:56.060031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00572f/r3tmp/tmpLHijmh/pdisk_1.dat 2025-12-23T18:53:56.344767Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:56.352655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:56.352778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:56.431442Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:56.432445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:56.585843Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16700 TServer::EnableGrpc on GrpcPort 64537, node 1 2025-12-23T18:53:56.791219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:56.791305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:56.791321Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:56.791386Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:57.068549Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:16700 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036488 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:57.227582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:57.243734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:53:57.254034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:57.369021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037279 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036488 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037279 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036488 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) 2025-12-23T18:53:57.406140Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:53:57.406177Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:53:57.407013Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:53:58.874287Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516037363, tx_id: 281474976710658 } } } 2025-12-23T18:53:58.874782Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:53:58.877768Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-12-23T18:53:58.880029Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516037440 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 L ... pp:228: got bad distributable configuration TClient is connected to server localhost:11591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039610 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:54:00.003043Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:54:00.011004Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:00.044138Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040051 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039610 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040051 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039610 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) 2025-12-23T18:54:00.077681Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:54:00.077709Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:54:00.078220Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:54:00.443143Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:01.946880Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516040079, tx_id: 281474976710658 } } } 2025-12-23T18:54:01.947115Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:54:01.948475Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-12-23T18:54:01.949545Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516040114 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2025-12-23T18:54:01.949787Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication consistency level mismatch: expected: CONSISTENCY_LEVEL_ROW, got: 1 |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2025-12-23T18:53:56.077291Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128603995358374:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:56.078459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005707/r3tmp/tmpYswQ3Y/pdisk_1.dat 2025-12-23T18:53:56.319635Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:56.363954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:56.364523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:56.417870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:56.448814Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:56.452213Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128603995358336:2081] 1766516036074678 != 1766516036074681 2025-12-23T18:53:56.569804Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:31823 TServer::EnableGrpc on GrpcPort 30818, node 1 2025-12-23T18:53:56.791104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:56.791128Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:56.791141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:56.791392Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:57.082898Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:31823 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036481 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:57.238440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:57.251812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:57.367377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037286 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036481 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037286 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036481 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) 2025-12-23T18:53:57.400584Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:53:57.400624Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:53:57.401149Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:53:59.221133Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516037363, tx_id: 281474976710658 } } } 2025-12-23T18:53:59.221583Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:53:59.223360Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-12-23T18:53:59.225148Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516037433 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapsh ... maybe) 2025-12-23T18:54:00.010684Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:00.010869Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039862 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:54:00.299100Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:54:00.306613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:00.338952Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040345 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039862 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040345 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039862 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) 2025-12-23T18:54:00.370276Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:54:00.370300Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:54:00.370773Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:54:00.684261Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:02.214311Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516040373, tx_id: 281474976710658 } } } 2025-12-23T18:54:02.214587Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:54:02.216016Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-12-23T18:54:02.217051Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516040408 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2025-12-23T18:54:02.217255Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnsSizeMismatch [GOOD] Test command err: 2025-12-23T18:53:56.100567Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128604370959273:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:56.100754Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005717/r3tmp/tmpR0Bwdm/pdisk_1.dat 2025-12-23T18:53:56.370875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:56.372127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:56.372214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:56.449624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:56.475451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:56.494847Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128604370959233:2081] 1766516036098332 != 1766516036098335 2025-12-23T18:53:56.642843Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5319 TServer::EnableGrpc on GrpcPort 64569, node 1 2025-12-23T18:53:56.791460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:56.791486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:56.791500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:56.791598Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:57.107856Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036558 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:57.235653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037286 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036558 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037286 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036558 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depr... (TRUNCATED) 2025-12-23T18:53:57.247442Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:53:57.247537Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:53:57.248029Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:53:58.962613Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-12-23T18:53:58.962674Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: 2025-12-23T18:53:59.497180Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128615893320971:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:59.497295Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005717/r3tmp/tmpRMnQjU/pdisk_1.dat 2025-12-23T18:53:59.509344Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:59.582630Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:59.612697Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:59.612802Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:59.614006Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27492 TServer::EnableGrpc on GrpcPort 61039, node 2 2025-12-23T18:53:59.772168Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:59.777970Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:59.778009Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:59.778019Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:59.778141Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039659 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:59.978440Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:59.985087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:00.067635Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040023 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039659 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040023 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039659 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) 2025-12-23T18:54:00.099733Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:54:00.099758Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:54:00.100261Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:54:00.501835Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:01.899651Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516040072, tx_id: 281474976710658 } } } 2025-12-23T18:54:01.899943Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:54:01.901554Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-12-23T18:54:01.903742Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516040135 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2025-12-23T18:54:01.904095Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key columns size mismatch: expected: 1, got: 2 |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2025-12-23T18:53:56.059716Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128603064275618:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:56.059779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056c7/r3tmp/tmpqlgr4b/pdisk_1.dat 2025-12-23T18:53:56.332503Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:56.342732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:56.342877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:56.381381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:56.453551Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:56.521018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13582 TServer::EnableGrpc on GrpcPort 4386, node 1 2025-12-23T18:53:56.792514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:56.792611Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:56.792625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:56.792690Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:57.070494Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:13582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036481 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:57.271175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:57.280504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:57.397267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037321 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036481 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037321 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036481 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) 2025-12-23T18:53:57.431346Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:53:57.431384Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:53:57.432206Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:53:59.030911Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516037370, tx_id: 281474976710658 } } } 2025-12-23T18:53:59.031319Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:53:59.033025Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-12-23T18:53:59.035002Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516037468 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 M ... ROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039890 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:54:00.354711Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:54:00.362473Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:00.395417Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040401 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039890 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040401 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039890 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) 2025-12-23T18:54:00.429862Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:54:00.429886Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:54:00.430384Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:54:00.720040Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:02.165579Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516040429, tx_id: 281474976710658 } } } 2025-12-23T18:54:02.165914Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:54:02.167445Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-12-23T18:54:02.168298Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516040464 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2025-12-23T18:54:02.168560Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithAsyncIndex [GOOD] Test command err: 2025-12-23T18:53:56.086369Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128602916881838:2141];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:56.086508Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005712/r3tmp/tmpIl0dx6/pdisk_1.dat 2025-12-23T18:53:56.334975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:56.378238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:56.378348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:56.420082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:56.476049Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:56.476375Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128602916881730:2081] 1766516036077166 != 1766516036077169 2025-12-23T18:53:56.619269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10700 TServer::EnableGrpc on GrpcPort 7191, node 1 2025-12-23T18:53:56.791482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:56.791507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:56.791513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:56.791606Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:57.091046Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10700 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036488 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:57.236443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:57.250508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766516037356 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037286 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036488 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 3... (TRUNCATED) 2025-12-23T18:53:57.392883Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:53:57.392917Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:53:57.393493Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:53:58.873282Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516037356, tx_id: 281474976710658 } } } 2025-12-23T18:53:58.873757Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:53:58.875884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:58.876787Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-12-23T18:53:58.876807Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-12-23T18:53:58.902996Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-12-23T18:53:58.903026Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 40] TClient::Ls request: /Root/Dir/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 40 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516038945 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "... (TRUNCATED) 2025-12-23T18:53:59.446674Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128619551178448:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:59.446801Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005712/r3tmp/tmp6Ly2kH/pdisk_1.dat 2025-12-23T18:53:59.459418Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:59.539789Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:59.541646Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587128619551178412:2081] 1766516039445672 != 1766516039445675 2025-12-23T18:53:59.584882Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:59.584967Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:59.589676Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:59.698299Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7988 TServer::EnableGrpc on GrpcPort 3381, node 2 2025-12-23T18:53:59.757418Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:59.757442Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:59.757462Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:59.757536Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039638 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:59.977202Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:59.983950Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766516040240 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040023 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039638 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 3... (TRUNCATED) 2025-12-23T18:54:00.253119Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:54:00.253152Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:54:00.253729Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:54:00.451225Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:02.421281Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516040240, tx_id: 281474976710658 } } } 2025-12-23T18:54:02.421590Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:54:02.423317Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:02.424115Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-12-23T18:54:02.424137Z node 2 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-12-23T18:54:02.447185Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-12-23T18:54:02.447235Z node 2 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 41] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 41 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516042487 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::SamePartitionCount [GOOD] Test command err: 2025-12-23T18:53:56.133128Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128603951274616:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:56.133234Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:53:56.184849Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005739/r3tmp/tmpbssIlB/pdisk_1.dat 2025-12-23T18:53:56.446594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:56.446705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:56.449515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:56.450869Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:56.552277Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:56.553061Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128603951274584:2081] 1766516036131548 != 1766516036131551 2025-12-23T18:53:56.709121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6562 TServer::EnableGrpc on GrpcPort 7184, node 1 2025-12-23T18:53:56.790815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:56.790869Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:56.790961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:56.791038Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:57.142838Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036607 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:57.262154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:57.274889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:53:57.280368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516037377 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037314 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036607 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId:... (TRUNCATED) 2025-12-23T18:53:57.414788Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:53:57.414851Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:53:57.415538Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:53:59.160394Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516037377, tx_id: 281474976710659 } } } 2025-12-23T18:53:59.160743Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:53:59.162118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:59.162811Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-12-23T18:53:59.162840Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710660 2025-12-23T18:53:59.185159Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710660 TClient::Ls request: /Root/Replicated 2025-12-23T18:53:59.185191Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1766516039225 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "k... (TRUNCATED) 2025-12-23T18:53:59.753038Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128617804194672:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:59.753134Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005739/r3tmp/tmpBv3pyd/pdisk_1.dat 2025-12-23T18:53:59.767837Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:59.835603Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587128617804194646:2081] 1766516039752000 != 1766516039752003 2025-12-23T18:53:59.846673Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:59.880851Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:59.880942Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:59.884958Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:59.999124Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19464 TServer::EnableGrpc on GrpcPort 6955, node 2 2025-12-23T18:54:00.058024Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:00.058044Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:00.058052Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:00.058120Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516039932 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:54:00.289075Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:54:00.296155Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766516040380 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040338 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516039932 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 3... (TRUNCATED) 2025-12-23T18:54:00.347928Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:54:00.347957Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:54:00.348513Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:54:00.787176Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:02.554665Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516040380, tx_id: 281474976715658 } } } 2025-12-23T18:54:02.556082Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:54:02.557792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:02.558554Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-12-23T18:54:02.558576Z node 2 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-12-23T18:54:02.584359Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-12-23T18:54:02.584381Z node 2 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1766516040380 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1766516042627 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "k... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2025-12-23T18:53:56.205403Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128606551627220:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:56.215476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:53:56.233684Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00573a/r3tmp/tmpXxlZx9/pdisk_1.dat 2025-12-23T18:53:56.486564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:56.486662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:56.486881Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:56.511941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:56.563772Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:7061 TServer::EnableGrpc on GrpcPort 14037, node 1 2025-12-23T18:53:56.784694Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:56.796124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:56.796157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:56.796193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:56.796330Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:53:57.223969Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036614 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:57.256039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:57.264372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766516037370 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037307 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036614 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 3... (TRUNCATED) 2025-12-23T18:53:57.386557Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:53:57.386602Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:53:57.387316Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:53:59.327226Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516037370, tx_id: 281474976710658 } } } 2025-12-23T18:53:59.327663Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:53:59.329636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:59.330373Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-12-23T18:53:59.330400Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-12-23T18:53:59.351470Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-12-23T18:53:59.351499Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516039393 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "k... (TRUNCATED) 2025-12-23T18:54:00.055817Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128623491410916:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:54:00.055890Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00573a/r3tmp/tmpybvhPY/pdisk_1.dat 2025-12-23T18:54:00.065662Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:00.153011Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587128623491410886:2081] 1766516040054851 != 1766516040054854 2025-12-23T18:54:00.170562Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:00.173864Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:00.173948Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:00.175098Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme s ... s: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516040233 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:54:00.605434Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:54:00.614068Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:00.647845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040653 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516040233 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040653 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516040233 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) 2025-12-23T18:54:00.680519Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:54:00.680543Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:54:00.681076Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:54:01.062386Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:02.750402Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516040681, tx_id: 281474976710658 } } } 2025-12-23T18:54:02.750625Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:54:02.752124Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-12-23T18:54:02.753035Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516040716 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2025-12-23T18:54:02.753291Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] Test command err: 2025-12-23T18:53:56.081349Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128605197368844:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:56.081435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00572e/r3tmp/tmpyKaOjI/pdisk_1.dat 2025-12-23T18:53:56.334105Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:56.356517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:56.356729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:56.359892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:56.430862Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:56.554839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8541 TServer::EnableGrpc on GrpcPort 1753, node 1 2025-12-23T18:53:56.793212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:56.793238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:56.793256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:56.793363Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:57.090371Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:8541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036544 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:57.256427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:57.262547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:53:57.266593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1766516037356 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Ke... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516037307 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516036544 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 3... (TRUNCATED) 2025-12-23T18:53:57.378015Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:53:57.378064Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:53:57.378701Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:53:58.957707Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516037356, tx_id: 281474976710658 } } } 2025-12-23T18:53:58.958111Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:53:58.959967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:58.960700Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-12-23T18:53:58.960719Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-12-23T18:53:58.986290Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 TClient::Ls request: /Root/Replicated 2025-12-23T18:53:58.986316Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516039029 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "k... (TRUNCATED) 2025-12-23T18:53:59.475454Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128618425005677:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:59.475539Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00572e/r3tmp/tmpc9U73a/pdisk_1.dat 2025-12-23T18:53:59.487378Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:59.547908Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:59.549136Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587128618425005636:2081] 1766516039474455 != 1766516039474458 2025-12-23T18:53:59.583706Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:59.583807Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:59.592396Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14339 TServer::EnableGrpc on Grp ... 31Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039631 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:59.974794Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:59.982477Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:00.015557Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040023 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039631 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516040023 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516039631 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dst" PathId: 3... (TRUNCATED) 2025-12-23T18:54:00.050487Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-12-23T18:54:00.050512Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2025-12-23T18:54:00.050964Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-12-23T18:54:00.481470Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:02.793284Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1766516040051, tx_id: 281474976710658 } } } 2025-12-23T18:54:02.793500Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-12-23T18:54:02.795034Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-12-23T18:54:02.795930Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1766516040086 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2025-12-23T18:54:02.796154Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot find column: name: value |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> StatisticsSaveLoad::Delete >> StatisticsSaveLoad::Simple |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 11518, MsgBus: 22636 2025-12-23T18:53:51.322227Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128582647751523:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:51.322369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005eb6/r3tmp/tmpGtJm19/pdisk_1.dat 2025-12-23T18:53:51.521646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:51.521768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:51.545292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:51.572126Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:51.573435Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128582647751484:2081] 1766516031320664 != 1766516031320667 2025-12-23T18:53:51.573501Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 11518, node 1 2025-12-23T18:53:51.644057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:51.644086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:51.644098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:51.644195Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:51.773978Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22636 TClient is connected to server localhost:22636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516031616 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:52.100910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:52.111332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:52.205678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:52.330071Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:52.337838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:52.400183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.931996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128591237687949:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.932152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.932448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128591237687959:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.932494Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.270465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.295644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.320405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.344566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.367106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.395304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.453434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.490413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.557724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128595532656126:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.557792Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.557861Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128595532656131:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.557907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128595532656133:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.557931Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.560992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:54.569419Z node 1 ... _info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:57.479512Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:57.486001Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587128609081313930:2081] 1766516037387546 != 1766516037387549 2025-12-23T18:53:57.498969Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25717, node 2 2025-12-23T18:53:57.555587Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:57.555609Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:57.555616Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:57.555702Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:57.639382Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2840 TClient is connected to server localhost:2840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516037566 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:57.937611Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:57.944840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:57.994452Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:53:58.122983Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:58.179472Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:58.409532Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:00.744252Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128621966217680:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:00.744341Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:00.744606Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128621966217690:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:00.744680Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:00.795102Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.818660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.845742Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.869372Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.894157Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.923707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.991759Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:01.034083Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:01.096472Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128626261185854:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:01.096554Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128626261185859:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:01.096566Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:01.096760Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128626261185862:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:01.096809Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:01.099918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:54:01.109506Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128626261185861:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:54:01.197019Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128626261185914:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:54:02.388810Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128609081313955:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:54:02.388872Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:52:44.253113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:44.253174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:44.253202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:44.253229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:44.253258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:44.253279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:44.253333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:44.253411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:44.254061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:44.254264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:44.311678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:44.311726Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:44.315866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:44.316012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:44.316132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:44.318333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:44.318488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:44.319057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:44.319236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:44.320582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:44.320721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:44.321703Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:44.321749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:44.321832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:44.321864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:44.321910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:44.322008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:44.428873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.429668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.429751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.429820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.429869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.429907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.429958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.430029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.430079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.430117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.430158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.430246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.430292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.430339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:44.430391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... onditionalErase DoExecute: at schemeshard: 72075186233409546 2025-12-23T18:54:03.297147Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-12-23T18:54:03.297305Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435098, Sender [0:0:0], Recipient [3:767:2743]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-12-23T18:54:03.297341Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5455: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-12-23T18:54:03.382673Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:1268:3151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-12-23T18:54:03.382766Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-12-23T18:54:03.382894Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409552 outdated step 300 last cleanup 0 2025-12-23T18:54:03.382987Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:54:03.383024Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409552 2025-12-23T18:54:03.383058Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409552 has no attached operations 2025-12-23T18:54:03.383098Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409552 2025-12-23T18:54:03.383232Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:1268:3151]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T18:54:03.383362Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 32 2025-12-23T18:54:03.383741Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269553162, Sender [3:1268:3151], Recipient [3:1606:3462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 32 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 28 Memory: 119488 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 3 StartTime: 218 TableOwnerId: 72075186233409549 FollowerId: 0 2025-12-23T18:54:03.383796Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5284: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-12-23T18:54:03.383873Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0028 2025-12-23T18:54:03.384014Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-12-23T18:54:03.384061Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-12-23T18:54:03.394425Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:1272:3153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-12-23T18:54:03.394497Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-12-23T18:54:03.394579Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409553 outdated step 300 last cleanup 0 2025-12-23T18:54:03.394647Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:54:03.394701Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409553 2025-12-23T18:54:03.394764Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409553 has no attached operations 2025-12-23T18:54:03.394800Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409553 2025-12-23T18:54:03.395010Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:1272:3153]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T18:54:03.395173Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 32 2025-12-23T18:54:03.395513Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269553162, Sender [3:1272:3153], Recipient [3:1606:3462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 32 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 29 Memory: 119488 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 3 StartTime: 218 TableOwnerId: 72075186233409549 FollowerId: 0 2025-12-23T18:54:03.395562Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5284: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-12-23T18:54:03.395632Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0029 2025-12-23T18:54:03.395739Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-12-23T18:54:03.417874Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:1606:3462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:03.417938Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:03.418049Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [3:1606:3462], Recipient [3:1606:3462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:54:03.418095Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:54:03.428508Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:1606:3462]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-12-23T18:54:03.428593Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5307: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-12-23T18:54:03.428626Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7071: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-12-23T18:54:03.428698Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-12-23T18:54:03.428763Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-12-23T18:54:03.428886Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435098, Sender [0:0:0], Recipient [3:1606:3462]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-12-23T18:54:03.428928Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5455: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-12-23T18:54:03.429281Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269746180, Sender [3:2838:4654], Recipient [3:1606:3462]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-12-23T18:54:03.429344Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5454: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-12-23T18:54:03.460991Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:2841:4657], Recipient [3:1268:3151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:54:03.461072Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:54:03.461121Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409552, clientId# [3:2840:4656], serverId# [3:2841:4657], sessionId# [0:0:0] 2025-12-23T18:54:03.461282Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:2839:4655], Recipient [3:1268:3151]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 32 } 2025-12-23T18:54:03.461934Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:2844:4660], Recipient [3:1272:3153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:54:03.461970Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:54:03.462019Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409553, clientId# [3:2843:4659], serverId# [3:2844:4660], sessionId# [0:0:0] 2025-12-23T18:54:03.462130Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:2842:4658], Recipient [3:1272:3153]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 32 } |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::JoinIndexLookup [GOOD] Test command err: Trying to start YDB, gRPC: 21076, MsgBus: 25629 2025-12-23T18:53:51.384175Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128584934451350:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:51.384337Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005eb5/r3tmp/tmpewEsoa/pdisk_1.dat 2025-12-23T18:53:51.582937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:51.651916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:51.652039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:51.654046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:51.694107Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:51.695475Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128584934451315:2081] 1766516031382916 != 1766516031382919 TServer::EnableGrpc on GrpcPort 21076, node 1 2025-12-23T18:53:51.746260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:51.746286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:51.746295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:51.746435Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:51.789697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25629 TClient is connected to server localhost:25629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516031707 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:52.166889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:52.186945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:52.317147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:52.408404Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:52.445248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:52.501226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:53.928385Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128593524387785:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.928489Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.928781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128593524387795:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:53.928855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.291653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.315812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.340429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.365511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.387135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.415678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.444019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.485044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:54.547265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128597819355962:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.547324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.547336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128597819355967:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.547436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128597819355969:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.547500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:54.550711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:53:54.561145Z node 1 ... Notification cookie mismatch for subscription [2:7587128609569807602:2081] 1766516037640878 != 1766516037640881 2025-12-23T18:53:57.775266Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:57.775345Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:57.779437Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12404, node 2 2025-12-23T18:53:57.819672Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:57.819695Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:57.819703Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:57.819779Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:57.920716Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30218 TClient is connected to server localhost:30218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516037825 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:58.217288Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:58.225312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:58.268196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:58.400287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:58.459263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:58.664312Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:00.621987Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128622454711354:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:00.622076Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:00.622329Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128622454711363:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:00.622374Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:00.682028Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.708332Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.736381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.761876Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.787946Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.816804Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.846097Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.888844Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:00.977507Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128622454712232:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:00.977605Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:00.977706Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128622454712237:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:00.977752Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128622454712239:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:00.977802Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:00.980906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:54:00.991065Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128622454712241:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:54:01.067791Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128626749679588:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:54:02.660480Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128609569807731:2155];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:54:02.660556Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues |97.7%| [TA] $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> THealthCheckTest::TestTabletIsDead >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> StatisticsSaveLoad::ForbidAccess >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2025-12-23T18:53:18.900426Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:18.900576Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:18.923143Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:18.923242Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:18.960920Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:18.961444Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=6235272857994451305, session=0, seqNo=0) 2025-12-23T18:53:18.962755Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:18.974890Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=6235272857994451305, session=1) 2025-12-23T18:53:18.975737Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=111, session=1, semaphore="Lock1" count=1) 2025-12-23T18:53:18.976833Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:53:18.976944Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:53:18.988861Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=111) 2025-12-23T18:53:18.989169Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:136:2161], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:53:19.000706Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:136:2161], cookie=222) 2025-12-23T18:53:19.001160Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:153:2175], cookie=2282500019929195021, name="Lock1") 2025-12-23T18:53:19.002281Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:153:2175], cookie=2282500019929195021) 2025-12-23T18:53:19.412485Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:19.412578Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:19.427642Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:19.427719Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:19.452705Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:19.453393Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:135:2159], cookie=3882592864392530178, session=0, seqNo=0) 2025-12-23T18:53:19.453549Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:19.465339Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:135:2159], cookie=3882592864392530178, session=1) 2025-12-23T18:53:19.465610Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2160], cookie=210961778055474749, session=0, seqNo=0) 2025-12-23T18:53:19.465747Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:53:19.477625Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2160], cookie=210961778055474749, session=2) 2025-12-23T18:53:19.478675Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:135:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-12-23T18:53:19.478856Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-12-23T18:53:19.478951Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-12-23T18:53:19.490771Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:135:2159], cookie=111) 2025-12-23T18:53:19.491162Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:135:2159], cookie=112, session=1, semaphore="Lock2" count=1) 2025-12-23T18:53:19.491294Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-12-23T18:53:19.491357Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-12-23T18:53:19.502971Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:135:2159], cookie=112) 2025-12-23T18:53:19.503233Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2160], cookie=222, session=2, semaphore="Lock1" count=1) 2025-12-23T18:53:19.503434Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2160], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-12-23T18:53:19.515030Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2160], cookie=222) 2025-12-23T18:53:19.515090Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2160], cookie=223) 2025-12-23T18:53:19.515384Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2160], cookie=333, session=2, semaphore="Lock1" count=1) 2025-12-23T18:53:19.515634Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2160], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2025-12-23T18:53:19.527143Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2160], cookie=333) 2025-12-23T18:53:19.527202Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2160], cookie=334) 2025-12-23T18:53:19.956234Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:19.968446Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:20.312619Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:20.324416Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:20.675001Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:20.687001Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:21.015728Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:21.027889Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:21.379160Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:21.391131Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:21.734712Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:21.746639Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:22.076917Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:22.089237Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:22.429363Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:22.440945Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:22.780103Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:22.791791Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:23.184763Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:23.196929Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:23.546901Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:23.558801Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:23.909032Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:23.921301Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:24.273100Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:24.285421Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:24.636806Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:24.649050Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:25.060242Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:25.072567Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:25.413657Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:25.425786Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:25.775822Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:25.789847Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:26.146483Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:26.158565Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:26.509998Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:26.522216Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:26.905883Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:26.918009Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:27.275003Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] ... 18:53:59.728396Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:00.136948Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:00.149414Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:00.514337Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:00.526876Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:00.890132Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:00.902408Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:01.265381Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:01.277174Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:01.639697Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:01.652016Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:02.046842Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:02.059015Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:02.420486Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:02.432279Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:02.783914Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:02.795976Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:03.158662Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:03.170804Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:03.545273Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:03.557695Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:03.921233Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:03.933296Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:04.296439Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:04.308821Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:04.662334Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:04.674671Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:05.040028Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:05.052285Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:05.404968Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:05.416896Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:05.872923Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:30: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2025-12-23T18:54:05.873031Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-12-23T18:54:05.885351Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:71: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2025-12-23T18:54:05.906710Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:596:2533], cookie=3747682974984095752) 2025-12-23T18:54:05.906851Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:596:2533], cookie=3747682974984095752) 2025-12-23T18:54:05.907277Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:599:2536], cookie=14713060886124134309) 2025-12-23T18:54:05.907327Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:599:2536], cookie=14713060886124134309) 2025-12-23T18:54:05.907650Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:602:2539], cookie=14438883849516696909, name="Lock1") 2025-12-23T18:54:05.907719Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:602:2539], cookie=14438883849516696909) 2025-12-23T18:54:05.908054Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:605:2542], cookie=11983225919339049853, name="Lock1") 2025-12-23T18:54:05.908105Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:605:2542], cookie=11983225919339049853) 2025-12-23T18:54:06.276865Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:54:06.276984Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:54:06.297655Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:54:06.297780Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:54:06.332338Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:54:06.332856Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=12652371987976743847, session=0, seqNo=0) 2025-12-23T18:54:06.333005Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:54:06.345034Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=12652371987976743847, session=1) 2025-12-23T18:54:06.345342Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=1754135007314804292, session=0, seqNo=0) 2025-12-23T18:54:06.345473Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-12-23T18:54:06.357358Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=1754135007314804292, session=2) 2025-12-23T18:54:06.357646Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2161], cookie=11332618358111529149, session=0, seqNo=0) 2025-12-23T18:54:06.357770Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2025-12-23T18:54:06.369588Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2161], cookie=11332618358111529149, session=3) 2025-12-23T18:54:06.369990Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:150:2172], cookie=6691423270222263609, name="Sem1", limit=3) 2025-12-23T18:54:06.370101Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-12-23T18:54:06.381731Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:150:2172], cookie=6691423270222263609) 2025-12-23T18:54:06.381968Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=111, session=1, semaphore="Sem1" count=2) 2025-12-23T18:54:06.382106Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-12-23T18:54:06.382321Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=222, session=2, semaphore="Sem1" count=2) 2025-12-23T18:54:06.382450Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2161], cookie=333, session=3, semaphore="Sem1" count=1) 2025-12-23T18:54:06.394436Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=111) 2025-12-23T18:54:06.394523Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=222) 2025-12-23T18:54:06.394559Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2161], cookie=333) 2025-12-23T18:54:06.395151Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:159:2181], cookie=12116983967171834725, name="Sem1") 2025-12-23T18:54:06.395245Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:159:2181], cookie=12116983967171834725) 2025-12-23T18:54:06.395672Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2184], cookie=1836074338289678357, name="Sem1") 2025-12-23T18:54:06.395746Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2184], cookie=1836074338289678357) 2025-12-23T18:54:06.395991Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:136:2161], cookie=444, name="Sem1") 2025-12-23T18:54:06.396089Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-12-23T18:54:06.396159Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-12-23T18:54:06.396222Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-12-23T18:54:06.408238Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:136:2161], cookie=444) 2025-12-23T18:54:06.408847Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:167:2189], cookie=16896288825891328397, name="Sem1") 2025-12-23T18:54:06.408942Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:167:2189], cookie=16896288825891328397) 2025-12-23T18:54:06.409376Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:170:2192], cookie=6671312544313865821, name="Sem1") 2025-12-23T18:54:06.409455Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:170:2192], cookie=6671312544313865821) |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> KqpScripting::ScriptExplain [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold [GOOD] >> KqpWorkloadService::TestCpuLoadThresholdRefresh >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> THealthCheckTest::StaticGroupIssue [GOOD] >> THealthCheckTest::StorageLimit95 >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes >> THealthCheckTest::BridgeGroupNoIssues [GOOD] >> THealthCheckTest::BridgeTwoGroups >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey >> TCmsTest::ManualRequestApprovalLockingAllNodes >> TCmsTest::RequestRestartServicesOk >> TCmsTest::TestOutdatedState >> TClusterInfoTest::DeviceId [GOOD] >> TClusterInfoTest::FillInfo [GOOD] >> TCmsTenatsTest::CollectInfo >> TCmsTest::Notifications >> TCmsTest::StateRequestUnknownNode >> TCmsTest::RestartNodeInDownState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptExplain [GOOD] Test command err: Trying to start YDB, gRPC: 18167, MsgBus: 19961 2025-12-23T18:53:54.542637Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128595215539498:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:54.544970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005e9e/r3tmp/tmp7y8qFT/pdisk_1.dat 2025-12-23T18:53:54.731838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:54.731939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:54.734082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:54.798312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:54.806589Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:54.807601Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128595215539462:2081] 1766516034540585 != 1766516034540588 TServer::EnableGrpc on GrpcPort 18167, node 1 2025-12-23T18:53:54.830679Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T18:53:54.830715Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T18:53:54.856389Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:54.856421Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:54.856441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:54.856558Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19961 2025-12-23T18:53:55.049874Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516034899 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:55.213131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:55.223092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:55.337159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:55.480525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:55.531746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:55.610958Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:53:57.206524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128608100443220:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:57.206710Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:57.207080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128608100443230:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:57.207126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:57.498335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:57.527249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:57.555216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:57.583631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:57.612095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:57.647606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:57.708702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:57.748852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:57.818442Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128608100444098:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:57.818550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:57.818637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128608100444103:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:57.818794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128608100444105:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:57.818854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:57.822219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateRes ... "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516041787 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:54:02.117177Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:54:02.125208Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:02.183141Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:02.289699Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:02.334310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:02.621960Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:04.240504Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128639330577935:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:04.240584Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:04.240896Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128639330577945:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:04.240972Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:04.297604Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:04.321940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:04.344753Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:04.368219Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:04.392141Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:04.454374Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:04.484823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:04.520788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:04.585230Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128639330578810:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:04.585300Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128639330578815:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:04.585309Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:04.585567Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128639330578817:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:04.585632Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:04.588231Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:54:04.598061Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128639330578818:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:54:04.683850Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128639330578872:3760] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:54:06.365153Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587128647920513784:2534], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:168: Error: At function: DataQueryBlocks
:1:185: Error: At function: TKiDataQueryBlock
:1:208: Error: At function: KiEffects
:1:219: Error: At function: KiWriteTable!
:1:219: Error: Cannot find table 'db.[/Root/ScriptingTest]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:54:06.365816Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=MjUyMjc2MjktZmFkYjU3OGQtOWRjZmUxNjUtZTM0ZWNiZg==, ActorId: [2:7587128647920513782:2533], ActorState: ExecuteState, TraceId: 01kd68zegd6yyae8h3jjpx51sm, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 168 } message: "At function: DataQueryBlocks" end_position { row: 1 column: 168 } severity: 1 issues { position { row: 1 column: 185 } message: "At function: TKiDataQueryBlock" end_position { row: 1 column: 185 } severity: 1 issues { position { row: 1 column: 208 } message: "At function: KiEffects" end_position { row: 1 column: 208 } severity: 1 issues { position { row: 1 column: 219 } message: "At function: KiWriteTable!" end_position { row: 1 column: 219 } severity: 1 issues { position { row: 1 column: 219 } message: "Cannot find table \'db.[/Root/ScriptingTest]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 219 } issue_code: 2003 severity: 1 } } } } } }, remove tx with tx_id: 2025-12-23T18:54:06.618164Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128626445674230:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:54:06.618259Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TCmsTenatsTest::TestTenantLimit >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey >> TKesusTest::TestSessionStealingDifferentKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2025-12-23T18:53:18.900424Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:53:18.900553Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:53:18.921754Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:53:18.921911Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:53:18.960804Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:53:18.961322Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:136:2161], cookie=4510039068972821898, session=0, seqNo=0) 2025-12-23T18:53:18.962788Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:53:18.974675Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:136:2161], cookie=4510039068972821898, session=1) 2025-12-23T18:53:18.975431Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:136:2161], cookie=5984415525107411282 2025-12-23T18:53:18.975802Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:150:2172], cookie=12469345387620603921) 2025-12-23T18:53:18.975884Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:150:2172], cookie=12469345387620603921) 2025-12-23T18:53:19.390945Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:19.403201Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:19.754906Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:19.767379Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:20.113084Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:20.125549Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:20.477588Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:20.489988Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:20.862145Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:20.874355Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:21.225326Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:21.237334Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:21.577752Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:21.589979Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:21.940685Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:21.953140Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:22.302947Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:22.315120Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:22.696145Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:22.707748Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:23.077576Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:23.089352Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:23.448204Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:23.459747Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:23.818656Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:23.830564Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:24.190611Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:24.202501Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:24.595948Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:24.608088Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:24.979431Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:24.991721Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:25.352159Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:25.364014Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:25.731325Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:25.743753Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:26.108695Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:26.120626Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:26.502908Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:26.514731Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:26.874944Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:26.886865Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:27.247428Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:27.259630Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:27.619902Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:27.631733Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:27.991331Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:28.003217Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:28.373965Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:28.386006Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:28.756835Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:28.768478Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:29.130382Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:29.142704Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:29.504503Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:29.516847Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:29.878378Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:29.890555Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:30.295423Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:30.307919Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:30.670175Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:30.682248Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:31.055395Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:31.067738Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:31.430914Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:31.443366Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:31.795140Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:31.807066Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:32.178787Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:32.191108Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:32.552535Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:32.564794Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:32.926529Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:32.938969Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:33.300946Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:33.313230Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:33.675111Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:33.687131Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:34.080523Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:34.092880Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:34.454300Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:34.466761Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:34.839831Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:34.852158Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:35.213691Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:35.225879Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck:: ... : tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:55.275641Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:55.638597Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:55.651117Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:55.991764Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:56.003853Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:56.356628Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:56.378560Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:56.743259Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:56.755185Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:57.129204Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:57.141167Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:57.520028Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:57.532979Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:57.887022Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:57.898933Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:58.249708Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:58.261685Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:58.612287Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:58.624263Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:59.016687Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:59.028961Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:59.389245Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:59.401256Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:53:59.751799Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:53:59.767537Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:00.016476Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:00.028918Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:00.383816Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:00.396148Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:00.769997Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:00.782544Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:01.135838Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:01.148140Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:01.512534Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:01.524984Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:01.888503Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:01.900774Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:02.262515Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:02.274271Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:02.635452Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:02.647299Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:02.997398Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:03.009161Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:03.371037Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:03.382956Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:03.743779Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:03.755502Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:04.117564Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:04.129671Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:04.606607Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:04.618597Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:04.990426Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:05.002249Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:05.374543Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:05.386637Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:05.768428Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:05.780527Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:06.141667Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:06.153472Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:06.524536Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:06.536568Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:06.907760Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:06.919688Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:07.290760Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:07.302954Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:07.684659Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:07.696741Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:08.068486Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-12-23T18:54:08.080283Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-12-23T18:54:08.422041Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-12-23T18:54:08.422116Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-12-23T18:54:08.434072Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-12-23T18:54:08.444821Z node 2 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:643:2568], cookie=14300906141999155880) 2025-12-23T18:54:08.444918Z node 2 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:643:2568], cookie=14300906141999155880) 2025-12-23T18:54:08.859996Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:54:08.860068Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:54:08.872274Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:54:08.872626Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:54:08.907003Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:54:08.907734Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=12345, session=0, seqNo=0) 2025-12-23T18:54:08.907878Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:54:08.919757Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=12345, session=1) 2025-12-23T18:54:08.920303Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:144:2166], cookie=23456, session=1, seqNo=0) 2025-12-23T18:54:08.932094Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:144:2166], cookie=23456, session=1) 2025-12-23T18:54:09.264498Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-12-23T18:54:09.264630Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-12-23T18:54:09.284137Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-12-23T18:54:09.284282Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-12-23T18:54:09.310012Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-12-23T18:54:09.311066Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:134:2159], cookie=12345, session=0, seqNo=0) 2025-12-23T18:54:09.311249Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-12-23T18:54:09.334390Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:134:2159], cookie=12345, session=1) 2025-12-23T18:54:09.335331Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:141:2164], cookie=23456, session=1, seqNo=0) 2025-12-23T18:54:09.347717Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:141:2164], cookie=23456, session=1) |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TCmsTest::ManagePermissions |97.7%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::RequestShutdownHost >> TCmsTest::StateRequestUnknownNode [GOOD] >> TCmsTest::StateStorageNodesFromOneRing >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::LastRefreshTime >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> THealthCheckTest::BridgeGroupDegradedInBothPiles >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesMultipleNodes >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestSetResetMarkers >> TCmsTest::Notifications [GOOD] >> TCmsTest::PermissionDuration >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::ManualRequestApprovalLockingAllNodes [GOOD] >> TCmsTest::ManualRequestApprovalWithPartialAlreadyApproved >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageAvailabilityMode >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> THealthCheckTest::ShardsLimit999 >> TMaintenanceApiTest::LastRefreshTime [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::RacyStartCollecting >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TCmsTest::RequestRestartServicesNoUser >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> TCmsTest::RequestRestartServicesReject >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy >> TCmsTest::ManualRequestApprovalWithPartialAlreadyApproved [GOOD] >> TCmsTest::ManualRequestApprovalAlreadyLockedNode >> THiveTest::TestDrain >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> THeavyPerfTest::TTestLoadEverything >> THiveImplTest::BootQueueSpeed >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues >> THiveTest::TestCreateTablet >> KqpWorkloadService::TestCpuLoadThresholdRefresh [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup >> THiveTest::TestDrainWithMaxTabletsScheduled >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageLimit87 >> THiveTest::TestHiveBalancer >> THiveTest::TestServerlessMigration >> TargetTrackingScaleRecommenderPolicy::ScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::SpikeResistance [GOOD] >> TargetTrackingScaleRecommenderPolicy::NearTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::AtTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::Fluctuations [GOOD] >> TargetTrackingScaleRecommenderPolicy::FluctuationsBigNumbers [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleInToMaxSeen [GOOD] >> TargetTrackingScaleRecommenderPolicy::Idle [GOOD] >> TScaleRecommenderTest::BasicTest >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::EmergencyDuringRollingRestart >> THiveTest::TestNoMigrationToSelf >> THiveTest::TestLocalDisconnect >> THealthCheckTest::TestTabletIsDead [GOOD] >> THealthCheckTest::TestStoppedTabletIsNotDead >> KqpStreamLookup::ReadTableDuringSplit [GOOD] >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestRestartServices >> THealthCheckTest::TestNoSchemeShardResponse [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead >> THealthCheckTest::BridgeTwoGroups [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> TCmsTest::RequestRestartServicesDryRun >> TCmsTest::StateStorageAvailabilityMode [GOOD] >> TCmsTest::StateStorageLockedNodes >> TCmsTest::RacyStartCollecting [GOOD] >> TCmsTest::PriorityRange >> THiveTest::TestCreateTablet [GOOD] >> THiveTest::TestCreate100Tablets >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestProcessingQueue >> TCmsTest::SamePriorityRequest [GOOD] >> TCmsTest::SamePriorityRequest2 >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestLocalReplacement ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:06.597412Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:06.732209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:06.742990Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:06.743364Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:06.743443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:07.051750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:07.051893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:07.131767Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516043527363 != 1766516043527367 2025-12-23T18:54:07.148159Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:07.194989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:07.282390Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0050e1/r3tmp/tmplyJzjK/pdisk_1.dat 2025-12-23T18:54:07.553263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:54:07.567605Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:07.670203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:07.966716Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:952:2810], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:07.966911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:963:2815], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:07.967339Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:07.969219Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:968:2820], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:07.969629Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:07.976000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:54:08.100623Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:54:08.170249Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1024:2857] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp/unittest >> THiveTest::TestNoMigrationToSelf [GOOD] >> THiveTest::TestReCreateTablet >> TCmsTest::ManualRequestApprovalAlreadyLockedNode [GOOD] >> TCmsTest::Mirror3dcPermissions >> TScaleRecommenderTest::BasicTest [GOOD] >> TScaleRecommenderTest::RollingRestart >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] >> THiveTest::TestServerlessMigration [GOOD] >> THiveTest::TestUpdateChannelValues >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError >> StatisticsSaveLoad::Simple [GOOD] >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestHiveRestart >> StatisticsSaveLoad::Delete [GOOD] >> TCmsTest::PriorityRange [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] >> TMaintenanceApiTest::ForceAvailabilityMode >> TCmsTest::RequestRestartServicesDryRun [GOOD] >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestStorageBalancer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:06.305619Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:06.442247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:06.466684Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:06.467215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:06.467307Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:06.891396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:06.891545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:06.977083Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516043527334 != 1766516043527338 2025-12-23T18:54:06.997399Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:07.045623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:07.160637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0050e3/r3tmp/tmpENfMP1/pdisk_1.dat 2025-12-23T18:54:07.472816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:54:07.488186Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:07.606518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:07.966720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:999:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:07.966895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1009:2852], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:07.967032Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:07.969000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1015:2857], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:07.969220Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:07.975847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:54:08.098532Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1013:2855], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:54:08.169681Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1072:2895] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp/unittest >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesRejectSecond >> TScaleRecommenderTest::RollingRestart [GOOD] >> TScaleRecommenderTest::RollingRestartNoLastRecommendation >> TCmsTest::TestProcessingQueue [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesDryRun [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::PriorityRange [GOOD] Test command err: 2025-12-23T18:54:14.301287Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 17:17 2025-12-23T18:54:14.301340Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 18:18 2025-12-23T18:54:14.301357Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 19:19 2025-12-23T18:54:14.301372Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 20:20 2025-12-23T18:54:14.301385Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 21:21 2025-12-23T18:54:14.301400Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 22:22 2025-12-23T18:54:14.301429Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 23:23 2025-12-23T18:54:14.301444Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 24:24 2025-12-23T18:54:14.306525Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 17:17 2025-12-23T18:54:14.306573Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 18:18 2025-12-23T18:54:14.306589Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 19:19 2025-12-23T18:54:14.306603Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 20:20 2025-12-23T18:54:14.306630Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 21:21 2025-12-23T18:54:14.306646Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 22:22 2025-12-23T18:54:14.306660Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 23:23 2025-12-23T18:54:14.306674Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 24:24 2025-12-23T18:54:14.334009Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 17:17 2025-12-23T18:54:14.334076Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 18:18 2025-12-23T18:54:14.334101Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 19:19 2025-12-23T18:54:14.334118Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 20:20 2025-12-23T18:54:14.334131Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 21:21 2025-12-23T18:54:14.334146Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 22:22 2025-12-23T18:54:14.334160Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 23:23 2025-12-23T18:54:14.334173Z node 17 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 24:24 |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList >> TCmsTest::StateStorageLockedNodes [GOOD] >> TCmsTest::SamePriorityRequest2 [GOOD] |97.7%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestCreate100Tablets [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:07.653008Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:07.785149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:07.793914Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:07.794256Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:07.794371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:08.287070Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:08.395750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:08.395888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:08.431094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:08.557203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003f99/r3tmp/tmp14Wl6X/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4888, node 1 2025-12-23T18:54:09.559587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:54:09.560159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:09.560235Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:09.560263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:09.560646Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:09.624282Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:8278 2025-12-23T18:54:10.145605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:54:12.848702Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:12.854083Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:54:12.857419Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:12.881557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:12.881650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:12.919582Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:54:12.921050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:13.093931Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.094478Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.095163Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.095779Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.096013Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.096118Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.096204Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.096368Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.096455Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.261209Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:54:13.295717Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:13.295838Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:13.309485Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:13.517331Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:13.558529Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:54:13.558627Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:54:13.590218Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:54:13.591682Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:54:13.591884Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:54:13.591935Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:54:13.591990Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:54:13.592042Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:54:13.592094Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:54:13.592142Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:54:13.593011Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:54:13.710564Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:54:13.710667Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:54:13.722815Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:54:13.723972Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:54:13.834006Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:54:13.834357Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:54:13.836143Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:54:13.836182Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:54:13.836245Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:54:13.838637Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:54:13.841946Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:13.847859Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:54:13.847979Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:54:13.861336Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:54:13.991661Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:54:14.419853Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:54:14.527538Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:54:14.527614Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Column diff is empty, finishing 2025-12-23T18:54:15.141952Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:15.144464Z node 1 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [1:2798:3385] Owner: [1:2797:3384]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:54:15.144534Z node 1 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [1:2798:3385] Owner: [1:2797:3384]. Column diff is empty, finishing 2025-12-23T18:54:15.144953Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2807:3388], ActorId: [1:2808:3389], Starting query actor #1 [1:2809:3390] 2025-12-23T18:54:15.145019Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2808:3389], ActorId: [1:2809:3390], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:54:15.156029Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2808:3389], ActorId: [1:2809:3390], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=YzQzYzQyZjUtMjE2ZTJmN2UtZGU5YjQwYjctODA2ZDVjOWE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:54:15.671757Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2829:3404]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:54:15.671967Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:54:15.672051Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:2831:3406] 2025-12-23T18:54:15.672117Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:2831:3406] 2025-12-23T18:54:15.672765Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2832:3007] 2025-12-23T18:54:15.673057Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:2831:3406], server id = [2:2832:3007], tablet id = 72075186224037894, status = OK 2025-12-23T18:54:15.673265Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:2832:3007], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:54:15.673350Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-12-23T18:54:15.673559Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-12-23T18:54:15.673632Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:2829:3404], StatRequests.size() = 1 2025-12-23T18:54:15.924360Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:54:16.039804Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2808:3389], ActorId: [1:2809:3390], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YzQzYzQyZjUtMjE2ZTJmN2UtZGU5YjQwYjctODA2ZDVjOWE=, TxId: 2025-12-23T18:54:16.039897Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2808:3389], ActorId: [1:2809:3390], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=YzQzYzQyZjUtMjE2ZTJmN2UtZGU5YjQwYjctODA2ZDVjOWE=, TxId: 2025-12-23T18:54:16.040324Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2807:3388], ActorId: [1:2808:3389], Got response [1:2809:3390] SUCCESS 2025-12-23T18:54:16.041151Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2849:3411], ActorId: [1:2850:3412], Starting query actor #1 [1:2851:3413] 2025-12-23T18:54:16.041219Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2850:3412], ActorId: [1:2851:3413], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:54:16.044106Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2850:3412], ActorId: [1:2851:3413], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=ZDg0YzNjNzQtM2MyOGZlOTUtNGNmYTU5MjctOTFjZDdiMmU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-12-23T18:54:16.077493Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2860:3422]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:54:16.077690Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:54:16.077729Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:2860:3422], StatRequests.size() = 1 2025-12-23T18:54:16.232442Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2850:3412], ActorId: [1:2851:3413], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZDg0YzNjNzQtM2MyOGZlOTUtNGNmYTU5MjctOTFjZDdiMmU=, TxId: 01kd68zr4999mmytfn7jq97kqd 2025-12-23T18:54:16.233683Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2850:3412], ActorId: [1:2851:3413], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZDg0YzNjNzQtM2MyOGZlOTUtNGNmYTU5MjctOTFjZDdiMmU=, TxId: 01kd68zr4999mmytfn7jq97kqd 2025-12-23T18:54:16.234030Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2849:3411], ActorId: [1:2850:3412], Got response [1:2851:3413] SUCCESS 2025-12-23T18:54:16.236078Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2882:3432], ActorId: [1:2883:3433], Starting query actor #1 [1:2884:3434] 2025-12-23T18:54:16.236145Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2883:3433], ActorId: [1:2884:3434], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:54:16.239148Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2883:3433], ActorId: [1:2884:3434], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=NTYzMTJlNGItMTI0OWVlYzAtYzI2NzcwMDctNmU0MGIzODM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-12-23T18:54:16.260077Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2883:3433], ActorId: [1:2884:3434], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=NTYzMTJlNGItMTI0OWVlYzAtYzI2NzcwMDctNmU0MGIzODM=, TxId: 01kd68zr5g4279ze6v1ptmcc6q 2025-12-23T18:54:16.260192Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2883:3433], ActorId: [1:2884:3434], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=NTYzMTJlNGItMTI0OWVlYzAtYzI2NzcwMDctNmU0MGIzODM=, TxId: 01kd68zr5g4279ze6v1ptmcc6q 2025-12-23T18:54:16.260464Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2882:3432], ActorId: [1:2883:3433], Got response [1:2884:3434] SUCCESS |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:07.673251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:07.782011Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:07.793486Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:07.793837Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:07.793916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:08.274809Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:08.395862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:08.395998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:08.430644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:08.512767Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fae/r3tmp/tmptSt4QF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32266, node 1 2025-12-23T18:54:09.559545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:54:09.560157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:09.560202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:09.560226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:09.560676Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:09.624771Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21644 2025-12-23T18:54:10.144599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:54:12.669232Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:12.675281Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:54:12.679018Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:12.709823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:12.709939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:12.749485Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:54:12.751295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:12.905475Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:12.908389Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:12.908863Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:12.909612Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:12.909919Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:12.910111Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:12.910329Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:12.910573Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:12.910849Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.058956Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:54:13.114393Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:13.114515Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:13.127947Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:13.293421Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:13.379858Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:54:13.379970Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:54:13.409150Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:54:13.411983Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:54:13.412238Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:54:13.412313Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:54:13.412372Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:54:13.412427Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:54:13.412482Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:54:13.412534Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:54:13.413587Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:54:13.536451Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:54:13.536552Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:54:13.548790Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:54:13.551091Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:54:13.666254Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:54:13.666694Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:54:13.670450Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:54:13.670530Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:54:13.670645Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:54:13.675579Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:54:13.681202Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:13.689224Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:54:13.689369Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:54:13.702593Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:54:13.860589Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:54:14.257954Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:54:14.355877Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:54:14.355957Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Column diff is empty, finishing 2025-12-23T18:54:15.022097Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:15.025187Z node 1 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [1:2793:3383] Owner: [1:2792:3382]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:54:15.025272Z node 1 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [1:2793:3383] Owner: [1:2792:3382]. Column diff is empty, finishing 2025-12-23T18:54:15.027716Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2802:3386], ActorId: [1:2803:3387], Starting query actor #1 [1:2804:3388] 2025-12-23T18:54:15.027829Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2803:3387], ActorId: [1:2804:3388], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:54:15.065361Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2803:3387], ActorId: [1:2804:3388], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=ZTZkOTlhMWQtYjk3NDQxNTMtNzk5ZTZkMzUtOTYwOTFhOGU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:54:15.639820Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2824:3402]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:54:15.640014Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:54:15.641327Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:2826:3404] 2025-12-23T18:54:15.641454Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:2826:3404] 2025-12-23T18:54:15.641986Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2827:3004] 2025-12-23T18:54:15.642220Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:2826:3404], server id = [2:2827:3004], tablet id = 72075186224037894, status = OK 2025-12-23T18:54:15.642366Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:2827:3004], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:54:15.644654Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-12-23T18:54:15.644950Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-12-23T18:54:15.645903Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:2824:3402], StatRequests.size() = 1 2025-12-23T18:54:15.924390Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:54:16.039817Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2803:3387], ActorId: [1:2804:3388], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZTZkOTlhMWQtYjk3NDQxNTMtNzk5ZTZkMzUtOTYwOTFhOGU=, TxId: 2025-12-23T18:54:16.039907Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2803:3387], ActorId: [1:2804:3388], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZTZkOTlhMWQtYjk3NDQxNTMtNzk5ZTZkMzUtOTYwOTFhOGU=, TxId: 2025-12-23T18:54:16.040306Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2802:3386], ActorId: [1:2803:3387], Got response [1:2804:3388] SUCCESS 2025-12-23T18:54:16.041280Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2844:3409], ActorId: [1:2845:3410], Starting query actor #1 [1:2846:3411] 2025-12-23T18:54:16.041377Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2845:3410], ActorId: [1:2846:3411], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:54:16.044634Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2845:3410], ActorId: [1:2846:3411], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=OGRkNmIzM2UtZmIyZTQ4MTctNzBiNzIzMzAtMmFkZjE0YzY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-12-23T18:54:16.074142Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2855:3420]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:54:16.074410Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:54:16.074469Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:2855:3420], StatRequests.size() = 1 2025-12-23T18:54:16.249845Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2845:3410], ActorId: [1:2846:3411], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=OGRkNmIzM2UtZmIyZTQ4MTctNzBiNzIzMzAtMmFkZjE0YzY=, TxId: 2025-12-23T18:54:16.249938Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2845:3410], ActorId: [1:2846:3411], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=OGRkNmIzM2UtZmIyZTQ4MTctNzBiNzIzMzAtMmFkZjE0YzY=, TxId: 2025-12-23T18:54:16.250390Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2844:3409], ActorId: [1:2845:3410], Got response [1:2846:3411] SUCCESS 2025-12-23T18:54:16.251530Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2877:3424], ActorId: [1:2878:3425], Starting query actor #1 [1:2879:3426] 2025-12-23T18:54:16.251601Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2878:3425], ActorId: [1:2879:3426], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:54:16.254787Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2878:3425], ActorId: [1:2879:3426], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=ZDgxNTA2MTUtOTAxZWVlMS0xOTMzZmIzZS0zNGZjNzYyZQ==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-12-23T18:54:16.285658Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2888:3435]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:54:16.285924Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:54:16.285983Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:2888:3435], StatRequests.size() = 1 2025-12-23T18:54:16.421474Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2878:3425], ActorId: [1:2879:3426], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZDgxNTA2MTUtOTAxZWVlMS0xOTMzZmIzZS0zNGZjNzYyZQ==, TxId: 01kd68zrak0q8tnjjf28b39ma8 2025-12-23T18:54:16.421617Z node 1 :STATISTICS WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:2878:3425], ActorId: [1:2879:3426], Finish with BAD_REQUEST, Issues: {
: Error: No data }, SessionId: ydb://session/3?node_id=1&id=ZDgxNTA2MTUtOTAxZWVlMS0xOTMzZmIzZS0zNGZjNzYyZQ==, TxId: 01kd68zrak0q8tnjjf28b39ma8 2025-12-23T18:54:16.421900Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2877:3424], ActorId: [1:2878:3425], Got response [1:2879:3426] BAD_REQUEST ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2025-12-23T18:54:09.671325Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2025-12-23T18:54:09.778911Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2025-12-23T18:54:09.793076Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2025-12-23T18:54:09.910040Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2025-12-23T18:54:16.658999Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 26:26 2025-12-23T18:54:16.659061Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 27:27 2025-12-23T18:54:16.659085Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 28:28 2025-12-23T18:54:16.659108Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 29:29 2025-12-23T18:54:16.659130Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 30:30 2025-12-23T18:54:16.659150Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 31:31 2025-12-23T18:54:16.659172Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 32:32 2025-12-23T18:54:16.659194Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 25:25 |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC1 >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> THealthCheckTest::BridgeGroupDegradedInBothPiles [GOOD] >> THealthCheckTest::BridgeGroupDegradedInOnePile >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> StatisticsSaveLoad::ForbidAccess [GOOD] >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> THiveTest::TestCheckSubHiveForwarding >> TCmsTenatsTest::RequestRestartServices [GOOD] >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC >> TCmsTest::RequestReplaceDevicePDisk >> THiveImplTest::BootQueueSpeed [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution >> TScaleRecommenderTest::RollingRestartNoLastRecommendation [GOOD] >> TStorageBalanceTest::TestScenario1 >> THealthCheckTest::TestStoppedTabletIsNotDead [GOOD] >> THealthCheckTest::TestTabletsInUnresolvaleDatabase |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::TestKeepAvailableMode >> TMaintenanceApiTest::ForceAvailabilityMode [GOOD] >> TCmsTest::WalleTasks >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestReassignUseRelativeSpace |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] >> THealthCheckTest::ServerlessBadTablets >> TCmsTenatsTest::TestTenantRatioLimit >> TCmsTest::CheckUnreplicatedDiskPreventsRestart >> TCmsTest::TestKeepAvailableModeScheduled >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ForceAvailabilityMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:08.577245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:08.663070Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:08.667149Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:08.667423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:08.667483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:08.994395Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:09.094604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:09.094729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:09.153732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:09.244659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003f10/r3tmp/tmpmfLqiB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29591, node 1 2025-12-23T18:54:09.922415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:54:09.923626Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:09.923689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:09.923723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:09.923954Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:09.988576Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:61847 2025-12-23T18:54:10.523426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:54:13.098039Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:13.103606Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:54:13.106445Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:13.132497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:13.132616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:13.171545Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:54:13.173746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:13.411166Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:54:13.423586Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.424371Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.425123Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.425766Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.426005Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.426138Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.426406Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.426554Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.426712Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:54:13.545772Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:13.545927Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:13.559461Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:13.724328Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:13.771569Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:54:13.771727Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:54:13.802168Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:54:13.803796Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:54:13.804030Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:54:13.804087Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:54:13.804174Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:54:13.804235Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:54:13.804289Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:54:13.804342Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:54:13.804754Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:54:13.850508Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:54:13.850620Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:54:13.865284Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:54:13.875652Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:54:13.876921Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:54:13.882967Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:54:13.920549Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:54:13.920626Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:54:13.920729Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:54:13.943294Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:54:13.953132Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:13.973607Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:54:13.973795Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:54:13.987143Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:54:14.117058Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:54:14.484493Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:54:14.597150Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:54:14.597261Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Column diff is empty, finishing 2025-12-23T18:54:15.250215Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:15.520233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2759:3371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:15.520403Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:15.520801Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2777:3376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:15.520889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:15.628893Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:16.165037Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3066:3425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:16.165334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:16.165726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3070:3428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:16.165808Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:16.166913Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3073:3431]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:54:16.167100Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:54:16.167171Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3075:3433] 2025-12-23T18:54:16.167230Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3075:3433] 2025-12-23T18:54:16.167758Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3076:3187] 2025-12-23T18:54:16.168141Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3075:3433], server id = [2:3076:3187], tablet id = 72075186224037894, status = OK 2025-12-23T18:54:16.168260Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3076:3187], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:54:16.168325Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-12-23T18:54:16.168564Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-12-23T18:54:16.168613Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3073:3431], StatRequests.size() = 1 2025-12-23T18:54:16.183177Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:54:16.183596Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3080:3437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:16.183786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:16.184229Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3084:3441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:16.184382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:16.184471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3087:3444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:16.191224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:54:16.348714Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:54:16.348814Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:54:16.402082Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3075:3433], schemeshard count = 1 2025-12-23T18:54:16.789980Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3089:3446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-12-23T18:54:16.962543Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3196:3511] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:54:16.978253Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3219:3527]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:54:16.978535Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:54:16.978579Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3219:3527], StatRequests.size() = 1 2025-12-23T18:54:17.243724Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:3298:3557], for# user@builtin, access# DescribeSchema 2025-12-23T18:54:17.243793Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:3298:3557], for# user@builtin, access# DescribeSchema 2025-12-23T18:54:17.255833Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:3288:3553], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:54:17.258346Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=ZWZiOGRiZGEtYjA2ZDY3YmItZGRmMTVjOC02YzJmZDFkZQ==, ActorId: [1:3279:3545], ActorState: ExecuteState, TraceId: 01kd68zs3f4k2770m4j88p4926, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 17 } message: "At function: KiReadTable!" end_position { row: 2 column: 17 } severity: 1 issues { position { row: 2 column: 17 } message: "Cannot find table \'db.[/Root/Database/.metadata/_statistics]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 17 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] >> THealthCheckTest::BridgeNoBscResponse |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> THiveTest::TestCheckSubHiveForwarding [GOOD] >> THiveTest::TestCheckSubHiveDrain >> TMaintenanceApiTest::SingleCompositeActionGroup >> TCmsTenatsTest::TestClusterLimit |97.7%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestManyFollowersOnOneNode >> TCmsTest::Mirror3dcPermissions [GOOD] >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues [GOOD] >> THealthCheckTest::OnlyDiskIssueOnInitialPDisks >> TCmsTest::RequestReplaceDevices >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsLimit995 >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> THealthCheckTest::StorageLimit87 [GOOD] >> THealthCheckTest::StorageLimit80 >> TCmsTest::RequestReplaceDevicePDisk [GOOD] >> TCmsTest::RequestReplaceDevicePDiskByPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] Test command err: 2025-12-23T18:54:09.736856Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::ActionWithZeroDuration >> TCmsTenatsTest::TestNoneTenantPolicy >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestNotEnoughResources >> THiveTest::TestCheckSubHiveDrain [GOOD] >> THiveTest::TestCheckSubHiveMigration >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestForceRestartMode >> TMaintenanceApiTest::ActionReason [GOOD] >> TMaintenanceApiTest::CreateTime >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> THiveImplTest::TestShortTabletTypes [GOOD] >> THiveImplTest::TestStDev [GOOD] >> THiveImplTest::BootQueueConfigurePriorities [GOOD] >> THiveTest::TestBlockCreateTablet >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::BridgeModeStateStorage |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesWrongHost [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TCmsTest::TestKeepAvailableModeScheduledDisconnects >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> THiveTest::TestBlockCreateTablet [GOOD] >> THiveTest::DrainWithHiveRestart >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimit >> TCmsTest::ManualRequestApproval >> TCmsTest::WalleRebootDownNode >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDownAfterDrain >> THiveTest::TestCheckSubHiveMigration [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets >> TCmsTest::RequestReplaceDevicePDiskByPath [GOOD] >> TCmsTest::RequestReplaceDeviceTwiceWithNoVDisks >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::AllVDisksEvictionInRack >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplacePDiskDoesntBreakGroup >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 >> TCmsTest::StateStorageTwoRings >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost >> THealthCheckTest::ServerlessBadTablets [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> TMaintenanceApiTest::CreateTime [GOOD] >> TMaintenanceApiTest::DisableCMS >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] >> THiveTest::TestNotEnoughResources [GOOD] >> THiveTest::TestRestartTablets >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::StateStorageTwoBrokenRings >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> TMaintenanceApiTest::TestDrainAction >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleCleanupTest >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy >> TCmsTest::ManualRequestApproval [GOOD] >> TCmsTest::ManageRequestsWrong >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestDrainAndReconnect >> THiveTest::TestStorageBalancer [GOOD] >> THiveTest::TestRestartsWithFollower >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback >> TCmsTest::RequestReplaceDeviceTwiceWithNoVDisks [GOOD] >> TCmsTest::RequestReplaceBrokenDevices >> TCmsTest::RequestReplacePDiskDoesntBreakGroup [GOOD] >> TCmsTest::RequestReplacePDiskConsecutiveWithDone >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] Test command err: 2025-12-23T18:52:55.945450Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128342244745140:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:55.945678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00393d/r3tmp/tmpoVZA4H/pdisk_1.dat 2025-12-23T18:52:56.190407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:56.190543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:56.229873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:56.245379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:56.274078Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:56.275970Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128342244745114:2081] 1766515975944072 != 1766515975944075 TServer::EnableGrpc on GrpcPort 29290, node 1 2025-12-23T18:52:56.320489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:56.320519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:56.320532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:56.320672Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:56.472804Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:32278 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766515976330 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:56.577706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:56.954821Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:58.296309Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-12-23T18:52:58.296436Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128355129647858:2319], Start check tables existence, number paths: 2 2025-12-23T18:52:58.300710Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128355129647858:2319], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:52:58.300787Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128355129647858:2319], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:52:58.300813Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128355129647858:2319], Successfully finished 2025-12-23T18:52:58.300887Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-12-23T18:52:58.301975Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:52:58.302012Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:52:58.302297Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-12-23T18:52:58.304860Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=NDRiZDlmMWItMmJiNWEyZGUtNTgyOTBlZmMtMTJiY2I4YjE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NDRiZDlmMWItMmJiNWEyZGUtNTgyOTBlZmMtMTJiY2I4YjE= (tmp dir name: 1485ea62-42db-a46c-b2c4-339af5c891a5) 2025-12-23T18:52:58.304984Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=NDRiZDlmMWItMmJiNWEyZGUtNTgyOTBlZmMtMTJiY2I4YjE=, ActorId: [1:7587128355129647884:2323], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:52:58.314196Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128355129647886:2498], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:52:58.317561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:52:58.318629Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128355129647886:2498], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-12-23T18:52:58.318787Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128355129647886:2498], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-12-23T18:52:58.325787Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128355129647886:2498], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-12-23T18:52:58.397048Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128355129647886:2498], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-12-23T18:52:58.400749Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128355129647937:2530] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:52:58.400879Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128355129647886:2498], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-12-23T18:52:58.401224Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-12-23T18:52:58.401252Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id Root 2025-12-23T18:52:58.401320Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128355129647944:2324], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-12-23T18:52:58.402336Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128355129647944:2324], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-12-23T18:52:58.402452Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-12-23T18:52:58.402486Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-12-23T18:52:58.403450Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7587128355129647953:2325], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2025-12-23T18:52:58.404493Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7587128355129647953:2325], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-12-23T18:52:58.412449Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-12-23T18:52:58.412466Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2025-12-23T18:52:58.412514Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128355129647965:2327], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-12-23T18:52:58.412603Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-12-23T18:52:58.412671Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=1&id=NDRiZDlmMWItMmJiNWEyZGUtNTgyOTBlZmMtMTJiY2I4YjE=, ActorId: [1:7587128355129647884:2323], ActorState: ReadyState, TraceId: 01kd68xc5bcxf1hyknen5kpzxa, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: ALTER RESOURCE POOL sample_pool_id SET ( CONCURRENT_QUERY_LIMIT=42 ); rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-12-23T18:52:58.414068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128355129647965:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: { < ... /session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69002ra7rxaw2187fmqcyw, Sent query response back to proxy, proxyRequestId: 28, proxyId: [10:7587128629078362491:2264] 2025-12-23T18:54:24.356343Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:244: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7587128723567644432:2874], ActorId: [10:7587128723567644433:2875], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, TxId: 2025-12-23T18:54:24.356478Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:201: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7587128723567644432:2874], ActorId: [10:7587128723567644433:2875], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery with SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, TxId: , text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-12-23T18:54:24.356885Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:443: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ReadyState, TraceId: 01kd69003450kra7zwqsavke7d, received request, proxyRequestId: 29 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [10:7587128723567644469:2532] database: /Root databaseId: /Root pool id: 2025-12-23T18:54:24.357447Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:934: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69003450kra7zwqsavke7d, acquire mvcc snapshot 2025-12-23T18:54:24.358022Z node 10 :KQP_SESSION TRACE: kqp_session_actor.cpp:972: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69003450kra7zwqsavke7d, read snapshot result: UNAVAILABLE, step: 1766516064404, tx id: 18446744073709551615 2025-12-23T18:54:24.358075Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1659: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69003450kra7zwqsavke7d, ExecutePhyTx, tx: 0x00007BCF093E5C18 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-12-23T18:54:24.358122Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1816: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69003450kra7zwqsavke7d, Sending to Executer TraceId: 0 8 2025-12-23T18:54:24.358230Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1880: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69003450kra7zwqsavke7d, Created new KQP executer: [10:7587128723567644476:2525] isRollback: 0 2025-12-23T18:54:24.362015Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2090: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69003450kra7zwqsavke7d, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-12-23T18:54:24.362101Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:1659: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69003450kra7zwqsavke7d, ExecutePhyTx, tx: 0x00007BCF095B5658 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-12-23T18:54:24.363032Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2090: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69003450kra7zwqsavke7d, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-12-23T18:54:24.363142Z node 10 :KQP_SESSION INFO: kqp_session_actor.cpp:2383: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69003450kra7zwqsavke7d, txInfo Status: Committed Kind: ReadOnly TotalDuration: 5.797 ServerDuration: 5.696 QueriesCount: 2 2025-12-23T18:54:24.363238Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2543: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69003450kra7zwqsavke7d, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-12-23T18:54:24.363291Z node 10 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69003450kra7zwqsavke7d, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:54:24.363316Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69003450kra7zwqsavke7d, EndCleanup, isFinal: 0 2025-12-23T18:54:24.363363Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ExecuteState, TraceId: 01kd69003450kra7zwqsavke7d, Sent query response back to proxy, proxyRequestId: 29, proxyId: [10:7587128629078362491:2264] 2025-12-23T18:54:24.363624Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:244: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7587128723567644432:2874], ActorId: [10:7587128723567644433:2875], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, DataQuery #2 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, TxId: 2025-12-23T18:54:24.363705Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:371: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7587128723567644432:2874], ActorId: [10:7587128723567644433:2875], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, TxId: 2025-12-23T18:54:24.363737Z node 10 :KQP_WORKLOAD_SERVICE TRACE: query_actor.cpp:171: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7587128723567644432:2874], ActorId: [10:7587128723567644433:2875], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Delete session: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ= 2025-12-23T18:54:24.363770Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:311: [TQueryRetryActor] [TRefreshPoolStateQuery] OwnerId: [10:7587128723567644431:2873], ActorId: [10:7587128723567644432:2874], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , Got response [10:7587128723567644433:2875] SUCCESS 2025-12-23T18:54:24.363898Z node 10 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:54:24.363945Z node 10 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:54:24.363974Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:54:24.364001Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:54:24.364082Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=10&id=YzM4MTU5ZGEtMTBhYjljOWQtOTQ5ODA4YTQtM2UyYThmZTQ=, ActorId: [10:7587128723567644435:2525], ActorState: unknown state, Session actor destroyed 2025-12-23T18:54:24.371556Z node 10 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=10&id=ZGIzNWRhOWItODJjNmRmNDAtOGEzNzJlNTctNmMzZDc0MzQ=, ActorId: [10:7587128646258232304:2325], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:54:24.371611Z node 10 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=10&id=ZGIzNWRhOWItODJjNmRmNDAtOGEzNzJlNTctNmMzZDc0MzQ=, ActorId: [10:7587128646258232304:2325], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:54:24.371659Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=10&id=ZGIzNWRhOWItODJjNmRmNDAtOGEzNzJlNTctNmMzZDc0MzQ=, ActorId: [10:7587128646258232304:2325], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:54:24.371688Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=10&id=ZGIzNWRhOWItODJjNmRmNDAtOGEzNzJlNTctNmMzZDc0MzQ=, ActorId: [10:7587128646258232304:2325], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:54:24.371768Z node 10 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=10&id=ZGIzNWRhOWItODJjNmRmNDAtOGEzNzJlNTctNmMzZDc0MzQ=, ActorId: [10:7587128646258232304:2325], ActorState: unknown state, Session actor destroyed |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestLockTabletExecutionTimeout >> THealthCheckTest::BridgeGroupDegradedInOnePile [GOOD] >> THealthCheckTest::BridgeGroupDeadInOnePile >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeDisconnects >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> THiveTest::DrainWithHiveRestart [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet >> TCmsTest::BridgeModeStateStorage [GOOD] >> TCmsTest::BridgeModeSysTablets >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] >> THealthCheckTest::TestNodeDisconnected >> HttpRequest::ProbeServerless [GOOD] >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::NoBscResponse >> THiveTest::TestDrainAndReconnect [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] >> THiveTest::TestAsyncReassign >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageLimit50 >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage >> TMaintenanceApiTest::TestDrainAction [GOOD] >> TMaintenanceApiTest::TestCordonAction >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleTasksWithNodeLimit >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry >> TCmsTest::CollectInfo >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> THealthCheckTest::OnlyDiskIssueOnInitialPDisks [GOOD] >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestStartTabletTwiceInARow >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:16.287455Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:16.361149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:16.365519Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:16.365836Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:16.365925Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:16.715678Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:16.812778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:16.812910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:16.871591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:16.920440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003245/r3tmp/tmpeFJxQh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12760, node 1 2025-12-23T18:52:17.618383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:17.619535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:17.619586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:17.619627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:17.619857Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:17.686204Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:11743 2025-12-23T18:52:18.224809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:21.367541Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:21.373251Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:21.376208Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:21.404253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:21.404373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:21.442477Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:21.444307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:21.677670Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.678151Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.678790Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.679617Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.679971Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.680142Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.680349Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.680501Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.680640Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.761979Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-12-23T18:52:21.806650Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:21.806768Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:21.819958Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:21.981851Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:22.021486Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:22.021578Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:22.046476Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:22.047796Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:22.047977Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:22.048028Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:22.048075Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:22.048112Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:22.048154Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:22.048189Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:22.048515Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:22.088990Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:22.089071Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:22.100059Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:52:22.108010Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:52:22.109038Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:52:22.113932Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-12-23T18:52:22.144181Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:52:22.144229Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:52:22.144284Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:52:22.162297Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:22.171001Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:22.177624Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:22.177737Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:22.187504Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:22.312923Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:22.677445Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:22.742832Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:22.742923Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. ... ry with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2025-12-23T18:52:49.432857Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:6019:2473], ActorId: [2:6029:5112], Start read next stream part 2025-12-23T18:52:49.728043Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6056:5126]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:52:49.728355Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:52:49.728643Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-12-23T18:52:49.728716Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:6059:5129] 2025-12-23T18:52:49.728777Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:6059:5129] 2025-12-23T18:52:49.729267Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:6060:5130] 2025-12-23T18:52:49.729418Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6059:5129], server id = [2:6060:5130], tablet id = 72075186224037894, status = OK 2025-12-23T18:52:49.729485Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:6060:5130], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:52:49.729549Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:52:49.729721Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:52:49.729793Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6056:5126], StatRequests.size() = 1 2025-12-23T18:52:49.730070Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:53:43.759806Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:6019:2473], ActorId: [2:6029:5112], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:53:43.760117Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:6019:2473], ActorId: [2:6029:5112], Start read next stream part 2025-12-23T18:53:43.761352Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:6019:2473], ActorId: [2:6316:5302], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:53:43.761916Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd68x3crdpp67hc564sj7aw4", SessionId: ydb://session/3?node_id=2&id=YTI4NTA1MmQtZGFiMzg3YzYtODBlMTEzZjMtYzQzNThlMDc=, Slow query, duration: 54.323086s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:53:43.766398Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:6019:2473], ActorId: [2:6316:5302], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table` 2025-12-23T18:53:43.766561Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:6019:2473], ActorId: [2:6316:5302], Start read next stream part 2025-12-23T18:53:43.767219Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:6019:2473], ActorId: [2:6029:5112], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:53:43.767304Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6019:2473], ActorId: [2:6029:5112], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDg0ZTk5NTgtYjgxYTEwZTktZmNjMTQyMTctYmE0ZWU5N2Q=, TxId: 2025-12-23T18:53:43.768236Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2025-12-23T18:53:43.900693Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6336:5318]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:53:43.901138Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:53:43.901196Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:6336:5318], StatRequests.size() = 1 2025-12-23T18:54:25.819677Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:6019:2473], ActorId: [2:6316:5302], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:54:25.819805Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:6019:2473], ActorId: [2:6316:5302], Start read next stream part 2025-12-23T18:54:25.820387Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd68yrep4g2fz9510s562dqk", SessionId: ydb://session/3?node_id=2&id=YzlhMGVjZDUtYTQwNGFhZjUtYjVmZDcyZDgtN2YxNmFkMzI=, Slow query, duration: 42.046176s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:54:25.823105Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:6558:5458], ActorId: [2:6559:5459], Starting query actor #1 [2:6560:5460] 2025-12-23T18:54:25.823171Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:6559:5459], ActorId: [2:6560:5460], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2025-12-23T18:54:25.825499Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:6019:2473], ActorId: [2:6316:5302], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:54:25.825542Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6019:2473], ActorId: [2:6316:5302], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjAzNjI4YzAtNjBhOWExMjEtM2ZiMWE3ZTItMzAyZmI4MDU=, TxId: 2025-12-23T18:54:25.825623Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33540, txId: 18446744073709551615] shutting down 2025-12-23T18:54:25.826798Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:6559:5459], ActorId: [2:6560:5460], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NDhiY2JmNGQtMTNkYTU5ZmItNDVlODI3NzgtODAyMTI3ZTU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:54:25.903842Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:6576:5474]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:54:25.904113Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:54:25.904396Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-12-23T18:54:25.904442Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:54:25.904560Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:54:25.904626Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:6576:5474], StatRequests.size() = 1 2025-12-23T18:54:25.904873Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:54:26.077598Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6559:5459], ActorId: [2:6560:5460], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDhiY2JmNGQtMTNkYTU5ZmItNDVlODI3NzgtODAyMTI3ZTU=, TxId: 2025-12-23T18:54:26.077707Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6559:5459], ActorId: [2:6560:5460], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDhiY2JmNGQtMTNkYTU5ZmItNDVlODI3NzgtODAyMTI3ZTU=, TxId: 2025-12-23T18:54:26.078121Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6558:5458], ActorId: [2:6559:5459], Got response [2:6560:5460] SUCCESS 2025-12-23T18:54:26.078435Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:54:26.105640Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 32] 2025-12-23T18:54:26.105735Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=L$anlI, ActorId=[1:4460:3710] 2025-12-23T18:54:26.107415Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:6595:4270]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:54:26.107733Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:54:26.107805Z node 1 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:54:26.108150Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:54:26.108230Z node 1 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-12-23T18:54:26.108300Z node 1 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T18:54:26.125104Z node 1 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table[Value]=100' |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> THealthCheckTest::ShardsLimit995 [GOOD] >> THealthCheckTest::ShardsLimit905 >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> TCmsTest::VDisksEviction >> TMaintenanceApiTest::DisableCMS [GOOD] >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes >> THiveTest::TestAsyncReassign [GOOD] >> THiveTest::TestAlterFollower >> TCmsTest::TestForceRestartModeDisconnects [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceBrokenDevices [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteOwnerTablets >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionReconnect |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::DisableCMS [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeDisconnects [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveBalancerWithPreferredDC3 >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] >> THiveTest::TestDeleteOwnerTablets [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode >> TMaintenanceApiTest::TestCordonAction [GOOD] >> THealthCheckTest::TestTabletsInUnresolvaleDatabase [GOOD] >> THealthCheckTest::UnknowPDiskState >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::WalleDisableCMS >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::DynamicConfig >> TCmsTest::BridgeModeSysTablets [GOOD] >> TCmsTest::CheckSysTabletsOnNodesWithPDisks >> TCmsTest::ManageRequestsDry [GOOD] >> TCmsTest::ManageRequests ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:52:42.126109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:42.126189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:42.126227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:42.126254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:42.126301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:42.126325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:42.126395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:42.126475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:42.127181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:42.128156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:42.198113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:42.198166Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:42.208408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:42.208548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:42.209851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:42.222083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:42.223475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:42.226384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:42.230296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:42.239897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:42.240408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:42.249882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:42.249939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:42.250136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:42.250173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:42.250309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:42.250431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:42.378937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.379913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.380027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.380067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.380116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.380160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.380207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.380268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.380322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.380360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.380405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.380454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.380505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.380551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.380620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... cipient [3:720:2705]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-12-23T18:54:27.558561Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-12-23T18:54:27.558655Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409546 outdated step 5000038 last cleanup 0 2025-12-23T18:54:27.558712Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:54:27.558759Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409546 2025-12-23T18:54:27.558791Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-12-23T18:54:27.558817Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409546 2025-12-23T18:54:27.558963Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:720:2705]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T18:54:27.559178Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 38 2025-12-23T18:54:27.559458Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269553162, Sender [3:720:2705], Recipient [3:128:2152]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 38 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 146 Memory: 124368 Storage: 14156 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 261 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 444 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 133 TableOwnerId: 72057594046678944 FollowerId 2025-12-23T18:54:27.559493Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5284: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-12-23T18:54:27.559531Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0146 2025-12-23T18:54:27.559609Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-12-23T18:54:27.559640Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-12-23T18:54:27.560893Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435080, Sender [3:1467:3408], Recipient [3:720:2705]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvBuildTableStatsResult 2025-12-23T18:54:27.591995Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:54:27.592075Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:54:27.592103Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-12-23T18:54:27.592177Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2025-12-23T18:54:27.592216Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-12-23T18:54:27.592322Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2025-12-23T18:54:27.592372Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-12-23T18:54:27.592397Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2025-12-23T18:54:27.592454Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2025-12-23T18:54:27.592508Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2025-12-23T18:54:27.592588Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:54:27.603007Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:54:27.603070Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:54:27.603094Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:54:27.872500Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:27.872580Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:27.872657Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [3:128:2152], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:54:27.872691Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:54:28.225206Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:28.225273Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:28.225344Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [3:128:2152], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:54:28.225389Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:54:28.577584Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:28.577660Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:28.577731Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [3:128:2152], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:54:28.577756Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:54:28.930456Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:28.930524Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:28.930595Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [3:128:2152], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:54:28.930619Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:54:29.294554Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:29.294659Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:29.294773Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [3:128:2152], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:54:29.294807Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:54:29.347393Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:720:2705]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T18:54:29.637720Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:29.637786Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:54:29.637854Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [3:128:2152], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:54:29.637892Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::TestCordonAction [GOOD] Test command err: 2025-12-23T18:54:26.509250Z node 20 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action DRAIN_NODE is not supported 2025-12-23T18:54:26.509364Z node 20 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action DRAIN_NODE is not supported 2025-12-23T18:54:28.628022Z node 28 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action CORDON_NODE is not supported 2025-12-23T18:54:28.628155Z node 28 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action CORDON_NODE is not supported |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks >> THealthCheckTest::TestNodeDisconnected [GOOD] >> THealthCheckTest::TestStateStorageOk >> TCmsTest::StateRequest >> TCmsTest::VDisksEviction [GOOD] >> THiveTest::TestAlterFollower [GOOD] >> THiveTest::TestBootProgress >> TCmsTest::AllVDisksEvictionInRack [GOOD] >> TCmsTest::BridgeModeCollectInfo >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled >> THealthCheckTest::BridgeNoBscResponse [GOOD] >> THealthCheckTest::CLusterNotBootstrapped ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2025-12-23T18:54:29.457540Z node 26 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-12-23T18:54:29.457654Z node 26 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-12-23T18:54:29.457798Z node 26 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2025-12-23T18:54:29.459288Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2025-12-23T18:54:29.459737Z node 26 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2025-12-23T18:54:29.459952Z node 26 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-12-23T18:54:29.460004Z node 26 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2025-12-23T18:54:29.460045Z node 26 :CMS DEBUG: cms.cpp:415: Result: DISALLOW_TEMP (reason: VDisks eviction from host 26 has not yet been completed) 2025-12-23T18:54:29.460160Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-12-23T18:54:29.460310Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-12-23T18:54:29.460347Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 26, marker# MARKER_DISK_FAULTY 2025-12-23T18:54:29.460510Z node 26 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.083512s 2025-12-23T18:54:29.460543Z node 26 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2025-12-23T18:54:29.460600Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2025-12-23T18:54:29.460636Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2025-12-23T18:54:29.460657Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326: ... torage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120622560 } 2025-12-23T18:54:29.839001Z node 26 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-12-23T18:54:29.839065Z node 26 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2025-12-23T18:54:29.839111Z node 26 :CMS DEBUG: cms.cpp:415: Result: DISALLOW_TEMP (reason: VDisks eviction from host 26 has not yet been completed) 2025-12-23T18:54:29.839240Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-12-23T18:54:29.839403Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-12-23T18:54:29.839442Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 26, marker# MARKER_DISK_FAULTY 2025-12-23T18:54:29.839644Z node 26 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 0.100000s 2025-12-23T18:54:29.839685Z node 26 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2025-12-23T18:54:29.839752Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2025-12-23T18:54:29.839789Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2025-12-23T18:54:29.839822Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2025-12-23T18:54:29.839848Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-12-23T18:54:29.839888Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-12-23T18:54:29.839919Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-12-23T18:54:29.839955Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-12-23T18:54:29.839977Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 33, wbId# [33:8388350642965737326:1634689637] 2025-12-23T18:54:29.840181Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 120524072 ChangeTime: 120524072 Path: "/26/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2025-12-23T18:54:29.840651Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 120524072 ChangeTime: 120524072 Path: "/30/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2025-12-23T18:54:29.840778Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 120524072 ChangeTime: 120524072 Path: "/31/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2025-12-23T18:54:29.840854Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 120524072 ChangeTime: 120524072 Path: "/32/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2025-12-23T18:54:29.840915Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 33, response# PDiskStateInfo { PDiskId: 33 CreateTime: 120524072 ChangeTime: 120524072 Path: "/33/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2025-12-23T18:54:29.840968Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 120524072 ChangeTime: 120524072 Path: "/27/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2025-12-23T18:54:29.841010Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 120524072 ChangeTime: 120524072 Path: "/28/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2025-12-23T18:54:29.841048Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 120524072 ChangeTime: 120524072 Path: "/29/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2025-12-23T18:54:29.841081Z node 26 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:54:29.853263Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:139: TTxStorePermissions complete 2025-12-23T18:54:29.853525Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 26 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2025-12-23T18:54:29.854022Z node 26 :CMS INFO: cms.cpp:1439: User user removes request user-r-3 2025-12-23T18:54:29.854063Z node 26 :CMS DEBUG: cms.cpp:1462: Resulting status: OK 2025-12-23T18:54:29.854112Z node 26 :CMS DEBUG: cms_tx_remove_request.cpp:21: TTxRemoveRequest Execute 2025-12-23T18:54:29.854151Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 26 2025-12-23T18:54:29.854267Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2025-12-23T18:54:29.866691Z node 26 :CMS DEBUG: cms_tx_remove_request.cpp:45: TTxRemoveRequest Complete 2025-12-23T18:54:29.866917Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] >> TCmsTest::ActionIssue [GOOD] >> TCmsTest::ActionIssuePartialPermissions >> RetryPolicy::RetryWithBatching [GOOD] >> THiveTest::TestBootProgress [GOOD] >> THiveTest::TestBridgeCreateTablet >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled >> THiveTest::TestHiveBalancerWithPreferredDC3 [GOOD] >> THiveTest::TestHiveBalancerWithFollowers |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> TCmsTest::CheckSysTabletsOnNodesWithPDisks [GOOD] >> TCmsTest::StateRequest [GOOD] >> TCmsTest::StateRequestNode |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::CheckSysTabletsOnNodesWithPDisks [GOOD] >> TCmsTest::ManageRequests [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-12-23T18:48:41.328819Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.328866Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.328893Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-12-23T18:48:41.329421Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-12-23T18:48:41.329463Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.329488Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.332517Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006998s 2025-12-23T18:48:41.333080Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-12-23T18:48:41.333147Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.333171Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.333224Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009060s 2025-12-23T18:48:41.333630Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-12-23T18:48:41.333652Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.333671Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T18:48:41.333718Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006012s 2025-12-23T18:48:41.364561Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1766515721364516 2025-12-23T18:48:41.711993Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587127251789819309:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:48:41.712040Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:41.815052Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:48:41.838045Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587127249965569251:2152];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00583e/r3tmp/tmp7uMWzB/pdisk_1.dat 2025-12-23T18:48:41.850322Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:48:41.853151Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:48:41.853755Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:48:42.138867Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:42.154995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:48:42.293154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:42.293508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:42.295854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:42.295950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:42.332054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:42.346071Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:48:42.347983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:42.437007Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:42.446857Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:48:42.448207Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 10112, node 1 2025-12-23T18:48:42.700296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/00583e/r3tmp/yandexvvEdt8.tmp 2025-12-23T18:48:42.700336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/00583e/r3tmp/yandexvvEdt8.tmp 2025-12-23T18:48:42.701839Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/00583e/r3tmp/yandexvvEdt8.tmp 2025-12-23T18:48:42.701948Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:42.742102Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:42.851115Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:48:42.936610Z INFO: TTestServer started on Port 14589 GrpcPort 10112 TClient is connected to server localhost:14589 PQClient connected to localhost:10112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515722510 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:43.269745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:48:43.351571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976720658, at schemeshard: 72057594046644480 waiting... waiting... 2025-12-23T18:48:45.846276Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127267145438681:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.846697Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127267145438659:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.846875Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.846996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127268969689827:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.847064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.847517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127268969689839:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.847594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587127268969689842:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.848447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.853041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:45.867102Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587127267145438717:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:45.867172Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Reso ... e-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: acknoledged message 1 2025-12-23T18:54:30.083756Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: acknoledged message 2 2025-12-23T18:54:30.083794Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: acknoledged message 3 2025-12-23T18:54:30.083820Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: acknoledged message 4 2025-12-23T18:54:30.083852Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: acknoledged message 5 2025-12-23T18:54:30.083884Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: acknoledged message 6 2025-12-23T18:54:30.083913Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: acknoledged message 7 2025-12-23T18:54:30.083944Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: acknoledged message 8 2025-12-23T18:54:30.083983Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: acknoledged message 9 2025-12-23T18:54:30.084015Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: acknoledged message 10 2025-12-23T18:54:30.084353Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: close. Timeout = 0 ms 2025-12-23T18:54:30.084402Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session will now close 2025-12-23T18:54:30.084468Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: aborting 2025-12-23T18:54:30.085431Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: gracefully shut down, all writes complete 2025-12-23T18:54:30.085427Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session is aborting and will not restart 2025-12-23T18:54:30.085390Z node 17 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0 grpc read done: success: 0 data: 2025-12-23T18:54:30.085417Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0 grpc read failed 2025-12-23T18:54:30.085532Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0] Write session: destroy 2025-12-23T18:54:30.085755Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 7 sessionId: test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0 2025-12-23T18:54:30.085785Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message-group-id|7418c05e-754c3bd8-51e34a6b-1c4771be_0 is DEAD 2025-12-23T18:54:30.086175Z node 17 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T18:54:30.086326Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [17:7587128752504541205:2603] destroyed 2025-12-23T18:54:30.086358Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T18:54:30.086385Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:54:30.086400Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.086416Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:54:30.086434Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.086446Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:54:30.118413Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:54:30.118440Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.118455Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:54:30.118474Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.118486Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:54:30.218851Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:54:30.218889Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.218914Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:54:30.218941Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.218958Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:54:30.319211Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:54:30.319254Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.319275Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:54:30.319300Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.319317Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:54:30.419523Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:54:30.419563Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.419583Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:54:30.419608Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.419624Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:54:30.519864Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:54:30.519915Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.519935Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:54:30.519961Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.519979Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:54:30.620262Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:54:30.620314Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.620340Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:54:30.620389Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.620413Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:54:30.720567Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:54:30.720604Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.720624Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:54:30.720656Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.720672Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:54:30.820978Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:54:30.821030Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.821056Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:54:30.821088Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.821110Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T18:54:30.921273Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:54:30.921310Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.921329Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:54:30.921354Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:54:30.921391Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |97.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ManageRequests [GOOD] Test command err: 2025-12-23T18:54:24.215866Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected 2025-12-23T18:54:24.216334Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 3, reason# Disconnected 2025-12-23T18:54:24.216481Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 2, reason# Disconnected 2025-12-23T18:54:24.579294Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected 2025-12-23T18:54:24.580085Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 2, reason# Disconnected 2025-12-23T18:54:24.580134Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 3, reason# Disconnected |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TSchemeShardExtSubDomainTest::Create >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::SysTabletsNode >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionStealLock >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> THealthCheckTest::StorageLimit50 [GOOD] >> THealthCheckTest::StorageNoQuota >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ShardsLimit800 |97.7%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssuePartialPermissions [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> THiveTest::TestBridgeCreateTablet [GOOD] >> THiveTest::TestBridgeDisconnect >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics >> THealthCheckTest::BridgeGroupDeadInOnePile [GOOD] >> THealthCheckTest::BridgeGroupDeadInBothPiles >> TCmsTest::WalleDisableCMS [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TCmsTest::BridgeModeCollectInfo [GOOD] >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled >> TCmsTest::BridgeModeGroups >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestFollowers >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleDisableCMS [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> THealthCheckTest::UnknowPDiskState [GOOD] >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools >> TOlap::StoreStats >> TOlapNaming::CreateColumnTableExtraSymbolsOk >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TOlap::StoreStatsQuota >> TSchemeShardExtSubDomainTest::Drop-ExternalHive >> TOlapNaming::CreateColumnStoreFailed >> TOlap::CreateDropStandaloneTable >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> THiveTest::TestServerlessComputeResourcesMode >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestDeleteTabletWithFollowers >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks [GOOD] >> TCmsTest::SysTabletsNode [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::ShardsNoLimit >> TOlapNaming::CreateColumnTableOk >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TCmsTest::WalleTasksDifferentPriorities >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowersReconfiguration |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive >> THiveTest::TestBridgeDisconnect [GOOD] >> THiveTest::TestBridgeDisconnectWithReboots >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive >> THealthCheckTest::CLusterNotBootstrapped [GOOD] >> THealthCheckTest::BridgeTimeDifference >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode >> TOlapNaming::CreateColumnStoreFailed [GOOD] >> TOlapNaming::AlterColumnTableOk >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestCreateTabletBeforeLocal >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher >> TBoardSubscriberTest::DropByDisconnect >> TBoardSubscriber2DCTest::SimpleSubscriber >> TBoardSubscriberTest::SimpleSubscriber >> TBoardSubscriberTest::ManySubscribersManyPublisher >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive >> TBoardSubscriberTest::NotAvailableByShutdown >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> THiveTest::TestCreateTabletReboots >> test_canonical_records.py::test_create_drop_and_alter_table [GOOD] >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::AlterStore >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestFollowerPromotion >> THealthCheckTest::NoBscResponse [GOOD] >> THealthCheckTest::LayoutIncorrect >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] >> THiveTest::TestReassignNonexistentTablet >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestLockTabletExecution |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> THealthCheckTest::HealthCheckConfigUpdate >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |97.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TCmsTest::BridgeModeGroups [GOOD] >> TCmsTest::BridgeModeNodeLimit >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent |97.7%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |97.8%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |97.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies >> THiveTest::TestSkipBadNode [GOOD] >> THiveTest::TestStopTenant >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamOperationTimeout >> THiveTest::TestReassignNonexistentTablet [GOOD] >> THiveTest::TestLockedTabletsMustNotRestart >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false [FAIL] >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardColumnTableTTL::AlterColumnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:39.073641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:39.073723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:39.073759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:39.073790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:39.073823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:39.073863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:39.073928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:39.073997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:39.074661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:39.074883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:39.146473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:39.146521Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:39.151057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:39.151305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:39.151406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:39.153956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:39.154135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:39.154782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:39.154992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:39.156585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:39.156745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:39.157832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:39.157883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:39.157989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:39.158047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:39.158088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:39.158201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:39.261006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.261721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.261826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.261875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.261918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.261952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.261992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.262076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.262137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.262188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.262227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.262273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.262350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.262409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:39.262472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... thId: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:40.109195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:54:40.109378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:40.109422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:54:40.109458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:54:40.109486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:54:40.110863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:40.110922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:54:40.110962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:54:40.112307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:40.112350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:40.112387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:40.112439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:54:40.112554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:54:40.113704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:54:40.113833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:54:40.114100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:40.114211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:40.114248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:40.114456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:54:40.114496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:40.114664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:54:40.114725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:54:40.116139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:40.116177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:40.116363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:40.116398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:54:40.116589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:40.116625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:54:40.116702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:54:40.116733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:40.116786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:54:40.116813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:40.116850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:54:40.116881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:40.116912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:54:40.116937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:54:40.116983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:54:40.117019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:54:40.117050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:54:40.117529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:54:40.117650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:54:40.117694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:54:40.117722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:54:40.117750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:54:40.117834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:54:40.120122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:54:40.120703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:54:40.121862Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:671:2661] Bootstrap 2025-12-23T18:54:40.122819Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:671:2661] Become StateWork (SchemeCache [1:676:2666]) 2025-12-23T18:54:40.125149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:54:40.125418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:40.125521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2025-12-23T18:54:40.125994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2025-12-23T18:54:40.126889Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:671:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:54:40.129351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:40.129545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-12-23T18:54:40.129838Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn >> TOlap::AlterTtl [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:54:36.466720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:36.466830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:36.466884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:36.466909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:36.466945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:36.466966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:36.467002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:36.467077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:36.467755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:36.469580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:36.571178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:36.571241Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:36.582048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:36.582256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:36.582438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:36.592728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:36.592887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:36.593844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:36.599369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:36.612444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:36.613765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:36.632246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:36.632327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:36.632463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:36.632497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:36.632537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:36.634080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:36.806558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.807695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.807830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.807905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.807989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.808070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.808165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.808257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.808328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.808392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.808475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.808569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.808668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.808741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.808833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hard::TEvProposeTransactionResult> complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:54:40.967216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:54:40.967269Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:149: TAlterColumnTable TPropose operationId# 106:0 HandleReply ProgressState at tablet: 72057594046678944 2025-12-23T18:54:40.967345Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2025-12-23T18:54:40.967489Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 5000042 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:54:40.969033Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2025-12-23T18:54:40.969157Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000043 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 106 at step: 5000043 2025-12-23T18:54:40.969765Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:40.969879Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 12884904049 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:40.969937Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:110: TAlterColumnTable TPropose operationId# 106:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000043 2025-12-23T18:54:40.970670Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 106:0 128 -> 129 2025-12-23T18:54:40.970958Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:54:40.971044Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:54:40.971518Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=106;fline=column_engine_logs.cpp:113;event=double_schema_version;v=1; FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000043 2025-12-23T18:54:40.987807Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:40.987862Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:54:40.988093Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:54:40.988256Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:40.988313Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:214:2214], at schemeshard: 72057594046678944, txId: 106, path id: 38 2025-12-23T18:54:40.988367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:214:2214], at schemeshard: 72057594046678944, txId: 106, path id: 39 2025-12-23T18:54:40.988744Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:54:40.988802Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:200: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:54:40.988874Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: alter_table.cpp:223: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-12-23T18:54:40.991867Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-12-23T18:54:40.992011Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-12-23T18:54:40.992058Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-12-23T18:54:40.992104Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2025-12-23T18:54:40.992147Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:54:40.993785Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-12-23T18:54:40.993873Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-12-23T18:54:40.993900Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-12-23T18:54:40.993929Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 14 2025-12-23T18:54:40.993960Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:54:40.994040Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-12-23T18:54:40.997454Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-12-23T18:54:40.998579Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-12-23T18:54:40.998838Z node 3 :TX_TIERING ERROR: log.cpp:841: fline=manager.cpp:170;error=cannot_read_secrets;reason=Can't read access key: No such secret: SId:secret; 2025-12-23T18:54:40.998990Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-12-23T18:54:41.011731Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6585: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-12-23T18:54:41.011822Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-12-23T18:54:41.011940Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 FAKE_COORDINATOR: Erasing txId 106 2025-12-23T18:54:41.014071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:54:41.014224Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-12-23T18:54:41.014278Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 106:0 ProgressState 2025-12-23T18:54:41.014432Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2025-12-23T18:54:41.014497Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:54:41.014535Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2025-12-23T18:54:41.014566Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:54:41.014599Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-12-23T18:54:41.014681Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:750:2726] message: TxId: 106 2025-12-23T18:54:41.014778Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-12-23T18:54:41.014833Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2025-12-23T18:54:41.014894Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 106:0 2025-12-23T18:54:41.015014Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:54:41.017759Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-12-23T18:54:41.017840Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:956:2923] TestWaitNotification: OK eventTxId 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse [GOOD] >> THiveTest::TestStopTenant [GOOD] >> THiveTest::TestTabletAvailability >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool >> THiveTest::TestFollowerPromotionFollowerDies [GOOD] >> THiveTest::TestFollowersCrossDC_Easy >> THiveTest::TestLockedTabletsMustNotRestart [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true [FAIL] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] >> THiveTest::TestTabletAvailability [GOOD] >> THiveTest::TestSetDomain |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> THealthCheckTest::StorageNoQuota [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:41.506205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:41.506289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:41.506325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:41.506363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:41.506414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:41.506456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:41.506525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:41.506618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:41.507611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:41.507927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:41.593792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:41.593854Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:41.599574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:41.599790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:41.599918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:41.602872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:41.603090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:41.603826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:41.604065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:41.606069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:41.606278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:41.607427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:41.607485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:41.607632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:41.607682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:41.607724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:41.607872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:41.775031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.776036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.776176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.776250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.776317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.776381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.776463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.776581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.776672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.776752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.776817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.776901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.777026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.777120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.777194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... peration type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:54:42.337363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:42.337533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:54:42.337731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.337786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:54:42.337828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:54:42.337868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:54:42.339594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.339648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:54:42.339687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:54:42.341335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.341384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.341433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:42.341498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:54:42.341611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:54:42.343116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:54:42.343259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:54:42.343563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:42.343667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:42.343712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:42.343984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:54:42.344054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:42.344217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:54:42.344287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:54:42.346038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:42.346081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:42.346302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:42.346346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:54:42.346612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.346654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:54:42.346764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:54:42.346798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:42.346866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:54:42.346910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:42.346947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:54:42.346990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:42.347026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:54:42.347064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:54:42.347150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:54:42.347193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:54:42.347225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:54:42.347752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:54:42.347874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:54:42.347916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:54:42.347957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:54:42.348011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:54:42.348103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:54:42.350492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:54:42.350904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:54:42.352059Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:671:2661] Bootstrap 2025-12-23T18:54:42.352864Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:671:2661] Become StateWork (SchemeCache [1:676:2666]) 2025-12-23T18:54:42.354918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:54:42.355235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.355535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2025-12-23T18:54:42.356407Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:671:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:54:42.358772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:42.359003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-12-23T18:54:42.359401Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLockedTabletsMustNotRestart [GOOD] Test command err: 2025-12-23T18:54:15.238086Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:15.261837Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:15.262133Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:15.262918Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:15.263207Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:15.264164Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:51:2076] ControllerId# 72057594037932033 2025-12-23T18:54:15.264202Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:15.264287Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:15.264414Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:15.275081Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:15.275156Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:15.277003Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:60:2080] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.277096Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:61:2081] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.277173Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:62:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.277243Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:63:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.277314Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:64:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.277382Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:65:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.277475Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:66:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.277495Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:15.277570Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:51:2076] 2025-12-23T18:54:15.277594Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:51:2076] 2025-12-23T18:54:15.277635Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:15.277668Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:15.277868Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:15.279863Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:15.279955Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:15.280162Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:15.280325Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-12-23T18:54:15.281041Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-12-23T18:54:15.281077Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:15.281572Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:75:2077] ControllerId# 72057594037932033 2025-12-23T18:54:15.281594Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:15.281640Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:15.281706Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:15.290038Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:15.290079Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:15.291096Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:83:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.291241Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:84:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.291394Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:85:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.291484Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:86:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.291580Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:87:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.291655Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:88:2087] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.291732Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:89:2088] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.291749Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:15.291785Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:75:2077] 2025-12-23T18:54:15.291815Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:75:2077] 2025-12-23T18:54:15.291840Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:15.291862Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:15.292336Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:15.292437Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:15.292773Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:51:2076] 2025-12-23T18:54:15.292809Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.292936Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:15.293045Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:15.301724Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2025-12-23T18:54:15.301796Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.301970Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.302187Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.302220Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-12-23T18:54:15.304072Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:15.304724Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 1 2025-12-23T18:54:15.304760Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-12-23T18:54:15.304858Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:15.305131Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2025-12-23T18:54:15.305200Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.305255Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:100:2093] 2025-12-23T18:54:15.305280Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:100:2093] 2025-12-23T18:54:15.305336Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [2:55:2064] 2025-12-23T18:54:15.305359Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [2:55:2064] 2025-12-23T18:54:15.305677Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:54:15.305726Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:54:15.305758Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvRe ... 23T18:54:41.947055Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [27:475:2265] 2025-12-23T18:54:41.947131Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [27:475:2265] 2025-12-23T18:54:41.947204Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [27:475:2265] 2025-12-23T18:54:41.947315Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [27:475:2265] 2025-12-23T18:54:41.947430Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [27:475:2265] 2025-12-23T18:54:41.947480Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [27:475:2265] 2025-12-23T18:54:41.947527Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [27:475:2265] 2025-12-23T18:54:41.947608Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [27:475:2265] 2025-12-23T18:54:41.947660Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [27:475:2265] 2025-12-23T18:54:41.947724Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [27:462:2252] EventType# 268697623 2025-12-23T18:54:41.947887Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxUnlockTabletExecution} queued, type NKikimr::NHive::TTxUnlockTabletExecution 2025-12-23T18:54:41.947951Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxUnlockTabletExecution} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:54:41.948019Z node 27 :HIVE NOTICE: tx__unlock_tablet.cpp:43: HIVE#72057594037927937 THive::TTxUnlockTabletExecution::Execute TabletId: 72075186224037889 2025-12-23T18:54:41.948258Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxUnlockTabletExecution} hope 1 -> done Change{15, redo 110b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-12-23T18:54:41.948331Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{26, NKikimr::NHive::TTxUnlockTabletExecution} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:54:41.959657Z node 27 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [848c0e06882585e9] bootstrap ActorId# [27:478:2268] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:10:0:0:105:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-12-23T18:54:41.959860Z node 27 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [848c0e06882585e9] Id# [72057594037927937:2:10:0:0:105:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:54:41.959960Z node 27 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [848c0e06882585e9] restore Id# [72057594037927937:2:10:0:0:105:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:54:41.960075Z node 27 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [848c0e06882585e9] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:10:0:0:105:1] Marker# BPG33 2025-12-23T18:54:41.960164Z node 27 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [848c0e06882585e9] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:10:0:0:105:1] Marker# BPG32 2025-12-23T18:54:41.960378Z node 27 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [27:58:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:10:0:0:105:1] FDS# 105 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:54:41.961764Z node 27 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [848c0e06882585e9] received {EvVPutResult Status# OK ID# [72057594037927937:2:10:0:0:105:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 25 } Cost# 80826 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 26 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-12-23T18:54:41.961928Z node 27 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [848c0e06882585e9] Result# TEvPutResult {Id# [72057594037927937:2:10:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-12-23T18:54:41.962048Z node 27 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [848c0e06882585e9] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:10:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:54:41.962308Z node 27 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:10:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-12-23T18:54:41.962454Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} commited cookie 1 for step 10 2025-12-23T18:54:41.962648Z node 27 :HIVE NOTICE: tx__unlock_tablet.cpp:91: HIVE#72057594037927937 THive::TTxUnlockTabletExecution::Complete TabletId: 72075186224037889 SideEffects: {Notifications: 0x1004020F [27:462:2252] NKikimrHive.TEvLockTabletExecutionLost TabletID: 72075186224037889 Reason: LOCK_LOST_REASON_UNLOCKED,0x1004020E [27:462:2252] NKikimrHive.TEvUnlockTabletExecutionResult TabletID: 72075186224037889 Status: OK StatusMessage: ""} 2025-12-23T18:54:41.962975Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [27:479:2269] 2025-12-23T18:54:41.963021Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [27:479:2269] 2025-12-23T18:54:41.963112Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [27:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:41.963178Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 27 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [27:331:2201] 2025-12-23T18:54:41.963241Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [27:479:2269] 2025-12-23T18:54:41.963309Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [27:479:2269] 2025-12-23T18:54:41.963366Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [27:479:2269] 2025-12-23T18:54:41.963432Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [27:479:2269] 2025-12-23T18:54:41.963554Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [27:479:2269] 2025-12-23T18:54:41.963696Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [27:479:2269] 2025-12-23T18:54:41.963750Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [27:479:2269] 2025-12-23T18:54:41.963792Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [27:479:2269] 2025-12-23T18:54:41.963854Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [27:479:2269] 2025-12-23T18:54:41.963901Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [27:479:2269] 2025-12-23T18:54:41.963964Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [27:462:2252] EventType# 268959750 2025-12-23T18:54:41.964151Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{27, NKikimr::NHive::TTxSyncTablets} queued, type NKikimr::NHive::TTxSyncTablets 2025-12-23T18:54:41.964219Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{27, NKikimr::NHive::TTxSyncTablets} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:54:41.964319Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{27, NKikimr::NHive::TTxSyncTablets} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:54:41.964395Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{27, NKikimr::NHive::TTxSyncTablets} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:54:41.964542Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{28, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-12-23T18:54:41.964611Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{28, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:54:41.964700Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{28, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:54:41.964760Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{28, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:54:41.965083Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [27:482:2272] 2025-12-23T18:54:41.965130Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [27:482:2272] 2025-12-23T18:54:41.965241Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [27:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:41.965321Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 27 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [27:331:2201] 2025-12-23T18:54:41.965395Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [27:482:2272] 2025-12-23T18:54:41.965462Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [27:482:2272] 2025-12-23T18:54:41.965517Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [27:482:2272] 2025-12-23T18:54:41.965588Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [27:482:2272] 2025-12-23T18:54:41.965728Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [27:482:2272] 2025-12-23T18:54:41.965872Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [27:482:2272] 2025-12-23T18:54:41.965935Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [27:482:2272] 2025-12-23T18:54:41.966000Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [27:482:2272] 2025-12-23T18:54:41.966083Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [27:482:2272] 2025-12-23T18:54:41.966138Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [27:482:2272] 2025-12-23T18:54:41.966214Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [27:481:2271] EventType# 268697616 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> TOlapNaming::CreateColumnTableOk [GOOD] >> TOlapNaming::CreateColumnTableFailed >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerWithLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:03.502456Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:03.502656Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:03.585448Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:03.586360Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:03.590730Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:505:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:03.591052Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:03.591178Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:03.592690Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:500:2171], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:03.593020Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:03.593132Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:03.921279Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:04.031507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:04.031664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:04.032437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:04.032548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:04.077319Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:54:04.078326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:04.078660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:04.174586Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:04.187565Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002be9/r3tmp/tmp4sbEyi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23631, node 1 TClient is connected to server localhost:20793 2025-12-23T18:54:04.997132Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:04.997205Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:04.997243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:04.997754Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:10.208849Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:10.209638Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:10.209952Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:756:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:10.218199Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:10.219008Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:761:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:10.219342Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:10.219564Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:10.220628Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:10.220681Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:10.466752Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:10.552521Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:10.552631Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:10.553234Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:10.553306Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:10.596657Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:54:10.597455Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:10.597718Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:10.651158Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:10.688373Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002be9/r3tmp/tmp1vyXkY/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22475, node 3 TClient is connected to server localhost:8991 2025-12-23T18:54:13.725501Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:13.726971Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-12-23T18:54:13.729893Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:54:13.731097Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:13.731142Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:13.731174Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:13.731837Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:13.732970Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2025-12-23T18:54:13.733204Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:54:13.748716Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:13.748834Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:13.786649Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-12-23T18:54:13.787311Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:13.903373Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 5 2025-12-23T18:54:13.903918Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "RED-b954-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } issue_log { id: "RED-3c4a-1231c6b1" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-b954-1231c6b1-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "RED-5995-1231c6b1" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-3c4a-1231c6b1" type: "DATABASE" level: 1 } location { id: 3 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2025-12-23T18:54:18.069295Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existen ... d to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:25.353933Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:25.364474Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:25.366186Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:25.366811Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:770:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:25.367021Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:25.367223Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:25.368201Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:765:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:25.368339Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:25.368483Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:25.616255Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:25.725145Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:25.725275Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:25.725672Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:25.725736Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:25.769498Z node 7 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-12-23T18:54:25.770085Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:25.770405Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:25.837500Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:25.849851Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002be9/r3tmp/tmp6CFKtK/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14382, node 7 TClient is connected to server localhost:6370 2025-12-23T18:54:29.046542Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:29.050701Z node 7 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2025-12-23T18:54:29.052644Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:29.052692Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:29.052731Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:29.053458Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:29.053957Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:54:29.057333Z node 7 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-12-23T18:54:29.057561Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:54:29.069604Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:29.069723Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:29.109285Z node 7 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-12-23T18:54:29.109840Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:29.185351Z node 7 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-12-23T18:54:29.186003Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: GOOD location { id: 7 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2025-12-23T18:54:33.523009Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:33.527366Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:33.529259Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:33.529481Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:33.529626Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:33.814626Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:33.896229Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:33.896401Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:33.931236Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:33.997505Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002be9/r3tmp/tmpyh0tat/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18379, node 10 TClient is connected to server localhost:28106 2025-12-23T18:54:34.808891Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:34.808961Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:34.809005Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:34.809586Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:39.414545Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:39.421176Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:39.438466Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:266:2226], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:39.438807Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:39.438988Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:39.813784Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:39.912774Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:39.912911Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:39.947150Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:40.003865Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002be9/r3tmp/tmpkH2sFq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15424, node 12 TClient is connected to server localhost:25137 2025-12-23T18:54:40.988594Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:40.988663Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:40.988706Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:40.989184Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:41.581953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:41.582046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:41.582083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:41.582118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:41.582153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:41.582185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:41.582248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:41.582313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:41.583241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:41.583524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:41.671501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:41.671565Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:41.677172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:41.677401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:41.677520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:41.680684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:41.680894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:41.681612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:41.681854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:41.683837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:41.684028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:41.685173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:41.685233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:41.685377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:41.685426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:41.685488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:41.685624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:41.812701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.813624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.813742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.813828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.813896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.813954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.814010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.814107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.814175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.814254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.814321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.814407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.814514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.814592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.814650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:54:42.572847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 38 2025-12-23T18:54:42.573116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.573162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:54:42.574481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:54:42.574585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:54:42.574636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:54:42.574672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:54:42.574704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:54:42.575759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:54:42.575827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:54:42.575847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:54:42.575877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:54:42.575903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:54:42.575966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-12-23T18:54:42.577025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1282 } } CommitVersion { Step: 5000038 TxId: 101 } 2025-12-23T18:54:42.577056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-12-23T18:54:42.577152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1282 } } CommitVersion { Step: 5000038 TxId: 101 } 2025-12-23T18:54:42.577290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1282 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2025-12-23T18:54:42.578859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:54:42.578936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-12-23T18:54:42.579096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:54:42.579170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:54:42.579363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:54:42.579458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:42.579509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.579550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:54:42.579596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2025-12-23T18:54:42.581160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:54:42.581478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:54:42.582629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.582777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.583142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.583196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:54:42.583311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:54:42.583348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:54:42.583392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:54:42.583436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:54:42.583490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-12-23T18:54:42.583557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2715] message: TxId: 101 2025-12-23T18:54:42.583616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:54:42.583659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:54:42.583696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:54:42.583872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:54:42.585892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:54:42.585944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:738:2716] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-12-23T18:54:42.589714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" DropColumns { Name: "modified_at" } TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:54:42.589956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.590261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', at schemeshard: 72057594046678944 2025-12-23T18:54:42.592897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot enable TTL on dropped column: \'modified_at\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:42.593167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true >> TOlapNaming::CreateColumnTableExtraSymbolsOk [GOOD] >> TOlapNaming::CreateColumnStoreOk >> TStorageBalanceTest::TestScenario1 [GOOD] >> TStorageBalanceTest::TestScenario2 >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst >> TSchemeShardTTLUtility::ValidateTiers [GOOD] >> TSchemeShardTTLTests::TtlTiersValidation |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:54:34.586209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:34.586315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:34.586369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:34.586419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:34.586456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:34.586481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:34.586532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:34.586630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:34.587455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:34.589650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:34.672512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:34.672557Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:34.682910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:34.683083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:34.684950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:34.696490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:34.697808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:34.701591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:34.704737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:34.713882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:34.714657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:34.721496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:34.721567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:34.721710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:34.721770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:34.721871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:34.721988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:34.873870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.874493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.874598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.874665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.874705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.874776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.874863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.874913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.874991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.875050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.875086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.875151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.875198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.875259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.875314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ], version: 18446744073709551615 2025-12-23T18:54:42.860895Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-12-23T18:54:42.860946Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-12-23T18:54:42.862026Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 4, to hive 72057594037968897, at schemeshard 72057594046678944 2025-12-23T18:54:42.862144Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:54:42.862191Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:54:42.862218Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:54:42.862242Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:54:42.862652Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.862703Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:54:42.862860Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:54:42.862912Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:54:42.862960Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:54:42.863004Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:54:42.863061Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-12-23T18:54:42.863122Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:54:42.863181Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:54:42.863223Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:54:42.863384Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-12-23T18:54:42.864115Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:54:42.864899Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 2025-12-23T18:54:42.865047Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-12-23T18:54:42.865349Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-12-23T18:54:42.865602Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2025-12-23T18:54:42.865808Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:42.865989Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-12-23T18:54:42.866970Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 2025-12-23T18:54:42.867179Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:54:42.867329Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-12-23T18:54:42.868094Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 2025-12-23T18:54:42.868439Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:54:42.868613Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:54:42.868988Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:54:42.869515Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:54:42.869586Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:54:42.869708Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:54:42.869989Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:54:42.870057Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:54:42.870126Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:54:42.873696Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:54:42.873754Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-12-23T18:54:42.873826Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:54:42.873847Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:54:42.873893Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:54:42.873913Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-12-23T18:54:42.873954Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:54:42.873999Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-12-23T18:54:42.874206Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:54:42.874284Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:54:42.874627Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:54:42.874693Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:54:42.875239Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:54:42.875353Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:54:42.875408Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:585:2526] TestWaitNotification: OK eventTxId 103 2025-12-23T18:54:42.875976Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:54:42.876225Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 287us result status StatusPathDoesNotExist 2025-12-23T18:54:42.876412Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> THiveTest::TestSetDomain [GOOD] >> THiveTest::TestSetDomainAlready >> THealthCheckTest::ShardsLimit800 [GOOD] >> TCmsTest::BridgeModeNodeLimit [GOOD] >> THealthCheckTest::BridgeGroupDeadInBothPiles [GOOD] >> TOlapNaming::CreateColumnTableFailed [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TOlapNaming::AlterColumnTableOk [GOOD] >> TOlapNaming::CreateColumnStoreOk [GOOD] >> TSchemeShardTTLTestsWithReboots::AlterTable >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType >> THealthCheckTest::ShardsNoLimit [GOOD] >> THiveTest::TestSetDomainAlready [GOOD] >> THiveTest::TestSetDomainError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::BridgeModeNodeLimit [GOOD] Test command err: 2025-12-23T18:54:24.809961Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-12-23T18:54:24.810054Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-12-23T18:54:24.810177Z node 9 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2025-12-23T18:54:24.811465Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 10 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 11 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 12 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 13 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 14 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 15 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 16 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2025-12-23T18:54:24.811908Z node 9 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 10 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 11 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 12 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 13 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 14 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 15 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 16 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2025-12-23T18:54:24.812093Z node 9 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-12-23T18:54:24.812143Z node 9 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 2025-12-23T18:54:24.812183Z node 9 :CMS DEBUG: cms.cpp:415: Result: DISALLOW_TEMP (reason: VDisks eviction from host 9 has not yet been completed) 2025-12-23T18:54:24.812317Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-12-23T18:54:24.812477Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-12-23T18:54:24.812517Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 9, marker# MARKER_DISK_FAULTY 2025-12-23T18:54:24.812711Z node 9 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.083512s 2025-12-23T18:54:24.812752Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2025-12-23T18:54:24.812841Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2025-12-23T18:54:24.812879Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-12-23T18:54:24.812901Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-12-23T18:54:24.812931Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-12-23T18:54:24.812954Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-12-23T18:54:24.812970Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-12-23T18:54:24.812985Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, ... 29.880689Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:54:29.880865Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-12-23T18:54:29.880912Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 9:9, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-12-23T18:54:29.880937Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2025-12-23T18:54:29.881123Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2025-12-23T18:54:29.881254Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2025-12-23T18:54:29.881329Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2025-12-23T18:54:29.881360Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 9:9 2025-12-23T18:54:29.881385Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 2025-12-23T18:54:29.893552Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2025-12-23T18:54:29.893613Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2025-12-23T18:54:29.909195Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-12-23T18:54:29.909305Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-12-23T18:54:29.909374Z node 9 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:03:00Z 2025-12-23T18:54:29.910215Z node 9 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-12-23T18:54:29.910329Z node 9 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } 2025-12-23T18:54:29.910420Z node 9 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 9, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-12-23T18:54:29.910473Z node 9 :CMS DEBUG: cms.cpp:759: Ring: 0; State: Ok 2025-12-23T18:54:29.910498Z node 9 :CMS DEBUG: cms.cpp:759: Ring: 1; State: Ok 2025-12-23T18:54:29.910517Z node 9 :CMS DEBUG: cms.cpp:759: Ring: 2; State: Ok 2025-12-23T18:54:29.910545Z node 9 :CMS DEBUG: cms.cpp:404: Result: ALLOW 2025-12-23T18:54:29.910689Z node 9 :CMS DEBUG: cms.cpp:1064: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-12-23T18:54:29.910771Z node 9 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (9) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-12-23T18:54:29.910887Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-12-23T18:54:29.911087Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.210512Z, action# Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 2025-12-23T18:54:29.911207Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-12-23T18:54:29.923532Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:139: TTxStorePermissions complete 2025-12-23T18:54:29.923836Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 } Deadline: 780210512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 9 InterconnectPort: 12001 } } } } 2025-12-23T18:54:29.923898Z node 9 :CMS DEBUG: cms.cpp:1092: Schedule cleanup at 1970-01-01T00:33:00.210512Z 2025-12-23T18:54:29.960026Z node 9 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (9) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-12-23T18:54:29.960316Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-12-23T18:54:29.960371Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-12-23T18:54:29.960421Z node 9 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:03:00Z 2025-12-23T18:54:29.961003Z node 9 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-12-23T18:54:29.961088Z node 9 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } 2025-12-23T18:54:29.961129Z node 9 :CMS DEBUG: node_checkers.cpp:99: [Nodes Counter] Checking Node: 10, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-12-23T18:54:29.961165Z node 9 :CMS DEBUG: cms.cpp:404: Result: ALLOW 2025-12-23T18:54:29.961272Z node 9 :CMS DEBUG: cms.cpp:1064: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-12-23T18:54:29.961311Z node 9 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12002 (10) (permission user-p-2 until 1970-01-01T00:13:00Z) 2025-12-23T18:54:29.961370Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-12-23T18:54:29.961482Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.312024Z, action# Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 2025-12-23T18:54:29.961550Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-12-23T18:54:29.973625Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:139: TTxStorePermissions complete 2025-12-23T18:54:29.973862Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 } Deadline: 780312024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 10 InterconnectPort: 12002 } } } } 2025-12-23T18:54:29.974403Z node 9 :CMS INFO: cms.cpp:1361: User user is done with permissions user-p-1 2025-12-23T18:54:29.974471Z node 9 :CMS DEBUG: cms.cpp:1384: Resulting status: OK 2025-12-23T18:54:29.974534Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2025-12-23T18:54:29.974617Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 9 2025-12-23T18:54:29.974709Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2025-12-23T18:54:29.974783Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-12-23T18:54:29.986943Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:80: TTxRemovePermissions Complete 2025-12-23T18:54:29.987114Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-12-23T18:54:29.987578Z node 9 :CMS INFO: cms.cpp:1361: User user is done with permissions user-p-2 2025-12-23T18:54:29.987630Z node 9 :CMS DEBUG: cms.cpp:1384: Resulting status: OK 2025-12-23T18:54:29.987698Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2025-12-23T18:54:29.987782Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 10 2025-12-23T18:54:29.987861Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2025-12-23T18:54:29.987911Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-12-23T18:54:30.000128Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:80: TTxRemovePermissions Complete 2025-12-23T18:54:30.000283Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-12-23T18:54:40.916565Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 49 2025-12-23T18:54:40.917628Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 59 2025-12-23T18:54:40.917870Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 52 2025-12-23T18:54:40.918065Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 53 2025-12-23T18:54:40.918106Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 54 2025-12-23T18:54:40.918160Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 55 2025-12-23T18:54:40.918232Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 56 2025-12-23T18:54:40.918282Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 57 2025-12-23T18:54:40.918315Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 58 2025-12-23T18:54:40.918348Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 50 2025-12-23T18:54:40.918378Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 51 2025-12-23T18:54:40.918414Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 60 2025-12-23T18:54:40.918466Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 61 2025-12-23T18:54:40.918498Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 62 2025-12-23T18:54:40.918543Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 63 2025-12-23T18:54:40.918585Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 64 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> THealthCheckTest::LayoutIncorrect [GOOD] >> THealthCheckTest::LayoutCorrect >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTable >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsLimit800 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:02.343285Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:02.343964Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:02.344616Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:678:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:02.428056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:02.429208Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:02.432996Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:683:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:02.433249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:02.433404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:02.434547Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:02.434668Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:02.779719Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:02.889437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:02.889611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:02.890528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:02.890627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:02.936187Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:54:02.937128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:02.937491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:03.031492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:03.046076Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c00/r3tmp/tmpY2Q14l/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18893, node 1 TClient is connected to server localhost:17175 2025-12-23T18:54:03.890467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:03.890559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:03.890601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:03.891054Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:10.339459Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:10.340150Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:10.341613Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:10.347764Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:10.348788Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:10.350544Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:10.351235Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:10.351426Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:10.352950Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:10.353003Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:10.608480Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:10.688695Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:10.688803Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:10.689188Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:10.689243Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:10.758160Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:54:10.758699Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:10.759235Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:10.862565Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:10.896888Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c00/r3tmp/tmpR4STSA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61123, node 3 TClient is connected to server localhost:21940 2025-12-23T18:54:11.653467Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:11.653532Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:11.653564Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:11.654139Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:17.953629Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:17.954429Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:17.956214Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:17.964202Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:17.965171Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:17.966930Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:17.967321Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:17.967435Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:17.968619Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:17.968684Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:18.222406Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:18.321817Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: ... d_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:23.163391Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:23.163514Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:23.210916Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:23.322883Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c00/r3tmp/tmpIDmKns/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28577, node 7 TClient is connected to server localhost:22939 2025-12-23T18:54:24.039722Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:24.039772Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:24.039804Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:24.040044Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:27.683827Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:27.689908Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:27.691984Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:27.692289Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:27.692382Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:27.946700Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:27.946855Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:27.973749Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:34:2081] 1766516064943251 != 1766516064943255 2025-12-23T18:54:27.980326Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:28.023454Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:28.104867Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c00/r3tmp/tmprv5I5w/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15825, node 9 TClient is connected to server localhost:27369 2025-12-23T18:54:28.645680Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:28.645740Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:28.645774Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:28.646221Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:32.272456Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:32.276406Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:32.278324Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:32.278554Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:32.278675Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:32.545757Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:32.612817Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:32.612962Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:32.647122Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:32.718533Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c00/r3tmp/tmpmf1FP1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10580, node 10 TClient is connected to server localhost:17146 2025-12-23T18:54:33.618187Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:33.618261Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:33.618310Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:33.618885Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:41.424206Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:41.424399Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:41.436094Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:41.437455Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:41.438020Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:41.438143Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:41.440378Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:679:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:41.440591Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:41.440790Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:41.788396Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:41.898156Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:41.898317Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:41.898720Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:41.898800Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:41.969047Z node 12 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 13 Cookie 13 2025-12-23T18:54:41.969836Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:41.970436Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:42.066666Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:42.080239Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c00/r3tmp/tmplVMULq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4577, node 12 TClient is connected to server localhost:12124 2025-12-23T18:54:43.053044Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:43.053114Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:43.053170Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:43.053574Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnTableFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:36.947191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:36.947286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:36.947329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:36.947369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:36.947412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:36.947441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:36.947521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:36.947627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:36.948517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:36.948815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:37.031422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:37.031481Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:37.037063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:37.037256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:37.037350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:37.039913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:37.040046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:37.040621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:37.040758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:37.042242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:37.042403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:37.043392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:37.043458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:37.043612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:37.043666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:37.043736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:37.043872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:37.154720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.155438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.155550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.155615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.155667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.155716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.155776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.155819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.155862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.155917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.155980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.156044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.156109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.156185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:37.156267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:54:44.064330Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:44.064463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:44.064513Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:44.064776Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:54:44.064835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:44.065017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:54:44.065101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:44.066962Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:44.067022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:44.067233Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:44.067282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:54:44.067654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:44.067709Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:54:44.067819Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:54:44.067858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:44.067901Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:54:44.067942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:44.067983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:54:44.068032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:44.068077Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:54:44.068116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:54:44.068191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:54:44.068236Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:54:44.068276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:54:44.068869Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:54:44.068979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:54:44.069028Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:54:44.069070Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:54:44.069116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:54:44.069204Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:54:44.072340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:54:44.072853Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:54:44.073749Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:675:2665] Bootstrap 2025-12-23T18:54:44.074999Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:675:2665] Become StateWork (SchemeCache [2:680:2670]) 2025-12-23T18:54:44.077950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "mess age" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:54:44.078201Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:44.078443Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-12-23T18:54:44.079181Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:675:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:54:44.081189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:44.081363Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-12-23T18:54:44.081869Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:54:44.082049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:54:44.082082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:54:44.082339Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:54:44.082406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:54:44.082434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:690:2680] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-12-23T18:54:44.084498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "~!@#$%^&*()+=asdfa" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:54:44.084670Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:44.084804Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', at schemeshard: 72057594046678944 2025-12-23T18:54:44.086127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Invalid name for column \'~!@#$%^&*()+=asdfa\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:44.086263Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:54:44.086448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:54:44.086472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:54:44.086682Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:54:44.086753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:54:44.086775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:697:2687] TestWaitNotification: OK eventTxId 102 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::BridgeGroupDeadInBothPiles [GOOD] Test command err: 2025-12-23T18:53:57.695082Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128607109121233:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:57.698797Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bf6/r3tmp/tmp8tC2Pq/pdisk_1.dat 2025-12-23T18:53:57.978898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:57.979059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:58.026319Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:58.027593Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:58.029663Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128607109121195:2081] 1766516037689721 != 1766516037689724 2025-12-23T18:53:58.039714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12599, node 1 2025-12-23T18:53:58.238780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:58.238851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:58.238858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:58.238935Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:58.285769Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:64695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516038091 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:58.639155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:58.701511Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:00.679018Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128623643595494:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:54:00.679062Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bf6/r3tmp/tmpdrqF9T/pdisk_1.dat 2025-12-23T18:54:00.696990Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:54:00.781355Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:00.788370Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587128623643595468:2081] 1766516040677545 != 1766516040677548 2025-12-23T18:54:00.814437Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:00.814510Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:00.820184Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13662, node 2 2025-12-23T18:54:00.857688Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:00.857736Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:00.857743Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:00.857843Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:00.900487Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516040863 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:54:01.044136Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... ... waiting for SysViewsRoster update finished 2025-12-23T18:54:08.880573Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:08.881647Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:08.884309Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:08.893875Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:08.895482Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:08.897405Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:08.898146Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:08.898346Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:08.900021Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:08.900077Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:09.252012Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:09.348358Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:09.348495Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:09.349012Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:09.349092Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:09.419783Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:54:09.420400Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:09.420941Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:09.460291Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:09.517728Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnkn ... Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:686:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:32.119009Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:32.119138Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:32.401576Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:32.484696Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:32.484854Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:32.485277Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:32.485333Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:32.553282Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-12-23T18:54:32.553768Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:32.554162Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:32.644603Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:32.682050Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bf6/r3tmp/tmpa71TY4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22186, node 9 TClient is connected to server localhost:24405 2025-12-23T18:54:33.538351Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:33.538407Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:33.538445Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:33.538941Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: MAINTENANCE_REQUIRED issue_log { id: "ORANGE-af30-1231c6b1" status: ORANGE message: "Storage has no redundancy" location { database { name: "/Root" } } reason: "ORANGE-cf29-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "ORANGE-a3e2-1231c6b1-2147483648" status: ORANGE message: "Group dead in some piles" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-1a83-1231c6b1-2147483649" type: "STORAGE_GROUP" level: 4 } issue_log { id: "ORANGE-cf29-1231c6b1-f7549920" status: ORANGE message: "Pool has no redundancy" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "ORANGE-a3e2-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-1a83-1231c6b1-2147483649" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483649" pile { name: "1" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-9-2147483649-3-55-0-55" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-9f89-1231c6b1-9-2147483649-3-55-0-55" status: RED message: "VDisks are not available" location { storage { node { id: 9 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483649-3-55-0-55" id: "2147483649-3-56-0-56" id: "2147483649-3-57-0-57" } pile { name: "1" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "ORANGE-5cc9-1231c6b1" status: ORANGE message: "Database has storage issues" location { database { name: "/Root" } } reason: "ORANGE-af30-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 pile { name: "pile0" } } ... waiting for SysViewsRoster update finished 2025-12-23T18:54:41.217043Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:41.219279Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:41.236424Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:41.237297Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:41.237722Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:41.238017Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:41.240129Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:679:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:41.240586Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:41.240863Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:41.577224Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:41.688182Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:41.688345Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:41.689155Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:41.689241Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:41.733893Z node 11 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-12-23T18:54:41.734358Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:41.734731Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:41.792669Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:41.805302Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bf6/r3tmp/tmpAUVoc1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14734, node 11 TClient is connected to server localhost:13635 2025-12-23T18:54:42.805858Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:42.805940Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:42.805986Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:42.806714Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-2f2e-1231c6b1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-40f1-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "RED-6100-1231c6b1-2147483648" status: RED message: "Group dead in all piles" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-1a83-1231c6b1-2147483649" reason: "RED-1a83-1231c6b1-2147483650" type: "STORAGE_GROUP" level: 4 } issue_log { id: "RED-40f1-1231c6b1-f7549920" status: RED message: "Pool failed" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "RED-6100-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-1a83-1231c6b1-2147483650" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483650" pile { name: "2" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-11-2147483650-3-58-0-58" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-1a83-1231c6b1-2147483649" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483649" pile { name: "1" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-11-2147483649-3-55-0-55" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-9f89-1231c6b1-11-2147483650-3-58-0-58" status: RED message: "VDisks are not available" location { storage { node { id: 11 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483650-3-58-0-58" id: "2147483650-3-59-0-59" id: "2147483650-3-60-0-60" } pile { name: "2" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "RED-9f89-1231c6b1-11-2147483649-3-55-0-55" status: RED message: "VDisks are not available" location { storage { node { id: 11 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483649-3-55-0-55" id: "2147483649-3-56-0-56" id: "2147483649-3-57-0-57" } pile { name: "1" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "RED-5cc9-1231c6b1" status: RED message: "Database has storage issues" location { database { name: "/Root" } } reason: "RED-2f2e-1231c6b1" type: "DATABASE" level: 1 } location { id: 11 host: "::1" port: 12001 pile { name: "pile0" } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnTableOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:54:36.466692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:36.466820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:36.466863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:36.466890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:36.466952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:36.466975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:36.467014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:36.467070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:36.467702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:36.469541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:36.563427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:36.563474Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:36.574109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:36.574269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:36.576329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:36.589154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:36.589360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:36.593639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:36.599299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:36.612251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:36.613768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:36.632253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:36.632314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:36.632437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:36.632474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:36.632538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:36.634047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:36.778507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.780431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.780566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.780637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.780708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.780767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.780827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.780893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.780958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -23T18:54:43.885804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.885892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.890727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.890929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.891004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.891073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.891131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.891201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.891260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.891341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.892122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.892206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.892334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.892418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.892482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.892555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.892615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.892662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.897726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.897852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.897922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.898008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.898143Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.898251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.898317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.898405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.900900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.901017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.901073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.901121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.901200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.901303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.901410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.901475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.902082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.902198Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.902241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.902292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.902332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.902543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.902611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.902668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.902777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.902819Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:54:43.902939Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:54:43.902979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:54:43.903012Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:54:43.903037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:54:43.903063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:54:43.903123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:3125:4344] message: TxId: 102 2025-12-23T18:54:43.903164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:54:43.903206Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:54:43.903248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:54:43.904189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2025-12-23T18:54:43.907815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:54:43.907860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:4038:5201] TestWaitNotification: OK eventTxId 102 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> TTabletResolver::NodeProblem >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly >> THiveTest::TestSetDomainError [GOOD] >> THiveTest::TestTabletsStartingCounter >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::NoStoragePools ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnStoreOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:54:36.466722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:36.466858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:36.466905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:36.466948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:36.466993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:36.467022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:36.467075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:36.467161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:36.468121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:36.469551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:36.563655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:36.563719Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:36.574402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:36.574579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:36.576347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:36.590385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:36.590653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:36.593672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:36.599293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:36.612514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:36.613777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:36.632277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:36.632345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:36.632474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:36.632521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:36.632576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:36.634067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:36.785855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.786590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.786679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.786722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.786799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.786837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.786917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.786975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.787013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.787055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.787116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.787201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.787276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.787336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.787398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Count reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:54:44.487524Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000038 2025-12-23T18:54:44.489269Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:44.489322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:44.489523Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:54:44.489669Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:44.489718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-12-23T18:54:44.489768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2025-12-23T18:54:44.489838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:44.489890Z node 2 :FLAT_TX_SCHEMESHARD INFO: create_store.cpp:246: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:54:44.489956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: create_store.cpp:269: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-12-23T18:54:44.491243Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:54:44.491354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:54:44.491396Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:54:44.491441Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:54:44.491485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:54:44.492395Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:54:44.492473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:54:44.492525Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:54:44.492557Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:54:44.492589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:54:44.492662Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-12-23T18:54:44.495251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-12-23T18:54:44.496874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:54:44.497577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:54:44.509620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6585: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-12-23T18:54:44.509682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-12-23T18:54:44.509779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:54:44.511466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:44.511610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:44.511661Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:54:44.511778Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:54:44.511814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:54:44.511847Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:54:44.511881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:54:44.511913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-12-23T18:54:44.511989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:744:2721] message: TxId: 101 2025-12-23T18:54:44.512033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:54:44.512065Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:54:44.512092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:54:44.512229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:54:44.513676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:54:44.513727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:745:2722] TestWaitNotification: OK eventTxId 101 2025-12-23T18:54:44.514176Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:54:44.514385Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 253us result status StatusSuccess 2025-12-23T18:54:44.515041Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:44.162099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:44.162171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:44.162206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:44.162236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:44.162265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:44.162296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:44.162355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:44.162420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:44.163097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:44.163298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:44.220144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:44.220195Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:44.225036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:44.225191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:44.225307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:44.227690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:44.227829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:44.228332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:44.228484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:44.229822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:44.229975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:44.230775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:44.230815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:44.230924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:44.230957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:44.230987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:44.231064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:44.346067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.346948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.347064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.347133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.347197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.347246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.347306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.347385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.347448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.347502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.347536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.347583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.347652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.347720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:44.347788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:54:45.152340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:54:45.152435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:54:45.152465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:54:45.152512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:54:45.152552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:54:45.152635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-12-23T18:54:45.154324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1539 } } CommitVersion { Step: 5000038 TxId: 101 } 2025-12-23T18:54:45.154373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-12-23T18:54:45.154528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1539 } } CommitVersion { Step: 5000038 TxId: 101 } 2025-12-23T18:54:45.154721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1539 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2025-12-23T18:54:45.156672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:54:45.156743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-12-23T18:54:45.156897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:54:45.156960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:54:45.157077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:54:45.157151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:45.157199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:45.157244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:54:45.157292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2025-12-23T18:54:45.158998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:54:45.159313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:54:45.160452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:45.160563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:45.160943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:45.161010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:54:45.161137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:54:45.161174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:54:45.161222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:54:45.161267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:54:45.161316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-12-23T18:54:45.161401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2715] message: TxId: 101 2025-12-23T18:54:45.161465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:54:45.161507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:54:45.161544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:54:45.161728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:54:45.163659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:54:45.163715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:738:2716] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-12-23T18:54:45.167852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 Delete { } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:54:45.168150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:45.168511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, at schemeshard: 72057594046678944 2025-12-23T18:54:45.171160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Tier 0: only the last tier in TTL settings can have Delete action" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:45.171412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-12-23T18:54:45.175425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 EvictToExternalStorage { Storage: "/Root/abc" } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:54:45.175715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:54:45.176140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, at schemeshard: 72057594046678944 2025-12-23T18:54:45.178491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Only DELETE via TTL is allowed for row-oriented tables" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:45.179041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:00.773241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:00.882406Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:00.887438Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:00.887772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:00.887838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:01.243350Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:01.332423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:01.332574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:01.390687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:01.483666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bed/r3tmp/tmpaj1Lsa/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31079, node 1 TClient is connected to server localhost:2117 2025-12-23T18:54:02.265355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:02.265411Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:02.265452Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:02.265811Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:05.487948Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:05.493208Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:05.495256Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:523:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:05.495571Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:05.495718Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:05.709862Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:05.786203Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:05.786333Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:05.832411Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:05.934380Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bed/r3tmp/tmp7Fo6Db/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22821, node 3 TClient is connected to server localhost:65186 2025-12-23T18:54:06.750170Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:06.750238Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:06.750277Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:06.750950Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:10.865867Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:10.870638Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:10.872596Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:267:2227], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:10.872846Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:10.872966Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:11.183702Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:11.250253Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:11.250429Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:11.297825Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:11.355852Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bed/r3tmp/tmpryEp9V/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8738, node 6 TClient is connected to server localhost:6950 2025-12-23T18:54:12.180549Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:12.180613Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:12.180654Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:12.180849Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:19.392667Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:19.393499Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:19.396093Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:684:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:19.403599Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:19.404805Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:19.406522Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:298:2224], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:19.406941Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:19.407133Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:19.407844Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:19.407919Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:19.724000Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:19.826571Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:19.826723Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:19.827574Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:19.827658Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:19.872370Z node 8 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-12-23T18:54:19.873272Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:19.873650Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:19.944996Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme s ... fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:27.019208Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:27.099072Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:27.099186Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:27.099747Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:27.099815Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:27.144035Z node 10 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2025-12-23T18:54:27.144689Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:27.144971Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:27.201637Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:27.270771Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bed/r3tmp/tmpXAO0sd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16239, node 10 TClient is connected to server localhost:9113 2025-12-23T18:54:27.922984Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:27.923025Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:27.923045Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:27.923310Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:33.958636Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:33.958782Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:33.968274Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:33.969115Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:33.969475Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:33.969550Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:33.971027Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:679:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:33.971161Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:33.971278Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:34.213016Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:34.298525Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:34.298627Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:34.299036Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:34.299103Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:34.368458Z node 12 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 13 Cookie 13 2025-12-23T18:54:34.368910Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:34.369251Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:34.461001Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:34.473982Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bed/r3tmp/tmpsS7cQ2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16476, node 12 TClient is connected to server localhost:20295 2025-12-23T18:54:35.261270Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:35.261331Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:35.261362Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:35.261747Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:42.167544Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:42.167792Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:42.183186Z node 15 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:42.185178Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:505:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:42.186145Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:42.186448Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:42.187783Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [15:500:2171], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:42.188693Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:42.188828Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:42.456516Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:42.559512Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:42.559650Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:42.560505Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:42.560592Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:42.604623Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-12-23T18:54:42.605589Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:42.605891Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:42.675833Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:42.689190Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bed/r3tmp/tmpA8a4es/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13652, node 14 TClient is connected to server localhost:29532 2025-12-23T18:54:43.408696Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:43.408752Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:43.408779Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:43.409188Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:02.595200Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:02.596148Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:02.596875Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:678:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:02.702003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:02.703051Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:02.708807Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:683:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:02.709180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:02.709405Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:02.710540Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:02.710655Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:03.115573Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:03.228699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:03.228825Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:03.229568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:03.229645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:03.273586Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:54:03.274406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:03.274724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:03.350496Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:03.375277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bf2/r3tmp/tmpzkrWjg/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7268, node 1 TClient is connected to server localhost:28670 2025-12-23T18:54:04.160561Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:04.160629Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:04.160659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:04.161040Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:09.680237Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:09.680903Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:09.682267Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:09.688229Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:09.689156Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:09.690232Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:09.690656Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:09.690793Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:09.691744Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:09.691780Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:09.910160Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:09.983977Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:09.984107Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:09.984547Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:09.984615Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:10.051902Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:54:10.052235Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:10.052499Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:10.128053Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:10.175883Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bf2/r3tmp/tmp3nE843/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27667, node 3 TClient is connected to server localhost:13040 2025-12-23T18:54:10.898801Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:10.898878Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:10.898916Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:10.899335Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:16.684427Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:16.685239Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:16.687125Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:16.695376Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:16.696368Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:16.698215Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:16.698596Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:16.698709Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:16.700135Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:16.700210Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:16.980991Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:17.079324Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: U ... 8:54:24.531033Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:24.585493Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:24.598155Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bf2/r3tmp/tmpP7Hq9M/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19825, node 7 TClient is connected to server localhost:13761 2025-12-23T18:54:25.375751Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:25.375812Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:25.375849Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:25.376175Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD location { id: 7 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2025-12-23T18:54:31.067118Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:31.067830Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:31.080477Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:31.082253Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:31.082797Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:691:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:31.083179Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:31.083263Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:31.083977Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:686:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:31.084329Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:31.084449Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:31.351158Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:31.456997Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:31.457155Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:31.457504Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:31.457561Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:31.526662Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-12-23T18:54:31.527156Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:31.527623Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:31.594870Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:31.620385Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bf2/r3tmp/tmphG408C/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11263, node 9 TClient is connected to server localhost:6309 2025-12-23T18:54:32.446610Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:32.446663Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:32.446694Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:32.447212Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD location { id: 9 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2025-12-23T18:54:37.054055Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:37.060898Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:37.077214Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:442:2402], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:37.077743Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:37.077902Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:37.406213Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:37.483351Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:37.483477Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:37.542409Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:37.580337Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bf2/r3tmp/tmpzZz5i8/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17304, node 11 TClient is connected to server localhost:1522 2025-12-23T18:54:38.511016Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:38.511090Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:38.511127Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:38.511722Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:42.907708Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:42.913025Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:42.915101Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:42.915385Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:42.915466Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:43.243518Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:43.329333Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:43.329505Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:43.376210Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:43.471688Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bf2/r3tmp/tmpDgNc6Q/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16099, node 13 TClient is connected to server localhost:18109 2025-12-23T18:54:44.350372Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:44.350454Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:44.350504Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:44.351079Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> TTabletResolver::NodeProblem [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType >> THiveTest::TestTabletsStartingCounter [GOOD] >> THiveTest::TestTabletsStartingCounterExternalBoot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:45.132018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:45.132097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:45.132128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:45.132159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:45.132191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:45.132233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:45.132478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:45.132530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:45.133291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:45.133520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:45.201473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:45.201531Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:45.206439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:45.206637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:45.206757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:45.209119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:45.209276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:45.209910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:45.210097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:45.211850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:45.212005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:45.212814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:45.212858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:45.212972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:45.213014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:45.213049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:45.213137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:45.375186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.376159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.376286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.376382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.376448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.376505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.376566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.376668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.376736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.376810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.376876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.376948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.377057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.377147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.377222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... xId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:45.992378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:54:45.992580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:45.992638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:54:45.992679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:54:45.992729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:54:45.994530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:45.994595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:54:45.994638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:54:45.996451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:45.996516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:45.996585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:45.996656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:54:45.996777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:54:45.998279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:54:45.998433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:54:45.998778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:45.998899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:45.998944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:45.999230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:54:45.999290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:45.999476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:54:45.999558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:54:46.001470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:46.001520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:46.001728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:46.001776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:54:46.002052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:46.002098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:54:46.002343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:54:46.002382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:46.002439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:54:46.002474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:46.002519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:54:46.002562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:46.002598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:54:46.002631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:54:46.002707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:54:46.002772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:54:46.002812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:54:46.003453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:54:46.003567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:54:46.003616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:54:46.003663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:54:46.003711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:54:46.003822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:54:46.006823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:54:46.007368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:54:46.008742Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:671:2661] Bootstrap 2025-12-23T18:54:46.010006Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:671:2661] Become StateWork (SchemeCache [1:676:2666]) 2025-12-23T18:54:46.012964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:54:46.013319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:46.013431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } }, at schemeshard: 72057594046678944 2025-12-23T18:54:46.013892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-12-23T18:54:46.015053Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:671:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:54:46.017956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:46.018159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-12-23T18:54:46.018545Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2025-12-23T18:54:46.139533Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:46.139720Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [1:215:2139] followers: 0 2025-12-23T18:54:46.139814Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:215:2139] 2025-12-23T18:54:46.140045Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:46.140181Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [1:221:2143] followers: 0 2025-12-23T18:54:46.140219Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:221:2143] 2025-12-23T18:54:46.141352Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:215:2139] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:46.141414Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:215:2139] 2025-12-23T18:54:46.141589Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:221:2143] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:46.141624Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:221:2143] 2025-12-23T18:54:46.141728Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 4 2025-12-23T18:54:46.141820Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 123 leader: [1:215:2139] by nodeId 2025-12-23T18:54:46.141859Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:215:2139] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:46.141889Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:54:46.142076Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [2:231:2096] followers: 0 2025-12-23T18:54:46.142117Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:231:2096] 2025-12-23T18:54:46.142526Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 234 leader: [1:221:2143] by nodeId 2025-12-23T18:54:46.142579Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:221:2143] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:46.142615Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:54:46.142899Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [2:237:2098] followers: 0 2025-12-23T18:54:46.142960Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:237:2098] 2025-12-23T18:54:46.144359Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 4 2025-12-23T18:54:46.144409Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:231:2096] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:46.144452Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:231:2096] 2025-12-23T18:54:46.144636Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:237:2098] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:46.144671Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:237:2098] 2025-12-23T18:54:46.144829Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 6 2025-12-23T18:54:46.144863Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 123 leader: [2:231:2096] by nodeId 2025-12-23T18:54:46.144911Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:231:2096] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:46.144938Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:54:46.145092Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [3:249:2096] followers: 0 2025-12-23T18:54:46.145143Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:249:2096] 2025-12-23T18:54:46.145476Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:237:2098] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:46.145518Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:237:2098] 2025-12-23T18:54:46.145676Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 7 2025-12-23T18:54:46.145713Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [3:249:2096] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:46.145741Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:249:2096] 2025-12-23T18:54:46.145940Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 234 leader: [2:237:2098] by nodeId 2025-12-23T18:54:46.145994Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:237:2098] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:46.146022Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:54:46.146202Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [3:255:2098] followers: 0 2025-12-23T18:54:46.146245Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:255:2098] |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> KqpScripting::StreamOperationTimeout [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:54:34.586190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:34.586294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:34.586337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:34.586416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:34.586446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:34.586470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:34.586538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:34.586641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:34.587547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:34.589668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:34.677544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:34.677594Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:34.687437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:34.687653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:34.687856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:34.697095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:34.697799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:34.701561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:34.704714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:34.713505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:34.714616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:34.721463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:34.721582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:34.721814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:34.721884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:34.722050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:34.722210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:34.851150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.854030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.854100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.854149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.854194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.854238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... r__attr_1" Value: "value" } UserAttributesVersion: 2 2025-12-23T18:54:46.543502Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:79: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72075186234409546 2025-12-23T18:54:46.543721Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:601: Cannot publish paths for unknown operation id#0 2025-12-23T18:54:46.543912Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:46.543952Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:54:46.544137Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:46.544176Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:213:2213], at schemeshard: 72057594046678944, txId: 104, path id: 38 FAKE_COORDINATOR: Erasing txId 104 2025-12-23T18:54:46.545036Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:54:46.545155Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:54:46.545193Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:54:46.545250Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2025-12-23T18:54:46.545310Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2025-12-23T18:54:46.545392Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-12-23T18:54:46.547962Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6221: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 38 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 2 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-12-23T18:54:46.548086Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:54:46.548227Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:805:2772], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 2, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:54:46.548407Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-12-23T18:54:46.548470Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-12-23T18:54:46.548622Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-12-23T18:54:46.548653Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:897:2840], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-12-23T18:54:46.549459Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-12-23T18:54:46.549551Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:54:46.549725Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186234409546, cookie: 0 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:54:46.550012Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:54:46.550069Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:54:46.550586Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:54:46.550699Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:54:46.550769Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [8:1039:2980] TestWaitNotification: OK eventTxId 104 2025-12-23T18:54:46.551440Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:54:46.551664Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 265us result status StatusSuccess 2025-12-23T18:54:46.552142Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:46.552923Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2025-12-23T18:54:46.553114Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 212us result status StatusSuccess 2025-12-23T18:54:46.553593Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186234409546 PathType: EPathTypeDir CreateFinished: false CreateTxId: 281474976720657 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardTTLTests::CheckCounters >> THiveTest::TestTabletsStartingCounterExternalBoot [GOOD] >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::DisableCMS >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestDeleteTabletError >> TSchemeShardTTLTests::BuildIndexShouldSucceed >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestTabletsStartingCounterExternalBoot [GOOD] Test command err: 2025-12-23T18:54:15.149793Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:15.178518Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:15.178899Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:15.179750Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:15.180041Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:15.181103Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:51:2076] ControllerId# 72057594037932033 2025-12-23T18:54:15.181148Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:15.181274Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:15.181409Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:15.193809Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:15.193890Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:15.196405Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:60:2080] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.196584Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:61:2081] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.196728Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:62:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.196863Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:63:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.197003Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:64:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.197138Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:65:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.197323Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:66:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.197355Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:15.197463Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:51:2076] 2025-12-23T18:54:15.197501Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:51:2076] 2025-12-23T18:54:15.197575Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:15.197630Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:15.197972Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:15.200944Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:15.201090Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:15.201424Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:15.201673Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-12-23T18:54:15.202849Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-12-23T18:54:15.202904Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:15.203808Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:75:2077] ControllerId# 72057594037932033 2025-12-23T18:54:15.203850Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:15.203924Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:15.204034Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:15.216608Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:15.216653Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:15.218279Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:83:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.218423Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:84:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.218547Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:85:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.218664Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:86:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.218824Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:87:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.218965Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:88:2087] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.219089Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:89:2088] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.219113Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:15.219165Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:75:2077] 2025-12-23T18:54:15.219209Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:75:2077] 2025-12-23T18:54:15.219248Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:15.219283Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:15.219994Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:15.220141Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:15.220557Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:51:2076] 2025-12-23T18:54:15.220608Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.220798Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:15.220947Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:15.231219Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2025-12-23T18:54:15.231329Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.231542Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.231814Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.231856Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-12-23T18:54:15.233946Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:15.234797Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 1 2025-12-23T18:54:15.234862Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-12-23T18:54:15.234972Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:15.235227Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2025-12-23T18:54:15.235268Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.235321Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:100:2093] 2025-12-23T18:54:15.235379Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:100:2093] 2025-12-23T18:54:15.235430Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [2:55:2064] 2025-12-23T18:54:15.235452Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [2:55:2064] 2025-12-23T18:54:15.235797Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:54:15.235862Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:54:15.235891Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvRe ... age_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:47.764364Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-12-23T18:54:47.764454Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-12-23T18:54:47.764496Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-12-23T18:54:47.764554Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:47.764650Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:47.764712Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:47.765113Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [25:323:2302] 2025-12-23T18:54:47.765176Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [25:323:2302] 2025-12-23T18:54:47.765366Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:47.765518Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:47.765680Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-12-23T18:54:47.765767Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-12-23T18:54:47.765808Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-12-23T18:54:47.765876Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:47.765967Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:47.766014Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:47.766104Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [0:0:0] followers: 0 2025-12-23T18:54:47.766231Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [25:323:2302] 2025-12-23T18:54:47.766295Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [25:323:2302] 2025-12-23T18:54:47.766676Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [25:326:2304] 2025-12-23T18:54:47.766756Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [25:326:2304] 2025-12-23T18:54:47.766892Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [25:274:2265] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:47.766987Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 25 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [25:274:2265] 2025-12-23T18:54:47.767073Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [25:326:2304] 2025-12-23T18:54:47.767143Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [25:326:2304] 2025-12-23T18:54:47.767214Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [25:326:2304] 2025-12-23T18:54:47.767297Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [25:326:2304] 2025-12-23T18:54:47.767459Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [25:326:2304] 2025-12-23T18:54:47.767641Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [25:326:2304] 2025-12-23T18:54:47.767728Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [25:326:2304] 2025-12-23T18:54:47.767792Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [25:326:2304] 2025-12-23T18:54:47.767871Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [25:326:2304] 2025-12-23T18:54:47.767932Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [25:326:2304] 2025-12-23T18:54:47.768026Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [25:325:2303] EventType# 268697624 2025-12-23T18:54:47.768257Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-12-23T18:54:47.768355Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:54:47.768644Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{6, redo 144b alter 0b annex 0, ~{ 1, 16 } -{ }, 0 gb} 2025-12-23T18:54:47.768743Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:54:47.780141Z node 25 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [185eac4b9c06d110] bootstrap ActorId# [25:329:2307] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:5:0:0:126:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-12-23T18:54:47.780340Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [185eac4b9c06d110] Id# [72057594037927937:2:5:0:0:126:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:54:47.780445Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [185eac4b9c06d110] restore Id# [72057594037927937:2:5:0:0:126:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:54:47.780547Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [185eac4b9c06d110] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:5:0:0:126:1] Marker# BPG33 2025-12-23T18:54:47.780631Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [185eac4b9c06d110] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:5:0:0:126:1] Marker# BPG32 2025-12-23T18:54:47.780862Z node 25 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [25:36:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:5:0:0:126:1] FDS# 126 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:54:47.782417Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [185eac4b9c06d110] received {EvVPutResult Status# OK ID# [72057594037927937:2:5:0:0:126:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 80992 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-12-23T18:54:47.782590Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [185eac4b9c06d110] Result# TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-12-23T18:54:47.782704Z node 25 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [185eac4b9c06d110] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:54:47.783020Z node 25 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-12-23T18:54:47.783208Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} commited cookie 1 for step 5 2025-12-23T18:54:47.783638Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [25:331:2309] 2025-12-23T18:54:47.783713Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [25:331:2309] 2025-12-23T18:54:47.783800Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [25:331:2309] 2025-12-23T18:54:47.783915Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [25:274:2265] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:47.784002Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 25 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [25:274:2265] 2025-12-23T18:54:47.784091Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [25:331:2309] 2025-12-23T18:54:47.784166Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [25:331:2309] 2025-12-23T18:54:47.784259Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [25:331:2309] 2025-12-23T18:54:47.784414Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [25:331:2309] 2025-12-23T18:54:47.784641Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [25:331:2309] 2025-12-23T18:54:47.784722Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [25:331:2309] 2025-12-23T18:54:47.784785Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [25:331:2309] 2025-12-23T18:54:47.784868Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [25:331:2309] 2025-12-23T18:54:47.784936Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [25:331:2309] 2025-12-23T18:54:47.785029Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [25:330:2308] EventType# 268830214 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamOperationTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 20819, MsgBus: 8701 2025-12-23T18:53:36.984673Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128518392854553:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.984807Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:53:37.013804Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ed7/r3tmp/tmp1t5GrB/pdisk_1.dat 2025-12-23T18:53:37.311729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.311914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.314412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.375335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.421191Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20819, node 1 2025-12-23T18:53:37.614849Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:37.622019Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.622038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.622051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.622129Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8701 2025-12-23T18:53:37.991242Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:8701 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516017511 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.350884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.360902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:53:38.386736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.546176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.715691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.790434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:40.379157Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535572725497:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.379261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.379559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535572725507:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.379641Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.682510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.710599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.739822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.768186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.794524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.824495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.850093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.906239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.965425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535572726374:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.965563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.965669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535572726379:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.965825Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535572726381:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.965889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.969276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opI ... e_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:41.014048Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8118, node 2 2025-12-23T18:54:41.054568Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:41.054601Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:41.054610Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:41.054773Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:41.131762Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20158 TClient is connected to server localhost:20158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516081057 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:54:41.495876Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:54:41.513232Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:41.572897Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:41.754916Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:41.827576Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:41.904225Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:44.340872Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128810882571613:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.340963Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.341218Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128810882571622:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.341266Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.396152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.425558Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.455727Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.486888Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.517832Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.551832Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.588348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.633015Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.716188Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128810882572493:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.716270Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.716281Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128810882572498:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.716508Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128810882572501:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.716557Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.719988Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:54:44.731072Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128810882572500:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:54:44.834541Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128810882572553:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:54:45.887470Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128793702700598:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:54:45.887558Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:54:46.581882Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 1ms, session id ydb://session/3?node_id=2&id=ZjNjZmNmN2UtNzRjMjdiZC0yZTM5OTA5Ny00ZmQxZjY4MQ== } |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:47.418127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:47.418203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:47.418240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:47.418275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:47.418312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:47.418350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:47.418420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:47.418480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:47.419340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:47.419595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:47.504472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:47.504526Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:47.509910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:47.510129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:47.510257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:47.513126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:47.513315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:47.513969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:47.514155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:47.515910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:47.516075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:47.517028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:47.517083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:47.517216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:47.517257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:47.517296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:47.517404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:47.679417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.680323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.680451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.680533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.680607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.680666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.680730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.680831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.680906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.680980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.681045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.681119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.681240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.681324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:47.681411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:48.261155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:54:48.261316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:54:48.263291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:54:48.263440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:54:48.263797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:48.263956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:48.264024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:48.264339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:54:48.264438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:54:48.264612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:54:48.264687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:54:48.266911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:48.266969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:48.267204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:48.267276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:54:48.267517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.267567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:54:48.267692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:54:48.267727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:48.267797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:54:48.267847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:48.267894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:54:48.267935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:54:48.267971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:54:48.268008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:54:48.268080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:54:48.268121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:54:48.268158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:54:48.269016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:54:48.269130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:54:48.269176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:54:48.269221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:54:48.269263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:54:48.269351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:54:48.272727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:54:48.273268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1766516088.274656 1558776 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-12-23T18:54:48.275155Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:677:2667] Bootstrap 2025-12-23T18:54:48.276215Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:677:2667] Become StateWork (SchemeCache [1:682:2672]) 2025-12-23T18:54:48.279389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:54:48.279784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.280221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-12-23T18:54:48.281378Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:677:2667] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:54:48.284008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:48.284236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-12-23T18:54:48.284853Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 W0000 00:00:1766516088.285353 1558776 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-12-23T18:54:48.288754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:54:48.289026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.289227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2025-12-23T18:54:48.291321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:48.291466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] >> TabletState::NormalLifecycle ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:41.417460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:41.417525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:41.417550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:41.417573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:41.417599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:41.417622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:41.417667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:41.417713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:41.418333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:41.418522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:41.487305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:41.487354Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:41.493391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:41.493630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:41.493772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:41.497093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:41.497295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:41.498026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:41.498287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:41.500716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:41.500937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:41.502096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:41.502158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:41.502315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:41.502364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:41.502405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:41.502546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:41.654632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.655669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.655798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.655866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.655928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.655986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.656040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.656143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.656218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.656292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.656361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.656452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.656581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.656652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:41.656711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 72057594046678944 2025-12-23T18:54:48.091619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.091698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.091759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.091829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.091922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.094904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.095046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.095085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:54:48.095212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:54:48.095245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:54:48.095280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:54:48.095311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:54:48.095342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-12-23T18:54:48.095409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3119:4339] message: TxId: 103 2025-12-23T18:54:48.095461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:54:48.095515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:54:48.095543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:54:48.096471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2025-12-23T18:54:48.098427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:54:48.098471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:4345:5504] TestWaitNotification: OK eventTxId 103 2025-12-23T18:54:48.098969Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:54:48.099200Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 243us result status StatusSuccess 2025-12-23T18:54:48.099706Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2025-12-23T18:54:48.102774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:54:48.103044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: alter_table.cpp:283: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.106162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2025-12-23T18:54:48.108553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:48.108750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:54:48.109079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:54:48.109117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:54:48.109515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:54:48.109601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:54:48.109642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4693:5851] TestWaitNotification: OK eventTxId 104 |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] >> TabletState::NormalLifecycle [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] Test command err: Trying to start YDB, gRPC: 63529, MsgBus: 7231 2025-12-23T18:53:36.998770Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128518206196594:2140];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:36.999768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:53:37.060262Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ec3/r3tmp/tmpWINfpn/pdisk_1.dat 2025-12-23T18:53:37.451774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:37.451886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:37.455551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:37.489558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:37.533025Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63529, node 1 2025-12-23T18:53:37.619087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:37.619102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:37.619164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:37.619223Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:53:37.769301Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7231 2025-12-23T18:53:37.995656Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516017581 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:38.285598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:38.291287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:53:38.308389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.456238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.620044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:38.693610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:53:40.160817Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535386067545:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.160924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.161220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535386067555:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.161272Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.455048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.482470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.510080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.534349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.559213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.590688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.624197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.691005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:53:40.763018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535386068422:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.763091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.763208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535386068428:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.763233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128535386068427:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.763277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:53:40.767506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opI ... (2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:40.912406Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:40.917353Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7230, node 2 2025-12-23T18:54:40.960148Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:40.960164Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:40.960172Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:40.960238Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:40.969850Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:7559 TClient is connected to server localhost:7559 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516080959 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:54:41.484358Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:54:41.503745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:41.558734Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:41.739947Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:41.799133Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:41.816961Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:54:44.451781Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128812323835053:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.451883Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.452129Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128812323835062:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.452175Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.522380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.550277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.576514Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.605020Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.630830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.659700Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.691358Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.760590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:44.837378Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128812323835932:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.837508Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.837540Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128812323835937:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.837710Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128812323835939:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.837763Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:44.841015Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:54:44.851582Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128812323835940:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:54:44.927579Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128812323835992:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:54:45.791728Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128795143964024:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:54:45.791796Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:54:47.765944Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766516087791, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:48.100811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:48.100880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:48.100906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:48.100952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:48.100981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:48.101003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:48.101046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:48.101089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:48.101731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:48.101973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:48.161766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:48.161807Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:48.166505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:48.166679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:48.166810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:48.169499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:48.169694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:48.170421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:48.170648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:48.172566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:48.172776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:48.174001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:48.174062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:48.174230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:48.174293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:48.174342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:48.174473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.286819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.287866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.287995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.288072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.288138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.288196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.288259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.288376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.288475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.288566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.288661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.288746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.288858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.288936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.289001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... CHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:49.308106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:49.308135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2025-12-23T18:54:49.308163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2025-12-23T18:54:49.309534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-12-23T18:54:49.309585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-12-23T18:54:49.309635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2025-12-23T18:54:49.309651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:54:49.309672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2025-12-23T18:54:49.309687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:54:49.309704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-12-23T18:54:49.309742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:138:2160] message: TxId: 281474976710760 2025-12-23T18:54:49.309794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:54:49.309824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2025-12-23T18:54:49.309849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710760:0 2025-12-23T18:54:49.309909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-12-23T18:54:49.311200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-12-23T18:54:49.311242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710760 2025-12-23T18:54:49.311277Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2025-12-23T18:54:49.311352Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:796:2766], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2025-12-23T18:54:49.312438Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2025-12-23T18:54:49.312528Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:796:2766], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:54:49.312562Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-12-23T18:54:49.313634Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2025-12-23T18:54:49.313716Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:796:2766], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:54:49.313823Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-12-23T18:54:49.313908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:54:49.313935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:887:2846] TestWaitNotification: OK eventTxId 102 2025-12-23T18:54:49.314290Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:54:49.314787Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 386us result status StatusSuccess 2025-12-23T18:54:49.315140Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TPipeCacheTest::TestAutoConnect |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> THiveTest::TestDeleteTabletError [GOOD] >> THiveTest::TestDeleteTabletWithRestartAndRetry >> TTabletPipeTest::TestPipeConnectLoopUnknownTabletWithoutRetries |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:48.430093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:48.430147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:48.430173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:48.430204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:48.430238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:48.430266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:48.430321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:48.430383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:48.431237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:48.431477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:48.493041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:48.493096Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:48.497750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:48.497930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:48.498019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:48.500580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:48.500716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:48.501257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:48.501433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:48.503102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:48.503311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:48.504149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:48.504193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:48.504303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:48.504351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:48.504407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:48.504510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.625206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.628905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 4Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:49.679105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:49.679137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2025-12-23T18:54:49.679176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2025-12-23T18:54:49.680405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-12-23T18:54:49.680441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-12-23T18:54:49.680503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2025-12-23T18:54:49.680526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:54:49.680546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2025-12-23T18:54:49.680579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:54:49.680614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-12-23T18:54:49.680656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:138:2160] message: TxId: 281474976710760 2025-12-23T18:54:49.680681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-12-23T18:54:49.680701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2025-12-23T18:54:49.680717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710760:0 2025-12-23T18:54:49.680765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-12-23T18:54:49.681914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7195: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-12-23T18:54:49.681965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7197: Message: TxId: 281474976710760 2025-12-23T18:54:49.682014Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2025-12-23T18:54:49.682132Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:787:2757], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2025-12-23T18:54:49.683250Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2025-12-23T18:54:49.683343Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:787:2757], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:54:49.683379Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-12-23T18:54:49.684374Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2025-12-23T18:54:49.684452Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:787:2757], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-12-23T18:54:49.684486Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-12-23T18:54:49.684569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:54:49.684595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:878:2837] TestWaitNotification: OK eventTxId 102 2025-12-23T18:54:49.684939Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:54:49.685132Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 190us result status StatusSuccess 2025-12-23T18:54:49.685486Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::NormalLifecycle [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> THealthCheckTest::LayoutCorrect [GOOD] >> TResourceBroker::TestResubmitTask |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TA] $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} >> TResourceBroker::TestResubmitTask [GOOD] >> TResourceBroker::TestUpdateCookie >> TTabletPipeTest::TestPipeConnectLoopUnknownTabletWithoutRetries [GOOD] >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries |97.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |97.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables [GOOD] Test command err: Trying to start YDB, gRPC: 11191, MsgBus: 20297 ... waiting for SysViewsRoster update finished 2025-12-23T18:47:58.342782Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:47:58.459865Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:47:58.468746Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:47:58.469025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:47:58.469083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:47:58.757155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:47:58.757310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:47:58.847247Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766515675363010 != 1766515675363014 2025-12-23T18:47:58.860734Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:47:58.915976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:47:59.091517Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040fa/r3tmp/tmpgIfHIp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11191, node 1 2025-12-23T18:47:59.523707Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:47:59.523769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:47:59.523811Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:47:59.524198Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:47:59.535825Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:20297 TClient is connected to server localhost:20297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:48:00.054692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:48:00.128997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:918:2783], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.129161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2788], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.129250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.130478Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2793], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.130857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:48:00.135938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:48:00.245227Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:932:2791], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:48:00.288668Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:989:2829] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:48:00.646134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:00.992190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:48:03.651171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 11665, MsgBus: 24747 ... waiting for SysViewsRoster update finished 2025-12-23T18:48:11.131912Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:48:11.141714Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:48:11.142076Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:48:11.142156Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:48:11.399385Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:48:11.399504Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:48:11.422315Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766515687705989 != 1766515687705992 2025-12-23T18:48:11.430086Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:48:11.477263Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:48:11.593669Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0040fa/r3tmp/tmpb5pUiw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11665, node 2 2025-12-23T18:48:11.993440Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:48:11.993501Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:48:11.993533Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:48:11.994104Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:48:12.005696Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24747 TClient is connected to server localhost:24747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { ... S/ydb/library/grpc/server/grpc_server.cpp:278:18 #9 0x00001b27cb26 in Shutdown /-S/ydb/core/testlib/test_client.h:443:33 #10 0x00001b27cb26 in ShutdownGRpc /-S/ydb/core/testlib/test_client.h:395:30 #11 0x00001b27cb26 in NKikimr::NKqp::TKikimrRunner::~TKikimrRunner() /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:178:17 #12 0x00001b528cce in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:70:5 #13 0x00001b528cce in reset /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:286:7 #14 0x00001b528cce in ~unique_ptr /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:255:71 #15 0x00001b528cce in ~TTableDataModificationTester /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:15:7 #16 0x00001b528cce in ~TIsolationSetting /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:391:11 #17 0x00001b528cce in void NKikimr::NKqp::NTestSuiteKqpSinkTx::TIsolationSettingTest(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:510:9 #18 0x00001b4efe07 in operator() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #19 0x00001b4efe07 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #20 0x00001b4efe07 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #21 0x00001b4efe07 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #22 0x00001b4efe07 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #23 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #24 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #25 0x00001bf315f9 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #26 0x00001beff667 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #27 0x00001b4ef15c in NKikimr::NKqp::NTestSuiteKqpSinkTx::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #28 0x00001bf00e1f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #29 0x00001bf2b45c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #30 0x7fca5e55cd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) Indirect leak of 16 byte(s) in 2 object(s) allocated from: #0 0x00001b748f1d in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021b85219 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:271:10 #2 0x000021b85219 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:295:10 #3 0x000021b85219 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:103:32 #4 0x000021b85219 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x000021b85219 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:354:25 #6 0x000021b85219 in __push_back_slow_path /-S/contrib/libs/cxxsupp/libcxx/include/vector:1541:47 #7 0x000021b85219 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/vector:1557:13 #8 0x000021b85219 in grpc_core::Server::RegisterCompletionQueue(grpc_completion_queue*) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:794:8 #9 0x000021199a67 in grpc::ServerBuilder::BuildAndStart() /-S/contrib/libs/grpc/src/cpp/server/server_builder.cc:396:5 #10 0x000021189519 in NYdbGrpc::TGRpcServer::Start() /-S/ydb/library/grpc/server/grpc_server.cpp:249:23 #11 0x00003f83c025 in NKikimr::Tests::TServer::EnableGRpc(NYdbGrpc::TServerOptions const&, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:813:21 #12 0x00003f8403f7 in NKikimr::Tests::TServer::EnableGRpc(unsigned short, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:817:9 #13 0x00004e0c39ae in NKikimr::NKqp::TKikimrRunner::TKikimrRunner(NKikimr::NKqp::TKikimrSettings const&) /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:239:17 #14 0x00001b3ca739 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #15 0x00001b3ca739 in NKikimr::NKqp::TTableDataModificationTester::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:38:18 #16 0x00001b528c86 in void NKikimr::NKqp::NTestSuiteKqpSinkTx::TIsolationSettingTest(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:509:20 #17 0x00001b4efe07 in operator() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #18 0x00001b4efe07 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #19 0x00001b4efe07 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #20 0x00001b4efe07 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #21 0x00001b4efe07 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #22 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #23 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #24 0x00001bf315f9 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #25 0x00001beff667 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #26 0x00001b4ef15c in NKikimr::NKqp::NTestSuiteKqpSinkTx::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #27 0x00001bf00e1f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #28 0x00001bf2b45c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #29 0x7fca5e55cd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) Indirect leak of 16 byte(s) in 2 object(s) allocated from: #0 0x00001b748f1d in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021b819c9 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:271:10 #2 0x000021b819c9 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:295:10 #3 0x000021b819c9 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:103:32 #4 0x000021b819c9 in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x000021b819c9 in __split_buffer /-S/contrib/libs/cxxsupp/libcxx/include/__split_buffer:354:25 #6 0x000021b819c9 in __push_back_slow_path /-S/contrib/libs/cxxsupp/libcxx/include/vector:1541:47 #7 0x000021b819c9 in push_back /-S/contrib/libs/cxxsupp/libcxx/include/vector:1569:13 #8 0x000021b819c9 in grpc_core::Server::Start() /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:702:17 #9 0x000021b90aee in grpc_server_start /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:1715:37 #10 0x000021bb5297 in grpc::Server::Start(grpc::ServerCompletionQueue**, unsigned long) /-S/contrib/libs/grpc/src/cpp/server/server_cc.cc:1214:3 #11 0x000021199f64 in grpc::ServerBuilder::BuildAndStart() /-S/contrib/libs/grpc/src/cpp/server/server_builder.cc:445:11 #12 0x000021189519 in NYdbGrpc::TGRpcServer::Start() /-S/ydb/library/grpc/server/grpc_server.cpp:249:23 #13 0x00003f83c025 in NKikimr::Tests::TServer::EnableGRpc(NYdbGrpc::TServerOptions const&, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:813:21 #14 0x00003f8403f7 in NKikimr::Tests::TServer::EnableGRpc(unsigned short, unsigned int, std::__y1::optional>> const&) /-S/ydb/core/testlib/test_client.cpp:817:9 #15 0x00004e0c39ae in NKikimr::NKqp::TKikimrRunner::TKikimrRunner(NKikimr::NKqp::TKikimrSettings const&) /-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:239:17 #16 0x00001b3ca739 in make_unique /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #17 0x00001b3ca739 in NKikimr::NKqp::TTableDataModificationTester::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:38:18 #18 0x00001b528c86 in void NKikimr::NKqp::NTestSuiteKqpSinkTx::TIsolationSettingTest(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:509:20 #19 0x00001b4efe07 in operator() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #20 0x00001b4efe07 in __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #21 0x00001b4efe07 in __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #22 0x00001b4efe07 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #23 0x00001b4efe07 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #24 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #25 0x00001bf315f9 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #26 0x00001bf315f9 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:526:20 #27 0x00001beff667 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:373:18 #28 0x00001b4ef15c in NKikimr::NKqp::NTestSuiteKqpSinkTx::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:15:1 #29 0x00001bf00e1f in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:494:19 #30 0x00001bf2b45c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:875:44 #31 0x7fca5e55cd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 4f7b0c955c3d81d7cac1501a2498b69d1d82bfe7) SUMMARY: AddressSanitizer: 857212 byte(s) leaked in 10514 allocation(s). >> TResourceBroker::TestUpdateCookie [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> THiveTest::TestDeleteTabletWithRestartAndRetry [GOOD] >> THiveTest::TestCreateTabletChangeToExternal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutCorrect [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:03.075197Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:03.076156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:03.076898Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:678:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:03.197568Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:03.199043Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:03.203983Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:683:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:03.204367Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:03.204613Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:03.206175Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:03.206303Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:03.541318Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:03.638444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:03.638583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:03.639340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:03.639428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:03.684051Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:54:03.684904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:03.685226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:03.763144Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:03.811560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c01/r3tmp/tmpJyN11o/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14990, node 1 TClient is connected to server localhost:10455 2025-12-23T18:54:04.601529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:04.601604Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:04.601633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:04.602080Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:10.248476Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:10.249189Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:10.250636Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:10.256995Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:10.257957Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:10.259141Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:10.259565Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:10.259686Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:10.260645Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:10.260684Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:10.496014Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:10.579743Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:10.579855Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:10.580221Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:10.580275Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:10.648013Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:54:10.648439Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:10.648762Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:10.725308Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:10.739202Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c01/r3tmp/tmpZGPPQw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12057, node 3 TClient is connected to server localhost:23360 2025-12-23T18:54:11.531744Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:11.531804Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:11.531844Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:11.532267Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:17.081509Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:17.082360Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:17.084214Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:17.092296Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:17.093334Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:17.095137Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:17.095540Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:17.095650Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:17.096878Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:17.096949Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:17.358601Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:17.445362Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: ... RROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:24.872676Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:674:2344], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:24.872862Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:24.872940Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:25.152150Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:25.236115Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:25.236256Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:25.236601Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:25.236665Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:25.294385Z node 7 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-12-23T18:54:25.295294Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:25.295608Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:25.350545Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:25.374859Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c01/r3tmp/tmp2MYdBf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9051, node 7 TClient is connected to server localhost:64993 2025-12-23T18:54:26.248802Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:26.248874Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:26.248920Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:26.249392Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:29.857737Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:29.861713Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:29.871214Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:445:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:29.871554Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:29.871616Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:30.122355Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:30.193175Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:30.193290Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:30.239452Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:30.315826Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c01/r3tmp/tmp2jGUsu/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22396, node 9 TClient is connected to server localhost:4559 2025-12-23T18:54:31.116449Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:31.116501Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:31.116534Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:31.117039Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:31.174901Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2025-12-23T18:54:42.361610Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:42.368847Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:42.371622Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:442:2402], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:42.372051Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:42.372174Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:42.752407Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:42.833626Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:42.833822Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:42.893727Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:42.973610Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c01/r3tmp/tmpfeNjNT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11001, node 11 TClient is connected to server localhost:5735 2025-12-23T18:54:43.905527Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:43.905610Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:43.905661Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:43.906174Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:48.753921Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:48.757429Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:48.758961Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:447:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:48.759125Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:48.759205Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:49.044176Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:49.120374Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:49.120480Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:49.154581Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:49.222599Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c01/r3tmp/tmpD6MJWa/pdisk_1.dat TServer::EnableGrpc on GrpcPort 63370, node 13 TClient is connected to server localhost:28545 2025-12-23T18:54:49.887567Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:49.887610Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:49.887626Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:49.887892Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TPipeCacheTest::TestAutoConnect [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] >> THealthCheckTest::NoStoragePools [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries [GOOD] >> TCmsTest::DisableCMS [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> THiveTest::TestDownCounterDeleteNode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries [GOOD] Test command err: ... waiting for connect1 ... waiting for connect2 ... waiting for connect3 ... waiting for connect4 ... waiting for connect5 ... waiting for connect6 ... waiting for connect7 ... waiting for connect8 ... waiting for connect9 ... waiting for connect10 ... waiting for boot1 ... waiting for connect1 ... waiting for client destroyed notification ... waiting for connect2 ... waiting for connect3 ... waiting for connect4 ... waiting for connect5 ... waiting for connect6 ... waiting for connect7 ... waiting for connect8 ... waiting for connect9 ... waiting for connect10 ... waiting for connect11 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::DisableCMS [GOOD] Test command err: 2025-12-23T18:54:31.562771Z node 10 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 PileRatio: 50 StateStorageSelfHealConfig { Enable: false NodeGoodStateLimit: 10 NodePrettyGoodStateLimit: 7 NodeBadStateLimit: 10 WaitForConfigStep: 60000000 RelaxTime: 600000000 PileupReplicas: false OverrideReplicasInRingCount: 0 OverrideRingsCount: 0 ReplicasSpecificVolume: 200 } InitialDeploymentGracePeriod: 600000000 } Enable: true } } 2025-12-23T18:54:31.563155Z node 10 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-12-23T18:54:31.589244Z node 10 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-12-23T18:54:31.589378Z node 10 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2025-12-23T18:54:31.590765Z node 10 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2025-12-23T18:54:31.591314Z node 10 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2025-12-23T18:54:31.591467Z node 10 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.083512s 2025-12-23T18:54:31.591519Z node 10 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2025-12-23T18:54:31.591619Z node 10 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-12-23T18:54:31.591668Z node 10 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-12-23T18:54:31.591687Z node 10 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-12-23T18:54:31.591704Z node 10 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbI ... ChangeTime: 0 Path: "/10/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2025-12-23T18:54:41.618051Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2025-12-23T18:54:41.618325Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2025-12-23T18:54:41.618418Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2025-12-23T18:54:41.618534Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2025-12-23T18:54:41.618596Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2025-12-23T18:54:41.618662Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2025-12-23T18:54:41.618705Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/17/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2025-12-23T18:54:41.618781Z node 10 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:54:41.619168Z node 10 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# FAULTY, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 3 StateLimit# 1, dry run# 0 2025-12-23T18:54:41.619283Z node 10 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-12-23T18:54:41.619568Z node 10 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2025-12-23T18:54:41.619877Z node 10 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2025-12-23T18:54:41.619947Z node 10 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 2025-12-23T18:54:41.633573Z node 10 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2025-12-23T18:54:41.659136Z node 10 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2025-12-23T18:54:41.659258Z node 10 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2025-12-23T18:54:41.659377Z node 10 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:04:00Z 2025-12-23T18:54:41.660442Z node 10 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-12-23T18:54:41.660592Z node 10 :CMS DEBUG: cms.cpp:396: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } 2025-12-23T18:54:41.660654Z node 10 :CMS DEBUG: cms.cpp:415: Result: ERROR (reason: Evict vdisks is disabled in Sentinel (self heal)) 2025-12-23T18:54:41.660817Z node 10 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2025-12-23T18:54:41.661041Z node 10 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-12-23T18:54:41.673381Z node 10 :CMS DEBUG: cms_tx_store_permissions.cpp:139: TTxStorePermissions complete 2025-12-23T18:54:41.673774Z node 10 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR Reason: "Evict vdisks is disabled in Sentinel (self heal)" } RequestId: "user-r-1" } 2025-12-23T18:54:41.674645Z node 10 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2025-12-23T18:54:41.687652Z node 10 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2025-12-23T18:54:41.687966Z node 10 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 1 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 PileRatio: 50 StateStorageSelfHealConfig { Enable: false NodeGoodStateLimit: 10 NodePrettyGoodStateLimit: 7 NodeBadStateLimit: 10 WaitForConfigStep: 60000000 RelaxTime: 600000000 PileupReplicas: false OverrideReplicasInRingCount: 0 OverrideRingsCount: 0 ReplicasSpecificVolume: 200 } InitialDeploymentGracePeriod: 600000000 } Enable: true 2025-12-23T18:54:46.653238Z node 10 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2025-12-23T18:54:46.653298Z node 10 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2025-12-23T18:54:46.653454Z node 10 :CMS DEBUG: cms.cpp:1176: Running CleanupWalleTasks 2025-12-23T18:54:46.653757Z node 10 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-12-23T18:54:46.653811Z node 10 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-12-23T18:54:46.653833Z node 10 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-12-23T18:54:46.653849Z node 10 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-12-23T18:54:46.653864Z node 10 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-12-23T18:54:46.653880Z node 10 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-12-23T18:54:46.653921Z node 10 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-12-23T18:54:46.653941Z node 10 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-12-23T18:54:46.654141Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2025-12-23T18:54:46.654778Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2025-12-23T18:54:46.654870Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2025-12-23T18:54:46.655002Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2025-12-23T18:54:46.655116Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2025-12-23T18:54:46.655168Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2025-12-23T18:54:46.655233Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2025-12-23T18:54:46.655300Z node 10 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/17/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2025-12-23T18:54:46.655344Z node 10 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2025-12-23T18:54:46.655532Z node 10 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-12-23T18:54:46.655591Z node 10 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-12-23T18:54:46.655777Z node 10 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2025-12-23T18:54:46.656063Z node 10 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2025-12-23T18:54:46.656101Z node 10 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TResourceBroker::TestRealUsage >> TPipeCacheTest::TestIdleRefresh >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoStoragePools [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:04.953218Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:04.953488Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:05.095052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:05.095951Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:05.099951Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:505:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:05.100221Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:05.100346Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:05.101901Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:500:2171], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:05.102188Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:05.102284Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:05.482006Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:05.595895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:05.596002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:05.596591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:05.596674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:05.640649Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:54:05.641281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:05.641580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:05.762151Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:05.775163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002beb/r3tmp/tmpGUrORY/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22460, node 1 TClient is connected to server localhost:19938 2025-12-23T18:54:06.560917Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:06.560977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:06.561007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:06.561379Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:12.027718Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:12.028430Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:12.030316Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:12.037628Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:12.039113Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:12.040876Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:12.041516Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:12.041698Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:12.043216Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:12.043273Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:12.293470Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:12.390380Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:12.390519Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:12.391042Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:12.391124Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:12.460851Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:54:12.461358Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:12.461814Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:12.543507Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:12.567832Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002beb/r3tmp/tmpiEvYuB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29874, node 3 TClient is connected to server localhost:25548 2025-12-23T18:54:13.383812Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:13.383876Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:13.383914Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:13.384313Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:19.332417Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:19.333431Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:19.335534Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:19.344644Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:19.345838Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:19.347868Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:19.348173Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:19.348274Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:19.349325Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:19.349403Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:19.613710Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:19.698303Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057 ... a/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:33.657697Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:33.658511Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:686:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:33.658937Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:33.659068Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:33.933344Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:34.022335Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:34.022481Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:34.022924Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:34.022986Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:34.093427Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-12-23T18:54:34.093852Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:34.094228Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:34.175760Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:34.238324Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002beb/r3tmp/tmpIMDPGF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4513, node 9 TClient is connected to server localhost:16338 2025-12-23T18:54:35.036010Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:35.036070Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:35.036105Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:35.036583Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "RED-a838-9-9-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-42" path: "/home/runner/.ya/build/build_root/8ptz/002beb/r3tmp/tmpIMDPGF/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a838-9-9-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-43" path: "/home/runner/.ya/build/build_root/8ptz/002beb/r3tmp/tmpIMDPGF/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a838-9-9-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-44" path: "/home/runner/.ya/build/build_root/8ptz/002beb/r3tmp/tmpIMDPGF/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } location { id: 9 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2025-12-23T18:54:43.212879Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:43.214168Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:43.223591Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:43.224046Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:43.224316Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:43.224482Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:43.225540Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:679:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:43.225755Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:43.225904Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:43.579577Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:43.693858Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:43.694027Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:43.694970Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:43.695065Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:43.740541Z node 11 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-12-23T18:54:43.741235Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:43.741777Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:43.814233Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:43.827692Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002beb/r3tmp/tmp27yQzm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21041, node 11 TClient is connected to server localhost:23742 2025-12-23T18:54:44.728369Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:44.728439Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:44.728480Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:44.729063Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:49.672731Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:49.677541Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:49.689004Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:447:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:49.689315Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:49.689464Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:50.040974Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:50.129506Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:50.129671Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:50.164391Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:50.223141Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002beb/r3tmp/tmp4EAQOJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10078, node 13 TClient is connected to server localhost:63866 2025-12-23T18:54:51.075124Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:51.075200Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:51.075244Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:51.075831Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: { LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } 2025-12-23T18:54:52.518682Z node 3 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437185] NodeDisconnected NodeId# 2 |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> BootstrapperTest::LoneBootstrapper >> TResourceBroker::TestRealUsage [GOOD] >> TResourceBroker::TestRandomQueue >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace >> THiveTest::TestBridgeDisconnectWithReboots [GOOD] >> THiveTest::TestBridgeDemotion >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode |97.8%| [TA] $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true [FAIL] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:54:34.586242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:34.586354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:34.586441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:34.586483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:34.586522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:34.586553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:34.586630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:34.586726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:34.587717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:34.589688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:34.675927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:34.676001Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:34.686757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:34.686978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:34.687185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:34.697797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:34.697961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:34.701583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:34.704761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:34.713922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:34.714620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:34.721499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:34.721559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:34.721688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:34.721734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:34.721838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:34.721940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:34.852606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.853961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.854030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.854086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:34.854151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... NFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 2025-12-23T18:54:42.074381Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:42.074584Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 Forgetting tablet 72075186233409546 2025-12-23T18:54:42.075022Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-12-23T18:54:42.075179Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:54:42.075353Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 2025-12-23T18:54:42.075490Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:54:42.075635Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:54:42.075976Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:54:42.076051Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:54:42.076190Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:54:42.076652Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:54:42.076716Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2025-12-23T18:54:42.076802Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:54:42.077313Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:54:42.080931Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2025-12-23T18:54:42.081001Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-12-23T18:54:42.081104Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:54:42.081131Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:54:42.081187Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2025-12-23T18:54:42.081215Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-12-23T18:54:42.081274Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:54:42.081322Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-12-23T18:54:42.081538Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:54:42.081630Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:54:42.081950Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:54:42.082016Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:54:42.082513Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:54:42.082618Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:54:42.082667Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [6:1031:2973] TestWaitNotification: OK eventTxId 103 2025-12-23T18:54:42.083332Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:54:42.083544Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 255us result status StatusPathDoesNotExist 2025-12-23T18:54:42.083732Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:54:42.084300Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:54:42.084485Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 233us result status StatusSuccess 2025-12-23T18:54:42.084980Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 equal assertion failed at ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp:712, auto NSchemeShardUT_Private::NLs::PathsInsideDomain(ui64)::(anonymous class)::operator()(const NKikimrScheme::TEvDescribeSchemeResult &) const: curCount == count paths inside domain count mismatch, domain with id 1 has count 36 but expected 0 TBackTrace::Capture()+28 (0x174B745C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x179A66BC) ??+0 (0x39D7C0E8) NSchemeShardUT_Private::TestDescribeResult(NKikimrScheme::TEvDescribeSchemeResult const&, TVector, std::__y1::allocator>>)+201 (0x39B3A4A9) void NTestSuiteTSchemeShardExtSubDomainTest::DropWithDeadTenantHive(NUnitTest::TTestContext&)+27118 (0x170BE5FE) std::__y1::__function::__func, void ()>::operator()()+280 (0x16F6BF78) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x179D5B1A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x179AD398) NTestSuiteTSchemeShardExtSubDomainTest::TCurrentTest::Execute()+1300 (0x16F6B064) NUnitTest::TTestFactory::Execute()+2176 (0x179AEB50) NUnitTest::RunMain(int, char**)+5805 (0x179CF97D) ??+0 (0x7F27C2E00D90) __libc_start_main+128 (0x7F27C2E00E40) _start+41 (0x14950029) |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |97.8%| [TA] {RESULT} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TOlap::StoreStats [GOOD] >> TOlap::Decimal |97.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TResourceBroker::TestRandomQueue [GOOD] >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers >> TPipeCacheTest::TestTabletNode [GOOD] >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2025-12-23T18:54:53.713218Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-4 (4 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.713461Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-10 (10 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.713520Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-11 (11 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.713590Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-13 (13 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.713881Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-26 (26 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.713952Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-28 (28 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.714018Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-29 (29 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.714103Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-31 (31 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.714211Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-35 (35 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.714262Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-36 (36 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.714352Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-38 (38 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.714419Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-40 (40 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.714464Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-41 (41 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.714517Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-42 (42 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.714568Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-43 (43 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.714636Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-45 (45 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.714756Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-47 (47 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.714910Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-52 (52 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.714998Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-54 (54 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.715057Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-55 (55 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.715278Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-65 (65 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.715318Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-66 (66 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.715362Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-67 (67 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.715427Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-69 (69 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.715542Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-72 (72 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.715638Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-75 (75 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.715707Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-77 (77 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.715801Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-80 (80 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.715871Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-82 (82 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.715960Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-84 (84 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.716179Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-93 (93 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.716225Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-94 (94 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.716368Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-99 (99 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.716506Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-102 (102 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.716596Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-104 (104 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.716697Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-107 (107 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.716937Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-117 (117 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.717034Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-120 (120 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.717165Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-123 (123 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.717228Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-125 (125 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.717405Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-132 (132 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.717469Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-134 (134 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.717904Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-154 (154 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.717962Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-155 (155 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.718268Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-168 (168 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.718365Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-171 (171 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.718413Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-172 (172 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.718536Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-175 (175 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.718699Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-181 (181 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.719039Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-192 (192 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.719148Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-195 (195 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.719294Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-200 (200 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.719355Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-201 (201 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.719464Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-206 (206 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.719759Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-219 (219 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.719811Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-220 (220 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.719860Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-221 (221 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.719904Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-222 (222 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.720060Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-228 (228 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.720108Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-229 (229 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.720192Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-231 (231 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.720267Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-232 (232 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.720374Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-236 (236 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.720571Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-244 (244 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.720788Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-252 (252 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.720903Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-254 (254 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.720975Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-256 (256 ... KER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-761 (761 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.754810Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-762 (762 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.754845Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-789 (789 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.754878Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-794 (794 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.754904Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-797 (797 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.754948Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-802 (802 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.754975Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-816 (816 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755019Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-820 (820 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755039Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-837 (837 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755066Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-842 (842 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755150Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-925 (925 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755169Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-932 (932 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755252Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-978 (978 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755341Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-985 (985 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755371Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-992 (992 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755388Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-995 (995 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755425Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-999 (999 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755455Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-1000 (1000 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755488Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-10 (10 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755544Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-31 (31 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755585Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-40 (40 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755612Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-43 (43 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755692Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-72 (72 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755722Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-75 (75 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755760Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-93 (93 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755786Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-117 (117 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755829Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-134 (134 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755856Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-154 (154 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755887Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-155 (155 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755916Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-175 (175 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.755962Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-200 (200 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756008Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-219 (219 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756064Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-231 (231 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756131Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-252 (252 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756161Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-256 (256 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756191Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-280 (280 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756227Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-282 (282 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756295Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-344 (344 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756314Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-345 (345 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756403Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-360 (360 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756451Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-362 (362 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756532Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-422 (422 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756643Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-490 (490 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756683Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-503 (503 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756763Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-513 (513 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756868Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-558 (558 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.756952Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-577 (577 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757000Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-599 (599 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757049Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-643 (643 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757099Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-661 (661 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757228Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-684 (684 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757278Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-687 (687 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757318Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-701 (701 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757368Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-722 (722 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757403Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-723 (723 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757478Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-737 (737 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757558Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-786 (786 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757621Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-795 (795 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757691Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-814 (814 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757731Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-835 (835 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757795Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-836 (836 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757841Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-847 (847 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.757945Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-887 (887 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.758027Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-909 (909 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.758093Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-922 (922 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.758142Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-931 (931 by [2:104:2137])' of unknown type 'wrong' to default queue 2025-12-23T18:54:53.758217Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-934 (934 by [2:104:2137])' of unknown type 'wrong' to default queue |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:21.245542Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:21.325350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:21.329696Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:21.330035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:21.330133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:21.704522Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:21.799602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:21.799733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:21.859096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:21.962295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003207/r3tmp/tmp4Kj06t/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62482, node 1 2025-12-23T18:52:22.619501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:22.620269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:22.620303Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:22.620327Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:22.620476Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:22.683198Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1086 2025-12-23T18:52:23.193997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:25.740033Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:25.744643Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:25.747116Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:25.769372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:25.769455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:25.806388Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:25.807884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:26.036763Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.037372Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.038151Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.038778Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.039129Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.039291Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.039454Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.039570Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.039722Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:26.155397Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:52:26.167094Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:26.167218Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:26.180440Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:26.340857Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:26.379524Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:26.379590Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:26.404583Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:26.406040Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:26.406257Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:26.406329Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:26.406390Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:26.406440Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:26.406494Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:26.406545Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:26.406964Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:26.445840Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:26.445912Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:26.455561Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:52:26.463379Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:52:26.464385Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:52:26.468642Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:52:26.492975Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:52:26.493034Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:52:26.493091Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:52:26.518525Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:26.529236Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:26.537822Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:26.537942Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:26.548692Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:26.648829Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:26.967788Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:27.081748Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:27.081843Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:26 ... or_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:6057:4709], schemeshard id = 72075186224037897 ... waiting for TEvPeriodicTableStats2 2025-12-23T18:53:55.635474Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:53:57.990544Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:53:57.990838Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 3 2025-12-23T18:53:57.990973Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2025-12-23T18:54:01.446268Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:54:03.673661Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:54:03.673904Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 4 2025-12-23T18:54:03.673975Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2025-12-23T18:54:07.128553Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:54:07.128644Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:54:07.128683Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:54:07.128718Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:54:07.292837Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:54:09.669005Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:54:09.669164Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2025-12-23T18:54:09.669232Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-12-23T18:54:09.690632Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:54:09.690714Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:12.926967Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:54:14.977872Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:54:14.978079Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2025-12-23T18:54:14.978147Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-12-23T18:54:18.102766Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:54:20.179672Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:54:20.179887Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2025-12-23T18:54:20.180003Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-12-23T18:54:23.355386Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:54:25.431861Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:54:25.432044Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 8 2025-12-23T18:54:25.432144Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 8 2025-12-23T18:54:25.475675Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2025-12-23T18:54:25.475740Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:54:25.475914Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2025-12-23T18:54:25.488854Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:54:28.670767Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:54:30.768777Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:54:30.768923Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 9 2025-12-23T18:54:30.768985Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 9 2025-12-23T18:54:30.779634Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2025-12-23T18:54:30.779690Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:54:30.779894Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2025-12-23T18:54:30.792582Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:54:33.926171Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:54:35.888428Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:54:35.888588Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 10 2025-12-23T18:54:35.888650Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 10 2025-12-23T18:54:35.899218Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2025-12-23T18:54:35.899302Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:54:35.899493Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2025-12-23T18:54:35.912064Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:54:39.147353Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:54:39.147426Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:54:39.147458Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:54:39.147489Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:54:39.323507Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:54:41.518222Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:54:41.518416Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 11 2025-12-23T18:54:41.518547Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2025-12-23T18:54:41.540735Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2025-12-23T18:54:41.540806Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:54:41.541044Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2025-12-23T18:54:41.554226Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_COLUMNSHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 ... waiting for TEvPeriodicTableStats2 (done) ... waiting for stats update from SchemeShard 2025-12-23T18:54:44.902589Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:54:46.927399Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:54:46.927653Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2025-12-23T18:54:46.927800Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2025-12-23T18:54:46.938652Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2025-12-23T18:54:46.938877Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:54:46.939240Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2025-12-23T18:54:46.952320Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... waiting for stats update from SchemeShard (done) ... waiting for TEvPropagateStatistics 2025-12-23T18:54:50.106484Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:54:52.283887Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:54:52.284203Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 13 ... waiting for TEvPropagateStatistics (done) 2025-12-23T18:54:52.284638Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 13 2025-12-23T18:54:52.284751Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7865:5481]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:54:52.285156Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:54:52.285224Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:7865:5481], StatRequests.size() = 1 |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> THiveTest::TestDownCounterDeleteNode [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> TOlap::Decimal [GOOD] >> TOlap::MoveTableStats |97.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Uint8VectorKnnPushdown >> KqpKnn::Int8VectorKnnPushdown >> KqpKnn::VectorSearchKnnPushdown-Nullable |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestDownCounterDeleteNode [GOOD] Test command err: 2025-12-23T18:54:14.777220Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:14.804588Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:14.808161Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:14.814700Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:14.818333Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-12-23T18:54:14.820888Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-12-23T18:54:14.820941Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:14.821879Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:78:2078] ControllerId# 72057594037932033 2025-12-23T18:54:14.821918Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:14.822285Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:14.822432Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:14.834150Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:14.834199Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:14.836329Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:86:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836478Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:87:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836619Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:88:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836748Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:89:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836889Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:90:2087] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.837011Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:91:2088] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.837133Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:92:2089] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.837155Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:14.837441Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:78:2078] 2025-12-23T18:54:14.837489Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:78:2078] 2025-12-23T18:54:14.837544Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:14.837597Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:14.840710Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:14.840786Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:14.842931Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:14.843025Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:14.843242Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:14.843376Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:14.843932Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:103:2077] ControllerId# 72057594037932033 2025-12-23T18:54:14.843952Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:14.843988Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:14.844061Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:14.846305Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:14.848289Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:14.848384Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:14.848578Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:14.848721Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:14.849235Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:114:2077] ControllerId# 72057594037932033 2025-12-23T18:54:14.849255Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:14.849286Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:14.849358Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:14.855620Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:14.855659Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:14.856718Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:121:2081] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.856818Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:122:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.856889Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:123:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.856959Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:124:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.857027Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:125:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.857121Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:126:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.857215Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:127:2087] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.857230Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:14.857265Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [3:114:2077] 2025-12-23T18:54:14.857292Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [3:114:2077] 2025-12-23T18:54:14.857324Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:14.857355Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:14.857638Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:14.857743Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:14.866159Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:78:2078] 2025-12-23T18:54:14.866270Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:14.879364Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:14.879435Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:14.881436Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:133:2081] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.881612Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:134:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.881755Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:135:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.881900Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:136:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.882039Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:137:2085] targetNo ... ID: 72057594037936131 Cookie: 2} 2025-12-23T18:54:54.218897Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:54.218945Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:54.218981Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:54.219049Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936131 leader: [0:0:0] followers: 0 2025-12-23T18:54:54.219112Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936131] forward result error, check reconnect [30:199:2162] 2025-12-23T18:54:54.219141Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936131] schedule retry [30:199:2162] 2025-12-23T18:54:54.250783Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936129] client retry [31:53:2064] 2025-12-23T18:54:54.250841Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [31:53:2064] 2025-12-23T18:54:54.250953Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:54.250991Z node 31 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 31 selfDC 2 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:54:54.251127Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:54.251474Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-12-23T18:54:54.251533Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-12-23T18:54:54.251563Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-12-23T18:54:54.251733Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:54.251802Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:54.251837Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:54.251904Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936129 leader: [0:0:0] followers: 0 2025-12-23T18:54:54.251953Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936129] forward result error, check reconnect [31:53:2064] 2025-12-23T18:54:54.251981Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936129] schedule retry [31:53:2064] 2025-12-23T18:54:54.262333Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936129] client retry [30:77:2064] 2025-12-23T18:54:54.262391Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [30:77:2064] 2025-12-23T18:54:54.262470Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:54.262507Z node 30 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 30 selfDC 1 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:54:54.262616Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:54.262705Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-12-23T18:54:54.262769Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-12-23T18:54:54.262800Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-12-23T18:54:54.262836Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:54.262892Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:54.262922Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:54.262973Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936129 leader: [0:0:0] followers: 0 2025-12-23T18:54:54.263018Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936129] forward result error, check reconnect [30:77:2064] 2025-12-23T18:54:54.263044Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936129] schedule retry [30:77:2064] 2025-12-23T18:54:54.337669Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936131] client retry [30:199:2162] 2025-12-23T18:54:54.337750Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936131] lookup [30:199:2162] 2025-12-23T18:54:54.337871Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936131 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936131 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:54.337926Z node 30 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 30 selfDC 1 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:54:54.338101Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:54.338229Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-12-23T18:54:54.338291Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-12-23T18:54:54.338334Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-12-23T18:54:54.338391Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:54.338463Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:54.338512Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:54.338594Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936131 leader: [0:0:0] followers: 0 2025-12-23T18:54:54.338663Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936131] forward result error, check reconnect [30:199:2162] 2025-12-23T18:54:54.338704Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936131] schedule retry [30:199:2162] 2025-12-23T18:54:54.404589Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} queued, type NKikimr::NHive::TTxDeleteNode 2025-12-23T18:54:54.404687Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:54:54.404922Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} hope 1 -> done Change{10, redo 64b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-12-23T18:54:54.405011Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:54:54.405729Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [30:538:2318] 2025-12-23T18:54:54.405785Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [30:538:2318] 2025-12-23T18:54:54.405888Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [30:452:2266] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:54.405973Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 30 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [30:452:2266] 2025-12-23T18:54:54.406046Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [30:538:2318] 2025-12-23T18:54:54.406107Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [30:538:2318] 2025-12-23T18:54:54.406160Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [30:538:2318] 2025-12-23T18:54:54.406240Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [30:538:2318] 2025-12-23T18:54:54.406392Z node 30 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [30:538:2318] 2025-12-23T18:54:54.406621Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [30:538:2318] 2025-12-23T18:54:54.406684Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [30:538:2318] 2025-12-23T18:54:54.406732Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [30:538:2318] 2025-12-23T18:54:54.406823Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [30:538:2318] 2025-12-23T18:54:54.406912Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [30:538:2318] 2025-12-23T18:54:54.407006Z node 30 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [30:537:2317] EventType# 268830214 >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:04.834879Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:04.835023Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:04.921857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:04.922727Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:04.928266Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:505:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:04.928541Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:04.928678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:04.930227Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:500:2171], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:04.930478Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:04.930572Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:05.254008Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:05.350071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:05.350179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:05.350924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:05.351041Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:05.395171Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:54:05.395879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:05.396157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:05.475538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:05.510441Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002812/r3tmp/tmpAoZj8O/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32602, node 1 TClient is connected to server localhost:18469 2025-12-23T18:54:06.298687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:06.298770Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:06.298806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:06.299220Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-a8c7-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-f700-1231c6b1-80c02825" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-f700-1231c6b1-80c02825" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-5d3e-1231c6b1-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "YELLOW-5d3e-1231c6b1-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } issue_log { id: "RED-9f89-1231c6b1-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-5cc9-1231c6b1" status: YELLOW message: "Database has storage issues" location { database { name: "/Root" } } reason: "YELLOW-a8c7-1231c6b1" type: "DATABASE" level: 1 } location { id: 1 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2025-12-23T18:54:12.542313Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:12.543077Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:12.544554Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:12.551017Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:12.552016Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:12.553182Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:12.553629Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:12.553755Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:12.554754Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:12.554795Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:12.787854Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:12.875325Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:12.875456Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:12.875857Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:12.875923Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:12.944189Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:54:12.944657Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:12.945041Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:13.037132Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:13.049658Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002812/r3tmp/tmpckgXer/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13548, node 3 TClient is connected to server localhost:18483 2025-12-23T18:54:13.822366Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:13.822428Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:13.822466Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:13.822888Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:19.772655Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:19.773306Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:19.774536Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:19.780071Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:19.780783Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:19.782041Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manage ... EvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-12-23T18:54:32.592756Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:32.593039Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:32.659842Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:32.696577Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002812/r3tmp/tmpG5Hjji/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5682, node 9 TClient is connected to server localhost:5754 2025-12-23T18:54:33.414287Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:33.414330Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:33.414351Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:33.414699Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:40.406412Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:40.408150Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:40.418246Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:40.418640Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:40.418879Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:40.419045Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:40.421187Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:679:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:40.421623Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:40.421950Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:40.748169Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:40.859985Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:40.860127Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:40.860940Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:40.861040Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:40.906676Z node 11 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-12-23T18:54:40.907377Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:40.907850Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:40.996109Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:41.020945Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002812/r3tmp/tmpxsRDCU/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22826, node 11 TClient is connected to server localhost:19720 2025-12-23T18:54:41.770415Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:41.770460Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:41.770481Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:41.770782Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2025-12-23T18:54:48.533176Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:48.533681Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:757:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T18:54:48.534775Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:48.543564Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:48.545017Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:48.546156Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:762:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:48.546444Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:48.546756Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:48.547939Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:48.548000Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:48.881330Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:48.981211Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:48.981348Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:48.982106Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:48.982210Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:49.026940Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 14 Cookie 14 2025-12-23T18:54:49.027859Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:49.028247Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:49.098669Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:49.113196Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002812/r3tmp/tmp9374QS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15103, node 13 TClient is connected to server localhost:1412 2025-12-23T18:54:53.114534Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:53.122071Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:53.122143Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:53.122178Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:53.124024Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:53.139168Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:53.139312Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:53.172840Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-12-23T18:54:53.173429Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD location { id: 13 host: "::1" port: 12001 } |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_table [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> THiveTest::TestBridgeDemotion [GOOD] >> THiveTest::TestBridgeBalance |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestHiveBalancerDifferentResources >> TOlap::MoveTableStats [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::MoveTableStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:54:36.466707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:36.466835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:36.466887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:36.466919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:36.466958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:36.466995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:36.467062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:36.467143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:36.468062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:36.469613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:36.563443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:36.563483Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:36.574109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:36.574267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:36.576346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:36.589080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:36.589404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:36.593650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:36.599289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:36.612588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:36.613778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:36.632248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:36.632306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:36.632435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:36.632471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:36.632543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:36.634059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:36.781971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.784738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.784850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.784888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.784939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.784983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.785039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.785093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.785146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.785193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.785238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.785295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.785383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.785443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.785510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 28: Part operation is done id#203:0 progress is 1/1 2025-12-23T18:54:57.436553Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 203 ready parts: 1/1 2025-12-23T18:54:57.436596Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#203:0 progress is 1/1 2025-12-23T18:54:57.436630Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 203 ready parts: 1/1 2025-12-23T18:54:57.436665Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 203, ready parts: 1/1, is published: true 2025-12-23T18:54:57.436723Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:871:2829] message: TxId: 203 2025-12-23T18:54:57.436784Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 203 ready parts: 1/1 2025-12-23T18:54:57.436826Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 203:0 2025-12-23T18:54:57.436857Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 203:0 2025-12-23T18:54:57.436983Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2025-12-23T18:54:57.437021Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:54:57.437347Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:54:57.437407Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:54:57.437482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:54:57.439489Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 203: got EvNotifyTxCompletionResult 2025-12-23T18:54:57.439544Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 203: satisfy waiter [3:1051:2998] 2025-12-23T18:54:57.439644Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 203 2025-12-23T18:54:57.440481Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:54:57.440683Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 227us result status StatusPathDoesNotExist 2025-12-23T18:54:57.440874Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:54:57.441544Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 41 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-12-23T18:54:57.441763Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 41 took 227us result status StatusSuccess 2025-12-23T18:54:57.442196Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MovedColumnTable" PathDescription { Self { Name: "MovedColumnTable" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 203 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ColumnTableVersion: 2 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1225216 RowCount: 100000 IndexSize: 0 LastAccessTime: 223 LastUpdateTime: 223 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:54:57.475012Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 1225216 rowCount 100000 cpuUsage 0 2025-12-23T18:54:57.475245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:279: PersistSingleStats for pathId [OwnerId: 72057594046678944, LocalPathId: 39], tabletId 72075186233409546, followerId 0: unknown pathId 2025-12-23T18:54:57.485751Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:54:57.768709Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MovedColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-12-23T18:54:57.769037Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MovedColumnTable" took 365us result status StatusSuccess 2025-12-23T18:54:57.769535Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MovedColumnTable" PathDescription { Self { Name: "MovedColumnTable" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 203 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ColumnTableVersion: 2 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1225216 RowCount: 100000 IndexSize: 0 LastAccessTime: 223 LastUpdateTime: 223 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2025-12-23T18:54:54.186066Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:54:54.186154Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:54:54.186244Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:54:54.187318Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-12-23T18:54:54.187375Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2025-12-23T18:54:54.187641Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-12-23T18:54:54.187675Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 13151740404452589043 2025-12-23T18:54:54.188199Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-12-23T18:54:54.188233Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 8427358873417017059 2025-12-23T18:54:54.189047Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-12-23T18:54:54.189205Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-12-23T18:54:54.189313Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: UNKNOWN 2025-12-23T18:54:54.189348Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.153108s 2025-12-23T18:54:54.189524Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-12-23T18:54:54.189555Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2025-12-23T18:54:54.189934Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-12-23T18:54:54.190077Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-12-23T18:54:54.190124Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.170556s 2025-12-23T18:54:54.427880Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:54:54.428565Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:286:2098] 2025-12-23T18:54:54.429028Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-12-23T18:54:54.429090Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-12-23T18:54:54.461058Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:54:54.461772Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:286:2098] 2025-12-23T18:54:54.462169Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-12-23T18:54:54.462198Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 4 (idx 2) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2025-12-23T18:54:55.355362Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 5 2025-12-23T18:54:55.355442Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 3 2025-12-23T18:54:55.355765Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2025-12-23T18:54:55.355810Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:54:55.356075Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2025-12-23T18:54:55.356108Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:54:55.357892Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:286:2098] 2025-12-23T18:54:55.358133Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:286:2098] 2025-12-23T18:54:55.360070Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-12-23T18:54:55.360118Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-12-23T18:54:55.360323Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-12-23T18:54:55.360351Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2025-12-23T18:54:56.143819Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 3 2025-12-23T18:54:56.143903Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 5 2025-12-23T18:54:56.143955Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2025-12-23T18:54:56.143991Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:54:56.144055Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2025-12-23T18:54:56.144084Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:54:56.144857Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:286:2098] 2025-12-23T18:54:56.145292Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:286:2098] ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2025-12-23T18:54:56.145843Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2025-12-23T18:54:56.145888Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 10053858333920509680 2025-12-23T18:54:56.146070Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2025-12-23T18:54:56.146099Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 11851482555838222794 2025-12-23T18:54:56.146694Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-12-23T18:54:56.146837Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-12-23T18:54:56.146913Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2025-12-23T18:54:56.146939Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 4 (owner) 2025-12-23T18:54:56.147015Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2025-12-23T18:54:56.147071Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 4 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2025-12-23T18:54:56.916770Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:661: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2025-12-23T18:54:56.916874Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:54:56.916993Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:661: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2025-12-23T18:54:56.917030Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:54:56.917950Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:286:2098] 2025-12-23T18:54:56.918401Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:286:2098] ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2025-12-23T18:54:56.922183Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2025-12-23T18:54:56.922529Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 8470239763125230813 2025-12-23T18:54:56.922910Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2025-12-23T18:54:56.922949Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 6622044195218853944 ... disconnecting nodes 2 <-> 1 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 ... disconnecting nodes 2 <-> 3 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 2025-12-23T18:54:56.924427Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:421: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2025-12-23T18:54:56.924929Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2025-12-23T18:54:56.925162Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:421: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2025-12-23T18:54:56.925201Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2025-12-23T18:54:56.925422Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-12-23T18:54:56.925677Z node 3 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2025-12-23T18:54:56.926663Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-12-23T18:54:56.926817Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.127990s 2025-12-23T18:54:56.933238Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:733: tablet: 9437184, type: Dummy, tablet dead 2025-12-23T18:54:56.933696Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:54:56.940530Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:439:2098] 2025-12-23T18:54:56.957359Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-12-23T18:54:56.957415Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-12-23T18:54:57.033406Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:54:57.034268Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:439:2098] 2025-12-23T18:54:57.034865Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-12-23T18:54:57.034906Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect |97.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts >> test_canonical_records.py::test_kill_tablet_using_developer_ui |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown+Nullable |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpKnn::FloatVectorKnnPushdown |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> THiveTest::TestBridgeBalance [GOOD] >> THiveTest::TestBridgeFollowers |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> THealthCheckTest::TestStateStorageOk [GOOD] >> THealthCheckTest::TestStateStorageBlue |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> THiveTest::TestBridgeFollowers [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:56.105518Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:56.259496Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:56.282797Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:56.283328Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:56.283446Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:56.631425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:56.631572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:56.702712Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516093889440 != 1766516093889444 2025-12-23T18:54:56.719438Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:56.764805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:56.864179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fce/r3tmp/tmpZpU40I/pdisk_1.dat 2025-12-23T18:54:57.187042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:54:57.203156Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:57.317637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:54:57.365954Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:891:2767]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:54:57.367053Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:891:2767]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:54:57.368021Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2767] 2025-12-23T18:54:57.368282Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:54:57.379944Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:893:2769]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:54:57.406813Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:891:2767]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:54:57.407160Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:893:2769]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:54:57.407413Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:893:2769] 2025-12-23T18:54:57.407581Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:54:57.413388Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:893:2769]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:54:57.413970Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:54:57.414108Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:54:57.416167Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:54:57.416235Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:54:57.416308Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:54:57.417237Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:54:57.417384Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:54:57.417437Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:922:2767] in generation 1 2025-12-23T18:54:57.417686Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:54:57.417776Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:54:57.418677Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:54:57.418718Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:54:57.418771Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:54:57.418997Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:54:57.419076Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:54:57.419120Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:923:2769] in generation 1 2025-12-23T18:54:57.429819Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:54:57.453173Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:54:57.454626Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:54:57.454770Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:926:2788] 2025-12-23T18:54:57.454804Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:54:57.454839Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:54:57.454881Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:54:57.455176Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:891:2767], Recipient [1:891:2767]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:54:57.456031Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:54:57.456886Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:54:57.456920Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:54:57.456970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:54:57.457021Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:927:2789] 2025-12-23T18:54:57.457045Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:54:57.457065Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:54:57.457086Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:54:57.457366Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:893:2769], Recipient [1:893:2769]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:54:57.457389Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:54:57.457473Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:54:57.457542Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:54:57.457683Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:54:57.457731Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:54:57.457785Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:54:57.457821Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:54:57.457846Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:54:57.457870Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:54:57.457902Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:54:57.457951Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T18:54:57.457991Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T18:54:57.458084Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:896:2770], Recipient [1:891:2767]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:54:57.458107Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:54:57.458160Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2763], serverId# [1:896:2770], sessionId# [0:0:0] 2025-12-23T18:54:57.458208Z node 1 :TX_DATASHARD DEBUG: datashard__prog ... TASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-12-23T18:54:59.039558Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=1 2025-12-23T18:54:59.039597Z node 1 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2025-12-23T18:54:59.039636Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:54:59.039650Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-12-23T18:54:59.039681Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-12-23T18:54:59.039718Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:54:59.039762Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:54:59.039784Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-12-23T18:54:59.039805Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:54:59.039824Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:54:59.039850Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T18:54:59.039883Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:54:59.039903Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2025-12-23T18:54:59.050703Z node 1 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-12-23T18:54:59.050821Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:54:59.050887Z node 1 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-12-23T18:54:59.050990Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:54:59.051553Z node 1 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [1:1164:2945] TxId: 281474976715662. Ctx: { TraceId: 01kd6911x90ks4jn3f4jgc2jaa, Database: , SessionId: ydb://session/3?node_id=1&id=OGZmNWZiNzItNzdkNGE0MmQtMmQxMmYyMmYtZWMzMzNmMTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:54:59.051609Z node 1 :KQP_EXECUTER TRACE: kqp_executer_impl.h:1400: ActorId: [1:1164:2945] TxId: 281474976715662. Ctx: { TraceId: 01kd6911x90ks4jn3f4jgc2jaa, Database: , SessionId: ydb://session/3?node_id=1&id=OGZmNWZiNzItNzdkNGE0MmQtMmQxMmYyMmYtZWMzMzNmMTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState ... waiting for SysViewsRoster update finished 2025-12-23T18:55:01.947722Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:01.954426Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:01.954666Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:01.954725Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:02.132539Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:02.132680Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:02.151462Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766516099427622 != 1766516099427625 2025-12-23T18:55:02.159592Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:02.204583Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:02.274015Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fce/r3tmp/tmpD7mnbB/pdisk_1.dat 2025-12-23T18:55:02.584207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:02.597337Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:55:02.700126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:02.927189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:03.132205Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1025:2866], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:03.132287Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1036:2871], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:03.132344Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:03.132892Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1039:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:03.133022Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:03.136058Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:03.275351Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1040:2875], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:55:03.311283Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1097:2913] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for readsets 2025-12-23T18:55:04.217435Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1284: SelfId: [2:1183:2952], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [2:1159:2952]TEvDeliveryProblem was received from tablet: 72075186224037888 2025-12-23T18:55:04.228029Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [2:1173:2952], SessionActorId: [2:1159:2952], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 . sessionActorId=[2:1159:2952]. 2025-12-23T18:55:04.228391Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=2&id=ZWU4NzMyOTAtZDM3Y2E0NTgtODJmNDlhMzItYTU2NDk5ZTk=, ActorId: [2:1159:2952], ActorState: ExecuteState, TraceId: 01kd69169c99tdtm9cmwgvnyrw, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [2:1174:2952] from: [2:1173:2952] 2025-12-23T18:55:04.228475Z node 2 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976715664, task: 1, CA Id [2:1209:2990]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-12-23T18:55:04.229285Z node 2 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [2:1174:2952] TxId: 281474976715663. Ctx: { TraceId: 01kd69169c99tdtm9cmwgvnyrw, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZWU4NzMyOTAtZDM3Y2E0NTgtODJmNDlhMzItYTU2NDk5ZTk=, PoolId: default, IsStreamingQuery: 0}. UNDETERMINED: {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 } 2025-12-23T18:55:04.229669Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=ZWU4NzMyOTAtZDM3Y2E0NTgtODJmNDlhMzItYTU2NDk5ZTk=, ActorId: [2:1159:2952], ActorState: ExecuteState, TraceId: 01kd69169c99tdtm9cmwgvnyrw, Create QueryResponse for error on request, msg: , status: UNDETERMINED, issues: { message: "State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888." issue_code: 2026 severity: 1 } { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:45.579808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:45.579911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:45.579954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:45.579991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:45.580037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:45.580074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:45.580146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:45.580216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:45.581306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:45.581646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:45.674504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:45.674559Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:45.680269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:45.680477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:45.680595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:45.683734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:45.683944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:45.684440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:45.684591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:45.686125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:45.686278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:45.687174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:45.687216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:45.687338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:45.687371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:45.687400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:45.687479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:45.810899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.811863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.811999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.812044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.812086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.812139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.812200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.812317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.812388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.812500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.812564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.812646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.812765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.812848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.812909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 102 at step: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000039 2025-12-23T18:55:04.761395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:04.761466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:55:04.761506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:373: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000039, at schemeshard: 72057594046678944 2025-12-23T18:55:04.761689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2025-12-23T18:55:04.761783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:55:04.765996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:04.766072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:55:04.766278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:04.766323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2025-12-23T18:55:04.766725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:04.766795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:55:04.767817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:55:04.767912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:55:04.767948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:55:04.767975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2025-12-23T18:55:04.768004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:55:04.768063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:55:04.769618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1136 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:55:04.769649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:55:04.769750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1136 } } CommitVersion { Step: 5000039 TxId: 102 } 2025-12-23T18:55:04.769847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1136 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2025-12-23T18:55:04.770428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:55:04.770460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:55:04.770563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:55:04.770602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:55:04.770661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 710 RawX2: 4294969992 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-12-23T18:55:04.770733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:04.770793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:04.770817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:55:04.770841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:55:04.772500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:55:04.773380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:04.773467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:04.773638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:04.773670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:55:04.773739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:55:04.773767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:04.773800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:55:04.773828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:04.773853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:55:04.773894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:737:2715] message: TxId: 102 2025-12-23T18:55:04.773924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:04.773947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:55:04.773968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:55:04.774068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:55:04.775285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:55:04.775321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1012:2965] TestWaitNotification: OK eventTxId 102 2025-12-23T18:55:04.776481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:349: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-12-23T18:55:04.776552Z node 1 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__conditional_erase.cpp:393: Unsuccessful conditional erase: tabletId: 72075186233409546, status: SCHEME_ERROR, error: Schema version mismatch: got 1, expected 2, retry after: 300.000000s, at schemeshard: 72057594046678944 2025-12-23T18:55:04.778042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:453: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-12-23T18:55:04.779521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:55:04.779585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:108: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T00:06:00.048500Z, at schemeshard: 72057594046678944 2025-12-23T18:55:04.779644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestBridgeFollowers [GOOD] Test command err: 2025-12-23T18:54:22.641888Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:22.658339Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:22.658544Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:22.659078Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:22.659297Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-12-23T18:54:22.659997Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-12-23T18:54:22.660035Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:22.660600Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2025-12-23T18:54:22.660631Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:22.660710Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:22.660794Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:22.669684Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:22.669719Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:22.671206Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:22.671301Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:22.671393Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:22.671465Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:22.671549Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:22.671619Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:22.671683Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:22.671711Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:22.671767Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2025-12-23T18:54:22.671796Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2025-12-23T18:54:22.671834Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:22.671862Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:22.672262Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:22.672458Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:22.680560Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-12-23T18:54:22.680631Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:22.682006Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:22.682201Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:22.682232Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-12-23T18:54:22.682389Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:22.682719Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:54:22.682785Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:54:22.682805Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:54:22.682854Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:22.683178Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-12-23T18:54:22.683243Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2025-12-23T18:54:22.683267Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2025-12-23T18:54:22.683387Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:52:2092] 2025-12-23T18:54:22.683407Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:52:2092] 2025-12-23T18:54:22.683437Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:22.683523Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:52:2092] 2025-12-23T18:54:22.683562Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:22.683691Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:22.683795Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-12-23T18:54:22.683831Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2025-12-23T18:54:22.683872Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2025-12-23T18:54:22.689296Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2025-12-23T18:54:22.689968Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:22.690121Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-12-23T18:54:22.690149Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-12-23T18:54:22.690243Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:22.690313Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2025-12-23T18:54:22.690396Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2025-12-23T18:54:22.690425Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2025-12-23T18:54:22.693636Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-12-23T18:54:22.693700Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-12-23T18:54:22.693766Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-12-23T18:54:22.694024Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:22.694182Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:22.694293Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-12-23T18:54:22.701064Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-12-23T18:54:22.701106Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-12-23T18:54:22.701133Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-12-23T18:54:22.701176Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:22.701298Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-12-23T18:54:22.70 ... tatus: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [46:641:2249] CurrentLeaderTablet: [46:672:2270] CurrentGeneration: 1 CurrentStep: 0} 2025-12-23T18:55:03.772669Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [46:641:2249] CurrentLeaderTablet: [46:672:2270] CurrentGeneration: 1 CurrentStep: 0} 2025-12-23T18:55:03.772735Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [46:641:2249] followers: 2 2025-12-23T18:55:03.772769Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2025-12-23T18:55:03.772813Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [49:897:2133] 2025-12-23T18:55:03.772860Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [49:897:2133] 2025-12-23T18:55:03.773081Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [49:899:2134] 2025-12-23T18:55:03.773108Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [49:899:2134] 2025-12-23T18:55:03.773165Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [46:641:2249] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2025-12-23T18:55:03.773197Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2025-12-23T18:55:03.773289Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:55:03.773543Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-12-23T18:55:03.773592Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-12-23T18:55:03.773634Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-12-23T18:55:03.773823Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [46:641:2249] CurrentLeaderTablet: [46:672:2270] CurrentGeneration: 1 CurrentStep: 0} 2025-12-23T18:55:03.773894Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [46:641:2249] CurrentLeaderTablet: [46:672:2270] CurrentGeneration: 1 CurrentStep: 0} 2025-12-23T18:55:03.773961Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [46:641:2249] followers: 2 2025-12-23T18:55:03.773994Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2025-12-23T18:55:03.774062Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [49:899:2134] 2025-12-23T18:55:03.774091Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [49:899:2134] 2025-12-23T18:55:03.774259Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [49:901:2135] 2025-12-23T18:55:03.774286Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [49:901:2135] 2025-12-23T18:55:03.774341Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [46:641:2249] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2025-12-23T18:55:03.774372Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2025-12-23T18:55:03.774464Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:55:03.774686Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-12-23T18:55:03.774734Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-12-23T18:55:03.774797Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-12-23T18:55:03.774984Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [46:641:2249] CurrentLeaderTablet: [46:672:2270] CurrentGeneration: 1 CurrentStep: 0} 2025-12-23T18:55:03.775102Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [46:641:2249] CurrentLeaderTablet: [46:672:2270] CurrentGeneration: 1 CurrentStep: 0} 2025-12-23T18:55:03.775214Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [46:641:2249] followers: 2 2025-12-23T18:55:03.775262Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2025-12-23T18:55:03.775367Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [49:901:2135] 2025-12-23T18:55:03.775407Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [49:901:2135] 2025-12-23T18:55:03.775654Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [49:903:2136] 2025-12-23T18:55:03.775683Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [49:903:2136] 2025-12-23T18:55:03.775746Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [46:641:2249] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2025-12-23T18:55:03.775780Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2025-12-23T18:55:03.775872Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:55:03.776085Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-12-23T18:55:03.776130Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-12-23T18:55:03.776173Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-12-23T18:55:03.776353Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [46:641:2249] CurrentLeaderTablet: [46:672:2270] CurrentGeneration: 1 CurrentStep: 0} 2025-12-23T18:55:03.776420Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [46:641:2249] CurrentLeaderTablet: [46:672:2270] CurrentGeneration: 1 CurrentStep: 0} 2025-12-23T18:55:03.776484Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [46:641:2249] followers: 2 2025-12-23T18:55:03.776517Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2025-12-23T18:55:03.776586Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [49:903:2136] 2025-12-23T18:55:03.776615Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [49:903:2136] 2025-12-23T18:55:03.776781Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [49:905:2137] 2025-12-23T18:55:03.776809Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [49:905:2137] 2025-12-23T18:55:03.776854Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [46:641:2249] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2025-12-23T18:55:03.776887Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2025-12-23T18:55:03.776977Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:55:03.777199Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-12-23T18:55:03.777248Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-12-23T18:55:03.777288Z node 46 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-12-23T18:55:03.777476Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [46:641:2249] CurrentLeaderTablet: [46:672:2270] CurrentGeneration: 1 CurrentStep: 0} 2025-12-23T18:55:03.777558Z node 49 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [46:641:2249] CurrentLeaderTablet: [46:672:2270] CurrentGeneration: 1 CurrentStep: 0} 2025-12-23T18:55:03.777626Z node 49 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [46:641:2249] followers: 2 2025-12-23T18:55:03.777657Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC 2 leaderDC 1 3:2:3 local 0 localDc 1 other 2 disallowed 3 2025-12-23T18:55:03.777702Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [49:905:2137] 2025-12-23T18:55:03.777727Z node 49 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [49:905:2137] |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestFollowerCompatability1 |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets >> KqpKnn::Int8VectorKnnPushdown [GOOD] >> KqpKnn::Uint8VectorKnnPushdown [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Uint8VectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 3088, MsgBus: 26713 ... waiting for SysViewsRoster update finished 2025-12-23T18:54:58.445925Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:58.534090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:58.549267Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:58.549778Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:58.549895Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:58.909655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:58.909766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:58.997190Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516096090884 != 1766516096090888 2025-12-23T18:54:59.016831Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:59.063493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:59.232861Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c79/r3tmp/tmpvDMHsc/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3088, node 1 2025-12-23T18:54:59.949555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:59.949642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:59.949687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:59.950025Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:59.961531Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:26713 TClient is connected to server localhost:26713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:55:00.331605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:55:00.365784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:00.716427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:01.071386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:01.338260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:02.324117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3510], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:02.324436Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:02.325322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1977:3529], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:02.325490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:02.474732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:02.677392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:02.909199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:03.188097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:03.420928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:03.684022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:03.935879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:04.214232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:04.556580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2789:4170], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.556720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.557138Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.557232Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2795:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.557292Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.563291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:04.722250Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2798:4179], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:55:04.778510Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2858:4220] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:06.481737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Int8VectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 18258, MsgBus: 6564 ... waiting for SysViewsRoster update finished 2025-12-23T18:54:58.596221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:58.728261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:58.742646Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:58.743100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:58.743205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:59.059919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:59.060063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:59.125299Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516096090915 != 1766516096090919 2025-12-23T18:54:59.134355Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:59.177495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:59.340399Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bde/r3tmp/tmpPXeSGE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18258, node 1 2025-12-23T18:54:59.949761Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:59.949840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:59.949888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:59.950245Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:59.961879Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6564 TClient is connected to server localhost:6564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:55:00.326534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:55:00.428342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:00.724390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:01.091381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:01.364816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:02.324165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1907:3515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:02.324892Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:02.325748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1981:3534], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:02.325915Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:02.475048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:02.689436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:02.965407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:03.206467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:03.433127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:03.728298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:03.977650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:04.277624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:04.657152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.657277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.657636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2798:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.657719Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2800:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.657774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.663391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:04.825057Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2803:4184], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:55:04.884214Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2863:4225] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:06.621733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> THiveTest::TestFollowerCompatability1 [GOOD] >> THiveTest::TestFollowerCompatability2 >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe >> TIcNodeCache::GetNodesInfoTest >> TTopicApiDescribes::GetLocalDescribe |98.0%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TExternalDataSourceTest::SchemeErrors |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> KqpKnn::FloatVectorKnnPushdown [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TBSV::ShouldLimitBlockStoreVolumeDropRate >> TBSV::CleanupDroppedVolumesOnRestart >> TExternalDataSourceTest::CreateExternalDataSource |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> THiveTest::TestFollowerCompatability2 [GOOD] >> THiveTest::TestFollowerCompatability3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:45.934223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:45.934306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:45.934363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:45.934397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:45.934429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:45.934463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:45.934528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:45.934592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:45.935517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:45.935784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:46.018221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:46.018263Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:46.023305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:46.023619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:46.023776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:46.027447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:46.027656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:46.028573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:46.028816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:46.030631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:46.030825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:46.031735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:46.031787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:46.031892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:46.031932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:46.031967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:46.032078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:46.153903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.154716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.154828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.154890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.154931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.154967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.155010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.155102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.155158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.155223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.155263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.155351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.155435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.155532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.155584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d: 72057594046678944 2025-12-23T18:55:10.070983Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.071415Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.071506Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.071644Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.071700Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.071821Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.071899Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.072026Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.072678Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.072763Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.072819Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.072897Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.072981Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.073064Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.073160Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.073211Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.073250Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.073343Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:10.073385Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:55:10.073534Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:55:10.073591Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:55:10.073649Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:55:10.073685Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:55:10.073722Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-12-23T18:55:10.073801Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:3104:4324] message: TxId: 101 2025-12-23T18:55:10.073851Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:55:10.073916Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:55:10.073947Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:55:10.075008Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2025-12-23T18:55:10.077288Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:55:10.077336Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:3105:4325] TestWaitNotification: OK eventTxId 101 2025-12-23T18:55:10.077776Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:55:10.078014Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 274us result status StatusSuccess 2025-12-23T18:55:10.078483Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_SECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } Version: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::FloatVectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 26296, MsgBus: 21080 ... waiting for SysViewsRoster update finished 2025-12-23T18:55:02.797251Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:02.911742Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:02.923550Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:02.923937Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:02.924019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:03.187527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:03.187641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:03.242961Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516100426670 != 1766516100426674 2025-12-23T18:55:03.252541Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:03.295578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:03.459478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002a6f/r3tmp/tmp98X1Pq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26296, node 1 2025-12-23T18:55:03.807516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:03.807617Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:03.807666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:03.808029Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:03.819888Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21080 TClient is connected to server localhost:21080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:55:04.135257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:55:04.167901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:04.447314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:04.799875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:05.058519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:05.940804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1901:3510], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:05.941233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:05.942101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3529], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:05.942278Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:05.974503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:06.187636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:06.428675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:06.700854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:06.930796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:07.168015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:07.411736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:07.669300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:07.971633Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4170], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:07.971730Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:07.971966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:07.972008Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:07.972042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4177], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:07.975737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:08.136017Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2796:4179], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:55:08.204162Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2856:4220] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:09.222814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerHighUsage >> TBSV::ShardsNotLeftInShardsToDelete >> KqpKnn::VectorSearchKnnPushdown-Nullable [GOOD] >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalDataSourceTest::SchemeErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:55:12.123346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:12.123453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:12.123511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:12.123552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:12.123590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:12.123623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:12.123691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:12.123784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:12.124662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:12.126547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:12.213739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:55:12.213794Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:12.226705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:12.226960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:12.227165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:55:12.240818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:12.242125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:12.245367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:12.249074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:55:12.259300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:12.260518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:12.271888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:12.271977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:12.272173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:12.272226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:12.272342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:12.273309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.280795Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:243:2058] recipient: [1:15:2062] 2025-12-23T18:55:12.430200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:55:12.431629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.432821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:55:12.432909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:55:12.434267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:55:12.434362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:12.437501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:12.438926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:55:12.439221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.439356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:55:12.439400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:55:12.439437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:55:12.441897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.441972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:55:12.442032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:55:12.444085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.444145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.444203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:55:12.444273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:55:12.448874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:55:12.451143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:55:12.452503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:55:12.453657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:12.453811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:55:12.453862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:55:12.454305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:55:12.454373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:55:12.454574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:55:12.455446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:55:12.458015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:12.458063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... [1:15:2062] 2025-12-23T18:55:12.748448Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:55:12.748713Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 252us result status StatusPathDoesNotExist 2025-12-23T18:55:12.748875Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-12-23T18:55:12.750058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:404:2379] sender: [1:473:2058] recipient: [1:107:2140] Leader for TabletID 72057594046678944 is [1:404:2379] sender: [1:476:2058] recipient: [1:475:2433] Leader for TabletID 72057594046678944 is [1:477:2434] sender: [1:478:2058] recipient: [1:475:2433] 2025-12-23T18:55:12.791213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:12.791289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:12.791325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:12.791363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:12.791393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:12.791417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:12.791478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:12.791535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:12.792162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:12.792374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:12.804037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:12.805156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:12.805393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:12.805537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:55:12.805566Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:12.805659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:12.806213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1418: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:12.806288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1492: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.806336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1518: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.806643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1820: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.806714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2025-12-23T18:55:12.806941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.807060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.807169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.807262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.807339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.807501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.807763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.807876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.808223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.808307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.808449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.808529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.808577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.808650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.808810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4093: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.808879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4109: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.808975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4394: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.809257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4761: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.809320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4827: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.809370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4886: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.809471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4975: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.809511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5002: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.809543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5029: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.814189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:12.815760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:12.815824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:12.816274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:12.816324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:12.816355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:12.818075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:477:2434] sender: [1:539:2058] recipient: [1:15:2062] 2025-12-23T18:55:12.850289Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:55:12.850513Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 224us result status StatusPathDoesNotExist 2025-12-23T18:55:12.850675Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:55:12.078459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:12.078558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:12.078607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:12.078773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:12.078826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:12.078879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:12.078952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:12.079062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:12.079998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:12.081067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:12.180681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:55:12.180747Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:12.196672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:12.196865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:12.197065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:55:12.211580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:12.211898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:12.215238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:12.217668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:55:12.230443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:12.230641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:12.235895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:12.235961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:12.236160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:12.236208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:12.236312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:12.237083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.383167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.385454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.385645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.385725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.385784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.385837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.385911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.385997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.386073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.386144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.386227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.386297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.386393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.386467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.386551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Id: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:13.051570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:55:13.051679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:55:13.051724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:55:13.051770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:55:13.051832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:55:13.051874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-12-23T18:55:13.051916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:55:13.051959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:55:13.051991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:55:13.052053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:55:13.052094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-12-23T18:55:13.052126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-12-23T18:55:13.052156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2025-12-23T18:55:13.053107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:55:13.053232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:55:13.053277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:55:13.053313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-12-23T18:55:13.053365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:55:13.054656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:55:13.054767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:55:13.054801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:55:13.054830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:55:13.054868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-12-23T18:55:13.054940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:55:13.056955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:55:13.058157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:55:13.058394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:55:13.058437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:55:13.058815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:55:13.058911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:55:13.058953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:707:2696] TestWaitNotification: OK eventTxId 101 2025-12-23T18:55:13.059395Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:55:13.059579Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 197us result status StatusSuccess 2025-12-23T18:55:13.059970Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-12-23T18:55:13.063910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:55:13.064184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-12-23T18:55:13.064286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_data_source.cpp:202: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2025-12-23T18:55:13.064419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, at schemeshard: 72057594046678944 2025-12-23T18:55:13.068953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-12-23T18:55:13.069185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:55:13.069496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:55:13.069534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:55:13.069937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:55:13.070030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:55:13.070065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:715:2704] TestWaitNotification: OK eventTxId 102 |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:114:2144] 2025-12-23T18:55:12.078440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:12.078523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:12.078563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:12.078756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:12.078887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:12.078927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:12.078968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:12.079026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:12.079701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:12.081036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:12.204021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:55:12.204100Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:12.204921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:12.217192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:12.217374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:12.217548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:55:12.229305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:12.229556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:12.230269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:12.230990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:55:12.234851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:12.235018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:12.236129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:12.236182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:12.236292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:12.236334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:12.236380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:12.237095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.416002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.417021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.417142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.417206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.417274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.417337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.417424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.417490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.417551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.417657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.417736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.417820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.417885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.417961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.418102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModif ... 11Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2025-12-23T18:55:13.101511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2025-12-23T18:55:13.101709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2025-12-23T18:55:13.104349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:55:13.104607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-12-23T18:55:13.108235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:55:13.108570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-12-23T18:55:13.108659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2025-12-23T18:55:13.108829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-12-23T18:55:13.111387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:55:13.111641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-12-23T18:55:13.115123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:55:13.115486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-12-23T18:55:13.115584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2025-12-23T18:55:13.115741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-12-23T18:55:13.118212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:55:13.118473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-12-23T18:55:13.121518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:55:13.121721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2025-12-23T18:55:13.121804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2025-12-23T18:55:13.121900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-12-23T18:55:13.124472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:55:13.124709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown-Nullable [GOOD] Test command err: Trying to start YDB, gRPC: 23870, MsgBus: 27040 ... waiting for SysViewsRoster update finished 2025-12-23T18:54:58.411537Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:58.528951Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:58.549318Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:58.549749Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:58.549858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:58.909672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:58.909789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:58.990377Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516096090941 != 1766516096090945 2025-12-23T18:54:59.013241Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:59.061479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:59.244286Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002b7c/r3tmp/tmpGCFvEP/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23870, node 1 2025-12-23T18:54:59.949395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:59.949552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:59.949603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:59.950159Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:59.965035Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:27040 TClient is connected to server localhost:27040 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:55:00.341957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:55:00.429061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:00.714109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:01.063861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:01.324041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:02.324120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1908:3515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:02.324534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:02.325665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1982:3534], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:02.325771Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:02.474068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:02.678976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:02.951403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:03.181852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:03.413167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:03.683322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:03.935896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:04.209024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:04.527739Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.527906Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.528495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2798:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.528604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:04.528693Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2801:4182], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't hav ... status for [0:18] at 72075186224037927 is Executed 2025-12-23T18:55:12.374189Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:18] at 72075186224037927 executing on unit CompletedOperations 2025-12-23T18:55:12.374205Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:18] at 72075186224037927 has finished 2025-12-23T18:55:12.374223Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037927 2025-12-23T18:55:12.374277Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037927 2025-12-23T18:55:12.374414Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:3820:4933], Recipient [1:3195:4485]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 10000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } RangesSize: 1 2025-12-23T18:55:12.374507Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:3820:4933], Recipient [1:3201:4489]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 10000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } RangesSize: 1 2025-12-23T18:55:12.374594Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037928, FollowerId 0 2025-12-23T18:55:12.374626Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CheckRead 2025-12-23T18:55:12.374666Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2025-12-23T18:55:12.374681Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CheckRead 2025-12-23T18:55:12.374697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit BuildAndWaitDependencies 2025-12-23T18:55:12.374713Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit BuildAndWaitDependencies 2025-12-23T18:55:12.374759Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037928 2025-12-23T18:55:12.374784Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2025-12-23T18:55:12.374798Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit BuildAndWaitDependencies 2025-12-23T18:55:12.374812Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit ExecuteRead 2025-12-23T18:55:12.374825Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit ExecuteRead 2025-12-23T18:55:12.374921Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037928 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 10000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2025-12-23T18:55:12.375010Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037928 promoting UnprotectedReadEdge to v10000/18446744073709551615 2025-12-23T18:55:12.375031Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037928 Complete read# {[1:3820:4933], 1} after executionsCount# 1 2025-12-23T18:55:12.375052Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037928 read iterator# {[1:3820:4933], 1} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:55:12.375135Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037928 read iterator# {[1:3820:4933], 1} finished in read 2025-12-23T18:55:12.375171Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2025-12-23T18:55:12.375189Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit ExecuteRead 2025-12-23T18:55:12.375206Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit CompletedOperations 2025-12-23T18:55:12.375221Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CompletedOperations 2025-12-23T18:55:12.375247Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2025-12-23T18:55:12.375262Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CompletedOperations 2025-12-23T18:55:12.375276Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037928 has finished 2025-12-23T18:55:12.375293Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037928 2025-12-23T18:55:12.375332Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037928 2025-12-23T18:55:12.375373Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037929, FollowerId 0 2025-12-23T18:55:12.375409Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CheckRead 2025-12-23T18:55:12.375449Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2025-12-23T18:55:12.375467Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CheckRead 2025-12-23T18:55:12.375481Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit BuildAndWaitDependencies 2025-12-23T18:55:12.375494Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit BuildAndWaitDependencies 2025-12-23T18:55:12.375515Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037929 2025-12-23T18:55:12.375535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2025-12-23T18:55:12.375551Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit BuildAndWaitDependencies 2025-12-23T18:55:12.375565Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit ExecuteRead 2025-12-23T18:55:12.375578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit ExecuteRead 2025-12-23T18:55:12.375645Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037929 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 10000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2025-12-23T18:55:12.375724Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037929 promoting UnprotectedReadEdge to v10000/18446744073709551615 2025-12-23T18:55:12.375744Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037929 Complete read# {[1:3820:4933], 2} after executionsCount# 1 2025-12-23T18:55:12.375766Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037929 read iterator# {[1:3820:4933], 2} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:55:12.375842Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037929 read iterator# {[1:3820:4933], 2} finished in read 2025-12-23T18:55:12.375876Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2025-12-23T18:55:12.375900Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit ExecuteRead 2025-12-23T18:55:12.375922Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit CompletedOperations 2025-12-23T18:55:12.375938Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CompletedOperations 2025-12-23T18:55:12.375962Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2025-12-23T18:55:12.375976Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CompletedOperations 2025-12-23T18:55:12.375991Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037929 has finished 2025-12-23T18:55:12.376007Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037929 2025-12-23T18:55:12.376043Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037929 2025-12-23T18:55:12.376992Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3820:4933], Recipient [1:3193:4483]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:55:12.377028Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037927 ReadCancel: { ReadId: 0 } 2025-12-23T18:55:12.377126Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3820:4933], Recipient [1:3195:4485]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-12-23T18:55:12.377148Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037928 ReadCancel: { ReadId: 1 } 2025-12-23T18:55:12.377450Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3820:4933], Recipient [1:3201:4489]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2025-12-23T18:55:12.377482Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037929 ReadCancel: { ReadId: 2 } 2025-12-23T18:55:12.395548Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:2073:3611]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:55:13.191545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:13.191612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:13.191638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:13.191661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:13.191689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:13.191707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:13.191743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:13.191802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:13.192408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:13.192626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:13.252788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:55:13.252842Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:13.257703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:13.257912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:13.258060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:55:13.261148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:13.261322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:13.262074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:13.262348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:55:13.264698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:13.264886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:13.266231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:13.266295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:13.266451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:13.266499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:13.266541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:13.266677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:55:13.273576Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:245:2058] recipient: [1:15:2062] 2025-12-23T18:55:13.379888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:55:13.380051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:13.380182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-12-23T18:55:13.380212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-12-23T18:55:13.380389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:55:13.380435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:13.382021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:13.382158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-12-23T18:55:13.382285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:13.382339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:55:13.382370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:55:13.382393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:55:13.384170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:13.384249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:55:13.384292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:55:13.385867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:13.385911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:13.385971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:55:13.386039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:55:13.395391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:55:13.397265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:55:13.397482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-12-23T18:55:13.398644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:13.398800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:55:13.398872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:55:13.399181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:55:13.399243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:55:13.399467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:55:13.399565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:55:13.401707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:13.401752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... d: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:55:13.488341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:55:13.488388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:13.488443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:55:13.488474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:13.488532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:55:13.488593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:55:13.488651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:55:13.488715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:13.488750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:55:13.488804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:55:13.488920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-12-23T18:55:13.488962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:55:13.488996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-12-23T18:55:13.489031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-12-23T18:55:13.490321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:55:13.490373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-12-23T18:55:13.490987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2025-12-23T18:55:13.491037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-12-23T18:55:13.491633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2025-12-23T18:55:13.491723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:55:13.491766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:55:13.491915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:13.491947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:13.492078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-12-23T18:55:13.492236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:13.492284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-12-23T18:55:13.492324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:55:13.492755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:55:13.492847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:55:13.492899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:55:13.492953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-12-23T18:55:13.492998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-12-23T18:55:13.493418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:55:13.493462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-12-23T18:55:13.493525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:55:13.493904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:55:13.494001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:55:13.494036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:55:13.494063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-12-23T18:55:13.494092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-12-23T18:55:13.494183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:55:13.494539Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 2025-12-23T18:55:13.495108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:13.495519Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 2025-12-23T18:55:13.495830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-12-23T18:55:13.497143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:55:13.500101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:55:13.500264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:55:13.500458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2025-12-23T18:55:13.500708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:55:13.501051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:55:13.501097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:55:13.501515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:55:13.501611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:55:13.501649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:398:2377] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-12-23T18:55:13.502084Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-12-23T18:55:13.502212Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:132:2058] recipient: [1:114:2144] 2025-12-23T18:55:12.078449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:12.078600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:12.078654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:12.078769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:12.078829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:12.078881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:12.078954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:12.079046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:12.080020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:12.081067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:12.192969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:55:12.193037Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:12.193812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:12.208843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:12.209138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:12.209313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:55:12.221459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:12.221729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:12.222489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:12.223175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:55:12.226873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:12.227975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:12.235857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:12.235945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:12.236102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:12.236156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:12.236303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:12.237095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.423065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.424178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.424332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.424445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.424540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.424627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.424723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.424802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.424876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.424967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.425045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.425161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.425243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.425329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.425525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:217:2217] at schemeshard: 72057594046678944 Send TEvModif ... MESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:55:14.058237Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-12-23T18:55:14.058271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:55:14.058706Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:55:14.058793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:55:14.058821Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:55:14.058848Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-12-23T18:55:14.058886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:55:14.058946Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:55:14.060839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:55:14.061469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:55:14.061622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:55:14.061657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:55:14.061928Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:55:14.061992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:55:14.062017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:711:2701] TestWaitNotification: OK eventTxId 101 2025-12-23T18:55:14.062348Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:55:14.062494Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 172us result status StatusSuccess 2025-12-23T18:55:14.062786Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-12-23T18:55:14.066655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:55:14.066968Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2025-12-23T18:55:14.067049Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2025-12-23T18:55:14.067200Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:55:14.069242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-12-23T18:55:14.069501Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:55:14.069813Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:55:14.069849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:55:14.070134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:55:14.070229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:55:14.070268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:719:2709] TestWaitNotification: OK eventTxId 102 2025-12-23T18:55:14.070620Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:55:14.070795Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 204us result status StatusSuccess 2025-12-23T18:55:14.071062Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:55:12.123329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:12.123428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:12.123485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:12.123530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:12.123573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:12.123601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:12.123672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:12.123794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:12.124662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:12.126559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:12.224380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:55:12.224440Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:12.233847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:12.234015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:12.234205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:55:12.247477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:12.247724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:12.248431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:12.248970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:55:12.259442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:12.260525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:12.271856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:12.271960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:12.272178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:12.272225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:12.272380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:12.273288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:55:12.417888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.420178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.420323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.420419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.420496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.420551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.420625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.420669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.420734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.420787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.420859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.420941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.421035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.421138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:12.421186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000064, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:14.034653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000064 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:55:14.034725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_bsv.cpp:40: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000064, at schemeshard: 72057594046678944 2025-12-23T18:55:14.034847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 2 2025-12-23T18:55:14.034957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2025-12-23T18:55:14.034985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-12-23T18:55:14.035014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2025-12-23T18:55:14.035039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-12-23T18:55:14.035078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:55:14.035118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 1 2025-12-23T18:55:14.035143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-12-23T18:55:14.035178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-12-23T18:55:14.035206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2025-12-23T18:55:14.035231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 129:0 2025-12-23T18:55:14.035337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 2 2025-12-23T18:55:14.035371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-12-23T18:55:14.035400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 57 2025-12-23T18:55:14.035435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 49], 18446744073709551615 2025-12-23T18:55:14.037160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:24 2025-12-23T18:55:14.037199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-12-23T18:55:14.037587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 2025-12-23T18:55:14.037622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-12-23T18:55:14.038061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2025-12-23T18:55:14.038137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:55:14.038159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2025-12-23T18:55:14.038254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:14.038279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:14.038432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 49] 2025-12-23T18:55:14.038530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:14.038555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-12-23T18:55:14.038593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 129, path id: 49 FAKE_COORDINATOR: Erasing txId 129 2025-12-23T18:55:14.039086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 49 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:55:14.039148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 49 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:55:14.039172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-12-23T18:55:14.039206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 49], version: 18446744073709551615 2025-12-23T18:55:14.039242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 1 2025-12-23T18:55:14.039497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:55:14.039532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 49], at schemeshard: 72057594046678944 2025-12-23T18:55:14.039603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:55:14.039844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 57 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:55:14.039891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 57 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:55:14.039910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-12-23T18:55:14.039928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 57 2025-12-23T18:55:14.039953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:55:14.040013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-12-23T18:55:14.040292Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 0 2025-12-23T18:55:14.040653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-12-23T18:55:14.040828Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 0 2025-12-23T18:55:14.041046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6377: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-12-23T18:55:14.042323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-12-23T18:55:14.043369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:55:14.043450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-12-23T18:55:14.044541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:24 2025-12-23T18:55:14.044619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2025-12-23T18:55:14.045166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2025-12-23T18:55:14.045199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2025-12-23T18:55:14.045719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-12-23T18:55:14.045822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-12-23T18:55:14.045862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:2081:3949] TestWaitNotification: OK eventTxId 129 |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> THiveTest::TestFollowerCompatability3 [GOOD] >> THiveTest::TestGetStorageInfo |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestHiveBalancerHighUsageAndColumnShards |98.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |98.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |98.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |98.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestExternalBoot |98.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |98.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::CheckCounters [GOOD] >> KqpKnn::VectorSearchKnnPushdown+Nullable [GOOD] >> THiveTest::TestExternalBoot [GOOD] >> THiveTest::TestExternalBootWhenLocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:129:2058] recipient: [1:112:2143] 2025-12-23T18:54:48.219009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:48.219079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:48.219107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:48.219131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:48.219166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:48.219195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:48.219265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:48.219325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:48.219925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:48.220132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:48.281255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:48.281295Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:48.290000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:48.290657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:48.290828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:48.299828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:48.300426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:48.301063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:48.301351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:48.304625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:48.304863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:48.305979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:48.306022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:48.306095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:48.306125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:48.306164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:48.306285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.416095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.416784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.416865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.416915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.416965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.417018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.417101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.417164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.417212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.417276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.417315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.417362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.417411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.417506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.417589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... tifyTxCompletionResult 2025-12-23T18:55:15.052400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1761:3662] 2025-12-23T18:55:15.053588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-12-23T18:55:15.178786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2025-12-23T18:55:15.179579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2025-12-23T18:55:15.179750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2025-12-23T18:55:15.179830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:55:15.179940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2025-12-23T18:55:15.180701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2025-12-23T18:55:15.180770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:55:15.180843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-12-23T18:55:15.273049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7071: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-12-23T18:55:15.273188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:55:15.273319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-12-23T18:55:15.273470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:215: Run conditional erase, tabletId: 72075186233409549, request: TableId: 40 Expiration { ColumnId: 2 WallClockTimestamp: 1766529379094704 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2025-12-23T18:55:15.273588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:215: Run conditional erase, tabletId: 72075186233409548, request: TableId: 40 Expiration { ColumnId: 2 WallClockTimestamp: 1766529379094704 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2025-12-23T18:55:15.274204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7099: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-12-23T18:55:15.274685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7099: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-12-23T18:55:15.275221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:349: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-12-23T18:55:15.275283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:399: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-12-23T18:55:15.275713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:349: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-12-23T18:55:15.275756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:399: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-12-23T18:55:15.279711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:453: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-12-23T18:55:15.279882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:55:15.279939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:108: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2025-12-23T23:36:19.094704Z, at schemeshard: 72057594046678944 2025-12-23T18:55:15.280036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:453: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-12-23T18:55:15.280087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-12-23T18:55:15.280163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-12-23T18:55:15.280198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:108: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2025-12-23T23:36:19.094704Z, at schemeshard: 72057594046678944 2025-12-23T18:55:15.280236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:192: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-12-23T18:55:15.302252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:55:15.355708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-12-23T18:55:15.355863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-12-23T18:55:15.355924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2025-12-23T18:55:15.355976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:55:15.356068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2025-12-23T18:55:15.356207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2025-12-23T18:55:15.356244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:55:15.356300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-12-23T18:55:15.384201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:55:15.448006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-12-23T18:55:15.448171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-12-23T18:55:15.448232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2025-12-23T18:55:15.448279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:55:15.448393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2025-12-23T18:55:15.448532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2025-12-23T18:55:15.448573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2025-12-23T18:55:15.448633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode >> AnalyzeColumnshard::AnalyzeSameOperationId |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable >> AnalyzeColumnshard::AnalyzeRebootSa >> AnalyzeColumnshard::AnalyzeDeadline >> AnalyzeColumnshard::AnalyzeStatus >> TraverseColumnShard::TraverseColumnTableRebootColumnshard >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate >> TraverseDatashard::TraverseTwoTables >> AnalyzeColumnshard::Analyze >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes >> TraverseColumnShard::TraverseServerlessColumnTable |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown+Nullable [GOOD] Test command err: Trying to start YDB, gRPC: 61109, MsgBus: 26396 ... waiting for SysViewsRoster update finished 2025-12-23T18:55:02.617105Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:02.709729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:02.718547Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:02.718971Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:02.719065Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:02.981136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:02.981288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:03.059665Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516100200448 != 1766516100200452 2025-12-23T18:55:03.076779Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:03.123509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:03.289566Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002a77/r3tmp/tmp6mIORq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61109, node 1 2025-12-23T18:55:03.642358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:03.642429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:03.642471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:03.642731Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:03.654157Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:26396 TClient is connected to server localhost:26396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:55:04.036341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:55:04.122178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:04.390277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:04.738490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:05.014476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:55:05.768786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1908:3515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:05.769020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:05.769779Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1982:3534], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:05.769853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:05.793316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:06.001137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:06.280129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:06.523985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:06.762629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:07.014235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:07.275696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:07.544798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:07.866300Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:07.866425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:07.866832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2797:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:07.866966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:07.867073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't hav ... 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:18] at 72075186224037927 on unit CompletedOperations 2025-12-23T18:55:15.603250Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:18] at 72075186224037927 is Executed 2025-12-23T18:55:15.603263Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:18] at 72075186224037927 executing on unit CompletedOperations 2025-12-23T18:55:15.603278Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:18] at 72075186224037927 has finished 2025-12-23T18:55:15.603296Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037927 2025-12-23T18:55:15.603355Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037927 2025-12-23T18:55:15.603577Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:3818:4932], Recipient [1:3193:4484]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 10000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } RangesSize: 1 2025-12-23T18:55:15.603666Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037928, FollowerId 0 2025-12-23T18:55:15.603696Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CheckRead 2025-12-23T18:55:15.603736Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2025-12-23T18:55:15.603752Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CheckRead 2025-12-23T18:55:15.603768Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit BuildAndWaitDependencies 2025-12-23T18:55:15.603784Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit BuildAndWaitDependencies 2025-12-23T18:55:15.603808Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037928 2025-12-23T18:55:15.603828Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2025-12-23T18:55:15.603843Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit BuildAndWaitDependencies 2025-12-23T18:55:15.603857Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit ExecuteRead 2025-12-23T18:55:15.603873Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit ExecuteRead 2025-12-23T18:55:15.603936Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037928 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 10000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2025-12-23T18:55:15.604020Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037928 promoting UnprotectedReadEdge to v10000/18446744073709551615 2025-12-23T18:55:15.604040Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037928 Complete read# {[1:3818:4932], 1} after executionsCount# 1 2025-12-23T18:55:15.604061Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037928 read iterator# {[1:3818:4932], 1} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:55:15.604146Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037928 read iterator# {[1:3818:4932], 1} finished in read 2025-12-23T18:55:15.604182Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2025-12-23T18:55:15.604198Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit ExecuteRead 2025-12-23T18:55:15.604215Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit CompletedOperations 2025-12-23T18:55:15.604232Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CompletedOperations 2025-12-23T18:55:15.604255Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2025-12-23T18:55:15.604272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CompletedOperations 2025-12-23T18:55:15.604287Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037928 has finished 2025-12-23T18:55:15.604302Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037928 2025-12-23T18:55:15.604341Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037928 2025-12-23T18:55:15.604444Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:3818:4932], Recipient [1:3198:4488]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 10000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } RangesSize: 1 2025-12-23T18:55:15.604806Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037929, FollowerId 0 2025-12-23T18:55:15.604839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CheckRead 2025-12-23T18:55:15.604879Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2025-12-23T18:55:15.604896Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CheckRead 2025-12-23T18:55:15.604915Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit BuildAndWaitDependencies 2025-12-23T18:55:15.604931Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit BuildAndWaitDependencies 2025-12-23T18:55:15.604954Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037929 2025-12-23T18:55:15.604974Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2025-12-23T18:55:15.604989Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit BuildAndWaitDependencies 2025-12-23T18:55:15.605002Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit ExecuteRead 2025-12-23T18:55:15.605016Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit ExecuteRead 2025-12-23T18:55:15.605083Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037929 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 10000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2025-12-23T18:55:15.605161Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037929 promoting UnprotectedReadEdge to v10000/18446744073709551615 2025-12-23T18:55:15.605184Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037929 Complete read# {[1:3818:4932], 2} after executionsCount# 1 2025-12-23T18:55:15.605208Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037929 read iterator# {[1:3818:4932], 2} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:55:15.605308Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037929 read iterator# {[1:3818:4932], 2} finished in read 2025-12-23T18:55:15.605342Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2025-12-23T18:55:15.605360Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit ExecuteRead 2025-12-23T18:55:15.605377Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit CompletedOperations 2025-12-23T18:55:15.605392Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CompletedOperations 2025-12-23T18:55:15.605416Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2025-12-23T18:55:15.605430Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CompletedOperations 2025-12-23T18:55:15.605446Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037929 has finished 2025-12-23T18:55:15.605462Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037929 2025-12-23T18:55:15.605497Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037929 2025-12-23T18:55:15.605758Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3818:4932], Recipient [1:3191:4482]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:55:15.605791Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037927 ReadCancel: { ReadId: 0 } 2025-12-23T18:55:15.605860Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3818:4932], Recipient [1:3193:4484]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-12-23T18:55:15.605879Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037928 ReadCancel: { ReadId: 1 } 2025-12-23T18:55:15.606363Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3818:4932], Recipient [1:3198:4488]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2025-12-23T18:55:15.606409Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037929 ReadCancel: { ReadId: 2 } |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] >> THiveTest::TestExternalBootCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:45.956926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:45.957006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:45.957038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:45.957074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:45.957115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:45.957150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:45.957206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:45.957267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:45.958037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:45.958292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:46.043688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:46.043740Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:46.048989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:46.049146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:46.049246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:46.051844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:46.052040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:46.052609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:46.052804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:46.054492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:46.054670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:46.055780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:46.055835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:46.055974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:46.056018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:46.056057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:46.056189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:46.209369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.212874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.213027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.213102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.213166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.213228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.213293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.213393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.213482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.213545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.213602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.213676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.213791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.213874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:46.213966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 25-12-23T18:55:16.815939Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:16.816024Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:55:16.816092Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:55:16.816143Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:55:16.818138Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:16.818210Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:55:16.818277Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:55:16.820337Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:16.820407Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:16.820500Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:55:16.820579Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:55:16.820779Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:55:16.822334Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:55:16.822510Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:55:16.822944Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:16.823146Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 115964119152 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:55:16.823222Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:55:16.823552Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:55:16.823624Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:55:16.823893Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:55:16.823990Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:55:16.825898Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:16.825984Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:16.826332Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:16.826415Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [27:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:55:16.826964Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:16.827052Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:55:16.827252Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:55:16.827301Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:55:16.827362Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:55:16.827415Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:55:16.827471Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:55:16.827533Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:55:16.827600Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:55:16.827650Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:55:16.827761Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:55:16.827834Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:55:16.827903Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:55:16.828775Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:55:16.828922Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:55:16.828986Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:55:16.829043Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:55:16.829108Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:55:16.829234Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:55:16.832436Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:55:16.833069Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:55:16.834230Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [27:671:2661] Bootstrap 2025-12-23T18:55:16.836161Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [27:671:2661] Become StateWork (SchemeCache [27:676:2666]) 2025-12-23T18:55:16.839459Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:55:16.839978Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:16.840131Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-12-23T18:55:16.840776Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-12-23T18:55:16.842827Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [27:671:2661] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:55:16.844793Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:55:16.845170Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-12-23T18:55:16.846469Z node 27 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TraverseColumnShard::TraverseColumnTable |98.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution >> AnalyzeColumnshard::AnalyzeRebootColumnShard |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> THiveTest::TestExternalBootCounters [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId >> AnalyzeColumnshard::AnalyzeServerless >> TraverseDatashard::TraverseOneTableServerless |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootCounters [GOOD] Test command err: 2025-12-23T18:54:14.889052Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:14.906632Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:14.906896Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:14.907469Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:14.907709Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-12-23T18:54:14.909030Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-12-23T18:54:14.909076Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:14.909698Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:78:2078] ControllerId# 72057594037932033 2025-12-23T18:54:14.909723Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:14.909804Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:14.909880Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:14.918511Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:14.918550Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:14.920081Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:86:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.920188Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:87:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.920275Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:88:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.920363Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:89:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.920452Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:90:2087] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.920543Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:91:2088] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.920614Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:92:2089] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.920628Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:14.920690Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:78:2078] 2025-12-23T18:54:14.920730Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:78:2078] 2025-12-23T18:54:14.920771Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:14.920808Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:14.921369Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:14.921435Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:14.923389Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:14.923473Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:14.923698Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:14.923825Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:14.924379Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:103:2077] ControllerId# 72057594037932033 2025-12-23T18:54:14.924399Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:14.924434Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:14.924490Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:14.925705Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:14.927554Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:14.927646Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:14.927841Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:14.927953Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:14.928460Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:114:2077] ControllerId# 72057594037932033 2025-12-23T18:54:14.928480Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:14.928512Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:14.928566Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:14.935040Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:14.935077Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:14.936195Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:121:2081] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.936301Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:122:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.936373Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:123:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.936441Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:124:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.936510Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:125:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.936584Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:126:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.936684Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:127:2087] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.936699Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:14.936736Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [3:114:2077] 2025-12-23T18:54:14.936760Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [3:114:2077] 2025-12-23T18:54:14.936784Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:14.936807Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:14.937087Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:14.937209Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:14.945280Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:78:2078] 2025-12-23T18:54:14.945335Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:14.951799Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:14.951860Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:14.953439Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:133:2081] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.953567Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:134:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.953694Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:135:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.953819Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:136:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.953938Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:137:2085] targetNo ... Id: 72075186224037889 2025-12-23T18:55:18.944845Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:13} Tx{26, NKikimr::NHive::TTxUnlockTabletExecution} hope 1 -> done Change{18, redo 110b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-12-23T18:55:18.944912Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:13} Tx{26, NKikimr::NHive::TTxUnlockTabletExecution} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:55:18.956446Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [cd7cbca5baaa92c4] bootstrap ActorId# [68:509:2341] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:13:0:0:105:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-12-23T18:55:18.956580Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [cd7cbca5baaa92c4] Id# [72057594037927937:2:13:0:0:105:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:55:18.956667Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [cd7cbca5baaa92c4] restore Id# [72057594037927937:2:13:0:0:105:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:55:18.956755Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [cd7cbca5baaa92c4] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:13:0:0:105:1] Marker# BPG33 2025-12-23T18:55:18.956839Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [cd7cbca5baaa92c4] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:13:0:0:105:1] Marker# BPG32 2025-12-23T18:55:18.957019Z node 68 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [68:58:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:13:0:0:105:1] FDS# 105 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:55:18.958343Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [cd7cbca5baaa92c4] received {EvVPutResult Status# OK ID# [72057594037927937:2:13:0:0:105:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 28 } Cost# 80826 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 29 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-12-23T18:55:18.958487Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [cd7cbca5baaa92c4] Result# TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-12-23T18:55:18.958570Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [cd7cbca5baaa92c4] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:55:18.958820Z node 68 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-12-23T18:55:18.959018Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} commited cookie 1 for step 13 2025-12-23T18:55:18.959222Z node 68 :HIVE NOTICE: tx__unlock_tablet.cpp:91: HIVE#72057594037927937 THive::TTxUnlockTabletExecution::Complete TabletId: 72075186224037889 SideEffects: {Notifications: 0x1004020F [68:483:2316] NKikimrHive.TEvLockTabletExecutionLost TabletID: 72075186224037889 Reason: LOCK_LOST_REASON_UNLOCKED,0x1004020E [68:483:2316] NKikimrHive.TEvUnlockTabletExecutionResult TabletID: 72075186224037889 Status: OK StatusMessage: ""} 2025-12-23T18:55:18.959884Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [68:510:2342] 2025-12-23T18:55:18.959945Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [68:510:2342] 2025-12-23T18:55:18.960042Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [68:510:2342] 2025-12-23T18:55:18.960121Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [68:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:55:18.960191Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 68 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [68:331:2201] 2025-12-23T18:55:18.960272Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [68:510:2342] 2025-12-23T18:55:18.960324Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [68:510:2342] 2025-12-23T18:55:18.960407Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [68:510:2342] 2025-12-23T18:55:18.960545Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [68:510:2342] 2025-12-23T18:55:18.960755Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [68:510:2342] 2025-12-23T18:55:18.960814Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [68:510:2342] 2025-12-23T18:55:18.960858Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [68:510:2342] 2025-12-23T18:55:18.960920Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [68:510:2342] 2025-12-23T18:55:18.960998Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [68:510:2342] 2025-12-23T18:55:18.961144Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [68:483:2316] EventType# 268697612 2025-12-23T18:55:18.961302Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{27, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2025-12-23T18:55:18.961401Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{27, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:55:18.961845Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{27, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{19, redo 664b alter 0b annex 0, ~{ 16, 1, 4 } -{ }, 0 gb} 2025-12-23T18:55:18.961958Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{27, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:55:18.973468Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [667a4d0d3c364da9] bootstrap ActorId# [68:513:2345] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:14:0:0:317:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-12-23T18:55:18.973653Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [667a4d0d3c364da9] Id# [72057594037927937:2:14:0:0:317:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:55:18.973739Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [667a4d0d3c364da9] restore Id# [72057594037927937:2:14:0:0:317:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:55:18.973823Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [667a4d0d3c364da9] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:14:0:0:317:1] Marker# BPG33 2025-12-23T18:55:18.973907Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [667a4d0d3c364da9] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:14:0:0:317:1] Marker# BPG32 2025-12-23T18:55:18.974133Z node 68 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [68:58:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:14:0:0:317:1] FDS# 317 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:55:18.975501Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [667a4d0d3c364da9] received {EvVPutResult Status# OK ID# [72057594037927937:2:14:0:0:317:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 29 } Cost# 82496 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 30 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-12-23T18:55:18.975667Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [667a4d0d3c364da9] Result# TEvPutResult {Id# [72057594037927937:2:14:0:0:317:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-12-23T18:55:18.975775Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [667a4d0d3c364da9] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:14:0:0:317:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:55:18.976101Z node 68 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:14:0:0:317:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-12-23T18:55:18.976279Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} commited cookie 1 for step 14 2025-12-23T18:55:18.977005Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [68:515:2347] 2025-12-23T18:55:18.977078Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [68:515:2347] 2025-12-23T18:55:18.977162Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [68:515:2347] 2025-12-23T18:55:18.977267Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [68:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:55:18.977362Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 68 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [68:331:2201] 2025-12-23T18:55:18.977476Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [68:515:2347] 2025-12-23T18:55:18.977549Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [68:515:2347] 2025-12-23T18:55:18.977624Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [68:515:2347] 2025-12-23T18:55:18.977755Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [68:515:2347] 2025-12-23T18:55:18.977978Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [68:515:2347] 2025-12-23T18:55:18.978049Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [68:515:2347] 2025-12-23T18:55:18.978106Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [68:515:2347] 2025-12-23T18:55:18.978190Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [68:515:2347] 2025-12-23T18:55:18.978245Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [68:515:2347] 2025-12-23T18:55:18.978348Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [68:514:2346] EventType# 268830214 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerHighUsageAndColumnShards [GOOD] >> THiveTest::TestHiveBalancerOneTabletHighUsage |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] >> AnalyzeColumnshard::AnalyzeTwoColumnTables >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] >> AnalyzeDatashard::AnalyzeTwoTables >> TTopicApiDescribes::GetPartitionDescribe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:48.543404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:48.543462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:48.543489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:48.543515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:48.543540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:48.543561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:48.543615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:48.543660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:48.544275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:48.544479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:48.629265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:48.629320Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:48.634970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:48.635191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:48.635306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:48.638374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:48.638552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:48.639257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:48.639457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:48.641202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:48.641367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:48.642081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:48.642126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:48.642238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:48.642269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:48.642297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:48.642372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:48.796008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.796964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.797115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.797191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.797255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.797315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.797401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.797508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.797578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.797660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.797718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.797809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.797933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.798026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:48.798102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:55:22.318361Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:55:22.318399Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:55:22.318438Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:55:22.318478Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:55:22.318560Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-12-23T18:55:22.320096Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6724: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1140 } } CommitVersion { Step: 5000038 TxId: 101 } 2025-12-23T18:55:22.320146Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-12-23T18:55:22.320289Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1140 } } CommitVersion { Step: 5000038 TxId: 101 } 2025-12-23T18:55:22.320417Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1140 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2025-12-23T18:55:22.321889Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 712 RawX2: 120259086986 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:55:22.321935Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-12-23T18:55:22.322055Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 712 RawX2: 120259086986 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:55:22.322119Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:55:22.322217Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 712 RawX2: 120259086986 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-12-23T18:55:22.322284Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:22.322326Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:22.322368Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:55:22.322414Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2025-12-23T18:55:22.323593Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:55:22.325079Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:55:22.325483Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:22.326526Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:22.326809Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:22.326858Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2025-12-23T18:55:22.326974Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:55:22.327016Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:55:22.327059Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2025-12-23T18:55:22.327097Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:55:22.327139Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-12-23T18:55:22.327214Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:740:2718] message: TxId: 101 2025-12-23T18:55:22.327273Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-12-23T18:55:22.327324Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:55:22.327359Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:55:22.327488Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:55:22.329081Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:55:22.329126Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:741:2719] TestWaitNotification: OK eventTxId 101 2025-12-23T18:55:22.329588Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:55:22.329789Z node 28 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 231us result status StatusSuccess 2025-12-23T18:55:22.330346Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: 2025-12-23T18:55:11.396704Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128926761955671:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:55:11.398646Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:55:11.452897Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:55:11.455802Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128926622853251:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:55:11.455876Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:55:11.467981Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004c51/r3tmp/tmpquWBFt/pdisk_1.dat 2025-12-23T18:55:11.632876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:55:11.636257Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:55:11.774813Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:11.774937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:11.775917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:11.775992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:11.790885Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:11.791040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:11.818550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:11.836562Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:11.837344Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128926761955611:2082] 1766516111386105 != 1766516111386108 2025-12-23T18:55:11.845990Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:55:11.914247Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 25355, node 1 2025-12-23T18:55:12.119300Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/004c51/r3tmp/yandexAs2N9l.tmp 2025-12-23T18:55:12.119343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/004c51/r3tmp/yandexAs2N9l.tmp 2025-12-23T18:55:12.120417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/004c51/r3tmp/yandexAs2N9l.tmp 2025-12-23T18:55:12.120529Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:12.326559Z INFO: TTestServer started on Port 31655 GrpcPort 25355 2025-12-23T18:55:12.417815Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:55:12.463840Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:31655 PQClient connected to localhost:25355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 1766516111934 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:55:12.629641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:55:12.669515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:55:14.372215Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128939507755532:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.372236Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128939507755521:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.372353Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.372756Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128939507755538:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.372821Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.377033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:14.400553Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128939507755537:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-12-23T18:55:14.479558Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128939507755568:2183] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:14.719357Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587128939646859000:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:55:14.719357Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587128939507755582:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:55:14.719905Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=YzFmMTY2ZTktNmUzNGUwODctY2I2YzRiZTItNjg3MzIyMzA=, ActorId: [2:7587128939507755513:2303], ActorState: ExecuteState, TraceId: 01kd691gy22wg0t4wbadjaccqg, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:55:14.723221Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:55:14.725385Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb:/ ... it] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 1 [2:7587128969572527142:2367] 2025-12-23T18:55:21.383039Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037898][Partition][14][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 1 [1:7587128969711631309:2452] 2025-12-23T18:55:21.384563Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037898][Partition][11][StateInit] bootstrapping 11 [1:7587128969711631311:2452] 2025-12-23T18:55:21.387134Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037895][Partition][9][StateInit] bootstrapping 9 [2:7587128969572527164:2368] 2025-12-23T18:55:21.387885Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037897][Partition][7][StateInit] bootstrapping 7 [2:7587128969572527168:2371] 2025-12-23T18:55:21.387136Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037898][Partition][11][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 1 [1:7587128969711631311:2452] 2025-12-23T18:55:21.387567Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037896][Partition][5][StateInit] bootstrapping 5 [1:7587128969711631320:2453] 2025-12-23T18:55:21.389278Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][9][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 1 [2:7587128969572527164:2368] 2025-12-23T18:55:21.389515Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037896][Partition][5][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 1 [1:7587128969711631320:2453] 2025-12-23T18:55:21.389820Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:55:21.389820Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][7][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 1 [2:7587128969572527168:2371] 2025-12-23T18:55:21.391082Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037895][Partition][2][StateInit] bootstrapping 2 [2:7587128969572527166:2368] 2025-12-23T18:55:21.391081Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037896][Partition][10][StateInit] bootstrapping 10 [1:7587128969711631322:2453] 2025-12-23T18:55:21.391325Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037897][Partition][13][StateInit] bootstrapping 13 [2:7587128969572527171:2371] 2025-12-23T18:55:21.393258Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037896][Partition][10][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 1 [1:7587128969711631322:2453] 2025-12-23T18:55:21.393509Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [2:7587128969572527166:2368] 2025-12-23T18:55:21.393524Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][13][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 1 [2:7587128969572527171:2371] 2025-12-23T18:55:21.393652Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037893][Partition][1][StateInit] bootstrapping 1 [1:7587128969711631342:2451] 2025-12-23T18:55:21.395728Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037893][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 1 [1:7587128969711631342:2451] 2025-12-23T18:55:21.395859Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:55:21.397490Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037893][Partition][6][StateInit] bootstrapping 6 [1:7587128969711631343:2451] 2025-12-23T18:55:21.397965Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:55:21.397984Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:55:21.399675Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037893][Partition][6][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 1 [1:7587128969711631343:2451] 2025-12-23T18:55:21.400122Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:55:21.400470Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:55:21.403061Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:55:21.406205Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig ===Query complete Create topic result: 1 2025-12-23T18:55:21.422095Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7587128969711631431:3984]: Request location 2025-12-23T18:55:21.422393Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128969711631440:3987] connected; active server actors: 1 2025-12-23T18:55:21.422720Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 1 2025-12-23T18:55:21.422762Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 1 2025-12-23T18:55:21.422775Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 1 2025-12-23T18:55:21.422788Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 1 2025-12-23T18:55:21.422797Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 1 2025-12-23T18:55:21.422803Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 1 2025-12-23T18:55:21.422811Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 1 2025-12-23T18:55:21.422819Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 1 2025-12-23T18:55:21.422825Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 2, Generation 1 2025-12-23T18:55:21.422831Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 1 2025-12-23T18:55:21.422836Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 1 2025-12-23T18:55:21.423043Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7587128969711631431:3984]: Got location 2025-12-23T18:55:21.422842Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 1, Generation 1 2025-12-23T18:55:21.422848Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 2, Generation 1 2025-12-23T18:55:21.422854Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 1 2025-12-23T18:55:21.422859Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 1, Generation 1 2025-12-23T18:55:21.423278Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128969711631440:3987] disconnected. 2025-12-23T18:55:21.423305Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128969711631440:3987] disconnected; active server actors: 1 2025-12-23T18:55:21.423319Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128969711631440:3987] disconnected no session 2025-12-23T18:55:21.423696Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7587128969711631441:3988]: Request location 2025-12-23T18:55:21.423895Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128969711631443:3990] connected; active server actors: 1 2025-12-23T18:55:21.424106Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 1 2025-12-23T18:55:21.424121Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 1 2025-12-23T18:55:21.424129Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 1 2025-12-23T18:55:21.424236Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7587128969711631441:3988]: Got location 2025-12-23T18:55:21.424418Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128969711631443:3990] disconnected. 2025-12-23T18:55:21.424437Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128969711631443:3990] disconnected; active server actors: 1 2025-12-23T18:55:21.424446Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128969711631443:3990] disconnected no session 2025-12-23T18:55:21.424599Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7587128969711631444:3991]: Request location 2025-12-23T18:55:21.424830Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128969711631446:3993] connected; active server actors: 1 >> TTopicApiDescribes::DescribeConsumer [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] Test command err: 2025-12-23T18:55:11.398528Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128928622068957:2171];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:55:11.399146Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:55:11.453608Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:55:11.471227Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128927465620854:2098];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:55:11.476323Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004c56/r3tmp/tmpp7XxTq/pdisk_1.dat 2025-12-23T18:55:11.497268Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:55:11.702837Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:55:11.713165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:55:11.776146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:11.776247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:11.776855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:11.776910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:11.790798Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:11.791043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:11.837103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:11.841054Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128928622068795:2082] 1766516111383860 != 1766516111383863 2025-12-23T18:55:11.850012Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16365, node 1 2025-12-23T18:55:11.941629Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:55:12.006847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:55:12.119307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/004c56/r3tmp/yandexRyOwi3.tmp 2025-12-23T18:55:12.119367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/004c56/r3tmp/yandexRyOwi3.tmp 2025-12-23T18:55:12.120406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/004c56/r3tmp/yandexRyOwi3.tmp 2025-12-23T18:55:12.120531Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:12.326326Z INFO: TTestServer started on Port 22321 GrpcPort 16365 2025-12-23T18:55:12.400986Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:55:12.490593Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22321 PQClient connected to localhost:16365 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516111934 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:55:12.582021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:55:12.613535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:55:14.490416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128941506972148:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.490419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128941506972139:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.490533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.490969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587128941506972156:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.491036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.493263Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128940350523108:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.493264Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128940350523097:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.493337Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.493547Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128940350523113:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.493604Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.493908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:14.511703Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587128941506972155:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-12-23T18:55:14.546758Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128940350523115:2176] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:55:14.547658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-12-23T18:55:14.547976Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128940350523112:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-12-23T18:55:14.611474Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587128941506972257:3073] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:14.629189Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128940350523141:2182] txid# 281474976720658, issues: { message: " ... nanos: 556000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 13 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1766516121 nanos: 574000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1766516121 nanos: 578000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 14 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1766516121 nanos: 538000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1766516121 nanos: 552000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } } } } 2025-12-23T18:55:22.557055Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2025-12-23T18:55:22.557146Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:484: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" include_location: true 2025-12-23T18:55:22.557754Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7587128975866712799:2523]: Request location 2025-12-23T18:55:22.558099Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128975866712801:2524] connected; active server actors: 1 2025-12-23T18:55:22.558327Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 2 2025-12-23T18:55:22.558346Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2025-12-23T18:55:22.558383Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 2 2025-12-23T18:55:22.558398Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2025-12-23T18:55:22.558410Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 2 2025-12-23T18:55:22.558418Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 2 2025-12-23T18:55:22.558428Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 2 2025-12-23T18:55:22.558440Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 2 2025-12-23T18:55:22.558451Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 2, Generation 2 2025-12-23T18:55:22.558460Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 2 2025-12-23T18:55:22.558470Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 2 2025-12-23T18:55:22.558480Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 1, Generation 2 2025-12-23T18:55:22.558490Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 2, Generation 2 2025-12-23T18:55:22.558499Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 2 2025-12-23T18:55:22.558509Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 1, Generation 2 2025-12-23T18:55:22.558666Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7587128975866712799:2523]: Got location 2025-12-23T18:55:22.558905Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128975866712801:2524] disconnected. 2025-12-23T18:55:22.558926Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128975866712801:2524] disconnected; active server actors: 1 2025-12-23T18:55:22.558937Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128975866712801:2524] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1766516121391 tx_id: 281474976715672 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-12-23T18:55:22.561644Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2025-12-23T18:55:22.561716Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:484: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1766516121391 tx_id: 281474976715672 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } 2025-12-23T18:55:22.564424Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2025-12-23T18:55:22.564515Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:484: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2025-12-23T18:55:11.396538Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128927118013089:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:55:11.396659Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:55:11.461893Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128927107605288:2087];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:55:11.463127Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:55:11.467056Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:55:11.477980Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:11.478498Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004c57/r3tmp/tmpIzRtE6/pdisk_1.dat 2025-12-23T18:55:11.625809Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:55:11.626865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:55:11.761921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:11.762037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:11.770228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:11.770312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:11.782191Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:11.782426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:11.806387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:11.824567Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:11.830478Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128927118013039:2082] 1766516111383868 != 1766516111383871 2025-12-23T18:55:11.854517Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:55:11.864615Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 17599, node 1 2025-12-23T18:55:12.119212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/004c57/r3tmp/yandexNQx83m.tmp 2025-12-23T18:55:12.119251Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/004c57/r3tmp/yandexNQx83m.tmp 2025-12-23T18:55:12.120436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/004c57/r3tmp/yandexNQx83m.tmp 2025-12-23T18:55:12.120540Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:12.326657Z INFO: TTestServer started on Port 13356 GrpcPort 17599 2025-12-23T18:55:12.413338Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:55:12.474269Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:13356 PQClient connected to localhost:17599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516111934 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:55:12.564117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:55:12.598235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:55:14.273986Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128939992507554:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.274047Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128939992507550:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.274374Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.278689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:14.297106Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128939992507564:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-12-23T18:55:14.395286Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128939992507601:2183] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:14.730892Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587128939992507614:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:55:14.731391Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=YWY2NTliMy03ZmU5NDc5NS05ZWJjZWU0OC04OThlNzAxYQ==, ActorId: [2:7587128939992507548:2303], ActorState: ExecuteState, TraceId: 01kd691gv0db2ka8pvsekp2ygq, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:55:14.734080Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:55:14.740044Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587128940002916452:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:55:14.741772Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YWIzNGEwNGMtNGZlOWNlMmItYWEyNWFmNzMtZjYzMmFkMzA=, ActorId: [1:7587128940002916403:2328], ActorState: ExecuteState, TraceId: 01kd691gyeab9ynz10j3d7enh1, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: ... iteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:55:21.534812Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037893][Partition][6][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 2 [1:7587128970067689241:2473] 2025-12-23T18:55:21.535031Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037896][Partition][5][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 2 [1:7587128970067689266:2475] 2025-12-23T18:55:21.535183Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037896][Partition][10][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 2 [1:7587128970067689270:2475] 2025-12-23T18:55:21.535193Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037897][Partition][7][StateInit] bootstrapping 7 [2:7587128970057279572:2382] 2025-12-23T18:55:21.535358Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037895][Partition][2][StateInit] bootstrapping 2 [2:7587128970057279545:2381] 2025-12-23T18:55:21.537754Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037897][Partition][13][StateInit] bootstrapping 13 [2:7587128970057279573:2382] 2025-12-23T18:55:21.537925Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:8:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:55:21.538005Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:12:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:55:21.538020Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037899, NodeId 1, Generation 2 2025-12-23T18:55:21.538052Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037893, NodeId 1, Generation 2 2025-12-23T18:55:21.538068Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037896, NodeId 1, Generation 2 2025-12-23T18:55:21.538202Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][8][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [1:7587128970067689303:2476] 2025-12-23T18:55:21.538311Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][12][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [1:7587128970067689305:2476] 2025-12-23T18:55:21.541585Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:55:21.541660Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:55:21.541873Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037892][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 2 [2:7587128970057279506:2380] 2025-12-23T18:55:21.541995Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037892][Partition][3][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 2 [2:7587128970057279504:2380] 2025-12-23T18:55:21.543729Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:11:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:55:21.543802Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:14:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:55:21.544056Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037898][Partition][11][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 2 [2:7587128970057279515:2383] 2025-12-23T18:55:21.544208Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037898][Partition][14][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 2 [2:7587128970057279514:2383] 2025-12-23T18:55:21.545556Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:7:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:55:21.545655Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:13:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:55:21.545901Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][7][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [2:7587128970057279572:2382] 2025-12-23T18:55:21.546030Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][13][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [2:7587128970057279573:2382] 2025-12-23T18:55:21.546494Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 1, Generation 2 2025-12-23T18:55:21.546522Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037892, NodeId 2, Generation 2 2025-12-23T18:55:21.546536Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 2, Generation 2 2025-12-23T18:55:21.546550Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 2, Generation 2 2025-12-23T18:55:21.547812Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:9:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:55:21.547889Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:55:21.548120Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][9][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 2 [2:7587128970057279544:2381] 2025-12-23T18:55:21.548222Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 2 [2:7587128970057279545:2381] 2025-12-23T18:55:21.548717Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 2, Generation 2 2025-12-23T18:55:22.417269Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2025-12-23T18:55:22.417371Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1220: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 1 include_location: true 2025-12-23T18:55:22.417412Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1230: TDescribePartitionActor[1:7587128974362656767:2493]: Bootstrap 2025-12-23T18:55:22.417768Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7587128974362656767:2493]: Request location 2025-12-23T18:55:22.418153Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128974362656770:2494] connected; active server actors: 1 2025-12-23T18:55:22.418501Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2025-12-23T18:55:22.418621Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7587128974362656767:2493]: Got location 2025-12-23T18:55:22.418935Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128974362656770:2494] disconnected. 2025-12-23T18:55:22.418968Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128974362656770:2494] disconnected; active server actors: 1 2025-12-23T18:55:22.418983Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128974362656770:2494] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-12-23T18:55:22.420612Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2025-12-23T18:55:22.420690Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1220: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2025-12-23T18:55:22.420723Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1230: TDescribePartitionActor[1:7587128974362656771:2495]: Bootstrap 2025-12-23T18:55:22.421117Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7587128974362656771:2495]: Request location 2025-12-23T18:55:22.421546Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128974362656774:2497] connected; active server actors: 1 2025-12-23T18:55:22.421941Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2025-12-23T18:55:22.422066Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7587128974362656771:2495]: Got location 2025-12-23T18:55:22.422292Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128974362656774:2497] disconnected. 2025-12-23T18:55:22.422314Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128974362656774:2497] disconnected; active server actors: 1 2025-12-23T18:55:22.422326Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128974362656774:2497] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1766516121 nanos: 524000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_location { node_id: 2 generation: 2 } } } } } 2025-12-23T18:55:22.424205Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2025-12-23T18:55:22.424270Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1220: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-12-23T18:55:22.424306Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1230: TDescribePartitionActor[1:7587128974362656776:2498]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } |98.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] Test command err: 2025-12-23T18:55:11.422879Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128928564718602:2082];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:55:11.426484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:55:11.481288Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128925780270100:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:55:11.481352Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:55:11.486070Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004c55/r3tmp/tmpbHg8fZ/pdisk_1.dat 2025-12-23T18:55:11.507564Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:55:11.750188Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:55:11.750447Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:55:11.799310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:11.799462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:11.801396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:11.801505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:11.807063Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:11.807223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:11.828499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:11.899774Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23175, node 1 2025-12-23T18:55:11.932910Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:55:12.038059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:55:12.119178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/004c55/r3tmp/yandexP1sQWA.tmp 2025-12-23T18:55:12.119203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/004c55/r3tmp/yandexP1sQWA.tmp 2025-12-23T18:55:12.120427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/004c55/r3tmp/yandexP1sQWA.tmp 2025-12-23T18:55:12.120534Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:12.326377Z INFO: TTestServer started on Port 6602 GrpcPort 23175 2025-12-23T18:55:12.440108Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6602 PQClient connected to localhost:23175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T18:55:12.500436Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516111969 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:55:12.551322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:55:12.593413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:55:14.287187Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128938665172378:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.287187Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128938665172387:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.287335Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.287752Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128938665172393:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.287881Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.293716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:14.312603Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128938665172392:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-12-23T18:55:14.384773Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128938665172421:2182] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:14.757675Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587128938665172435:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:55:14.758127Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=ZGQ5YzJlMWUtN2QzZGJjMjUtN2E4ZGU2YzEtYjE3MDA4MDk=, ActorId: [2:7587128938665172376:2303], ActorState: ExecuteState, TraceId: 01kd691gvc9n0x2nafajtgmqcz, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:55:14.760101Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:55:14.761508Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587128941449621946:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:55:14.763768Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YzNhYmEyZGItZDQzZTVhYWEtYTRlMjkyOTktNzgwNzkxNzA=, ActorId: [1:7587128941449621894:2326], ActorState: ExecuteState, TraceId: 01kd691h536bxeks7ck92v3dqm, ReplyQueryCompileError, statu ... ats { min_last_write_time { seconds: 1766516121 nanos: 835000000 } max_write_time_lag { } bytes_written { } } } } } Describe topic with location 2025-12-23T18:55:22.851363Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2025-12-23T18:55:22.851486Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2025-12-23T18:55:22.852149Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7587128975809362361:2531]: Request location 2025-12-23T18:55:22.852547Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128975809362363:2532] connected; active server actors: 1 2025-12-23T18:55:22.852787Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 2 2025-12-23T18:55:22.852808Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2025-12-23T18:55:22.852818Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 2 2025-12-23T18:55:22.852827Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2025-12-23T18:55:22.852835Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 2 2025-12-23T18:55:22.852845Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 2 2025-12-23T18:55:22.852852Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 2 2025-12-23T18:55:22.852861Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 2 2025-12-23T18:55:22.852869Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 2, Generation 2 2025-12-23T18:55:22.852877Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 2 2025-12-23T18:55:22.852885Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 2 2025-12-23T18:55:22.852892Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 1, Generation 2 2025-12-23T18:55:22.852899Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 2, Generation 2 2025-12-23T18:55:22.852905Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 2 2025-12-23T18:55:22.852912Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 1, Generation 2 2025-12-23T18:55:22.853277Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7587128975809362361:2531]: Got location 2025-12-23T18:55:22.853551Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128975809362363:2532] disconnected. 2025-12-23T18:55:22.853577Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128975809362363:2532] disconnected; active server actors: 1 2025-12-23T18:55:22.853589Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7587128975809362363:2532] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1766516121769 tx_id: 281474976710671 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } } } } Describe topic with no stats or location 2025-12-23T18:55:22.856778Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2025-12-23T18:55:22.856879Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1766516121769 tx_id: 281474976710671 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } } } } Describe bad topic 2025-12-23T18:55:22.860346Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2025-12-23T18:55:22.860439Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path /Root/PQ//bad-topic Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> AnalyzeColumnshard::AnalyzeShard ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2025-12-23T18:55:11.397469Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128925146993508:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:55:11.398381Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:55:11.452591Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:55:11.459138Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128928564035197:2158];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:55:11.459329Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004c50/r3tmp/tmpQuu7hW/pdisk_1.dat 2025-12-23T18:55:11.483699Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:55:11.627530Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:55:11.672330Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:55:11.768819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:11.768964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:11.776266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:11.776355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:11.793686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:11.851069Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:11.855706Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:11.856667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:11.889801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:55:11.904666Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 11444, node 1 2025-12-23T18:55:12.119260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/004c50/r3tmp/yandexn3xvcM.tmp 2025-12-23T18:55:12.119286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/004c50/r3tmp/yandexn3xvcM.tmp 2025-12-23T18:55:12.120393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/004c50/r3tmp/yandexn3xvcM.tmp 2025-12-23T18:55:12.120501Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:12.326374Z INFO: TTestServer started on Port 25661 GrpcPort 11444 2025-12-23T18:55:12.398598Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:55:12.464100Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:25661 PQClient connected to localhost:11444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516111941 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:55:12.559119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:55:12.593281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:55:14.217822Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128941448937391:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.217985Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.218254Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128941448937402:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.218304Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587128941448937405:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.218332Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:14.227506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:14.245444Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587128941448937407:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-12-23T18:55:14.326802Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587128941448937434:2181] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:14.734539Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587128938031896862:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:55:14.736368Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=Zjg1YTk1MGMtNmRmNzJiNzAtODFkZjI4MmYtYmJiOTAzM2M=, ActorId: [1:7587128938031896826:2329], ActorState: ExecuteState, TraceId: 01kd691h1b4q33t7qxahgc7f4d, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:55:14.737783Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:55:14.739999Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7587128941448937448:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:55:14.740276Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=2&id=OTBkNTc0N2ItNzYxYzE0NTUtYTBhOWNhOTgtNjczNTBlODQ=, ActorId: [2:7587128941448937389:2303], ActorState: ExecuteState, TraceId: 01kd691gs76jtc6mv3n2kr1ytv, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:55:14.740534Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:55:14.762678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:14.804593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:14.886291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587128942326864614:3386] 2025-12-23T18:55:16.392121Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587128925146993508:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:55:16.392191Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:55:16.458704Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587128928564035197:2158];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:55:16.458822Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok |98.1%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeEmptyTable |98.1%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.1%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:54:43.731273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:43.731341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:43.731376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:43.731403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:43.731438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:43.731465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:43.731507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:43.731556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:43.732172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:43.732370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:43.791694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:43.791741Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:43.796354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:43.796523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:43.796622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:43.799513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:43.799712Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:43.800415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:43.800711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:43.802427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:43.802624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:43.803500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:43.803546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:43.803649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:43.803682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:43.803715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:43.803807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:43.916610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.917596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.917733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.917804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.917860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.917921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.917977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.918084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.918153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.918231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.918311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.918401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.918520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.918610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:43.918682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -23T18:55:26.945784Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:26.945888Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-12-23T18:55:26.945984Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-12-23T18:55:26.946048Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2025-12-23T18:55:26.948031Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:26.948119Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:55:26.948199Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2025-12-23T18:55:26.949784Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:26.949848Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:26.949962Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:55:26.950067Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-12-23T18:55:26.950328Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:55:26.951863Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-12-23T18:55:26.952107Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2025-12-23T18:55:26.952587Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:26.952799Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 158913792111 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:55:26.952912Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:55:26.953262Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2025-12-23T18:55:26.953336Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-12-23T18:55:26.953637Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:55:26.953741Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-12-23T18:55:26.955312Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:26.955391Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:26.955693Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:26.955764Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [37:208:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-12-23T18:55:26.956191Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-12-23T18:55:26.956261Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-12-23T18:55:26.956480Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:55:26.956541Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:55:26.956614Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2025-12-23T18:55:26.956675Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:55:26.956744Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-12-23T18:55:26.956812Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-12-23T18:55:26.956879Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2025-12-23T18:55:26.956936Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1:0 2025-12-23T18:55:26.957029Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:55:26.957087Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-12-23T18:55:26.957146Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-12-23T18:55:26.957765Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:55:26.957906Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2025-12-23T18:55:26.957973Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-12-23T18:55:26.958041Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-12-23T18:55:26.958108Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-12-23T18:55:26.958217Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-12-23T18:55:26.960451Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-12-23T18:55:26.961240Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-12-23T18:55:26.962654Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [37:673:2662] Bootstrap 2025-12-23T18:55:26.964949Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [37:673:2662] Become StateWork (SchemeCache [37:678:2667]) 2025-12-23T18:55:26.967831Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:55:26.968369Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-12-23T18:55:26.968536Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-12-23T18:55:26.969241Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-12-23T18:55:26.970445Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [37:673:2662] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-12-23T18:55:26.972351Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:55:26.972714Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-12-23T18:55:26.973664Z node 37 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> THiveTest::TestHiveBalancerOneTabletHighUsage [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/8ptz/005644/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk14/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_sid_is_unexpected/audit_log.c1g21xv9.txt 2025-12-23T18:55:08.133781Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:55:08.133734Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-12-23T18:55:08.083380Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-12-23T18:55:08.369042Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:55:08.369008Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-12-23T18:55:08.241369Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-12-23T18:55:08.564823Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:55:08.564799Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-12-23T18:55:08.476167Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-12-23T18:55:08.779023Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:55:08.778998Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-12-23T18:55:08.674291Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-12-23T18:55:08.936271Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:55:08.936240Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-12-23T18:55:08.886539Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-12-23T18:55:09.090601Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-12-23T18:55:09.090575Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-12-23T18:55:09.044404Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes >> TraverseDatashard::TraverseTwoTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:52:42.447244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:52:42.447345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:42.447384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:52:42.447417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:52:42.447478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:52:42.447504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:52:42.447555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:52:42.447653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:52:42.448476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:52:42.448747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:52:42.533159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:52:42.533231Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:42.538760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:52:42.539017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:52:42.539169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:52:42.542292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:52:42.542481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:52:42.543185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:52:42.543440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:52:42.545256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:42.545428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:52:42.546662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:52:42.546734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:52:42.546919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:52:42.546969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:52:42.547028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:52:42.547163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:52:42.690302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.691300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.691439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.691528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.691606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.691667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.691746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.691815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.691904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.691962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.692033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.692142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.692229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.692319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:52:42.692392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... mpleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 101 Memory: 124368 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 131 TableOwnerId: 72057594046678944 FollowerId: 0 2025-12-23T18:55:28.872082Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5284: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-12-23T18:55:28.872126Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0101 2025-12-23T18:55:28.872233Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-12-23T18:55:28.872273Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-12-23T18:55:28.914027Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:55:28.914120Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:55:28.914152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-12-23T18:55:28.914239Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2025-12-23T18:55:28.914299Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-12-23T18:55:28.914441Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2025-12-23T18:55:28.914514Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2025-12-23T18:55:28.914554Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2025-12-23T18:55:28.914692Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2025-12-23T18:55:28.914786Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:55:28.925355Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:55:28.925431Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:55:28.925466Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:55:28.956704Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:1119:3082]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T18:55:28.957070Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 39 2025-12-23T18:55:28.957527Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 269553162, Sender [3:1119:3082], Recipient [3:128:2152]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 39 Generation: 2 Round: 6 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 40 Memory: 124368 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 302 TableOwnerId: 72057594046678944 FollowerId: 0 2025-12-23T18:55:28.957579Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5284: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-12-23T18:55:28.957638Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.004 2025-12-23T18:55:28.957788Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-12-23T18:55:28.957880Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-12-23T18:55:28.999605Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:92: Operation queue wakeup 2025-12-23T18:55:28.999738Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:65: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 39], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-12-23T18:55:28.999805Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 39], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2025-12-23T18:55:28.999925Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 3 seconds 2025-12-23T18:55:28.999963Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-12-23T18:55:29.000149Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:55:29.000210Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:55:29.000263Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-12-23T18:55:29.000345Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2025-12-23T18:55:29.000386Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-12-23T18:55:29.000526Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 13940 row count 100 2025-12-23T18:55:29.000601Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=CopyTable, is column=0, is olap=0, RowCount 100, DataSize 13940, with borrowed parts 2025-12-23T18:55:29.000639Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409547, followerId 0 2025-12-23T18:55:29.000761Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:582: Want to split tablet 72075186233409547 by size: split by size (shardCount: 1, maxShardCount: 2, shardSize: 13940, maxShardSize: 1) 2025-12-23T18:55:29.000800Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:669: Postpone split tablet 72075186233409547 because it has borrow parts, enqueue compact them first 2025-12-23T18:55:29.000844Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-12-23T18:55:29.000950Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-12-23T18:55:29.011600Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-12-23T18:55:29.011691Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5445: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-12-23T18:55:29.011733Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-12-23T18:55:29.220665Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:55:29.220757Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-12-23T18:55:29.220884Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, received event# 271124999, Sender [3:128:2152], Recipient [3:128:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-12-23T18:55:29.220924Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5254: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |98.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:19.451535Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:19.552416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:19.559126Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:19.559509Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:19.559643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:19.980074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:20.072851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:20.072988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:20.107579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:20.177949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004954/r3tmp/tmpNvZrW0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12904, node 1 2025-12-23T18:55:21.200194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:21.201105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:21.201173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:21.201207Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:21.201719Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:21.264405Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22744 2025-12-23T18:55:21.810396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:24.933660Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:24.940897Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:24.945107Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:24.975561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.975685Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:25.015007Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:25.016368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.161828Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.162311Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.162841Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.163427Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.163633Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.163738Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.163909Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.164049Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.164162Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.329527Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:25.351336Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:25.351437Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:25.364126Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.499427Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.543803Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:25.543893Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:25.574200Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:25.575467Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:25.575629Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:25.575677Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:25.575714Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:25.575757Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:25.575797Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:25.575834Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:25.576591Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:25.694449Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.694555Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.709959Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:55:25.711421Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:55:25.829458Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:55:25.829812Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:25.831890Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:55:25.831952Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:55:25.832021Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:25.834795Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:25.838131Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:25.844218Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:25.844314Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:25.856542Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:26.005095Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:26.417404Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:26.516246Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:26.516365Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2 ... workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:27.366175Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:27.770469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3104:3439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:27.770628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:27.882760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3108:3442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:27.882900Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:27.883742Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3111:3445]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:27.883917Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:27.884003Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3113:3447] 2025-12-23T18:55:27.885189Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3113:3447] 2025-12-23T18:55:27.885740Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3114:3202] 2025-12-23T18:55:27.885934Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3113:3447], server id = [2:3114:3202], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:27.886094Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3114:3202], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:27.887165Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-12-23T18:55:27.887403Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-12-23T18:55:27.887517Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3111:3445], StatRequests.size() = 1 2025-12-23T18:55:27.933170Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:55:27.933668Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3118:3451], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:27.933831Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:27.934517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3122:3455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:27.934654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3124:3457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:27.934772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:27.943318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:28.069021Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:55:28.069087Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:55:28.133060Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3113:3447], schemeshard count = 1 2025-12-23T18:55:28.401450Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3127:3460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-12-23T18:55:28.560766Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3230:3523] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:28.575637Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3253:3539]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:28.575782Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:28.575811Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3253:3539], StatRequests.size() = 1 2025-12-23T18:55:28.947269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:29.341281Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3599:3604]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:29.341443Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:29.341485Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:3599:3604], StatRequests.size() = 1 2025-12-23T18:55:29.363780Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3608:3613]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:29.363925Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T18:55:29.363952Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [1:3608:3613], StatRequests.size() = 1 2025-12-23T18:55:29.464426Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3658:3465]], StatType[ 2 ], StatRequestsCount[ 2 ] 2025-12-23T18:55:29.466907Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:29.466957Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:29.467178Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:29.467209Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:55:29.467247Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:29.478988Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T18:55:29.490597Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-12-23T18:55:29.491074Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-12-23T18:55:29.491323Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2025-12-23T18:55:29.491415Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2025-12-23T18:55:29.491777Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3686:3481]], StatType[ 2 ], StatRequestsCount[ 2 ] 2025-12-23T18:55:29.494801Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:29.494858Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:29.495339Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:29.495397Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:55:29.495452Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:29.495683Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T18:55:29.498635Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2025-12-23T18:55:29.499153Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 2025-12-23T18:55:29.499367Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2025-12-23T18:55:29.499729Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} >> TraverseDatashard::TraverseOneTableServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:22.803599Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:22.900044Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:22.905499Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:22.905785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:22.905889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:23.206677Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:23.284455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:23.284580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:23.342082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:23.378052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00493f/r3tmp/tmpXCXSwr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22040, node 1 2025-12-23T18:55:24.038241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:24.039352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:24.039403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:24.039434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:24.039667Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:24.101354Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:65364 2025-12-23T18:55:24.623018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:26.989705Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:26.994308Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:26.996605Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:27.019919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:27.020004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:27.057103Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:27.058710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:27.286721Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.287422Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.288206Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.288933Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.289224Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.289384Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.289535Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.289680Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.289847Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.406776Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-12-23T18:55:27.418455Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:27.418554Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:27.431565Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:27.601552Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:27.641288Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:27.641393Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:27.669267Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:27.670632Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:27.670845Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:27.670913Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:27.670963Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:27.671006Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:27.671056Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:27.671103Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:27.671446Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:27.710768Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:27.710860Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:27.720460Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:55:27.727414Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:55:27.728438Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:55:27.732481Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-12-23T18:55:27.755974Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:27.756020Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:55:27.756080Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:55:27.772966Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:27.780440Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:27.786391Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:27.786477Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:27.795408Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:27.891310Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:28.189241Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:28.303717Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:28.303784Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Column diff is empty, finishing 2025-12-23T18:55:28.930716Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:55:28.958077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:29.611678Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:29.656481Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8273: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-12-23T18:55:29.656553Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8289: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-12-23T18:55:29.656623Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:3096:3135], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-12-23T18:55:29.661923Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3106:3140] 2025-12-23T18:55:29.662687Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3106:3140], schemeshard id = 72075186224037899 2025-12-23T18:55:31.034685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3447:3576], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:31.034829Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:31.035204Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3465:3581], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:31.035263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:31.050179Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:31.429865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3749:3626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:31.429976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:31.485057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3753:3629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:31.485183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:31.486226Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3756:3632]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:31.486429Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:31.486576Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-12-23T18:55:31.486620Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3759:3635] 2025-12-23T18:55:31.486683Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3759:3635] 2025-12-23T18:55:31.487255Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3760:3557] 2025-12-23T18:55:31.487450Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3759:3635], server id = [2:3760:3557], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:31.487646Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3760:3557], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:31.487728Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-12-23T18:55:31.487952Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-12-23T18:55:31.488018Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3756:3632], StatRequests.size() = 1 2025-12-23T18:55:31.503419Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:55:31.503861Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3764:3639], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:31.503948Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:31.504482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3768:3643], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:31.504558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:31.504679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3771:3646], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:31.510308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:31.631327Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:55:31.631400Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:55:31.652677Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3759:3635], schemeshard count = 1 2025-12-23T18:55:31.902043Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3773:3648], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-12-23T18:55:32.151869Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3893:3722] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:32.164174Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3916:3738]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:32.164354Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:32.164381Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3916:3738], StatRequests.size() = 1 2025-12-23T18:55:32.275303Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3958:3603]], StatType[ 2 ], StatRequestsCount[ 2 ] 2025-12-23T18:55:32.277886Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:32.277953Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:32.278393Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:32.278438Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-12-23T18:55:32.278477Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:32.300896Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T18:55:32.305884Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-12-23T18:55:32.306182Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-12-23T18:55:32.306268Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2025-12-23T18:55:32.306345Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TBackupTests::BackupUuidColumn[Raw] >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2025-12-23T18:54:15.124752Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:15.141734Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:15.141955Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:15.142535Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:15.142788Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-12-23T18:54:15.143500Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-12-23T18:54:15.143540Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:15.144130Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:78:2078] ControllerId# 72057594037932033 2025-12-23T18:54:15.144153Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:15.144231Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:15.144303Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:15.153312Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:15.153358Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:15.154920Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:86:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.155018Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:87:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.155103Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:88:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.155187Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:89:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.155305Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:90:2087] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.155397Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:91:2088] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.155469Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:77:2077] Create Queue# [1:92:2089] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.155488Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:15.155537Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:78:2078] 2025-12-23T18:54:15.155575Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:78:2078] 2025-12-23T18:54:15.155614Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:15.155651Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:15.156260Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:15.156341Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:15.158850Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:15.158970Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:15.159180Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:15.159309Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:15.159884Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:103:2077] ControllerId# 72057594037932033 2025-12-23T18:54:15.159906Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:15.159943Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:15.159999Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:15.161247Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:15.163205Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:15.163298Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:15.163506Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:15.163623Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:15.164158Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:114:2077] ControllerId# 72057594037932033 2025-12-23T18:54:15.164184Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:15.164218Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:15.164275Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:15.171165Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:15.171222Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:15.172679Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:121:2081] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.172785Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:122:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.172859Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:123:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.172933Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:124:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.173003Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:125:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.173078Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:126:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.173182Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:113:2076] Create Queue# [3:127:2087] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.173197Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:15.173276Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [3:114:2077] 2025-12-23T18:54:15.173300Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [3:114:2077] 2025-12-23T18:54:15.173324Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:15.173349Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:15.173637Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:15.173758Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:15.182546Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:78:2078] 2025-12-23T18:54:15.182598Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.188940Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:15.188986Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:15.190215Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:133:2081] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.190340Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:134:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.190424Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:135:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.190522Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:136:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.190598Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:102:2076] Create Queue# [2:137:2085] targetNo ... BUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037892 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1976:2267] 2025-12-23T18:55:34.137164Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037892] forward result remote node 67 [62:2105:2493] 2025-12-23T18:55:34.137238Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037892] remote node connected [62:2105:2493] 2025-12-23T18:55:34.137264Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037892]::SendEvent [62:2105:2493] 2025-12-23T18:55:34.137475Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037892] Accept Connect Originator# [62:2105:2493] 2025-12-23T18:55:34.137748Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037892] connected with status OK role: Leader [62:2105:2493] 2025-12-23T18:55:34.137785Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037892] send queued [62:2105:2493] 2025-12-23T18:55:34.138330Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037893] ::Bootstrap [62:2109:2495] 2025-12-23T18:55:34.138355Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037893] lookup [62:2109:2495] 2025-12-23T18:55:34.138390Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037893 entry.State: StNormal leader: [67:1309:2100] followers: 0 ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:55:34.138415Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1309:2100] 2025-12-23T18:55:34.138483Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037893] forward result remote node 67 [62:2109:2495] 2025-12-23T18:55:34.138544Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037893] remote node connected [62:2109:2495] 2025-12-23T18:55:34.138568Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037893]::SendEvent [62:2109:2495] 2025-12-23T18:55:34.138699Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037893] Accept Connect Originator# [62:2109:2495] 2025-12-23T18:55:34.138927Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037893] connected with status OK role: Leader [62:2109:2495] 2025-12-23T18:55:34.138954Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037893] send queued [62:2109:2495] 2025-12-23T18:55:34.139505Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037894] ::Bootstrap [62:2112:2497] 2025-12-23T18:55:34.139537Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037894] lookup [62:2112:2497] 2025-12-23T18:55:34.139571Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal leader: [66:1313:2143] (known problem) followers: 0 ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:55:34.139594Z node 62 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 0 disallowed 0 2025-12-23T18:55:34.139716Z node 62 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:55:34.139794Z node 62 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2025-12-23T18:55:34.139826Z node 62 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2025-12-23T18:55:34.139851Z node 62 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2025-12-23T18:55:34.139886Z node 62 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [67:1977:2268] CurrentLeaderTablet: [67:1984:2271] CurrentGeneration: 3 CurrentStep: 0} 2025-12-23T18:55:34.139963Z node 62 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [67:1977:2268] CurrentLeaderTablet: [67:1984:2271] CurrentGeneration: 3 CurrentStep: 0} 2025-12-23T18:55:34.140001Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037894 leader: [67:1977:2268] followers: 0 2025-12-23T18:55:34.140034Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1977:2268] 2025-12-23T18:55:34.140089Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037894] forward result remote node 67 [62:2112:2497] 2025-12-23T18:55:34.140160Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037894] remote node connected [62:2112:2497] 2025-12-23T18:55:34.140184Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037894]::SendEvent [62:2112:2497] 2025-12-23T18:55:34.140387Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037894] Accept Connect Originator# [62:2112:2497] 2025-12-23T18:55:34.140691Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037894] connected with status OK role: Leader [62:2112:2497] 2025-12-23T18:55:34.140717Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037894] send queued [62:2112:2497] 2025-12-23T18:55:34.141428Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037895] ::Bootstrap [62:2116:2499] 2025-12-23T18:55:34.141456Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037895] lookup [62:2116:2499] 2025-12-23T18:55:34.141492Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal leader: [67:1824:2195] followers: 0 ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:55:34.141519Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1824:2195] 2025-12-23T18:55:34.141585Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037895] forward result remote node 67 [62:2116:2499] 2025-12-23T18:55:34.141645Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037895] remote node connected [62:2116:2499] 2025-12-23T18:55:34.141680Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037895]::SendEvent [62:2116:2499] 2025-12-23T18:55:34.141815Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037895] Accept Connect Originator# [62:2116:2499] 2025-12-23T18:55:34.142084Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037895] connected with status OK role: Leader [62:2116:2499] 2025-12-23T18:55:34.142119Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037895] send queued [62:2116:2499] 2025-12-23T18:55:34.142893Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037896] ::Bootstrap [62:2119:2501] 2025-12-23T18:55:34.142938Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037896] lookup [62:2119:2501] 2025-12-23T18:55:34.142988Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal leader: [67:1827:2197] followers: 0 ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:55:34.143026Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1827:2197] 2025-12-23T18:55:34.143106Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037896] forward result remote node 67 [62:2119:2501] 2025-12-23T18:55:34.143197Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037896] remote node connected [62:2119:2501] 2025-12-23T18:55:34.143233Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037896]::SendEvent [62:2119:2501] 2025-12-23T18:55:34.143384Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037896] Accept Connect Originator# [62:2119:2501] 2025-12-23T18:55:34.143617Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037896] connected with status OK role: Leader [62:2119:2501] 2025-12-23T18:55:34.143657Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037896] send queued [62:2119:2501] 2025-12-23T18:55:34.144372Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [62:2121:2502] 2025-12-23T18:55:34.144432Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [62:2121:2502] 2025-12-23T18:55:34.144530Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [62:616:2179] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:55:34.144599Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [62:616:2179] 2025-12-23T18:55:34.144693Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [62:2121:2502] 2025-12-23T18:55:34.144767Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [62:2121:2502] 2025-12-23T18:55:34.144823Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [62:2121:2502] 2025-12-23T18:55:34.144893Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [62:2121:2502] 2025-12-23T18:55:34.145012Z node 62 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [62:2121:2502] 2025-12-23T18:55:34.145221Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [62:2121:2502] 2025-12-23T18:55:34.145272Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [62:2121:2502] 2025-12-23T18:55:34.145319Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [62:2121:2502] 2025-12-23T18:55:34.145379Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [62:2121:2502] 2025-12-23T18:55:34.145426Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [62:2121:2502] 2025-12-23T18:55:34.145484Z node 62 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [62:587:2174] EventType# 268697616 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> AnalyzeColumnshard::AnalyzeShard [GOOD] >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch >> TBackupTests::ShouldSucceedOnLargeData[Raw] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:55:35.811173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:35.811245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:35.811276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:35.811309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:35.811364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:35.811383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:35.811430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:35.811508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:35.812174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:35.813250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:35.888920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:55:35.888964Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:35.898141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:35.898281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:35.899035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:55:35.908600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:35.908835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:35.910326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:35.912387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:55:35.920586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:35.921448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:35.926568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:35.926629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:35.926774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:35.926818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:35.926864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:35.927684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.052002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.053754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.053852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.053902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.053949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.054023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.054070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.054123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.054178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.054230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.054273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.054327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.054378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.054437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.054492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd: 72057594046678944 2025-12-23T18:55:36.816436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2025-12-23T18:55:36.816513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2025-12-23T18:55:36.816613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:55:36.834247Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:791: [Export] [s3] Bootstrap: self# [1:822:2790], attempt# 0 2025-12-23T18:55:36.862424Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:445: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:822:2790], sender# [1:821:2789] FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2025-12-23T18:55:36.867650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:36.867694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:55:36.867875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:36.867906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:55:36.868189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.868230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:55:36.868669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:55:36.868749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:55:36.868776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:55:36.868801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:55:36.868827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:55:36.868907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:55:36.871333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:62303 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 03CCD7A0-3B22-454A-8502-EB0151C227DA amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2025-12-23T18:55:36.890625Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:405: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:822:2790], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:62303 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 873164C6-AC3B-4F48-A641-18A0AB7FA938 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-12-23T18:55:36.896801Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:310: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:822:2790], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-12-23T18:55:36.896876Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:821:2789] 2025-12-23T18:55:36.896953Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:463: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:822:2790], sender# [1:821:2789], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:62303 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 421B47EA-6F00-4CDB-A3DA-518EF4DA409C amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2025-12-23T18:55:36.899220Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:505: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:822:2790], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2025-12-23T18:55:36.899268Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:708: [Export] [s3] Finish: self# [1:822:2790], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-12-23T18:55:36.903246Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:821:2789], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-12-23T18:55:36.905627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 718 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-12-23T18:55:36.905684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:55:36.905855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 718 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-12-23T18:55:36.905976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 718 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-12-23T18:55:36.906055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:36.906093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.906131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:55:36.906178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:55:36.906343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:36.908136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.908350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.908381Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:55:36.908443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:55:36.908465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:36.908498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:55:36.908523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:36.908556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:55:36.908598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:745:2722] message: TxId: 102 2025-12-23T18:55:36.908626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:36.908650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:55:36.908670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:55:36.908779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:55:36.910033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:55:36.910069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:808:2777] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:55:35.811173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:35.811242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:35.811280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:35.811312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:35.811404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:35.811428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:35.811475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:35.811528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:35.812186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:35.813210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:35.888980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:55:35.889022Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:35.898153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:35.898287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:35.899024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:55:35.908874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:35.909026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:35.910273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:35.912411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:55:35.920561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:35.921440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:35.926471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:35.926518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:35.926664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:35.926701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:35.926790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:35.927678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.055891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.056783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.056902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.056988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.057059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.057185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.057301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.057398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.057475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.057530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.057567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.057616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.057660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.057702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.057764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... _01.csv HTTP/1.1 HEADERS: Host: localhost:30028 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A02CEB98-AFAF-4D6C-BD5A-A270BDBDE6DA amz-sdk-request: attempt=1 content-length: 11 content-md5: jsMhyzH+cyrvZpBm0dQVGQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv / / 11 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:30028 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F28303BA-94AF-41A8-8AFA-D98F6FDC5C0D amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2025-12-23T18:55:36.890588Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:505: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:894:2849], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2025-12-23T18:55:36.890643Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:708: [Export] [s3] Finish: self# [1:894:2849], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-12-23T18:55:36.890722Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:405: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:887:2844], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } 2025-12-23T18:55:36.894981Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:893:2847], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:30028 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F35C7C3B-153D-470E-BCCD-33A83C8F02E7 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-12-23T18:55:36.901224Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:310: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:887:2844], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-12-23T18:55:36.901295Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:886:2842] 2025-12-23T18:55:36.901350Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:463: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:887:2844], sender# [1:886:2842], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-12-23T18:55:36.903251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 736 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:55:36.903315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:30028 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9F15FECC-CE9E-43F7-9336-917162D6CBCE amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-12-23T18:55:36.903458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 736 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:55:36.903528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 736 RawX2: 4294970011 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:55:36.903577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:36.903692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:36.903967Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:505: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:887:2844], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-12-23T18:55:36.903991Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:708: [Export] [s3] Finish: self# [1:887:2844], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-12-23T18:55:36.904101Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:886:2842], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-12-23T18:55:36.909443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.921494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 727 RawX2: 4294970005 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:55:36.921548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:55:36.921668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 727 RawX2: 4294970005 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:55:36.921735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 727 RawX2: 4294970005 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:55:36.921781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:36.921824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.921863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:55:36.921911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:55:36.921940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:55:36.922072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:36.923620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.924053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.924094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:55:36.924195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:55:36.924225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:36.924253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:55:36.924294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:36.924334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:55:36.924388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:780:2746] message: TxId: 102 2025-12-23T18:55:36.924430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:36.924466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:55:36.924491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:55:36.924603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:55:36.926024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:55:36.926063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:865:2823] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:55:35.811169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:35.811243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:35.811283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:35.811315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:35.811396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:35.811425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:35.811477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:35.811525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:35.812164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:35.813221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:35.889402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:55:35.889442Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:35.898531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:35.898677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:35.899041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:55:35.908926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:35.909115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:35.910323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:35.912384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:55:35.920683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:35.921467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:35.926489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:35.926556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:35.926685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:35.926751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:35.926797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:35.927672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.064146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.064876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.064946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.064991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.065024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.065094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.065154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.065204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.065250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.065302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.065340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.065390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.065431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.065507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.065571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... /1.1 HEADERS: Host: localhost:24089 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E0E40A4B-2E0C-4183-AF65-DA327BD1B4E8 amz-sdk-request: attempt=1 content-length: 20 content-md5: 8NOHH1ycwPXC5K+v+37u8g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv.zst / / 20 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:24089 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 50A62989-841C-4E57-A02D-7257400E8768 amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2025-12-23T18:55:36.890600Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:405: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:885:2842], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } 2025-12-23T18:55:36.895026Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:505: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:893:2848], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2025-12-23T18:55:36.895104Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:708: [Export] [s3] Finish: self# [1:893:2848], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-12-23T18:55:36.895424Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:892:2846], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:24089 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5879882B-94E3-491B-A423-64FAC691C882 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-12-23T18:55:36.900095Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:310: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:885:2842], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-12-23T18:55:36.900202Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:884:2841] 2025-12-23T18:55:36.900308Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:463: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:885:2842], sender# [1:884:2841], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:24089 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0D0EC9C2-DB25-4D37-800C-5684C9D61D2C amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-12-23T18:55:36.902904Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:505: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:885:2842], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-12-23T18:55:36.902957Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:708: [Export] [s3] Finish: self# [1:885:2842], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-12-23T18:55:36.903078Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:884:2841], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-12-23T18:55:36.909916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 732 RawX2: 4294970008 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:55:36.909970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:55:36.910086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 732 RawX2: 4294970008 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:55:36.910161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 732 RawX2: 4294970008 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:55:36.910253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:36.910359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:36.910640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 733 RawX2: 4294970009 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:55:36.910664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-12-23T18:55:36.910778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 733 RawX2: 4294970009 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:55:36.910826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 733 RawX2: 4294970009 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-12-23T18:55:36.910868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:36.910920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.910957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:55:36.910995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-12-23T18:55:36.911019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:55:36.911087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:36.912640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.912724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.913007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.913040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:55:36.913137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:55:36.913175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:36.913204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:55:36.913226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:36.913253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:55:36.913294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:782:2748] message: TxId: 102 2025-12-23T18:55:36.913325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:36.913368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:55:36.913393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:55:36.913491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:55:36.914822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:55:36.914859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:867:2825] TestWaitNotification: OK eventTxId 102 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:28.132085Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:28.205157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:28.209160Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:28.209438Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:28.209514Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:28.515276Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:28.596258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:28.596409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:28.653445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:28.751644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004930/r3tmp/tmpm1LqDm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12942, node 1 2025-12-23T18:55:29.342092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:29.342850Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:29.342892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:29.342916Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:29.343077Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:29.404085Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10416 2025-12-23T18:55:29.922798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:32.151211Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:32.156213Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:32.158908Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:32.181265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:32.181348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:32.218461Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:32.220107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:32.499603Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:32.500530Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:32.501726Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:32.502681Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:32.503181Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:32.503435Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:32.503675Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:32.503879Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:32.504175Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:32.634302Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:32.646718Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:32.646948Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:32.659738Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:32.810172Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:32.849218Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:32.849319Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:32.871979Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:32.873173Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:32.873346Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:32.873392Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:32.873454Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:32.873502Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:32.873541Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:32.873578Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:32.873892Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:32.917329Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:32.917432Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:32.927781Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:55:32.935065Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:55:32.935905Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:55:32.940350Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:32.970433Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:32.970515Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:55:32.970613Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:32.995968Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:33.004970Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:33.014287Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:33.014439Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:33.027143Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:33.133496Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:33.449119Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:33.554031Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:33.554148Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2 ... :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:55:34.420296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2759:3371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:34.420436Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:34.420808Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2777:3376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:34.420867Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:34.435770Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-12-23T18:55:34.517861Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:34.518120Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:34.518335Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:34.518421Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:34.518528Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:34.518659Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:34.518789Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:34.518905Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:34.518981Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:34.519080Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:34.519169Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:34.519258Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:34.519328Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:34.545432Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:34.545527Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:34.545654Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:34.545697Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:34.545849Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:34.545886Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:34.545982Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:34.546029Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:34.546081Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:34.546112Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:34.546150Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:34.546177Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:34.546333Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:34.546366Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:34.546478Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:34.546514Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:34.546562Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:55:34.546599Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:55:34.546639Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:55:34.546669Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:55:34.546860Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:55:34.546926Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-12-23T18:55:34.547005Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-12-23T18:55:34.547053Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-12-23T18:55:34.603885Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2833:3027];ev=NActors::IEventHandle;tablet_id=72075186224037899;tx_id=281474976715659;this=136807162978880;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=2970;max=18446744073709551615;plan=0;src=[2:1942:2455];cookie=121:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:55:34.660188Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:55:34.660304Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2025-12-23T18:55:34.660355Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; waiting actualization: 0/0.000014s |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> THealthCheckTest::TestStateStorageBlue [GOOD] >> THealthCheckTest::TestStateStorageYellow >> TBackupTests::BackupUuidColumn[Zstd] >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:29.460531Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:29.535418Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:29.541939Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:29.542202Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:29.542268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:29.869388Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:29.945294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:29.945427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:30.002018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:30.098937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00491d/r3tmp/tmpjPoswZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3707, node 1 2025-12-23T18:55:30.715335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:30.716152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:30.716197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:30.716228Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:30.716418Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:30.777232Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:19156 2025-12-23T18:55:31.272103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:33.533170Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:33.538196Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:33.540775Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:33.564761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:33.564852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:33.601689Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:33.603737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:33.828699Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.829296Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.829921Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.830415Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.830593Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.830698Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.830827Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.830977Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.831094Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.938788Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-12-23T18:55:33.950311Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:33.950436Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:33.962981Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:34.106947Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:34.141681Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:34.141776Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:34.162847Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:34.164076Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:34.164248Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:34.164294Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:34.164333Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:34.164376Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:34.164430Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:34.164468Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:34.164780Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:34.202420Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:34.202508Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:34.212058Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:55:34.218679Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:55:34.219656Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:55:34.223887Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-12-23T18:55:34.250332Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:34.250389Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:55:34.250458Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:55:34.273690Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:34.286059Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:34.293542Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:34.293642Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:34.302635Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:34.400978Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:34.731925Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:34.832508Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:34.832581Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. C ... ror: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:37.586297Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:38.002609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3742:3624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:38.002846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:38.003517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3746:3627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:38.003596Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:38.004565Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3749:3630]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:38.004748Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:38.004890Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-12-23T18:55:38.004933Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3752:3633] 2025-12-23T18:55:38.004991Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3752:3633] 2025-12-23T18:55:38.005500Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3753:3552] 2025-12-23T18:55:38.005741Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3752:3633], server id = [2:3753:3552], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:38.005974Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3753:3552], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:38.006069Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-12-23T18:55:38.006288Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-12-23T18:55:38.006353Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3749:3630], StatRequests.size() = 1 2025-12-23T18:55:38.021385Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:55:38.021733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3757:3637], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:38.021837Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:38.022302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3761:3641], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:38.022424Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:38.022481Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3764:3644], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:38.027954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:38.125878Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:55:38.125944Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:55:38.147195Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3752:3633], schemeshard count = 1 2025-12-23T18:55:38.417403Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3766:3646], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-12-23T18:55:38.708284Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3884:3717] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:38.720926Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3907:3733]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:38.721098Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:38.721128Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3907:3733], StatRequests.size() = 1 2025-12-23T18:55:38.841624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:39.229529Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4236:3797]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:39.229704Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:39.229739Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:4236:3797], StatRequests.size() = 1 2025-12-23T18:55:39.253871Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4245:3806]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:39.254023Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T18:55:39.254051Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [1:4245:3806], StatRequests.size() = 1 2025-12-23T18:55:39.373315Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4289:3800]], StatType[ 2 ], StatRequestsCount[ 2 ] 2025-12-23T18:55:39.376067Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:39.376121Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:39.376574Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:39.376622Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-12-23T18:55:39.376662Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:39.383752Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T18:55:39.388552Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-12-23T18:55:39.388799Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-12-23T18:55:39.389013Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2025-12-23T18:55:39.389086Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2025-12-23T18:55:39.389315Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4317:3816]], StatType[ 2 ], StatRequestsCount[ 2 ] 2025-12-23T18:55:39.391651Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:39.391696Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:39.392105Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:39.392144Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-12-23T18:55:39.392190Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 33] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:39.392362Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 33] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T18:55:39.394158Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2025-12-23T18:55:39.394381Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 2025-12-23T18:55:39.394674Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2025-12-23T18:55:39.394817Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:55:40.013083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:40.013146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:40.013176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:40.013199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:40.013275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:40.013297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:40.013331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:40.013380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:40.013937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:40.014132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:40.066909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:55:40.066952Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:40.070540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:40.070653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:40.070770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:55:40.072603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:40.072723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:40.073182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:40.073340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:55:40.074611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:40.074769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:40.075577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:40.075616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:40.075714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:40.075745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:40.075780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:40.075906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:55:40.183853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.184469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.184538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.184576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.184643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.184701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.184740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.184771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.184811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.184841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.184895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.184933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.184978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.185037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:40.185087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 7594046678944 2025-12-23T18:55:40.779422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2025-12-23T18:55:40.779493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2025-12-23T18:55:40.779595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:55:40.787617Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:791: [Export] [s3] Bootstrap: self# [1:824:2793], attempt# 0 2025-12-23T18:55:40.801171Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:445: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:824:2793], sender# [1:823:2791] FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2025-12-23T18:55:40.803507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:40.803553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:55:40.803768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:40.803798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:55:40.804044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:40.804096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:55:40.804701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:55:40.804800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:55:40.804839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:55:40.804869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:55:40.804927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:15237 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 53EBE999-2567-4F35-829B-04E874E1A9A9 amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD 2025-12-23T18:55:40.804998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true S3_MOCK::HttpServeWrite: /metadata.json / / 107 2025-12-23T18:55:40.806015Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:405: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:824:2793], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } 2025-12-23T18:55:40.808372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:15237 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DBA4D318-75A9-4245-9D1C-34BFCD4DCC93 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-12-23T18:55:40.809266Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:310: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:824:2793], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-12-23T18:55:40.809320Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:823:2791] 2025-12-23T18:55:40.809548Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:463: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:824:2793], sender# [1:823:2791], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:15237 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9170A29D-7DD6-4B08-BA3B-CA9B92D76FC4 amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2025-12-23T18:55:40.811750Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:505: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:824:2793], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2025-12-23T18:55:40.811791Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:708: [Export] [s3] Finish: self# [1:824:2793], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-12-23T18:55:40.811882Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:823:2791], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-12-23T18:55:40.828485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-12-23T18:55:40.828544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:55:40.828691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-12-23T18:55:40.828763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-12-23T18:55:40.828808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:40.828833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:40.828857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:55:40.828892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:55:40.829030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:40.830618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:40.830727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:55:40.830786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:55:40.830855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:55:40.830898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:40.830931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:55:40.830960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:40.830990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:55:40.831071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:743:2721] message: TxId: 102 2025-12-23T18:55:40.831128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:55:40.831168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:55:40.831199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:55:40.831295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:55:40.832522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:55:40.832554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:808:2778] TestWaitNotification: OK eventTxId 102 |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiers >> MoveTable::WithData-Reboot >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] >> MoveTable::WithCommitInProgress-Reboot >> MoveTable::WithData+Reboot >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn >> TColumnShardTestSchema::ExportAfterFail >> MoveTable::WithData-Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:30.764640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:30.835647Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:30.840131Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:30.840416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:30.840491Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:31.163758Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:31.236948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:31.237083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:31.294053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:31.395270Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004917/r3tmp/tmpyzvfAk/pdisk_1.dat TServer::EnableGrpc on GrpcPort 31362, node 1 2025-12-23T18:55:31.995693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:31.996397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:31.996425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:31.996449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:31.996612Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:32.058899Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:8723 2025-12-23T18:55:32.567977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:34.686798Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:34.691528Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:34.693900Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:34.716728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:34.716805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:34.753965Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:34.755397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:34.975002Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.975459Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.976082Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.976527Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.976715Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.976794Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.976911Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.977057Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.977148Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:35.085513Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-12-23T18:55:35.096971Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:35.097080Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:35.109892Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:35.248608Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:35.282878Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:35.282994Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:35.302205Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:35.303292Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:35.303433Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:35.303470Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:35.303502Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:35.303545Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:35.303587Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:35.303629Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:35.303892Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:35.340239Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:35.340333Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:35.349648Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:55:35.356284Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:55:35.357074Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:55:35.361567Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-12-23T18:55:35.384112Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2249:2656] Owner: [2:2248:2655]. Describe result: PathErrorUnknown 2025-12-23T18:55:35.384166Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2249:2656] Owner: [2:2248:2655]. Creating table 2025-12-23T18:55:35.384222Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2249:2656] Owner: [2:2248:2655]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:55:35.400007Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2352:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:35.408056Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:35.414454Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2249:2656] Owner: [2:2248:2655]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:35.414585Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2249:2656] Owner: [2:2248:2655]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:35.423460Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2249:2656] Owner: [2:2248:2655]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:35.522514Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:35.841279Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2249:2656] Owner: [2:2248:2655]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:35.955848Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2249:2656] Owner: [2:2248:2655]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:35.955930Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2249:2656] Owner: [2:2248:2655]. C ... PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:40.659559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:40.659921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4440:3833], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:40.659975Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:40.660873Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:4443:3836]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:40.661057Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:40.661223Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-12-23T18:55:40.661264Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:4446:3839] 2025-12-23T18:55:40.661314Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:4446:3839] 2025-12-23T18:55:40.661763Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4447:3935] 2025-12-23T18:55:40.662010Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:4446:3839], server id = [2:4447:3935], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:40.662203Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4447:3935], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:40.662252Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-12-23T18:55:40.662436Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-12-23T18:55:40.662486Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:4443:3836], StatRequests.size() = 1 2025-12-23T18:55:40.675041Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:55:40.675156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4451:3843], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:40.675219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:40.675586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4455:3847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:40.675642Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:40.675740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4458:3850], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:40.680263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:40.863337Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:55:40.863403Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:55:40.895048Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:4446:3839], schemeshard count = 1 2025-12-23T18:55:41.151111Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:4460:3852], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-12-23T18:55:41.303780Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:4574:3925] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:41.313833Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:4597:3941]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:41.313997Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:41.314021Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:4597:3941], StatRequests.size() = 1 2025-12-23T18:55:41.422889Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72075186224037905, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:41.714172Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4939:4005]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:41.714360Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:41.714661Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 1, schemeshard count = 1, urgent = 0 2025-12-23T18:55:41.714698Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-12-23T18:55:41.714997Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2025-12-23T18:55:41.715048Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:4939:4005], StatRequests.size() = 1 2025-12-23T18:55:41.726145Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:55:41.734781Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4948:4014]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:41.735017Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T18:55:41.735048Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [1:4948:4014], StatRequests.size() = 1 2025-12-23T18:55:41.838083Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4993:4183]], StatType[ 2 ], StatRequestsCount[ 2 ] 2025-12-23T18:55:41.843027Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:41.843099Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:41.843788Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:41.843842Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-12-23T18:55:41.843892Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:41.855809Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T18:55:41.864901Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-12-23T18:55:41.865303Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-12-23T18:55:41.865526Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2025-12-23T18:55:41.865686Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2025-12-23T18:55:41.866168Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5021:4199]], StatType[ 2 ], StatRequestsCount[ 2 ] 2025-12-23T18:55:41.870295Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:41.870369Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:41.871174Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:41.871233Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-12-23T18:55:41.871298Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:41.871510Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T18:55:41.875005Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2025-12-23T18:55:41.875280Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 2025-12-23T18:55:41.875975Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2025-12-23T18:55:41.876370Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithData-Reboot [GOOD] Test command err: 2025-12-23T18:55:42.582474Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:55:42.605236Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:55:42.606125Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:55:42.617507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:42.617745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:42.617903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:42.617987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:42.618060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:42.618131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:42.618237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:42.618306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:42.618414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:42.618485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:42.618542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:42.618624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:42.618691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:42.641716Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:55:42.641843Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:55:42.641887Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:55:42.642044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:42.643573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:42.643629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:42.643663Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:55:42.643729Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:55:42.643797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:42.643825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:42.643848Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:55:42.643980Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:42.644042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:42.644069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:42.644086Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:55:42.644166Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:55:42.644203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:42.644229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:42.644246Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:55:42.644280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:42.644302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:42.644321Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:55:42.644346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:42.644398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:42.644438Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:55:42.644668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:42.644740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:42.644777Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:55:42.644890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:42.644942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:42.644971Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:55:42.645020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:55:42.645059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:55:42.645087Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:55:42.645129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:55:42.645167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:55:42.645213Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:55:42.645356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:55:42.645401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... age=data_format;batch_size=0;num_rows=100;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-12-23T18:55:43.513046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:331:2343];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-12-23T18:55:43.513218Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:43.513371Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:43.513548Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:43.513763Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:55:43.513928Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:43.514164Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:43.514438Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:337:2349] finished for tablet 9437184 2025-12-23T18:55:43.515009Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:331:2343];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.016},{"events":["l_ProduceResults","f_Finish"],"t":0.019},{"events":["l_ack","l_processing","l_Finish"],"t":0.02}],"full":{"a":1514831,"name":"_full_task","f":1514831,"d_finished":0,"c":0,"l":1534868,"d":20037},"events":[{"name":"bootstrap","f":1515137,"d_finished":4704,"c":1,"l":1519841,"d":4704},{"a":1534117,"name":"ack","f":1531597,"d_finished":2363,"c":1,"l":1533960,"d":3114},{"a":1534105,"name":"processing","f":1520189,"d_finished":5748,"c":3,"l":1533963,"d":6511},{"name":"ProduceResults","f":1518991,"d_finished":2805,"c":6,"l":1534567,"d":2805},{"a":1534572,"name":"Finish","f":1534572,"d_finished":0,"c":0,"l":1534868,"d":296},{"name":"task_result","f":1520211,"d_finished":3326,"c":2,"l":1531482,"d":3326}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:43.515109Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:331:2343];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:55:43.515605Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:331:2343];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.016},{"events":["l_ProduceResults","f_Finish"],"t":0.019},{"events":["l_ack","l_processing","l_Finish"],"t":0.02}],"full":{"a":1514831,"name":"_full_task","f":1514831,"d_finished":0,"c":0,"l":1535534,"d":20703},"events":[{"name":"bootstrap","f":1515137,"d_finished":4704,"c":1,"l":1519841,"d":4704},{"a":1534117,"name":"ack","f":1531597,"d_finished":2363,"c":1,"l":1533960,"d":3780},{"a":1534105,"name":"processing","f":1520189,"d_finished":5748,"c":3,"l":1533963,"d":7177},{"name":"ProduceResults","f":1518991,"d_finished":2805,"c":6,"l":1534567,"d":2805},{"a":1534572,"name":"Finish","f":1534572,"d_finished":0,"c":0,"l":1535534,"d":962},{"name":"task_result","f":1520211,"d_finished":3326,"c":2,"l":1531482,"d":3326}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:43.515681Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:55:43.432017Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-12-23T18:55:43.515739Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:55:43.515897Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:337:2349];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2025-12-23T18:55:43.516636Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2025-12-23T18:55:43.516999Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {1766516143537:12} readable: {1766516143537:max} at tablet 9437184 2025-12-23T18:55:43.517141Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2025-12-23T18:55:43.517199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1766516143537:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:55:43.517279Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1766516143537:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithCommitInProgress-Reboot [GOOD] >> MoveTable::WithData+Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithCommitInProgress-Reboot [GOOD] Test command err: 2025-12-23T18:55:43.316677Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:55:43.336228Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:55:43.336383Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:55:43.341357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:43.341524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:43.341685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:43.341768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:43.341843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:43.341911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:43.342024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:43.342080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:43.342175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:43.342241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:43.342303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:43.342380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:43.342457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:43.361173Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:55:43.361284Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:55:43.361320Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:55:43.361451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:43.361547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:43.361616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:43.361645Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:55:43.361708Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:55:43.361753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:43.361780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:43.361797Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:55:43.361923Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:43.361959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:43.361993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:43.362018Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:55:43.362077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:55:43.362129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:43.362158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:43.362175Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:55:43.362210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:43.362251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:43.362274Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:55:43.362300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:43.362336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:43.362366Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:55:43.362509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:43.362547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:43.362566Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:55:43.362658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:43.362685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:43.362703Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:55:43.362729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:55:43.362765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:55:43.362784Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:55:43.362808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:55:43.362834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:55:43.362850Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:55:43.362950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:55:43.362982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ata_format;batch_size=0;num_rows=100;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-12-23T18:55:44.269764Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:339:2351];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-12-23T18:55:44.269882Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.269980Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.270110Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.270268Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:55:44.270389Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.270567Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.270776Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:345:2357] finished for tablet 9437184 2025-12-23T18:55:44.271183Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:339:2351];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.009}],"full":{"a":1271416,"name":"_full_task","f":1271416,"d_finished":0,"c":0,"l":1281357,"d":9941},"events":[{"name":"bootstrap","f":1271633,"d_finished":1771,"c":1,"l":1273404,"d":1771},{"a":1280787,"name":"ack","f":1279661,"d_finished":1010,"c":1,"l":1280671,"d":1580},{"a":1280776,"name":"processing","f":1273681,"d_finished":3257,"c":3,"l":1280674,"d":3838},{"name":"ProduceResults","f":1272871,"d_finished":1814,"c":6,"l":1281123,"d":1814},{"a":1281127,"name":"Finish","f":1281127,"d_finished":0,"c":0,"l":1281357,"d":230},{"name":"task_result","f":1273693,"d_finished":2210,"c":2,"l":1279589,"d":2210}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.271255Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:339:2351];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:55:44.271638Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:339:2351];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.009},{"events":["l_ack","l_processing","l_Finish"],"t":0.01}],"full":{"a":1271416,"name":"_full_task","f":1271416,"d_finished":0,"c":0,"l":1281857,"d":10441},"events":[{"name":"bootstrap","f":1271633,"d_finished":1771,"c":1,"l":1273404,"d":1771},{"a":1280787,"name":"ack","f":1279661,"d_finished":1010,"c":1,"l":1280671,"d":2080},{"a":1280776,"name":"processing","f":1273681,"d_finished":3257,"c":3,"l":1280674,"d":4338},{"name":"ProduceResults","f":1272871,"d_finished":1814,"c":6,"l":1281123,"d":1814},{"a":1281127,"name":"Finish","f":1281127,"d_finished":0,"c":0,"l":1281857,"d":730},{"name":"task_result","f":1273693,"d_finished":2210,"c":2,"l":1279589,"d":2210}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.271694Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:55:44.217565Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-12-23T18:55:44.271721Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:55:44.271827Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:345:2357];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2025-12-23T18:55:44.272316Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2025-12-23T18:55:44.272639Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {18446744073709551615:12} readable: {18446744073709551615:max} at tablet 9437184 2025-12-23T18:55:44.272724Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2025-12-23T18:55:44.272766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:55:44.272834Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithData+Reboot [GOOD] Test command err: 2025-12-23T18:55:43.757044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:55:43.775771Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:55:43.775943Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:55:43.781845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:43.782116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:43.782326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:43.782481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:43.782601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:43.782724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:43.782918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:43.783041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:43.783173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:43.783298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:43.783404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:43.783542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:43.783658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:43.811680Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:55:43.811792Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:55:43.811825Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:55:43.811936Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:43.812039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:43.812095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:43.812123Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:55:43.812207Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:55:43.812268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:43.812298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:43.812329Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:55:43.812465Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:43.812505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:43.812529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:43.812548Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:55:43.812609Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:55:43.812642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:43.812680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:43.812702Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:55:43.812738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:43.812763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:43.812780Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:55:43.812812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:43.812862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:43.812894Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:55:43.813036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:43.813066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:43.813083Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:55:43.813161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:43.813190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:43.813206Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:55:43.813234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:55:43.813256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:55:43.813275Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:55:43.813297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:55:43.813322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:55:43.813340Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:55:43.813411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:55:43.813432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... age=data_format;batch_size=0;num_rows=100;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-12-23T18:55:44.690701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:380:2381];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-12-23T18:55:44.690862Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.690991Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.691144Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.691327Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:55:44.691470Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.691590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.691789Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:387:2387] finished for tablet 9437184 2025-12-23T18:55:44.692311Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:380:2381];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.009},{"events":["l_ack","l_processing","l_Finish"],"t":0.01}],"full":{"a":1296431,"name":"_full_task","f":1296431,"d_finished":0,"c":0,"l":1306509,"d":10078},"events":[{"name":"bootstrap","f":1296728,"d_finished":1634,"c":1,"l":1298362,"d":1634},{"a":1305990,"name":"ack","f":1304748,"d_finished":1145,"c":1,"l":1305893,"d":1664},{"a":1305981,"name":"processing","f":1298585,"d_finished":3355,"c":3,"l":1305896,"d":3883},{"name":"ProduceResults","f":1298010,"d_finished":1920,"c":6,"l":1306282,"d":1920},{"a":1306288,"name":"Finish","f":1306288,"d_finished":0,"c":0,"l":1306509,"d":221},{"name":"task_result","f":1298596,"d_finished":2174,"c":2,"l":1304660,"d":2174}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.692430Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:380:2381];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:55:44.692924Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:380:2381];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.009},{"events":["l_ack","l_processing","l_Finish"],"t":0.01}],"full":{"a":1296431,"name":"_full_task","f":1296431,"d_finished":0,"c":0,"l":1307157,"d":10726},"events":[{"name":"bootstrap","f":1296728,"d_finished":1634,"c":1,"l":1298362,"d":1634},{"a":1305990,"name":"ack","f":1304748,"d_finished":1145,"c":1,"l":1305893,"d":2312},{"a":1305981,"name":"processing","f":1298585,"d_finished":3355,"c":3,"l":1305896,"d":4531},{"name":"ProduceResults","f":1298010,"d_finished":1920,"c":6,"l":1306282,"d":1920},{"a":1306288,"name":"Finish","f":1306288,"d_finished":0,"c":0,"l":1307157,"d":869},{"name":"task_result","f":1298596,"d_finished":2174,"c":2,"l":1304660,"d":2174}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:55:44.692987Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:55:44.632207Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-12-23T18:55:44.693018Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:55:44.693142Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:387:2387];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2025-12-23T18:55:44.693660Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2025-12-23T18:55:44.693916Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {1766516144744:12} readable: {1766516144744:max} at tablet 9437184 2025-12-23T18:55:44.694019Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2025-12-23T18:55:44.694058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:335:2344];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1766516144744:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:55:44.694130Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:335:2344];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1766516144744:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn >> TStorageBalanceTest::TestScenario2 [GOOD] >> TStorageBalanceTest::TestScenario3 >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn >> TColumnShardTestSchema::EnableColdTiersAfterTtl >> TColumnShardTestSchema::ForgetAfterFail >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:113:2143] Leader for TabletID 72057594046447617 is [1:132:2155] sender: [1:136:2058] recipient: [1:114:2144] Leader for TabletID 72057594046316545 is [1:139:2160] sender: [1:141:2058] recipient: [1:115:2145] 2025-12-23T18:54:45.053311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:45.053411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:45.053451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:45.053481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:45.053516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:45.053562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:45.053623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:45.053678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:45.054457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:45.054751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:45.158921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:54:45.158986Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:45.159746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:132:2155] sender: [1:178:2058] recipient: [1:15:2062] 2025-12-23T18:54:45.169309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:45.169401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:45.169545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:45.179061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:45.179635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:45.180170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:45.180420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:45.184830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:45.185045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:45.187173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:45.187242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:45.187544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:45.187638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:45.187697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:45.187890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:45.353214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.354201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.354328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.354413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.354484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.354542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.354640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.354708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.354814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.354958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.355085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.355187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:45.355264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [51:211:2211], at schemeshard: 72057594046678944, txId: 1003, path id: 39 2025-12-23T18:55:47.777158Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 724 } } CommitVersion { Step: 5000040 TxId: 1003 } 2025-12-23T18:55:47.777282Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 724 } } CommitVersion { Step: 5000040 TxId: 1003 } debug: NTableState::TProposedWaitParts operationId# 1003:0 2025-12-23T18:55:47.777679Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-12-23T18:55:47.777726Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:55:47.778227Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 733 RawX2: 219043334814 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-12-23T18:55:47.778267Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-12-23T18:55:47.778371Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 733 RawX2: 219043334814 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-12-23T18:55:47.778420Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-12-23T18:55:47.778503Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 733 RawX2: 219043334814 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-12-23T18:55:47.778566Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:47.778602Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-12-23T18:55:47.778647Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:55:47.778687Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 129 -> 240 2025-12-23T18:55:47.779240Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-12-23T18:55:47.779323Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-12-23T18:55:47.779360Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-12-23T18:55:47.779397Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2025-12-23T18:55:47.779432Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2025-12-23T18:55:47.779506Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-12-23T18:55:47.781153Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-12-23T18:55:47.782639Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-12-23T18:55:47.782978Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-12-23T18:55:47.783023Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-12-23T18:55:47.783120Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2025-12-23T18:55:47.783153Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-12-23T18:55:47.783192Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2025-12-23T18:55:47.783224Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-12-23T18:55:47.783261Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-12-23T18:55:47.783300Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-12-23T18:55:47.783337Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1003:0 2025-12-23T18:55:47.783368Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1003:0 2025-12-23T18:55:47.783476Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:55:47.783838Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-12-23T18:55:47.785858Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-12-23T18:55:47.785902Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-12-23T18:55:47.786240Z node 51 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-12-23T18:55:47.786313Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-12-23T18:55:47.786349Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:854:2825] TestWaitNotification: OK eventTxId 1003 2025-12-23T18:55:47.786793Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:55:47.786996Z node 51 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 248us result status StatusSuccess 2025-12-23T18:55:47.787531Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] Test command err: 2025-12-23T18:52:54.081845Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128339030796577:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:54.082763Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003941/r3tmp/tmpQefpRv/pdisk_1.dat 2025-12-23T18:52:54.302609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:52:54.328074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:54.328141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:54.383841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:54.385451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21337, node 1 2025-12-23T18:52:54.434586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:54.434620Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:54.434638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:54.434731Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:54.489670Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27785 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766515974447 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:52:54.642682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:52:55.093927Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:52:56.463866Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2025-12-23T18:52:56.468583Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128347620732397:2321], Start check tables existence, number paths: 2 2025-12-23T18:52:56.469347Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2025-12-23T18:52:56.469383Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enanbled 2025-12-23T18:52:56.472362Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128347620732397:2321], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-12-23T18:52:56.472430Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128347620732397:2321], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-12-23T18:52:56.472452Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7587128347620732397:2321], Successfully finished 2025-12-23T18:52:56.472578Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-12-23T18:52:56.472642Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, noode count: 1 2025-12-23T18:52:56.475237Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:227: SessionId: ydb://session/3?node_id=1&id=ZTRhYjY4YTktZDFjYTVhMzItNWFiNjk5MWUtMTQ1YjhkNmQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZTRhYjY4YTktZDFjYTVhMzItNWFiNjk5MWUtMTQ1YjhkNmQ= (tmp dir name: 46462f97-4c65-3e59-fb7b-03bab50ef4c3) 2025-12-23T18:52:56.475383Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:231: SessionId: ydb://session/3?node_id=1&id=ZTRhYjY4YTktZDFjYTVhMzItNWFiNjk5MWUtMTQ1YjhkNmQ=, ActorId: [1:7587128347620732420:2329], ActorState: unknown state, session actor bootstrapped 2025-12-23T18:52:56.515855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:56.528941Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587128346101690812:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:56.529030Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:52:56.540586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:56.540662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:56.543287Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:52:56.543940Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-dedicated/.metadata/script_executions 2025-12-23T18:52:56.544217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:56.574316Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:56.574574Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:56.574705Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:56.574849Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:56.574996Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:56.575085Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:56.575197Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:56.575287Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:56.575462Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:56.648596Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:56.648697Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:56.652027Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:56.723531Z node 3 :STATISTICS WARN: tx_init.cpp:298: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2025-12-23T18:52:56.730277Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9282, node 3 2025-12-23T18:52:56.864066Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:56.864085Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:56.864089Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:56.864142Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:56.897894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:56.911713Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587128344809789523:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:52:56.911763Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:52:56.919933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:56.920021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:56.921627Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:56.972000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileSta ... Start pool fetching 2025-12-23T18:54:21.648255Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587128713951305776:2448], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2025-12-23T18:54:21.648315Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root 2025-12-23T18:54:21.785286Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:294: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7587128701066403386:2330], DatabaseId: /Root, PoolId: sample_pool_id, Got delete notification 2025-12-23T18:54:21.785364Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-12-23T18:54:21.785396Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-12-23T18:54:21.785432Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587128713951305792:2451], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-12-23T18:54:21.785742Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587128713951305792:2451], DatabaseId: /Root, PoolId: sample_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-12-23T18:54:21.785856Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool sample_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-12-23T18:54:21.791203Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:294: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7587128713951305757:2445], DatabaseId: /Root, PoolId: default, Got delete notification 2025-12-23T18:54:21.791293Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-12-23T18:54:21.791330Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-12-23T18:54:21.791357Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587128713951305812:2452], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-12-23T18:54:21.791585Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7587128713951305812:2452], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:21.791680Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:54:21.795878Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=8&id=YmFjODI1NzUtNmMzZDY3MGEtNTlkZTBmMWYtNGU4ZDExZGM=, ActorId: [8:7587128701066403310:2324], ActorState: ExecuteState, TraceId: 01kd68zxef0e2pj17k95kc1hmn, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [8:7587128713951305778:2324] WorkloadServiceCleanup: 0 2025-12-23T18:54:21.799544Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=8&id=YmFjODI1NzUtNmMzZDY3MGEtNTlkZTBmMWYtNGU4ZDExZGM=, ActorId: [8:7587128701066403310:2324], ActorState: CleanupState, TraceId: 01kd68zxef0e2pj17k95kc1hmn, EndCleanup, isFinal: 0 2025-12-23T18:54:21.799633Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2733: SessionId: ydb://session/3?node_id=8&id=YmFjODI1NzUtNmMzZDY3MGEtNTlkZTBmMWYtNGU4ZDExZGM=, ActorId: [8:7587128701066403310:2324], ActorState: CleanupState, TraceId: 01kd68zxef0e2pj17k95kc1hmn, Sent query response back to proxy, proxyRequestId: 18, proxyId: [8:7587128683886533503:2264] Wait pool handlers 0.000012s: number handlers = 2 Wait pool handlers 1.000117s: number handlers = 2 Wait pool handlers 2.000242s: number handlers = 2 Wait pool handlers 3.000362s: number handlers = 2 Wait pool handlers 4.000480s: number handlers = 2 Wait pool handlers 5.000590s: number handlers = 2 Wait pool handlers 6.000719s: number handlers = 2 Wait pool handlers 7.000852s: number handlers = 2 Wait pool handlers 8.000977s: number handlers = 2 2025-12-23T18:54:29.836941Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:54:29.836977Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Wait pool handlers 9.001116s: number handlers = 2 Wait pool handlers 10.001245s: number handlers = 2 Wait pool handlers 11.001375s: number handlers = 2 Wait pool handlers 12.001568s: number handlers = 2 Wait pool handlers 13.001706s: number handlers = 2 2025-12-23T18:54:35.200711Z node 8 :KQP_WORKLOAD_SERVICE TRACE: pool_handlers_actors.cpp:689: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7587128701066403386:2330], DatabaseId: /Root, PoolId: sample_pool_id, Try to start scheduled refresh Wait pool handlers 14.001856s: number handlers = 2 Wait pool handlers 15.001996s: number handlers = 2 Wait pool handlers 16.002165s: number handlers = 2 Wait pool handlers 17.004597s: number handlers = 2 Wait pool handlers 18.004706s: number handlers = 2 Wait pool handlers 19.004847s: number handlers = 2 Wait pool handlers 20.004967s: number handlers = 2 Wait pool handlers 21.005076s: number handlers = 2 Wait pool handlers 22.005181s: number handlers = 2 Wait pool handlers 23.005300s: number handlers = 2 Wait pool handlers 24.005431s: number handlers = 2 Wait pool handlers 25.005564s: number handlers = 2 Wait pool handlers 26.005644s: number handlers = 2 Wait pool handlers 27.005817s: number handlers = 2 Wait pool handlers 28.005952s: number handlers = 2 Wait pool handlers 29.006082s: number handlers = 2 Wait pool handlers 30.006206s: number handlers = 2 Wait pool handlers 31.006327s: number handlers = 2 Wait pool handlers 32.006469s: number handlers = 2 Wait pool handlers 33.006599s: number handlers = 2 Wait pool handlers 34.006744s: number handlers = 2 Wait pool handlers 35.006874s: number handlers = 2 Wait pool handlers 36.006997s: number handlers = 2 Wait pool handlers 37.007119s: number handlers = 2 Wait pool handlers 38.007247s: number handlers = 2 Wait pool handlers 39.007372s: number handlers = 2 Wait pool handlers 40.007505s: number handlers = 2 Wait pool handlers 41.007632s: number handlers = 2 Wait pool handlers 42.007766s: number handlers = 2 Wait pool handlers 43.007925s: number handlers = 2 Wait pool handlers 44.008060s: number handlers = 2 Wait pool handlers 45.008176s: number handlers = 2 Wait pool handlers 46.008276s: number handlers = 2 Wait pool handlers 47.008397s: number handlers = 2 Wait pool handlers 48.008521s: number handlers = 2 Wait pool handlers 49.008638s: number handlers = 2 Wait pool handlers 50.008768s: number handlers = 2 Wait pool handlers 51.008913s: number handlers = 2 Wait pool handlers 52.008994s: number handlers = 2 Wait pool handlers 53.009108s: number handlers = 2 Wait pool handlers 54.009214s: number handlers = 2 Wait pool handlers 55.009331s: number handlers = 2 Wait pool handlers 56.009440s: number handlers = 2 Wait pool handlers 57.009567s: number handlers = 2 Wait pool handlers 58.009683s: number handlers = 2 Wait pool handlers 59.009801s: number handlers = 2 Wait pool handlers 60.009962s: number handlers = 2 Wait pool handlers 61.010084s: number handlers = 2 Wait pool handlers 62.010188s: number handlers = 2 Wait pool handlers 63.010314s: number handlers = 2 Wait pool handlers 64.010462s: number handlers = 2 Wait pool handlers 65.010586s: number handlers = 2 Wait pool handlers 66.010691s: number handlers = 2 Wait pool handlers 67.010807s: number handlers = 2 Wait pool handlers 68.010937s: number handlers = 2 Wait pool handlers 69.010997s: number handlers = 2 Wait pool handlers 70.011114s: number handlers = 2 Wait pool handlers 71.011243s: number handlers = 2 Wait pool handlers 72.011354s: number handlers = 2 Wait pool handlers 73.011456s: number handlers = 2 Wait pool handlers 74.011570s: number handlers = 2 Wait pool handlers 75.011679s: number handlers = 2 Wait pool handlers 76.011820s: number handlers = 2 Wait pool handlers 77.011890s: number handlers = 2 Wait pool handlers 78.012006s: number handlers = 2 Wait pool handlers 79.012110s: number handlers = 2 Wait pool handlers 80.012224s: number handlers = 2 Wait pool handlers 81.012334s: number handlers = 2 Wait pool handlers 82.012437s: number handlers = 2 Wait pool handlers 83.012553s: number handlers = 2 Wait pool handlers 84.012659s: number handlers = 2 Wait pool handlers 85.012755s: number handlers = 2 Wait pool handlers 86.012854s: number handlers = 2 2025-12-23T18:55:48.546796Z node 8 :KQP_WORKLOAD_SERVICE INFO: pool_handlers_actors.cpp:178: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7587128713951305757:2445], DatabaseId: /Root, PoolId: default, Got stop pool handler request, waiting for 0 requests 2025-12-23T18:55:48.546809Z node 8 :KQP_WORKLOAD_SERVICE INFO: pool_handlers_actors.cpp:178: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7587128701066403386:2330], DatabaseId: /Root, PoolId: sample_pool_id, Got stop pool handler request, waiting for 0 requests 2025-12-23T18:55:48.546921Z node 8 :KQP_COMPUTE_SCHEDULER ERROR: kqp_compute_scheduler_service.cpp:121: Trying to remove unknown pool: /Root/default 2025-12-23T18:55:48.546936Z node 8 :KQP_COMPUTE_SCHEDULER ERROR: kqp_compute_scheduler_service.cpp:121: Trying to remove unknown pool: /Root/sample_pool_id 2025-12-23T18:55:48.546943Z node 8 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:432: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: default 2025-12-23T18:55:48.546962Z node 8 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:432: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: sample_pool_id 2025-12-23T18:55:48.821392Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2778: SessionId: ydb://session/3?node_id=8&id=YmFjODI1NzUtNmMzZDY3MGEtNTlkZTBmMWYtNGU4ZDExZGM=, ActorId: [8:7587128701066403310:2324], ActorState: ReadyState, Session closed due to explicit close event 2025-12-23T18:55:48.821438Z node 8 :KQP_SESSION INFO: kqp_session_actor.cpp:2932: SessionId: ydb://session/3?node_id=8&id=YmFjODI1NzUtNmMzZDY3MGEtNTlkZTBmMWYtNGU4ZDExZGM=, ActorId: [8:7587128701066403310:2324], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-12-23T18:55:48.821459Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3002: SessionId: ydb://session/3?node_id=8&id=YmFjODI1NzUtNmMzZDY3MGEtNTlkZTBmMWYtNGU4ZDExZGM=, ActorId: [8:7587128701066403310:2324], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-12-23T18:55:48.821478Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3014: SessionId: ydb://session/3?node_id=8&id=YmFjODI1NzUtNmMzZDY3MGEtNTlkZTBmMWYtNGU4ZDExZGM=, ActorId: [8:7587128701066403310:2324], ActorState: unknown state, Cleanup temp tables: 0 2025-12-23T18:55:48.821534Z node 8 :KQP_SESSION DEBUG: kqp_session_actor.cpp:3106: SessionId: ydb://session/3?node_id=8&id=YmFjODI1NzUtNmMzZDY3MGEtNTlkZTBmMWYtNGU4ZDExZGM=, ActorId: [8:7587128701066403310:2324], ActorState: unknown state, Session actor destroyed |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |98.1%| [TA] $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.1%| [TA] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::RebootColdTiers >> MoveTable::EmptyTable >> MoveTable::EmptyTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::EmptyTable [GOOD] Test command err: 2025-12-23T18:55:52.867653Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:55:52.887504Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:55:52.887722Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:55:52.892904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:52.893097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:52.893274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:52.893357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:52.893430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:52.893491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:52.893597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:52.893700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:52.893786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:52.893874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:52.893939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:52.894017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:52.894110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:52.913251Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:55:52.913377Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:55:52.913415Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:55:52.913534Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:52.913643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:52.913702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:52.913731Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:55:52.913802Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:55:52.913851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:52.913889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:52.913915Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:55:52.914060Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:52.914113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:52.914142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:52.914161Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:55:52.914227Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:55:52.914284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:52.914323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:52.914344Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:55:52.914397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:52.914422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:52.914443Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:55:52.914470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:52.914507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:52.914543Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:55:52.914688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:52.914729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:52.914766Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:55:52.914851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:52.914899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:52.914920Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:55:52.914947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:55:52.914970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:55:52.914986Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:55:52.915014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:55:52.915038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:55:52.915056Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:55:52.915163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:55:52.915199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=39; 2025-12-23T18:55:53.178508Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=27; 2025-12-23T18:55:53.178567Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=21; 2025-12-23T18:55:53.178615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=2825; 2025-12-23T18:55:53.178765Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:55:53.178840Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:55:53.178940Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:55:53.179271Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:55:53.179330Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2025-12-23T18:55:53.179397Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.102000s; 2025-12-23T18:55:53.179677Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:55:53.179767Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:55:53.179821Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:55:53.179863Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2025-12-23T18:55:53.180007Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.032000s; 2025-12-23T18:55:53.180064Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2025-12-23T18:55:53.469617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136962515851136;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1766516153850;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:55:53.469691Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136962515851136;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1766516153850;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2025-12-23T18:55:53.481567Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1766516153850;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136962515851136;op_tx=10:TX_KIND_SCHEMA;min=1766516153850;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1766516153850;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137168676989312;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-12-23T18:55:53.481634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1766516153850;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136962515851136;op_tx=10:TX_KIND_SCHEMA;min=1766516153850;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1766516153850;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137168676989312;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:158:2181]; 2025-12-23T18:55:53.481668Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1766516153850;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136962515851136;op_tx=10:TX_KIND_SCHEMA;min=1766516153850;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1766516153850;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137168676989312;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2025-12-23T18:55:53.481940Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-12-23T18:55:53.482071Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516153850 at tablet 9437184, mediator 0 2025-12-23T18:55:53.482116Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2025-12-23T18:55:53.482448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:55:53.482547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:55:53.482601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:55:53.482699Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2025-12-23T18:55:53.489414Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1766516153850;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2025-12-23T18:55:53.489496Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:55:53.489603Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2025-12-23T18:55:53.489684Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:145;event=RegisterTable;path_id=1000000185; 2025-12-23T18:55:53.489885Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2025-12-23T18:55:53.504269Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:532;event=OnTieringModified;path_id=1000000185; 2025-12-23T18:55:53.528206Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2025-12-23T18:55:53.529217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136962515900864;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1766516153853;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=schema.cpp:134;propose_execute=move_table;src=1;dst=2; 2025-12-23T18:55:53.529286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136962515900864;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1766516153853;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-12-23T18:55:53.541077Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1766516153853;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136962515900864;op_tx=11:TX_KIND_SCHEMA;min=1766516153853;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_op_tx=11:TX_KIND_SCHEMA;min=1766516153853;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_this=137168677057472;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-12-23T18:55:53.541150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1766516153853;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136962515900864;op_tx=11:TX_KIND_SCHEMA;min=1766516153853;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_op_tx=11:TX_KIND_SCHEMA;min=1766516153853;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_this=137168677057472;method=TTxController::FinishProposeOnComplete;tx_id=11;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2025-12-23T18:55:53.541193Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1766516153853;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136962515900864;op_tx=11:TX_KIND_SCHEMA;min=1766516153853;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_op_tx=11:TX_KIND_SCHEMA;min=1766516153853;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_this=137168677057472;method=TTxController::FinishProposeOnComplete;tx_id=11;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=11; 2025-12-23T18:55:53.541428Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-12-23T18:55:53.541539Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516153853 at tablet 9437184, mediator 0 2025-12-23T18:55:53.541577Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] execute at tablet 9437184 2025-12-23T18:55:53.541815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=11;event=move_table_progress;old_path_id=1;new_path_id=2;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2025-12-23T18:55:53.553359Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] complete at tablet 9437184 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:19.674563Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:19.777201Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:19.781846Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:19.782111Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:19.782203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:20.141393Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:20.243594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:20.243787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:20.278433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:20.351962Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004965/r3tmp/tmp38un7t/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30745, node 1 2025-12-23T18:55:21.195951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:21.196713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:21.196764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:21.196797Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:21.197335Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:21.262200Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:62807 2025-12-23T18:55:21.816934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:24.359289Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:24.365148Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:24.369910Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:24.403431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.403534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.441593Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:24.443485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.597958Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.598536Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.599169Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.599759Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.599882Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.599980Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.600081Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.600215Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.600300Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.740047Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:24.795103Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.795192Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.808208Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.012428Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.053906Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:25.054045Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:25.088115Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:25.089704Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:25.089945Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:25.090008Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:25.090067Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:25.090150Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:25.090201Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:25.090251Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:25.091561Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:25.212025Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.212159Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.223945Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:55:25.225191Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:55:25.333467Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:55:25.333930Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:25.336520Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:55:25.336575Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:55:25.336656Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:25.340411Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:25.343748Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:25.349355Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:25.349463Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:25.360809Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:25.494591Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:25.842046Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:25.949810Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:25.949897Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2 ... 037894] Loaded traversal table database: 2025-12-23T18:55:54.650894Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-12-23T18:55:54.650946Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:54.651014Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-12-23T18:55:54.651078Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:54.651128Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:54.651170Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:54.651274Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:55:54.651929Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:54.652544Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-12-23T18:55:54.652591Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-12-23T18:55:54.652679Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5665:4814] Owner: [2:5664:4813]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:54.652727Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5665:4814] Owner: [2:5664:4813]. Column diff is empty, finishing 2025-12-23T18:55:54.653660Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2025-12-23T18:55:54.653714Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2025-12-23T18:55:54.654985Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:55:54.670637Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5674:4821] 2025-12-23T18:55:54.670766Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5629:4793], server id = [2:5674:4821], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:54.670924Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5675:4822] 2025-12-23T18:55:54.671011Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5674:4821], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-12-23T18:55:54.671118Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5675:4822], schemeshard id = 72075186224037897 2025-12-23T18:55:54.729604Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:55:54.729771Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 2 2025-12-23T18:55:54.730250Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5617:4782], server id = [2:5621:4786], tablet id = 72075186224037900 2025-12-23T18:55:54.730286Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:54.730416Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5618:4783], server id = [2:5622:4787], tablet id = 72075186224037901 2025-12-23T18:55:54.730432Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:54.730860Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5680:4827], server id = [2:5684:4831], tablet id = 72075186224037899, status = OK 2025-12-23T18:55:54.730946Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5680:4827], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:54.731176Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5681:4828], server id = [2:5685:4832], tablet id = 72075186224037900, status = OK 2025-12-23T18:55:54.731210Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5681:4828], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:54.731943Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5682:4829], server id = [2:5686:4833], tablet id = 72075186224037901, status = OK 2025-12-23T18:55:54.731998Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5682:4829], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:54.732416Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5683:4830], server id = [2:5687:4834], tablet id = 72075186224037902, status = OK 2025-12-23T18:55:54.732451Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5683:4830], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:54.733406Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-12-23T18:55:54.733872Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5680:4827], server id = [2:5684:4831], tablet id = 72075186224037899 2025-12-23T18:55:54.733896Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:54.734492Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-12-23T18:55:54.735108Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5683:4830], server id = [2:5687:4834], tablet id = 72075186224037902 2025-12-23T18:55:54.735143Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:54.736842Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-12-23T18:55:54.737073Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5681:4828], server id = [2:5685:4832], tablet id = 72075186224037900 2025-12-23T18:55:54.737093Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:54.737278Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-12-23T18:55:54.737310Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:55:54.737591Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:55:54.737751Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:55:54.737927Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5696:4841], ActorId: [2:5697:4842], Starting query actor #1 [2:5698:4843] 2025-12-23T18:55:54.737969Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5697:4842], ActorId: [2:5698:4843], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:55:54.740349Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5682:4829], server id = [2:5686:4833], tablet id = 72075186224037901 2025-12-23T18:55:54.740375Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:54.741844Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5697:4842], ActorId: [2:5698:4843], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZDkyNjNmZDctZWQ0NWZkNS04MTU0NDk5OS1mYTRiNTA0MQ==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:55:54.826044Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5707:4852]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:54.826247Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:54.826285Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5707:4852], StatRequests.size() = 1 2025-12-23T18:55:54.947462Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5697:4842], ActorId: [2:5698:4843], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDkyNjNmZDctZWQ0NWZkNS04MTU0NDk5OS1mYTRiNTA0MQ==, TxId: 2025-12-23T18:55:54.947540Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5697:4842], ActorId: [2:5698:4843], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDkyNjNmZDctZWQ0NWZkNS04MTU0NDk5OS1mYTRiNTA0MQ==, TxId: 2025-12-23T18:55:54.947809Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5696:4841], ActorId: [2:5697:4842], Got response [2:5698:4843] SUCCESS 2025-12-23T18:55:54.948091Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:55:54.961858Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:55:54.961913Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-12-23T18:55:55.037027Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5724:4860]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:55.037328Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:55.037375Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:55.037616Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:55.037656Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:55:55.037698Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:55.040392Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionDelete >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] >> AnalyzeDatashard::DropTableNavigateError [GOOD] >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:23.179102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:23.286386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:23.293279Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:23.293700Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:23.293801Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:23.712759Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:23.792531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:23.792640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:23.850190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:23.960753Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004938/r3tmp/tmpKLscre/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13801, node 1 2025-12-23T18:55:24.604049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:24.605194Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:24.605250Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:24.605281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:24.605506Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:24.671216Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28713 2025-12-23T18:55:25.214397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:27.665376Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:27.671789Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:27.674327Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:27.697999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:27.698090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:27.736035Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:27.738004Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:27.972684Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.973302Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.973941Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.974493Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.974711Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.974840Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.975005Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.975157Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.975308Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:28.087262Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:28.099107Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:28.099213Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:28.145466Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:28.271329Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:28.319523Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:28.319626Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:28.344687Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:28.344850Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:28.345002Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:28.345060Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:28.345112Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:28.345161Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:28.345197Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:28.345232Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:28.345597Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:28.386560Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:28.386670Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2185:2603], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:28.389625Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2601] 2025-12-23T18:55:28.410226Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:28.412122Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2249:2651] 2025-12-23T18:55:28.412343Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2249:2651], schemeshard id = 72075186224037897 2025-12-23T18:55:28.434759Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:28.434823Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Creating table 2025-12-23T18:55:28.434919Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:28.456589Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2348:2695], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:28.462012Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:28.474604Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:28.474758Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:28.487375Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:28.598843Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:28.941745Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:29.049121Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:29.049217Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2 ... :55:31.421072Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:55:31.584913Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3185:3258] 2025-12-23T18:55:31.588067Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3183:3467] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2025-12-23T18:55:31.588144Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `operationId', DatabaseName: `', Types: 1 2025-12-23T18:55:31.588187Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `operationId', PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: 2025-12-23T18:55:31.677763Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2025-12-23T18:55:31.715464Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037899 not found 2025-12-23T18:55:31.716230Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037900 not found 2025-12-23T18:55:31.717078Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037902 not found 2025-12-23T18:55:31.717201Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037901 not found 2025-12-23T18:55:31.787644Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:55:31.787716Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:55:31.840503Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3083:3440], schemeshard count = 1 2025-12-23T18:55:34.623423Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:55:34.623715Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 1 2025-12-23T18:55:34.623982Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2025-12-23T18:55:36.687313Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:55:38.064713Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:55:38.065026Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 2 2025-12-23T18:55:38.065233Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 2 2025-12-23T18:55:40.191704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:55:40.191764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:40.375236Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:55:42.158682Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:55:42.158728Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:42.225678Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:55:42.226028Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 3 2025-12-23T18:55:42.226286Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2025-12-23T18:55:44.885961Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:55:46.618116Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:55:46.618452Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 4 2025-12-23T18:55:46.618617Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2025-12-23T18:55:49.069732Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:55:50.765218Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:55:50.765687Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2025-12-23T18:55:50.765885Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2025-12-23T18:55:53.561019Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:55:53.561112Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:55:53.561145Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:55:53.561178Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:55:53.687040Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:55:55.707621Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:55:55.708176Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2025-12-23T18:55:55.708419Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-12-23T18:55:55.740746Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:55:55.740822Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:55:55.741068Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-12-23T18:55:55.754563Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:55:56.634866Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T18:55:56.634952Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:667: [72075186224037894] ScheduleNextAnalyze. table [OwnerId: 72075186224037897, LocalPathId: 34] was deleted, deleting its statistics 2025-12-23T18:55:56.635203Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4124:3621], ActorId: [2:4125:3622], Starting query actor #1 [2:4126:3623] 2025-12-23T18:55:56.635255Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4125:3622], ActorId: [2:4126:3623], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:55:56.637927Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4125:3622], ActorId: [2:4126:3623], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OThhYzYwMTUtYjJjNDk0YjMtZGQzZjVlZGEtMzQ3Y2I4MTM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-12-23T18:55:56.698440Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4135:3632]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:56.698641Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:56.698701Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4137:3634] 2025-12-23T18:55:56.698758Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4137:3634] 2025-12-23T18:55:56.699064Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4137:3634], server id = [2:4138:3635], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:56.699113Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4138:3635] 2025-12-23T18:55:56.699174Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4138:3635], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:56.699223Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:55:56.699323Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:55:56.699367Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4135:3632], StatRequests.size() = 1 2025-12-23T18:55:56.699470Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:55:56.795914Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4125:3622], ActorId: [2:4126:3623], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OThhYzYwMTUtYjJjNDk0YjMtZGQzZjVlZGEtMzQ3Y2I4MTM=, TxId: 2025-12-23T18:55:56.795980Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4125:3622], ActorId: [2:4126:3623], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OThhYzYwMTUtYjJjNDk0YjMtZGQzZjVlZGEtMzQ3Y2I4MTM=, TxId: 2025-12-23T18:55:56.796280Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4124:3621], ActorId: [2:4125:3622], Got response [2:4126:3623] SUCCESS 2025-12-23T18:55:56.796440Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:55:56.809420Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:55:56.809488Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3183:3467] 2025-12-23T18:55:56.809951Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4160:3647]], StatType[ 2 ], StatRequestsCount[ 2 ] 2025-12-23T18:55:56.811807Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:56.811871Z node 2 :STATISTICS ERROR: service_impl.cpp:799: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] Navigate failed 2025-12-23T18:55:56.811903Z node 2 :STATISTICS DEBUG: service_impl.cpp:1329: ReplyFailed(), request id = 2 >> TColumnShardTestSchema::OneColdTier >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2144] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:131:2058] recipient: [1:113:2143] Leader for TabletID 72057594046447617 is [1:132:2155] sender: [1:136:2058] recipient: [1:114:2144] Leader for TabletID 72057594046316545 is [1:139:2160] sender: [1:141:2058] recipient: [1:115:2145] 2025-12-23T18:54:42.165903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:42.165991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:42.166046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:42.166080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:42.166112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:42.166151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:42.166223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:42.166284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:42.167108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:42.167391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:42.261394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8099: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-12-23T18:54:42.261470Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:42.262408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:132:2155] sender: [1:178:2058] recipient: [1:15:2062] 2025-12-23T18:54:42.272460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:42.272541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:42.272675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:42.282048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:42.282789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:42.283543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:42.283889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:42.288726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:42.288949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:42.290796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:42.290853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:42.291086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:42.291160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:42.291206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:42.291382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:42.410325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:42.411301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:42.411404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:42.411451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:42.411492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:42.411526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:42.411594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:42.411633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:42.411673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:42.411759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:42.411841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:42.411902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:42.411945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:215:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... 3T18:55:57.389064Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:57.389092Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [62:211:2212], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-12-23T18:55:57.389128Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [62:211:2212], at schemeshard: 72057594046678944, txId: 1003, path id: 39 2025-12-23T18:55:57.389568Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-12-23T18:55:57.389609Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:55:57.389676Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-12-23T18:55:57.389705Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:55:57.389736Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 129 -> 240 2025-12-23T18:55:57.390349Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1003 2025-12-23T18:55:57.390419Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1003 2025-12-23T18:55:57.390445Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-12-23T18:55:57.390481Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-12-23T18:55:57.390514Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-12-23T18:55:57.391453Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-12-23T18:55:57.391522Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-12-23T18:55:57.391557Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-12-23T18:55:57.391587Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2025-12-23T18:55:57.391613Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:55:57.391670Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-12-23T18:55:57.393898Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-12-23T18:55:57.393940Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:55:57.393981Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 1003:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 39], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2025-12-23T18:55:57.394056Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2025-12-23T18:55:57.394081Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-12-23T18:55:57.394110Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2025-12-23T18:55:57.394134Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-12-23T18:55:57.394164Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-12-23T18:55:57.394196Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-12-23T18:55:57.394226Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1003:0 2025-12-23T18:55:57.394249Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 1003:0 2025-12-23T18:55:57.394343Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2025-12-23T18:55:57.394370Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2025-12-23T18:55:57.394775Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:55:57.394812Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2025-12-23T18:55:57.394865Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:55:57.395396Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-12-23T18:55:57.395847Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-12-23T18:55:57.397498Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-12-23T18:55:57.397700Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-12-23T18:55:57.397731Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-12-23T18:55:57.398025Z node 62 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-12-23T18:55:57.398084Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-12-23T18:55:57.398109Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:883:2855] TestWaitNotification: OK eventTxId 1003 2025-12-23T18:55:57.398446Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableMoved" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:55:57.398609Z node 62 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableMoved" took 187us result status StatusSuccess 2025-12-23T18:55:57.399106Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableMoved" PathDescription { Self { Name: "TTLEnabledTableMoved" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TTLEnabledTableMoved" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TraverseColumnShard::TraverseColumnTable [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] >> AnalyzeDatashard::AnalyzeOneTable [GOOD] >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:19.659725Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:19.781306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:19.787839Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:19.788193Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:19.788301Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:20.150441Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:20.238396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:20.238574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:20.272578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:20.344288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004a25/r3tmp/tmppMf8HL/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13806, node 1 2025-12-23T18:55:21.194954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:21.195597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:21.195636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:21.195658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:21.196131Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:21.260549Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21441 2025-12-23T18:55:21.824152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:24.339837Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:24.346132Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:24.349498Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:24.378511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.378605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.417762Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:24.419614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.593308Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.594091Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.594778Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.595802Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.596114Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.596296Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.596510Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.596739Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.596962Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.771032Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:24.792659Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.792731Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.806034Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.987653Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.029920Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:25.030013Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:25.057946Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:25.059242Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:25.059419Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:25.059482Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:25.059523Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:25.059576Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:25.059611Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:25.059649Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:25.060451Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:25.177374Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.177478Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.191337Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:55:25.192661Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:55:25.296716Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:55:25.297168Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:25.299604Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:55:25.299646Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:55:25.299724Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:25.303257Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:25.307472Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:25.313716Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:25.313825Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:25.324045Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:25.467728Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:25.847038Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:25.944422Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:25.944495Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2 ... ATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:55:56.266848Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5538:4744], schemeshard count = 1 2025-12-23T18:55:57.283937Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2025-12-23T18:55:57.284006Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2025-12-23T18:55:57.284043Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:55:57.287267Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-12-23T18:55:57.317178Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-12-23T18:55:57.317702Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2025-12-23T18:55:57.317788Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2025-12-23T18:55:57.318730Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:55:57.343128Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:55:57.343431Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-12-23T18:55:57.344186Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5620:4787], server id = [2:5624:4791], tablet id = 72075186224037899, status = OK 2025-12-23T18:55:57.344470Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5620:4787], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:57.345281Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5621:4788], server id = [2:5625:4792], tablet id = 72075186224037900, status = OK 2025-12-23T18:55:57.345339Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5621:4788], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:57.345558Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5622:4789], server id = [2:5626:4793], tablet id = 72075186224037901, status = OK 2025-12-23T18:55:57.345608Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5622:4789], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:57.346319Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5623:4790], server id = [2:5628:4795], tablet id = 72075186224037902, status = OK 2025-12-23T18:55:57.346370Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5623:4790], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:57.350706Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-12-23T18:55:57.351628Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5620:4787], server id = [2:5624:4791], tablet id = 72075186224037899 2025-12-23T18:55:57.351673Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:57.352220Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-12-23T18:55:57.352955Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5621:4788], server id = [2:5625:4792], tablet id = 72075186224037900 2025-12-23T18:55:57.352988Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:57.353581Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-12-23T18:55:57.353971Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5622:4789], server id = [2:5626:4793], tablet id = 72075186224037901 2025-12-23T18:55:57.354000Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:57.354136Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-12-23T18:55:57.354168Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:55:57.354305Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:55:57.354444Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:55:57.354778Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5640:4803], ActorId: [2:5641:4804], Starting query actor #1 [2:5642:4805] 2025-12-23T18:55:57.354839Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5641:4804], ActorId: [2:5642:4805], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:55:57.356685Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5623:4790], server id = [2:5628:4795], tablet id = 72075186224037902 2025-12-23T18:55:57.356717Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:57.357309Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5641:4804], ActorId: [2:5642:4805], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NWQ5Mzk3YzUtZTc1OWQ0OTctNTc5YTI2ZWQtY2U0ZTJiNTQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:55:57.456931Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5651:4814]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:57.457161Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:57.457198Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5651:4814], StatRequests.size() = 1 2025-12-23T18:55:57.570975Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5641:4804], ActorId: [2:5642:4805], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWQ5Mzk3YzUtZTc1OWQ0OTctNTc5YTI2ZWQtY2U0ZTJiNTQ=, TxId: 2025-12-23T18:55:57.571041Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5641:4804], ActorId: [2:5642:4805], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWQ5Mzk3YzUtZTc1OWQ0OTctNTc5YTI2ZWQtY2U0ZTJiNTQ=, TxId: 2025-12-23T18:55:57.571532Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5640:4803], ActorId: [2:5641:4804], Got response [2:5642:4805] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-12-23T18:55:57.572021Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:55:57.573195Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2897:3059];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:984;event=tablet_die; 2025-12-23T18:55:57.615550Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:55:57.615601Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-12-23T18:55:57.672111Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:5672:4825];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=20; 2025-12-23T18:55:57.680981Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } UpsertIndexes { Id: 3 Name: "cms_key" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 1 } } Options { SchemeNeedActualization: false } ; 2025-12-23T18:55:57.681349Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ; 2025-12-23T18:55:57.681595Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 DefaultCompression { } UpsertIndexes { Id: 4 Name: "cms_value" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 2 } } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ; 2025-12-23T18:55:57.902765Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5780:4918]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:57.903170Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:57.903217Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:57.903461Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:57.903516Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:55:57.903590Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:57.906917Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:19.323139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:19.428107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:19.442388Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:19.442686Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:19.442792Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:19.930636Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:20.040481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:20.040683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:20.075581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:20.145556Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004955/r3tmp/tmpyDUxOa/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1759, node 1 2025-12-23T18:55:21.195950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:21.196742Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:21.196797Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:21.196832Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:21.197395Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:21.261929Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7235 2025-12-23T18:55:21.810408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:24.864269Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:24.871831Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:24.876239Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:24.902295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.902394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.939769Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:24.941489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.108992Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.109663Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.110278Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.111152Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.111440Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.111585Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.111778Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.112019Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.112200Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.247567Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:25.302533Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:25.302610Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:25.315210Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.517037Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.557222Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:25.557296Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:25.580502Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:25.581642Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:25.581817Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:25.581899Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:25.581938Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:25.582001Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:25.582054Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:25.582091Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:25.582916Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:25.699481Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.699566Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.711663Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:55:25.712756Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:55:25.820354Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:55:25.820671Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:25.822803Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:55:25.822851Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:55:25.822948Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:25.825907Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:25.830592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:25.836880Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:25.836989Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:25.848802Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:25.993575Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:26.366994Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:26.463891Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:26.463972Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:269 ... DFlZA==, TxId: 2025-12-23T18:55:56.347870Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5530:4730], ActorId: [2:5531:4731], Got response [2:5532:4732] SUCCESS 2025-12-23T18:55:56.348069Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:55:56.361544Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2025-12-23T18:55:56.361601Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-12-23T18:55:56.414735Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:55:56.414859Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:55:56.479525Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5543:4743], schemeshard count = 1 2025-12-23T18:55:57.556985Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2025-12-23T18:55:57.557093Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2025-12-23T18:55:57.557150Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:55:57.562787Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-12-23T18:55:57.595212Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-12-23T18:55:57.595968Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2025-12-23T18:55:57.596078Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2025-12-23T18:55:57.597428Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:55:57.612435Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:55:57.612809Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-12-23T18:55:57.614251Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5627:4786], server id = [2:5631:4790], tablet id = 72075186224037899, status = OK 2025-12-23T18:55:57.614621Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5627:4786], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:57.615093Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5628:4787], server id = [2:5632:4791], tablet id = 72075186224037900, status = OK 2025-12-23T18:55:57.615187Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5628:4787], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:57.616074Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5629:4788], server id = [2:5634:4793], tablet id = 72075186224037901, status = OK 2025-12-23T18:55:57.616141Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5629:4788], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:57.616381Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5630:4789], server id = [2:5633:4792], tablet id = 72075186224037902, status = OK 2025-12-23T18:55:57.616442Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5630:4789], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:57.622661Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-12-23T18:55:57.623118Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5627:4786], server id = [2:5631:4790], tablet id = 72075186224037899 2025-12-23T18:55:57.623169Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:57.624714Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-12-23T18:55:57.625321Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5628:4787], server id = [2:5632:4791], tablet id = 72075186224037900 2025-12-23T18:55:57.625362Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:57.625777Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-12-23T18:55:57.626502Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5630:4789], server id = [2:5633:4792], tablet id = 72075186224037902 2025-12-23T18:55:57.626539Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:57.626944Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-12-23T18:55:57.627009Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:55:57.627293Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:55:57.627405Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:55:57.627631Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5629:4788], server id = [2:5634:4793], tablet id = 72075186224037901 2025-12-23T18:55:57.627660Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:57.628336Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:55:57.663419Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:55:57.663587Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-12-23T18:55:57.664139Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5649:4804], server id = [2:5650:4805], tablet id = 72075186224037900, status = OK 2025-12-23T18:55:57.664212Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5649:4804], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:57.665110Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-12-23T18:55:57.665159Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:55:57.665247Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:55:57.665355Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:55:57.665648Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5652:4807], ActorId: [2:5653:4808], Starting query actor #1 [2:5654:4809] 2025-12-23T18:55:57.665699Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5653:4808], ActorId: [2:5654:4809], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:55:57.667412Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5649:4804], server id = [2:5650:4805], tablet id = 72075186224037900 2025-12-23T18:55:57.667441Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:57.668037Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5653:4808], ActorId: [2:5654:4809], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NGQ1ZjU4MTEtMTQ4Njc2NjQtZjRhYTQyMGItNTQxOWI5ZDc=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:55:57.752676Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5663:4818]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:57.752861Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:57.752895Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5663:4818], StatRequests.size() = 1 2025-12-23T18:55:57.867159Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5653:4808], ActorId: [2:5654:4809], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGQ1ZjU4MTEtMTQ4Njc2NjQtZjRhYTQyMGItNTQxOWI5ZDc=, TxId: 2025-12-23T18:55:57.867231Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5653:4808], ActorId: [2:5654:4809], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGQ1ZjU4MTEtMTQ4Njc2NjQtZjRhYTQyMGItNTQxOWI5ZDc=, TxId: 2025-12-23T18:55:57.867520Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5652:4807], ActorId: [2:5653:4808], Got response [2:5654:4809] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-12-23T18:55:57.867843Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5677:4824]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:57.868040Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:55:57.868458Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:57.868516Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:57.869164Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:57.869229Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:55:57.869272Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:57.872695Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 probe = 4 >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:21.427814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:21.519725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:21.527703Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:21.528107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:21.528198Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:21.934406Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:22.022950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:22.023078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:22.080754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:22.169020Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004951/r3tmp/tmpsHZ7x4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10990, node 1 2025-12-23T18:55:22.824557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:22.825681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:22.825737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:22.825769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:22.826004Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:22.892263Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5943 2025-12-23T18:55:23.451838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:26.147999Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:26.152967Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:26.156507Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:26.180217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:26.180300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:26.217992Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:26.219912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:26.456288Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.456796Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.457439Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.457985Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.458204Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.458301Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.458460Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.458564Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.458666Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.527965Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:26.584221Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:26.584343Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:26.597396Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:26.772561Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:26.813572Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:26.813669Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:26.837358Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:26.838525Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:26.838689Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:26.838762Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:26.838803Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:26.838852Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:26.838918Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:26.838957Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:26.839275Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:26.879407Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:26.879498Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:26.893872Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:55:26.903726Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:55:26.904740Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:55:26.909139Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:26.941602Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:26.941679Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:55:26.941768Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:26.968409Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:26.979727Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:26.988883Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:26.989024Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:27.001808Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:27.121974Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:27.449800Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:27.566008Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:27.566083Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:26 ... 543Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5494:4722] 2025-12-23T18:55:56.554805Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5493:4721], server id = [2:5494:4722], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:56.554894Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5494:4722], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:56.554974Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:55:56.555206Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:55:56.555334Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5491:4719], StatRequests.size() = 1 2025-12-23T18:55:56.555427Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:55:56.681645Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5481:4709], ActorId: [2:5482:4710], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTllNzg0MWUtN2RjYjQ1ZDAtZGY4NDAyNjktN2I3YzFmZTY=, TxId: 2025-12-23T18:55:56.681713Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5481:4709], ActorId: [2:5482:4710], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTllNzg0MWUtN2RjYjQ1ZDAtZGY4NDAyNjktN2I3YzFmZTY=, TxId: 2025-12-23T18:55:56.682012Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5480:4708], ActorId: [2:5481:4709], Got response [2:5482:4710] SUCCESS 2025-12-23T18:55:56.682261Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:55:56.695668Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2025-12-23T18:55:56.695719Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-12-23T18:55:56.748893Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:55:56.748964Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:55:56.791506Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5493:4721], schemeshard count = 1 2025-12-23T18:55:57.865748Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2025-12-23T18:55:57.865826Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2025-12-23T18:55:57.865862Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:55:57.870534Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-12-23T18:55:57.902770Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-12-23T18:55:57.903313Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2025-12-23T18:55:57.903398Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2025-12-23T18:55:57.904252Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:55:57.918072Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:55:57.918298Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-12-23T18:55:57.919285Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5577:4764], server id = [2:5581:4768], tablet id = 72075186224037899, status = OK 2025-12-23T18:55:57.919601Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5577:4764], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:57.919847Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5578:4765], server id = [2:5582:4769], tablet id = 72075186224037900, status = OK 2025-12-23T18:55:57.919894Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5578:4765], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:57.920582Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5579:4766], server id = [2:5583:4770], tablet id = 72075186224037901, status = OK 2025-12-23T18:55:57.920628Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5579:4766], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:57.920899Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5580:4767], server id = [2:5584:4771], tablet id = 72075186224037902, status = OK 2025-12-23T18:55:57.920938Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5580:4767], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:57.925483Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-12-23T18:55:57.956287Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5577:4764], server id = [2:5581:4768], tablet id = 72075186224037899 2025-12-23T18:55:57.956352Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:57.957559Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-12-23T18:55:57.958143Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5578:4765], server id = [2:5582:4769], tablet id = 72075186224037900 2025-12-23T18:55:57.958174Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:57.958874Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-12-23T18:55:57.959261Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5579:4766], server id = [2:5583:4770], tablet id = 72075186224037901 2025-12-23T18:55:57.959284Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:57.959395Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-12-23T18:55:57.959431Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:55:57.959565Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:55:57.959682Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:55:57.960005Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5597:4780], ActorId: [2:5598:4781], Starting query actor #1 [2:5599:4782] 2025-12-23T18:55:57.960056Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5598:4781], ActorId: [2:5599:4782], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:55:57.961551Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5580:4767], server id = [2:5584:4771], tablet id = 72075186224037902 2025-12-23T18:55:57.961575Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:57.962163Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5598:4781], ActorId: [2:5599:4782], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OTE1ZThmZWItODgxNGE4ODUtNDc2ZmRlYTEtOWZiMDUzNDg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:55:58.041362Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5608:4791]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:58.041584Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:58.041638Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5608:4791], StatRequests.size() = 1 2025-12-23T18:55:58.150795Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5598:4781], ActorId: [2:5599:4782], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTE1ZThmZWItODgxNGE4ODUtNDc2ZmRlYTEtOWZiMDUzNDg=, TxId: 2025-12-23T18:55:58.150857Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5598:4781], ActorId: [2:5599:4782], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTE1ZThmZWItODgxNGE4ODUtNDc2ZmRlYTEtOWZiMDUzNDg=, TxId: 2025-12-23T18:55:58.151169Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5597:4780], ActorId: [2:5598:4781], Got response [2:5599:4782] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-12-23T18:55:58.151450Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5622:4797]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:58.151846Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:58.151900Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:58.152066Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:55:58.152746Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:58.152786Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:55:58.152827Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:58.155606Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:21.104983Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:21.201071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:21.206813Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:21.207249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:21.207360Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:21.621771Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:21.725021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:21.725133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:21.783284Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:21.891495Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00494f/r3tmp/tmpRa72h0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10595, node 1 2025-12-23T18:55:22.517012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:22.517740Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:22.517775Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:22.517795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:22.517971Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:22.580839Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22860 2025-12-23T18:55:23.089969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:25.686897Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:25.692137Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:25.694730Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:25.721647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:25.721732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:25.759241Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:25.760944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.988614Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.989144Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.989704Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.990215Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.990447Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.990556Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.990686Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.990830Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.990964Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.102204Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:26.113481Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:26.113606Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:26.126115Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:26.275245Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:26.311601Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:26.311673Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:26.334692Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:26.334861Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:26.335088Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:26.335141Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:26.335180Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:26.335217Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:26.335266Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:26.335323Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:26.335806Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:26.373445Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:26.373560Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2185:2603], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:26.376086Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2183:2601] 2025-12-23T18:55:26.394632Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:26.396709Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2249:2651] 2025-12-23T18:55:26.396980Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2249:2651], schemeshard id = 72075186224037897 2025-12-23T18:55:26.423097Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:26.423138Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Creating table 2025-12-23T18:55:26.423189Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:26.441831Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2348:2695], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:26.446940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:26.458334Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:26.458457Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:26.468195Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:26.571975Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:26.908879Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:27.017426Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:27.017512Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2251:2653] Owner: [2:2250:2 ... T18:55:58.315758Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2025-12-23T18:55:58.315801Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1766516158215958 2025-12-23T18:55:58.315834Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-12-23T18:55:58.315872Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2025-12-23T18:55:58.315908Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2025-12-23T18:55:58.315996Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-12-23T18:55:58.316062Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:58.316163Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-12-23T18:55:58.316239Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:58.316297Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:58.316355Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:58.316495Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:55:58.317773Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:58.318308Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5601:4781] Owner: [2:5600:4780]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:58.318370Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5601:4781] Owner: [2:5600:4780]. Column diff is empty, finishing 2025-12-23T18:55:58.318768Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-12-23T18:55:58.318835Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-12-23T18:55:58.319827Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2025-12-23T18:55:58.319892Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2025-12-23T18:55:58.321726Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:55:58.339547Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5610:4788] 2025-12-23T18:55:58.339792Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5610:4788], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-12-23T18:55:58.339901Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5566:4760], server id = [2:5610:4788], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:58.340079Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5611:4789] 2025-12-23T18:55:58.340178Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5611:4789], schemeshard id = 72075186224037897 2025-12-23T18:55:58.388371Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:55:58.388574Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-12-23T18:55:58.389283Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5616:4794], server id = [2:5620:4798], tablet id = 72075186224037899, status = OK 2025-12-23T18:55:58.389548Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5616:4794], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:58.389797Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5617:4795], server id = [2:5621:4799], tablet id = 72075186224037900, status = OK 2025-12-23T18:55:58.389841Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5617:4795], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:58.391128Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5618:4796], server id = [2:5623:4801], tablet id = 72075186224037901, status = OK 2025-12-23T18:55:58.391171Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5618:4796], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:58.391393Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5619:4797], server id = [2:5622:4800], tablet id = 72075186224037902, status = OK 2025-12-23T18:55:58.391427Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5619:4797], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:58.395335Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-12-23T18:55:58.395661Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5616:4794], server id = [2:5620:4798], tablet id = 72075186224037899 2025-12-23T18:55:58.395694Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:58.396570Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-12-23T18:55:58.396906Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5617:4795], server id = [2:5621:4799], tablet id = 72075186224037900 2025-12-23T18:55:58.396928Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:58.397217Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-12-23T18:55:58.397556Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5618:4796], server id = [2:5623:4801], tablet id = 72075186224037901 2025-12-23T18:55:58.397585Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:58.397896Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-12-23T18:55:58.397928Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:55:58.398090Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:55:58.398215Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:55:58.398419Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5636:4810], ActorId: [2:5637:4811], Starting query actor #1 [2:5638:4812] 2025-12-23T18:55:58.398463Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5637:4811], ActorId: [2:5638:4812], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:55:58.400596Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5619:4797], server id = [2:5622:4800], tablet id = 72075186224037902 2025-12-23T18:55:58.400630Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:58.401190Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5637:4811], ActorId: [2:5638:4812], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ODhjZGEyYTktMWI4ZGQ3OTEtZjczYzVhMjYtOWQ5MTY4ODU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:55:58.480115Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5647:4821]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:58.480372Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:58.480411Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5647:4821], StatRequests.size() = 1 2025-12-23T18:55:58.596266Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5637:4811], ActorId: [2:5638:4812], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODhjZGEyYTktMWI4ZGQ3OTEtZjczYzVhMjYtOWQ5MTY4ODU=, TxId: 2025-12-23T18:55:58.596327Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5637:4811], ActorId: [2:5638:4812], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODhjZGEyYTktMWI4ZGQ3OTEtZjczYzVhMjYtOWQ5MTY4ODU=, TxId: 2025-12-23T18:55:58.596604Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5636:4810], ActorId: [2:5637:4811], Got response [2:5638:4812] SUCCESS 2025-12-23T18:55:58.596878Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:55:58.622990Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:55:58.623041Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-12-23T18:55:58.689515Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5664:4829]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:58.689927Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:58.690006Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:58.690352Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:58.690407Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:55:58.690462Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:58.694327Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TColumnShardTestSchema::Drop-Reboots+GenerateInternalPathId |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:19.825932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:19.945514Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:19.953984Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:19.954245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:19.954321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:20.346086Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:20.450121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:20.450326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:20.485595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:20.566469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004a27/r3tmp/tmpZMunLH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11402, node 1 2025-12-23T18:55:21.202879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:21.203632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:21.203686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:21.203718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:21.204269Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:21.269431Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5903 2025-12-23T18:55:21.824342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:24.553916Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:24.562005Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:24.566719Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:24.593191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.593288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.631213Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:24.632566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.811035Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.811740Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.812381Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.813167Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.813425Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.813541Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.813723Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.813896Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.814004Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.957969Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:25.013264Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:25.013344Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:25.026073Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.154856Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.234379Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:25.234494Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:25.265161Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:25.266598Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:25.266843Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:25.266916Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:25.267041Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:25.267101Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:25.267172Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:25.267226Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:25.268226Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:25.385726Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.385828Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.399871Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:55:25.401207Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:55:25.518604Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:55:25.519024Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:25.521370Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:55:25.521428Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:55:25.521521Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:25.525341Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:25.529769Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:25.542970Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:25.543122Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:25.556532Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:25.705387Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:26.160130Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:26.258508Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:26.258575Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:26 ... 040Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5535:4744], server id = [2:5536:4745], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:57.496102Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5536:4745] 2025-12-23T18:55:57.496250Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5536:4745], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:57.496319Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:55:57.496454Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:55:57.496534Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5533:4742], StatRequests.size() = 1 2025-12-23T18:55:57.496610Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:55:57.604442Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5523:4732], ActorId: [2:5524:4733], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWFkMmI5MzEtMmIwY2Y4ODgtNzE1NDc1ODAtYTkyM2Q1Nzg=, TxId: 2025-12-23T18:55:57.604525Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5523:4732], ActorId: [2:5524:4733], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWFkMmI5MzEtMmIwY2Y4ODgtNzE1NDc1ODAtYTkyM2Q1Nzg=, TxId: 2025-12-23T18:55:57.604861Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5522:4731], ActorId: [2:5523:4732], Got response [2:5524:4733] SUCCESS 2025-12-23T18:55:57.605114Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:55:57.618647Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2025-12-23T18:55:57.618697Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-12-23T18:55:57.683043Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:55:57.683122Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:55:57.748299Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5535:4744], schemeshard count = 1 2025-12-23T18:55:58.783627Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2025-12-23T18:55:58.783710Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2025-12-23T18:55:58.783741Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:55:58.787263Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-12-23T18:55:58.815922Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-12-23T18:55:58.816502Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2025-12-23T18:55:58.816570Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2025-12-23T18:55:58.817460Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:55:58.842423Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:55:58.842793Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-12-23T18:55:58.843617Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5617:4787], server id = [2:5621:4791], tablet id = 72075186224037899, status = OK 2025-12-23T18:55:58.843942Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5617:4787], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:58.844240Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5618:4788], server id = [2:5622:4792], tablet id = 72075186224037900, status = OK 2025-12-23T18:55:58.844285Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5618:4788], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:58.845146Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5619:4789], server id = [2:5623:4793], tablet id = 72075186224037901, status = OK 2025-12-23T18:55:58.845225Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5619:4789], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:58.846128Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5620:4790], server id = [2:5624:4794], tablet id = 72075186224037902, status = OK 2025-12-23T18:55:58.846175Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5620:4790], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:58.850337Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-12-23T18:55:58.850634Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5617:4787], server id = [2:5621:4791], tablet id = 72075186224037899 2025-12-23T18:55:58.850665Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:58.851977Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-12-23T18:55:58.852519Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5618:4788], server id = [2:5622:4792], tablet id = 72075186224037900 2025-12-23T18:55:58.852546Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:58.852851Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-12-23T18:55:58.853056Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-12-23T18:55:58.853091Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:55:58.853224Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:55:58.853342Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:55:58.853704Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5637:4803], ActorId: [2:5638:4804], Starting query actor #1 [2:5639:4805] 2025-12-23T18:55:58.853748Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5638:4804], ActorId: [2:5639:4805], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:55:58.855350Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5619:4789], server id = [2:5623:4793], tablet id = 72075186224037901 2025-12-23T18:55:58.855389Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:58.855986Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5620:4790], server id = [2:5624:4794], tablet id = 72075186224037902 2025-12-23T18:55:58.856023Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:58.856411Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5638:4804], ActorId: [2:5639:4805], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ODY1ZDZiZmUtMTQ1ZDZlMi0xYjA4MDM2Zi00OWI5M2VlMA==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:55:58.936794Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5648:4814]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:58.937020Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:58.937052Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5648:4814], StatRequests.size() = 1 2025-12-23T18:55:59.047742Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5638:4804], ActorId: [2:5639:4805], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODY1ZDZiZmUtMTQ1ZDZlMi0xYjA4MDM2Zi00OWI5M2VlMA==, TxId: 2025-12-23T18:55:59.047818Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5638:4804], ActorId: [2:5639:4805], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODY1ZDZiZmUtMTQ1ZDZlMi0xYjA4MDM2Zi00OWI5M2VlMA==, TxId: 2025-12-23T18:55:59.048154Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5637:4803], ActorId: [2:5638:4804], Got response [2:5639:4805] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-12-23T18:55:59.048411Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5662:4820]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:59.048767Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:59.048816Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:59.048979Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:55:59.049647Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:59.049688Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:55:59.049727Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:59.052564Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:19.345589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:19.460845Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:19.467038Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:19.467390Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:19.467511Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:19.934248Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:20.030691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:20.030877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:20.064737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:20.186854Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00495d/r3tmp/tmpDIdx9z/pdisk_1.dat TServer::EnableGrpc on GrpcPort 1540, node 1 2025-12-23T18:55:21.195695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:21.196642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:21.196696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:21.196732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:21.197320Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:21.263270Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:1056 2025-12-23T18:55:21.829123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:24.559599Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:24.564916Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:24.568338Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:24.593261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.593364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.631611Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:24.633606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.790343Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.790927Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.791509Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.792230Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.792439Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.792522Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.792649Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.792843Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.792983Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.909548Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:24.987333Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.987456Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:25.000886Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.170555Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.206164Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:25.206282Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:25.239131Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:25.240712Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:25.241008Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:25.241074Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:25.241130Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:25.241190Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:25.241248Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:25.241299Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:25.242344Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:25.358097Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.358224Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.371077Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:55:25.372750Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:55:25.489642Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:55:25.489937Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:25.492181Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:55:25.492235Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:55:25.492319Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:25.495931Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:25.500137Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:25.508225Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:25.508374Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:25.521938Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:25.663887Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:26.059662Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:26.167561Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:26.167643Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:269 ... } 2025-12-23T18:55:50.245094Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:50.249988Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4065:3566], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:50.252237Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:50.290271Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4047:3560], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-12-23T18:55:50.434678Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4132:3607], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:50.468855Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4131:3606] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:50.703626Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4153:3620]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:50.703820Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:50.703886Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4155:3622] 2025-12-23T18:55:50.703950Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4155:3622] 2025-12-23T18:55:50.704251Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4155:3622], server id = [2:4156:3623], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:50.704299Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4156:3623] 2025-12-23T18:55:50.704353Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4156:3623], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:50.704401Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:55:50.704472Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:55:50.704540Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4153:3620], StatRequests.size() = 1 2025-12-23T18:55:50.704635Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:55:55.897455Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4010:2468], ActorId: [2:4020:3546], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:55:55.897613Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4010:2468], ActorId: [2:4020:3546], Start read next stream part 2025-12-23T18:55:55.898639Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4010:2468], ActorId: [2:4239:3682], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:55:55.900559Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33140, txId: 281474976720660] shutting down 2025-12-23T18:55:55.900703Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4010:2468], ActorId: [2:4239:3682], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table` 2025-12-23T18:55:55.900765Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4010:2468], ActorId: [2:4239:3682], Start read next stream part 2025-12-23T18:55:55.902643Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4010:2468], ActorId: [2:4020:3546], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:55:55.902683Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4010:2468], ActorId: [2:4020:3546], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzAxMTBkY2MtMWM5OTRhMzEtNmRhZjg1OWItY2Q4MGNkYWU=, TxId: 2025-12-23T18:55:55.971933Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4264:3703]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:55.972087Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:55.972113Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4264:3703], StatRequests.size() = 1 2025-12-23T18:55:58.899492Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4010:2468], ActorId: [2:4239:3682], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:55:58.899578Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4010:2468], ActorId: [2:4239:3682], Start read next stream part 2025-12-23T18:55:58.900422Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33160, txId: 281474976720663] shutting down 2025-12-23T18:55:58.901002Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4335:3752], ActorId: [2:4337:3753], Starting query actor #1 [2:4338:3754] 2025-12-23T18:55:58.901063Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4337:3753], ActorId: [2:4338:3754], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:55:58.903343Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4010:2468], ActorId: [2:4239:3682], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:55:58.903386Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4010:2468], ActorId: [2:4239:3682], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzhjMDc4YzUtMWMzNjMyNTQtZjVlZWZkYTktYzI3NjY5MjI=, TxId: 2025-12-23T18:55:58.903846Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4337:3753], ActorId: [2:4338:3754], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=M2Q4YzU5MDMtZDg4OGM5NGMtOGNhODc3MTktMjkzOWMzYTU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:55:58.947319Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4354:3768]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:58.947486Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:55:58.947512Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4354:3768], StatRequests.size() = 1 2025-12-23T18:55:59.079440Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4337:3753], ActorId: [2:4338:3754], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2Q4YzU5MDMtZDg4OGM5NGMtOGNhODc3MTktMjkzOWMzYTU=, TxId: 2025-12-23T18:55:59.079505Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4337:3753], ActorId: [2:4338:3754], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2Q4YzU5MDMtZDg4OGM5NGMtOGNhODc3MTktMjkzOWMzYTU=, TxId: 2025-12-23T18:55:59.079721Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4335:3752], ActorId: [2:4337:3753], Got response [2:4338:3754] SUCCESS 2025-12-23T18:55:59.079961Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:55:59.093121Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:55:59.093182Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3116:3444] 2025-12-23T18:55:59.093612Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4377:3782]], StatType[ 2 ], StatRequestsCount[ 2 ] 2025-12-23T18:55:59.093859Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T18:55:59.093895Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:55:59.094080Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T18:55:59.094121Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:55:59.094160Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:55:59.094410Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T18:55:59.097710Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-12-23T18:55:59.098256Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2025-12-23T18:55:59.098359Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] >> TColumnShardTestSchema::RebootExportWithLostAnswer >> MoveTable::RenameToItself_Negative >> TColumnShardTestSchema::CreateTable+Reboots-GenerateInternalPathId >> MoveTable::WithCommitInProgress+Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:19.848641Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:19.973818Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:19.979990Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:19.980309Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:19.980462Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:20.329708Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:20.428067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:20.428256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:20.462935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:20.585737Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0049f3/r3tmp/tmpzaD5BQ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18760, node 1 2025-12-23T18:55:21.195348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:21.196124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:21.196183Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:21.196218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:21.196786Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:21.261991Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:23137 2025-12-23T18:55:21.810399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:24.295054Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:24.303099Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:24.307731Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:24.348872Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.349026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.389669Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:24.391367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.564645Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.565149Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.565629Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.566293Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.566515Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.566594Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.566716Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.566890Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.566977Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.734089Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:24.756135Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.756243Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.768911Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.893189Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:24.985678Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:24.985790Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:25.017782Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:25.020559Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:25.020799Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:25.020891Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:25.020956Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:25.021011Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:25.021069Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:25.021124Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:25.022507Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:25.140899Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.141009Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.154844Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:55:25.156195Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:55:25.273539Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:55:25.273949Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:25.276559Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:55:25.276635Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:55:25.276724Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:25.280571Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:25.286932Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:25.296060Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:25.296197Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:25.311051Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:25.451766Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:25.851214Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:25.959303Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:25.959374Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2 ... T18:55:59.759593Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2025-12-23T18:55:59.759633Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1766516159647746 2025-12-23T18:55:59.759669Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-12-23T18:55:59.759704Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2025-12-23T18:55:59.759738Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2025-12-23T18:55:59.759834Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-12-23T18:55:59.759894Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:59.760005Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-12-23T18:55:59.760070Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:59.760137Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:59.760201Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:59.760347Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:55:59.761444Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:59.762016Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-12-23T18:55:59.762095Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-12-23T18:55:59.762578Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5684:4824] Owner: [2:5683:4823]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:59.762652Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5684:4824] Owner: [2:5683:4823]. Column diff is empty, finishing 2025-12-23T18:55:59.763362Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2025-12-23T18:55:59.763437Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2025-12-23T18:55:59.765818Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:55:59.784360Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5693:4831] 2025-12-23T18:55:59.784520Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5649:4803], server id = [2:5693:4831], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:59.784837Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5693:4831], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-12-23T18:55:59.785164Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5694:4832] 2025-12-23T18:55:59.785322Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5694:4832], schemeshard id = 72075186224037897 2025-12-23T18:55:59.858036Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:55:59.858196Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-12-23T18:55:59.859158Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5699:4837], server id = [2:5703:4841], tablet id = 72075186224037899, status = OK 2025-12-23T18:55:59.859274Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5699:4837], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:59.859689Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5700:4838], server id = [2:5704:4842], tablet id = 72075186224037900, status = OK 2025-12-23T18:55:59.859755Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5700:4838], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:59.860607Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5701:4839], server id = [2:5706:4844], tablet id = 72075186224037901, status = OK 2025-12-23T18:55:59.860669Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5701:4839], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:59.861287Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5702:4840], server id = [2:5705:4843], tablet id = 72075186224037902, status = OK 2025-12-23T18:55:59.861344Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5702:4840], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:55:59.862614Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-12-23T18:55:59.863451Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5699:4837], server id = [2:5703:4841], tablet id = 72075186224037899 2025-12-23T18:55:59.863498Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:59.863702Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-12-23T18:55:59.863977Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-12-23T18:55:59.864260Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5700:4838], server id = [2:5704:4842], tablet id = 72075186224037900 2025-12-23T18:55:59.864288Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:59.864506Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-12-23T18:55:59.864560Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:55:59.864827Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:55:59.864974Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:55:59.865218Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5711:4849], ActorId: [2:5712:4850], Starting query actor #1 [2:5713:4851] 2025-12-23T18:55:59.865286Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5712:4850], ActorId: [2:5713:4851], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:55:59.868234Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5701:4839], server id = [2:5706:4844], tablet id = 72075186224037901 2025-12-23T18:55:59.868272Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:59.868752Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5702:4840], server id = [2:5705:4843], tablet id = 72075186224037902 2025-12-23T18:55:59.868787Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:55:59.869120Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5712:4850], ActorId: [2:5713:4851], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NWVkZTZkNC00NzM2ZGFkOC03NDA4YjIzYi01YzUyNTEzOQ==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:55:59.984130Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5722:4860]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:59.984478Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:59.984535Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5722:4860], StatRequests.size() = 1 2025-12-23T18:56:00.137376Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5712:4850], ActorId: [2:5713:4851], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWVkZTZkNC00NzM2ZGFkOC03NDA4YjIzYi01YzUyNTEzOQ==, TxId: 2025-12-23T18:56:00.137479Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5712:4850], ActorId: [2:5713:4851], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWVkZTZkNC00NzM2ZGFkOC03NDA4YjIzYi01YzUyNTEzOQ==, TxId: 2025-12-23T18:56:00.137915Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5711:4849], ActorId: [2:5712:4850], Got response [2:5713:4851] SUCCESS 2025-12-23T18:56:00.138390Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:56:00.154206Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:56:00.154274Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-12-23T18:56:00.210174Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5739:4868]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:00.210686Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:56:00.210782Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:56:00.211152Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:56:00.211221Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:56:00.211283Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:56:00.215388Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:19.452771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:19.567793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:19.577788Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:19.578147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:19.578270Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:19.974330Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:20.065003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:20.065186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:20.099689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:20.112964Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00495e/r3tmp/tmp913Hyb/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11392, node 1 2025-12-23T18:55:21.195602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:21.196344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:21.196395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:21.196427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:21.196978Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:21.261926Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28836 2025-12-23T18:55:21.810385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:24.309393Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:24.315402Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:24.319704Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:24.350534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.350637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.389590Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:24.391217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.549510Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.554240Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.554929Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.555681Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.555922Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.556026Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.556242Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.556457Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.556571Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.690629Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-12-23T18:55:24.756260Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.756384Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.769552Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.971165Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.001372Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:25.001454Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:25.024180Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:25.025245Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:25.025538Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:25.025622Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:25.025689Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:25.025747Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:25.025809Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:25.025854Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:25.026508Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:25.140166Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.140280Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.154970Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:55:25.156372Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:55:25.273973Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:55:25.274358Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-12-23T18:55:25.276855Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:55:25.276917Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:55:25.277000Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:55:25.281048Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:25.287029Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:25.295642Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:25.295794Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:25.310438Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:25.462345Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:25.869219Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:25.957293Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:25.957391Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. ... = 1 2025-12-23T18:55:59.095599Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:55:59.095700Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6253:5148], StatRequests.size() = 1 2025-12-23T18:55:59.095763Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:55:59.229245Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6243:5138], ActorId: [2:6244:5139], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzJiOWFkNTMtZWEzZDJmNmMtZTYzN2Q5M2ItMmI0N2Y5ZjU=, TxId: 2025-12-23T18:55:59.229316Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6243:5138], ActorId: [2:6244:5139], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzJiOWFkNTMtZWEzZDJmNmMtZTYzN2Q5M2ItMmI0N2Y5ZjU=, TxId: 2025-12-23T18:55:59.229593Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6242:5137], ActorId: [2:6243:5138], Got response [2:6244:5139] SUCCESS 2025-12-23T18:55:59.229902Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:55:59.245155Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2025-12-23T18:55:59.245224Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-12-23T18:55:59.322705Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:55:59.322824Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:55:59.366765Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:6255:5150], schemeshard count = 1 2025-12-23T18:55:59.769694Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037899 2025-12-23T18:55:59.769774Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.769000s, at schemeshard: 72075186224037899 2025-12-23T18:55:59.769945Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2025-12-23T18:55:59.784890Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:56:00.534679Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2025-12-23T18:56:00.534769Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 32] is column table. 2025-12-23T18:56:00.534822Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 32] 2025-12-23T18:56:00.538196Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-12-23T18:56:00.566975Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-12-23T18:56:00.567697Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2025-12-23T18:56:00.567772Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2025-12-23T18:56:00.568757Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:56:00.593518Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:56:00.593809Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-12-23T18:56:00.594419Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6342:5195], server id = [2:6346:5199], tablet id = 72075186224037905, status = OK 2025-12-23T18:56:00.594777Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6342:5195], path = { OwnerId: 72075186224037899 LocalId: 32 } 2025-12-23T18:56:00.595941Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6343:5196], server id = [2:6347:5200], tablet id = 72075186224037906, status = OK 2025-12-23T18:56:00.596002Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6343:5196], path = { OwnerId: 72075186224037899 LocalId: 32 } 2025-12-23T18:56:00.596284Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6344:5197], server id = [2:6348:5201], tablet id = 72075186224037907, status = OK 2025-12-23T18:56:00.596326Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6344:5197], path = { OwnerId: 72075186224037899 LocalId: 32 } 2025-12-23T18:56:00.596986Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6345:5198], server id = [2:6349:5202], tablet id = 72075186224037908, status = OK 2025-12-23T18:56:00.597041Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6345:5198], path = { OwnerId: 72075186224037899 LocalId: 32 } 2025-12-23T18:56:00.601151Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-12-23T18:56:00.601420Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6342:5195], server id = [2:6346:5199], tablet id = 72075186224037905 2025-12-23T18:56:00.601459Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:00.602727Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-12-23T18:56:00.603166Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6343:5196], server id = [2:6347:5200], tablet id = 72075186224037906 2025-12-23T18:56:00.603192Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:00.604044Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-12-23T18:56:00.604353Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6345:5198], server id = [2:6349:5202], tablet id = 72075186224037908 2025-12-23T18:56:00.604377Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:00.604572Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-12-23T18:56:00.604605Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:56:00.604729Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:56:00.604853Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:56:00.605269Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:6362:5211], ActorId: [2:6363:5212], Starting query actor #1 [2:6364:5213] 2025-12-23T18:56:00.605336Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:6363:5212], ActorId: [2:6364:5213], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2025-12-23T18:56:00.606870Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6344:5197], server id = [2:6348:5201], tablet id = 72075186224037907 2025-12-23T18:56:00.606917Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:00.607499Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:6363:5212], ActorId: [2:6364:5213], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NzlkMDhjOGUtMmMzZmY2YzktOTc2NWNkYjYtMjg2NDNmMWQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:56:00.685936Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6373:5222]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:00.686189Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:56:00.686226Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:6373:5222], StatRequests.size() = 1 2025-12-23T18:56:00.793435Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6363:5212], ActorId: [2:6364:5213], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzlkMDhjOGUtMmMzZmY2YzktOTc2NWNkYjYtMjg2NDNmMWQ=, TxId: 2025-12-23T18:56:00.793507Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6363:5212], ActorId: [2:6364:5213], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzlkMDhjOGUtMmMzZmY2YzktOTc2NWNkYjYtMjg2NDNmMWQ=, TxId: 2025-12-23T18:56:00.793824Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6362:5211], ActorId: [2:6363:5212], Got response [2:6364:5213] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-12-23T18:56:00.794202Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:6387:5228]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:00.794365Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:56:00.795072Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:56:00.795168Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:56:00.795941Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:56:00.796018Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-12-23T18:56:00.796085Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:56:00.801632Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock >> TColumnShardTestSchema::RebootOneColdTier >> MoveTable::RenameToItself_Negative [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::RenameToItself_Negative [GOOD] Test command err: 2025-12-23T18:56:02.391124Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:02.425187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:02.425432Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:02.432239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:02.432436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:02.432591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:02.432671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:02.432744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:02.432820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:02.432916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:02.432985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:02.433078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:02.433149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:02.433219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:02.433306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:02.433377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:02.452618Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:02.452742Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:02.452778Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:02.452914Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:02.453031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:02.453084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:02.453113Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:02.453178Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:02.453235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:02.453262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:02.453280Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:02.453419Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:02.453472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:02.453515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:02.453541Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:02.453603Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:02.453635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:02.453677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:02.453699Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:02.453744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:02.453766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:02.453792Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:02.453819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:02.453860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:02.453892Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:02.454040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:02.454069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:02.454086Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:02.454154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:02.454178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:02.454195Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:02.454224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:02.454249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:02.454278Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:02.454309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:02.454343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:02.454364Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:02.454456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:02.454486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... mon_data.cpp:29;EXECUTE:storages_managerLoadingTime=205; 2025-12-23T18:56:02.723382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-12-23T18:56:02.723418Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-12-23T18:56:02.723453Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-12-23T18:56:02.723552Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=24; 2025-12-23T18:56:02.723580Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-12-23T18:56:02.723648Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=37; 2025-12-23T18:56:02.723678Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2025-12-23T18:56:02.723718Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=20; 2025-12-23T18:56:02.723786Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=37; 2025-12-23T18:56:02.723838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=16; 2025-12-23T18:56:02.723879Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=2116; 2025-12-23T18:56:02.723983Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:56:02.724037Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:56:02.724102Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:02.724347Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:02.724383Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2025-12-23T18:56:02.724440Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.099000s; 2025-12-23T18:56:02.724630Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:56:02.724694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:56:02.724731Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:02.724761Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2025-12-23T18:56:02.724887Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.094000s; 2025-12-23T18:56:02.724939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2025-12-23T18:56:03.014783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=137111714584448;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1766516163362;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:56:03.014850Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=137111714584448;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1766516163362;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2025-12-23T18:56:03.026757Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1766516163362;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=137111714584448;op_tx=10:TX_KIND_SCHEMA;min=1766516163362;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1766516163362;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137317875722624;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-12-23T18:56:03.026838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1766516163362;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=137111714584448;op_tx=10:TX_KIND_SCHEMA;min=1766516163362;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1766516163362;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137317875722624;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:158:2181]; 2025-12-23T18:56:03.026928Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1766516163362;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=137111714584448;op_tx=10:TX_KIND_SCHEMA;min=1766516163362;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1766516163362;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137317875722624;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2025-12-23T18:56:03.027212Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-12-23T18:56:03.027304Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516163362 at tablet 9437184, mediator 0 2025-12-23T18:56:03.027340Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2025-12-23T18:56:03.027596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:56:03.027670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:56:03.027704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:56:03.027780Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2025-12-23T18:56:03.035047Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1766516163362;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2025-12-23T18:56:03.035136Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:56:03.035247Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2025-12-23T18:56:03.035292Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:145;event=RegisterTable;path_id=1000000185; 2025-12-23T18:56:03.035478Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2025-12-23T18:56:03.047629Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:532;event=OnTieringModified;path_id=1000000185; 2025-12-23T18:56:03.071462Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2025-12-23T18:56:03.072212Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=137111714634176;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1766516163365;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=schema.cpp:134;propose_execute=move_table;src=1;dst=1; 2025-12-23T18:56:03.072273Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=137111714634176;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1766516163365;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tx_controller.cpp:350;error=problem on start;message=Rename to existing table; 2025-12-23T18:56:03.084165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1766516163365;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=137111714634176;op_tx=11:TX_KIND_SCHEMA;min=1766516163365;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2025-12-23T18:56:03.084238Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1766516163365;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=137111714634176;op_tx=11:TX_KIND_SCHEMA;min=1766516163365;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:23;message=Rename to existing table;tablet_id=9437184;tx_id=11; |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop-Reboots+GenerateInternalPathId [GOOD] |98.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::CreateTable+Reboots-GenerateInternalPathId [GOOD] >> TColumnShardTestSchema::ExportWithLostAnswer >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop-Reboots+GenerateInternalPathId [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> MoveTable::WithCommitInProgress+Reboot [GOOD] Test command err: 2025-12-23T18:56:01.308989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:01.339730Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:01.339953Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:01.345641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:01.345831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:01.345988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:01.346068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:01.346170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:01.346238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:01.346334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:01.346397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:01.346465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:01.346539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:01.346627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:01.346708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:01.346806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:01.365945Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:01.366106Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:01.366147Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:01.366260Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:01.366366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:01.366419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:01.366449Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:01.366539Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:01.366580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:01.366607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:01.366627Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:01.366765Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:01.366825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:01.366852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:01.366873Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:01.366954Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:01.366993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:01.367031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:01.367054Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:01.367099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:01.367143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:01.367163Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:01.367192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:01.367226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:01.367255Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:01.367391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:01.367420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:01.367440Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:01.367531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:01.367561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:01.367581Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:01.367611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:01.367634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:01.367652Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:01.367676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:01.367699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:01.367717Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:01.367790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:01.367820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... log.cpp:841: tablet_id=9437184;request_tx=104:TX_KIND_SCHEMA;min=1766516162404;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136495119911296;op_tx=104:TX_KIND_SCHEMA;min=1766516162404;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=104:TX_KIND_SCHEMA;min=1766516162404;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=136701260128256;method=TTxController::FinishProposeOnComplete;tx_id=104;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=104; 2025-12-23T18:56:03.733670Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-12-23T18:56:03.733763Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516162404 at tablet 9437184, mediator 0 2025-12-23T18:56:03.733800Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] execute at tablet 9437184 2025-12-23T18:56:03.733986Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: DropTable for pathId: {internal: 1000000185, ss: 1} at tablet 9437184 2025-12-23T18:56:03.745746Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] complete at tablet 9437184 2025-12-23T18:56:03.746195Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766516162404:max} readable: {1766516162404:max} at tablet 9437184 2025-12-23T18:56:03.746306Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:56:03.752409Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516162404:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:03.752494Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516162404:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:03.753086Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516162404:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-12-23T18:56:03.754442Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516162404:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:56:03.795754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516162404:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:563:2575];trace_detailed=; 2025-12-23T18:56:03.796720Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:83;ff_first=(column_ids=1;column_names=timestamp;);; 2025-12-23T18:56:03.796903Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:56:03.797190Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:03.797307Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:03.797489Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:03.797620Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:03.797776Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:03.797943Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:563:2575] finished for tablet 9437184 2025-12-23T18:56:03.798228Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:557:2569];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":2924863,"name":"_full_task","f":2924863,"d_finished":0,"c":0,"l":2927150,"d":2287},"events":[{"name":"bootstrap","f":2925129,"d_finished":1368,"c":1,"l":2926497,"d":1368},{"a":2926627,"name":"ack","f":2926627,"d_finished":0,"c":0,"l":2927150,"d":523},{"a":2926615,"name":"processing","f":2926615,"d_finished":0,"c":0,"l":2927150,"d":535},{"name":"ProduceResults","f":2926231,"d_finished":552,"c":2,"l":2926968,"d":552},{"a":2926972,"name":"Finish","f":2926972,"d_finished":0,"c":0,"l":2927150,"d":178}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:03.798301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:557:2569];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:03.798562Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:557:2569];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":2924863,"name":"_full_task","f":2924863,"d_finished":0,"c":0,"l":2927500,"d":2637},"events":[{"name":"bootstrap","f":2925129,"d_finished":1368,"c":1,"l":2926497,"d":1368},{"a":2926627,"name":"ack","f":2926627,"d_finished":0,"c":0,"l":2927500,"d":873},{"a":2926615,"name":"processing","f":2926615,"d_finished":0,"c":0,"l":2927500,"d":885},{"name":"ProduceResults","f":2926231,"d_finished":552,"c":2,"l":2926968,"d":552},{"a":2926972,"name":"Finish","f":2926972,"d_finished":0,"c":0,"l":2927500,"d":528}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:03.798626Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:03.754402Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:56:03.798657Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:03.798796Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> THiveTest::TestLockTabletExecutionLocalGone >> TColumnShardTestSchema::RebootForgetAfterFail >> test_canonical_records.py::test_kill_tablet_using_developer_ui [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable+Reboots-GenerateInternalPathId [GOOD] Test command err: 2025-12-23T18:56:02.684149Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:02.705322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:02.705528Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:02.711279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:02.711485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:02.711665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:02.711751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:02.711826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:02.711889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:02.711966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:02.712067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:02.712141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:02.712216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:02.712317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:02.712394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:02.712512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:02.733090Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:02.733326Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:02.733372Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:02.733484Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:02.733611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:02.733668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:02.733705Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:02.733810Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:02.733858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:02.733885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:02.733906Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:02.734025Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:02.734068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:02.734095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:02.734113Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:02.734189Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:02.734232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:02.734275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:02.734297Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:02.734338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:02.734364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:02.734388Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:02.734429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:02.734466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:02.734510Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:02.734640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:02.734677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:02.734698Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:02.734813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:02.734846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:02.734865Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:02.734910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:02.734936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:02.734954Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:02.734982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:02.735009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:02.735043Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:02.735167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:02.735226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=1766516163772;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2025-12-23T18:56:04.096772Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1766516163772;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136728182342176;op_tx=119:TX_KIND_SCHEMA;min=1766516163772;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1766516163772;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136934343894464;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-12-23T18:56:04.096857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1766516163772;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136728182342176;op_tx=119:TX_KIND_SCHEMA;min=1766516163772;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1766516163772;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136934343894464;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:56:04.096897Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1766516163772;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136728182342176;op_tx=119:TX_KIND_SCHEMA;min=1766516163772;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1766516163772;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136934343894464;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2025-12-23T18:56:04.097189Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-12-23T18:56:04.097323Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516163772 at tablet 9437184, mediator 0 2025-12-23T18:56:04.097362Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] execute at tablet 9437184 2025-12-23T18:56:04.097624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2025-12-23T18:56:04.097686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2025-12-23T18:56:04.097772Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 20, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2025-12-23T18:56:04.097859Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=20; 2025-12-23T18:56:04.097923Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:145;event=RegisterTable;path_id=20; 2025-12-23T18:56:04.098121Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2025-12-23T18:56:04.098635Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:532;event=OnTieringModified;path_id=20; 2025-12-23T18:56:04.111178Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-12-23T18:56:04.112941Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=136728182345088;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1766516163775;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-12-23T18:56:04.125472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1766516163775;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136728182345088;op_tx=120:TX_KIND_SCHEMA;min=1766516163775;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:56:04.125591Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1766516163775;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136728182345088;op_tx=120:TX_KIND_SCHEMA;min=1766516163775;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-12-23T18:56:04.127267Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=136728182346880;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1766516163776;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-12-23T18:56:04.140036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1766516163776;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136728182346880;op_tx=121:TX_KIND_SCHEMA;min=1766516163776;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:56:04.140120Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1766516163776;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136728182346880;op_tx=121:TX_KIND_SCHEMA;min=1766516163776;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-12-23T18:56:04.141732Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=136728182348672;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1766516163778;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-12-23T18:56:04.154335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1766516163778;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136728182348672;op_tx=122:TX_KIND_SCHEMA;min=1766516163778;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:56:04.154431Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1766516163778;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136728182348672;op_tx=122:TX_KIND_SCHEMA;min=1766516163778;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithCommitInProgress+Reboot [GOOD] Test command err: 2025-12-23T18:56:02.782696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:02.811147Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:02.811334Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:02.816913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:02.817109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:02.817306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:02.817388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:02.817465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:02.817538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:02.817640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:02.817704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:02.817785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:02.817905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:02.818006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:02.818116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:02.818192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:02.838202Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:02.838328Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:02.838366Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:02.838517Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:02.838642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:02.838711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:02.838759Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:02.838855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:02.838929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:02.838959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:02.838979Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:02.839115Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:02.839157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:02.839192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:02.839219Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:02.839276Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:02.839313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:02.839377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:02.839399Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:02.839462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:02.839494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:02.839517Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:02.839545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:02.839594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:02.839631Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:02.839778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:02.839813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:02.839831Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:02.839910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:02.839946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:02.839964Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:02.839995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:02.840019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:02.840036Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:02.840060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:02.840084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:02.840101Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:02.840180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:02.840202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=100;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-12-23T18:56:04.543922Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:530:2495];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-12-23T18:56:04.544195Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:04.544358Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:04.544560Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:04.544794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:04.545017Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:04.545215Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:04.545496Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:537:2501] finished for tablet 9437184 2025-12-23T18:56:04.546045Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:530:2495];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":2209312,"name":"_full_task","f":2209312,"d_finished":0,"c":0,"l":2223013,"d":13701},"events":[{"name":"bootstrap","f":2209791,"d_finished":2195,"c":1,"l":2211986,"d":2195},{"a":2222220,"name":"ack","f":2220491,"d_finished":1566,"c":1,"l":2222057,"d":2359},{"a":2222207,"name":"processing","f":2212170,"d_finished":4567,"c":3,"l":2222060,"d":5373},{"name":"ProduceResults","f":2211478,"d_finished":2673,"c":6,"l":2222694,"d":2673},{"a":2222700,"name":"Finish","f":2222700,"d_finished":0,"c":0,"l":2223013,"d":313},{"name":"task_result","f":2212208,"d_finished":2928,"c":2,"l":2220389,"d":2928}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:04.546183Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:530:2495];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:04.546719Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:530:2495];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":2209312,"name":"_full_task","f":2209312,"d_finished":0,"c":0,"l":2223695,"d":14383},"events":[{"name":"bootstrap","f":2209791,"d_finished":2195,"c":1,"l":2211986,"d":2195},{"a":2222220,"name":"ack","f":2220491,"d_finished":1566,"c":1,"l":2222057,"d":3041},{"a":2222207,"name":"processing","f":2212170,"d_finished":4567,"c":3,"l":2222060,"d":6055},{"name":"ProduceResults","f":2211478,"d_finished":2673,"c":6,"l":2222694,"d":2673},{"a":2222700,"name":"Finish","f":2222700,"d_finished":0,"c":0,"l":2223695,"d":995},{"name":"task_result","f":2212208,"d_finished":2928,"c":2,"l":2220389,"d":2928}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:04.546843Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:04.487149Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-12-23T18:56:04.546906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:04.547123Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:537:2501];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2025-12-23T18:56:04.548019Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2025-12-23T18:56:04.548448Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {18446744073709551615:12} readable: {18446744073709551615:max} at tablet 9437184 2025-12-23T18:56:04.548603Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2025-12-23T18:56:04.548663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:485:2458];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:56:04.548781Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:485:2458];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:21.301885Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:21.383950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:21.389003Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:21.389341Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:21.389450Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:21.757625Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:21.851210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:21.851356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:21.909558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:22.040761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00494a/r3tmp/tmpDAoMPj/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64918, node 1 2025-12-23T18:55:22.680663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:22.681743Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:22.681806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:22.681837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:22.682084Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:22.747691Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:16443 2025-12-23T18:55:23.302775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:25.730454Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:25.736048Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:25.738816Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:25.762694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:25.762807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:25.800109Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:25.801814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:26.057609Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.058387Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.059298Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.059804Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.060005Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.060083Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.060207Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.060298Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.060425Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.172115Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:26.183563Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:26.183688Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:26.196678Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:26.374005Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:26.418142Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:26.418243Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:26.439802Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:26.440958Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:26.441144Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:26.441204Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:26.441245Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:26.441285Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:26.441327Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:26.441365Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:26.441655Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:26.480427Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:26.480524Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:26.490442Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:55:26.497810Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:55:26.498673Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:55:26.502939Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:26.528649Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:26.528699Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:55:26.528761Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:26.547323Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:26.555859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:26.562060Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:26.562166Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:26.571387Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:26.680156Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:27.052590Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:27.155725Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:27.155825Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2 ... node 2 :STATISTICS DEBUG: tx_init.cpp:69: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-12-23T18:56:03.131004Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2025-12-23T18:56:03.131041Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1766516163048675 2025-12-23T18:56:03.131066Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-12-23T18:56:03.131090Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2025-12-23T18:56:03.131156Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-12-23T18:56:03.131206Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:56:03.131271Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-12-23T18:56:03.131316Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:56:03.131368Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:56:03.131417Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:56:03.131522Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:56:03.132147Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:56:03.133206Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-12-23T18:56:03.133279Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-12-23T18:56:03.133673Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5612:4786] Owner: [2:5611:4785]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:56:03.133735Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5612:4786] Owner: [2:5611:4785]. Column diff is empty, finishing 2025-12-23T18:56:03.134842Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2025-12-23T18:56:03.134912Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2025-12-23T18:56:03.136391Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:56:03.153554Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5621:4793] 2025-12-23T18:56:03.153788Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5621:4793], schemeshard id = 72075186224037897 2025-12-23T18:56:03.153890Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5578:4766], server id = [2:5622:4794], tablet id = 72075186224037894, status = OK 2025-12-23T18:56:03.154024Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5622:4794] 2025-12-23T18:56:03.154105Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5622:4794], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-12-23T18:56:03.214127Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:56:03.214272Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-12-23T18:56:03.215172Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5627:4799], server id = [2:5631:4803], tablet id = 72075186224037899, status = OK 2025-12-23T18:56:03.215505Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5627:4799], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:56:03.215723Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5628:4800], server id = [2:5632:4804], tablet id = 72075186224037900, status = OK 2025-12-23T18:56:03.215760Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5628:4800], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:56:03.216545Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5629:4801], server id = [2:5633:4805], tablet id = 72075186224037901, status = OK 2025-12-23T18:56:03.216589Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5629:4801], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:56:03.217190Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5630:4802], server id = [2:5634:4806], tablet id = 72075186224037902, status = OK 2025-12-23T18:56:03.217232Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5630:4802], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:56:03.221984Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-12-23T18:56:03.222319Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5627:4799], server id = [2:5631:4803], tablet id = 72075186224037899 2025-12-23T18:56:03.222353Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:03.222956Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-12-23T18:56:03.223679Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5628:4800], server id = [2:5632:4804], tablet id = 72075186224037900 2025-12-23T18:56:03.223708Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:03.224321Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-12-23T18:56:03.224492Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5629:4801], server id = [2:5633:4805], tablet id = 72075186224037901 2025-12-23T18:56:03.224511Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:03.224941Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-12-23T18:56:03.224989Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:56:03.225155Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:56:03.225264Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:56:03.225473Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5647:4815], ActorId: [2:5648:4816], Starting query actor #1 [2:5649:4817] 2025-12-23T18:56:03.225542Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5648:4816], ActorId: [2:5649:4817], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:56:03.228248Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5630:4802], server id = [2:5634:4806], tablet id = 72075186224037902 2025-12-23T18:56:03.228276Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:03.228551Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5648:4816], ActorId: [2:5649:4817], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MThmZjg0YS0xM2QxNGU2Zi00NWFiODNlOC0yMjVjNzViMA==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:56:03.342698Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5658:4826]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:03.343189Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:56:03.343255Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5658:4826], StatRequests.size() = 1 2025-12-23T18:56:03.459564Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5648:4816], ActorId: [2:5649:4817], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MThmZjg0YS0xM2QxNGU2Zi00NWFiODNlOC0yMjVjNzViMA==, TxId: 2025-12-23T18:56:03.459647Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5648:4816], ActorId: [2:5649:4817], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MThmZjg0YS0xM2QxNGU2Zi00NWFiODNlOC0yMjVjNzViMA==, TxId: 2025-12-23T18:56:03.459973Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5647:4815], ActorId: [2:5648:4816], Got response [2:5649:4817] SUCCESS 2025-12-23T18:56:03.460252Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:56:03.496764Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:56:03.496833Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-12-23T18:56:03.552500Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5675:4834]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:03.552946Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:56:03.553018Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:56:03.553350Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:56:03.553433Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:56:03.553498Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:56:03.557312Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] |98.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] >> TColumnShardTestSchema::RebootHotTiersAfterTtl >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:25.055874Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:25.136767Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:25.140671Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:25.140961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:25.141030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:25.480274Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.574265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:25.574396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:25.632563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.729312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004933/r3tmp/tmpcqyZ41/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28657, node 1 2025-12-23T18:55:26.379980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:26.380819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:26.380856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:26.380887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:26.381124Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:26.445420Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:61938 2025-12-23T18:55:26.954177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:29.364183Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:29.369466Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:29.372240Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:29.403930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:29.404034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:29.443526Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:29.445406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:29.685045Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.685593Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.686358Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.686921Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.687145Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.687247Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.687369Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.687462Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.687568Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.798606Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:29.810251Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:29.810382Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:29.823854Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:29.962164Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:30.002041Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:30.002153Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:30.026534Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:30.027743Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:30.027973Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:30.028032Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:30.028071Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:30.028128Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:30.028175Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:30.028220Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:30.028815Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:30.068707Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:30.068805Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:30.079823Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:55:30.087738Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:55:30.088767Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:55:30.093240Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:30.122051Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:30.122105Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:55:30.122183Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:30.141928Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:30.150565Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:30.157138Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:30.157252Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:30.167804Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:30.273491Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:30.593243Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:30.687333Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:30.687413Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2 ... ed schemeshards count = 1 2025-12-23T18:56:03.112217Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:56:03.112393Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:56:03.112486Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5494:4724], StatRequests.size() = 1 2025-12-23T18:56:03.112568Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:56:03.276474Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5484:4714], ActorId: [2:5485:4715], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjI0OTY3YTMtNjAzMDdhZjktODNmOTc3ZDQtNWZmNTE5MWE=, TxId: 2025-12-23T18:56:03.276578Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5484:4714], ActorId: [2:5485:4715], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjI0OTY3YTMtNjAzMDdhZjktODNmOTc3ZDQtNWZmNTE5MWE=, TxId: 2025-12-23T18:56:03.277123Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5483:4713], ActorId: [2:5484:4714], Got response [2:5485:4715] SUCCESS 2025-12-23T18:56:03.277603Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:56:03.293150Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2025-12-23T18:56:03.293227Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-12-23T18:56:03.348779Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:56:03.348885Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:56:03.392823Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5496:4726], schemeshard count = 1 2025-12-23T18:56:04.461619Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2025-12-23T18:56:04.461741Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2025-12-23T18:56:04.461804Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:56:04.466686Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-12-23T18:56:04.499442Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-12-23T18:56:04.500371Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2025-12-23T18:56:04.500480Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2025-12-23T18:56:04.502071Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 2 2025-12-23T18:56:04.502139Z node 2 :STATISTICS WARN: tx_response_tablet_distribution.cpp:63: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2025-12-23T18:56:04.502205Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:56:05.520821Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:56:05.534585Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:56:05.534848Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-12-23T18:56:05.535484Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5611:4781], server id = [2:5615:4785], tablet id = 72075186224037899, status = OK 2025-12-23T18:56:05.535812Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5611:4781], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:56:05.537227Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5612:4782], server id = [2:5616:4786], tablet id = 72075186224037900, status = OK 2025-12-23T18:56:05.537322Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5612:4782], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:56:05.537724Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5613:4783], server id = [2:5617:4787], tablet id = 72075186224037901, status = OK 2025-12-23T18:56:05.537764Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5613:4783], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:56:05.538530Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5614:4784], server id = [2:5618:4788], tablet id = 72075186224037902, status = OK 2025-12-23T18:56:05.538575Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5614:4784], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:56:05.542655Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-12-23T18:56:05.543405Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5611:4781], server id = [2:5615:4785], tablet id = 72075186224037899 2025-12-23T18:56:05.543447Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:05.544115Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-12-23T18:56:05.544289Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5612:4782], server id = [2:5616:4786], tablet id = 72075186224037900 2025-12-23T18:56:05.544309Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:05.545313Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-12-23T18:56:05.545668Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5613:4783], server id = [2:5617:4787], tablet id = 72075186224037901 2025-12-23T18:56:05.545690Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:05.545844Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-12-23T18:56:05.545895Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:56:05.546027Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:56:05.546131Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:56:05.546391Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5631:4797], ActorId: [2:5632:4798], Starting query actor #1 [2:5633:4799] 2025-12-23T18:56:05.546440Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5632:4798], ActorId: [2:5633:4799], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:56:05.548733Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5614:4784], server id = [2:5618:4788], tablet id = 72075186224037902 2025-12-23T18:56:05.548762Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:05.549373Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5632:4798], ActorId: [2:5633:4799], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NzYwYzc4ZDctMmE5OWNkZmMtYTlkYmJlYWUtYmVkN2Q0Yw==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:56:05.634789Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5642:4808]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:05.635145Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:56:05.635193Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5642:4808], StatRequests.size() = 1 2025-12-23T18:56:05.762077Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5632:4798], ActorId: [2:5633:4799], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzYwYzc4ZDctMmE5OWNkZmMtYTlkYmJlYWUtYmVkN2Q0Yw==, TxId: 2025-12-23T18:56:05.762145Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5632:4798], ActorId: [2:5633:4799], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzYwYzc4ZDctMmE5OWNkZmMtYTlkYmJlYWUtYmVkN2Q0Yw==, TxId: 2025-12-23T18:56:05.762473Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5631:4797], ActorId: [2:5632:4798], Got response [2:5633:4799] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-12-23T18:56:05.762845Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5656:4814]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:05.763117Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:56:05.763547Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:56:05.763586Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:56:05.764149Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:56:05.764258Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:56:05.764302Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:56:05.768619Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:55:37.989082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:37.989162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:37.989221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:37.989259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:37.989290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:37.989308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:37.989357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:37.989412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:37.990024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:37.990203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:38.046628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:55:38.046672Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:38.050685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:38.050842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:38.050957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:55:38.052936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:38.053072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:38.053539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:38.053705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:55:38.055020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:38.055151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:38.056053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:38.056104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:38.056200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:38.056241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:38.056282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:38.056388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:55:38.147170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.149850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.149942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.149980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.150056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.150116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.150153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.150203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.150247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.150280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.150336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.150373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.150441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.150490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.150535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 7.157161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2025-12-23T18:56:07.157237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2025-12-23T18:56:07.157356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:56:07.164910Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:791: [Export] [s3] Bootstrap: self# [1:3867:5828], attempt# 0 2025-12-23T18:56:07.181369Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:445: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3867:5828], sender# [1:3866:5827] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:10115 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 46276D8C-E6DF-4C93-9D28-9004D0FE52A7 amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2025-12-23T18:56:07.190279Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:405: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3867:5828], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2025-12-23T18:56:07.193870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:56:07.193922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:56:07.194159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:56:07.194198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 38 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:10115 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7775B151-F775-4E64-8659-9355E8907F51 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD 2025-12-23T18:56:07.194912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-12-23T18:56:07.194969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-12-23T18:56:07.195894Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:310: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3867:5828], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-12-23T18:56:07.196203Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3866:5827] 2025-12-23T18:56:07.196611Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:463: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3867:5828], sender# [1:3866:5827], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-12-23T18:56:07.196889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:56:07.196966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:56:07.196998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:56:07.197030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2025-12-23T18:56:07.197059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:56:07.197129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:10115 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 04F0BE4E-658B-4C86-833F-DCAE140F4839 amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2025-12-23T18:56:07.200649Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:505: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3867:5828], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2025-12-23T18:56:07.200699Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:708: [Export] [s3] Finish: self# [1:3867:5828], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-12-23T18:56:07.201090Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3866:5827], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-12-23T18:56:07.211867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:56:07.238353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 717 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-12-23T18:56:07.238427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:56:07.238635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 717 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-12-23T18:56:07.238721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 717 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-12-23T18:56:07.238795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:56:07.238824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:56:07.238858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:56:07.238909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:56:07.239082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:56:07.243328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:56:07.244026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:56:07.244089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:56:07.244201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:56:07.244238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:56:07.244279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:56:07.244317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:56:07.244355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:56:07.244430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:745:2723] message: TxId: 102 2025-12-23T18:56:07.244478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:56:07.244516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:56:07.244550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:56:07.244673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:56:07.249515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:56:07.249587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3852:5814] TestWaitNotification: OK eventTxId 102 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2025-12-23T18:54:15.345167Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:15.371450Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:15.371688Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:15.372229Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:15.372497Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-12-23T18:54:15.373262Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-12-23T18:54:15.373302Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:15.373965Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2025-12-23T18:54:15.374009Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:15.374090Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:15.374181Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:15.383724Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:15.383762Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:15.385274Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.385418Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.385495Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.385581Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.385677Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.385758Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.385867Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.385883Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:15.385943Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2025-12-23T18:54:15.385966Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2025-12-23T18:54:15.385996Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:15.386035Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:15.386512Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:15.386813Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:15.394415Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-12-23T18:54:15.394465Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.395765Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:15.395931Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.395960Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-12-23T18:54:15.396111Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.396450Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:54:15.396508Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:54:15.396534Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:54:15.396578Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:15.396877Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-12-23T18:54:15.396960Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2025-12-23T18:54:15.396983Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2025-12-23T18:54:15.397102Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:52:2092] 2025-12-23T18:54:15.397118Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:52:2092] 2025-12-23T18:54:15.397138Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:15.397236Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:52:2092] 2025-12-23T18:54:15.397269Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:15.397405Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:15.397472Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-12-23T18:54:15.397495Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2025-12-23T18:54:15.397534Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2025-12-23T18:54:15.401396Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2025-12-23T18:54:15.402193Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:15.402413Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-12-23T18:54:15.402458Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-12-23T18:54:15.402532Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:15.402589Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2025-12-23T18:54:15.402707Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2025-12-23T18:54:15.402762Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2025-12-23T18:54:15.406119Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-12-23T18:54:15.406187Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-12-23T18:54:15.406291Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-12-23T18:54:15.406587Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:15.406774Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:15.406905Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-12-23T18:54:15.413157Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-12-23T18:54:15.413200Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-12-23T18:54:15.413234Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-12-23T18:54:15.413276Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:15.413404Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-12-23T18:54:15.41 ... 72057594046678944 leader: [44:334:2203] followers: 0 2025-12-23T18:56:07.514615Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 45 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594046678944 followers: 0 countLeader 1 allowFollowers 0 winner: [44:334:2203] 2025-12-23T18:56:07.514764Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72057594046678944] forward result remote node 44 [45:556:2161] 2025-12-23T18:56:07.514996Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72057594046678944] remote node connected [45:556:2161] 2025-12-23T18:56:07.515122Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594046678944]::SendEvent [45:556:2161] 2025-12-23T18:56:07.515506Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594046678944] Accept Connect Originator# [45:556:2161] 2025-12-23T18:56:07.515977Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594046678944] connected with status OK role: Leader [45:556:2161] 2025-12-23T18:56:07.516055Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594046678944] send queued [45:556:2161] 2025-12-23T18:56:07.516187Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046678944] send [45:556:2161] 2025-12-23T18:56:07.516224Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046678944] push event to server [45:556:2161] 2025-12-23T18:56:07.516299Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594046678944]::SendEvent [45:556:2161] 2025-12-23T18:56:07.516442Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594046678944] Push Sender# [45:555:2161] EventType# 271122945 2025-12-23T18:56:07.516609Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2025-12-23T18:56:07.516702Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:56:07.517025Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:56:07.517139Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:56:07.518911Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [45:562:2162] 2025-12-23T18:56:07.518969Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [45:562:2162] 2025-12-23T18:56:07.519028Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [45:563:2163] 2025-12-23T18:56:07.519071Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [45:563:2163] 2025-12-23T18:56:07.519305Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [44:333:2202] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:56:07.519385Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 45 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [44:333:2202] 2025-12-23T18:56:07.519499Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [45:562:2162] 2025-12-23T18:56:07.519574Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72075186224037888] queue send [45:563:2163] 2025-12-23T18:56:07.519892Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:56:07.520183Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72057594037927937] forward result remote node 44 [45:562:2162] 2025-12-23T18:56:07.520372Z node 45 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:56:07.520494Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72057594037927937] remote node connected [45:562:2162] 2025-12-23T18:56:07.520562Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [45:562:2162] 2025-12-23T18:56:07.521431Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-12-23T18:56:07.521570Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-12-23T18:56:07.521619Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-12-23T18:56:07.521746Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [45:562:2162] 2025-12-23T18:56:07.522030Z node 45 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [44:474:2305] CurrentLeaderTablet: [44:490:2316] CurrentGeneration: 1 CurrentStep: 0} 2025-12-23T18:56:07.522114Z node 45 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [44:474:2305] CurrentLeaderTablet: [44:490:2316] CurrentGeneration: 1 CurrentStep: 0} 2025-12-23T18:56:07.522195Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [44:474:2305] followers: 0 2025-12-23T18:56:07.522288Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 45 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [44:474:2305] 2025-12-23T18:56:07.522362Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037888] forward result remote node 44 [45:563:2163] 2025-12-23T18:56:07.522857Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037888] remote node connected [45:563:2163] 2025-12-23T18:56:07.522920Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037888]::SendEvent [45:563:2163] 2025-12-23T18:56:07.523274Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [45:562:2162] 2025-12-23T18:56:07.523316Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [45:562:2162] 2025-12-23T18:56:07.523407Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [45:562:2162] 2025-12-23T18:56:07.523532Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [45:562:2162] 2025-12-23T18:56:07.523846Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037888] Accept Connect Originator# [45:563:2163] 2025-12-23T18:56:07.524422Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594037927937] Push Sender# [45:559:2162] EventType# 268959744 2025-12-23T18:56:07.524582Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037888] connected with status OK role: Leader [45:563:2163] 2025-12-23T18:56:07.524645Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037888] send queued [45:563:2163] 2025-12-23T18:56:07.524682Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037888] push event to server [45:563:2163] 2025-12-23T18:56:07.524744Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037888]::SendEvent [45:563:2163] 2025-12-23T18:56:07.524918Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-12-23T18:56:07.525051Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:56:07.525336Z node 44 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(45, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:07.525484Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{14, redo 204b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-12-23T18:56:07.525608Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:56:07.525938Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72075186224037888] Push Sender# [45:560:2163] EventType# 268959744 2025-12-23T18:56:07.526183Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-12-23T18:56:07.526314Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:56:07.526468Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:56:07.526568Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:56:07.526801Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-12-23T18:56:07.526900Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:56:07.527090Z node 44 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(45, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:56:07.527231Z node 44 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(45, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:07.527307Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{6, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-12-23T18:56:07.527355Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:56:07.527656Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-12-23T18:56:07.527714Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:56:07.527820Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:56:07.527882Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} >> TColumnShardTestSchema::ColdTiers |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn >> MoveTable::WithUncomittedData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:55:36.675807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:36.675866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:36.675924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:36.675959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:36.676011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:36.676031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:36.676077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:36.676139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:36.676703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:36.676899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:36.733236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:55:36.733325Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:36.737811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:36.737928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:36.738027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:55:36.739929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:36.740054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:36.740471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:36.740627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:55:36.741837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:36.741975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:36.742683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:36.742721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:36.742836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:36.742890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:36.742926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:36.743028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:55:36.834529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:36.835895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:64951 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C4004DD6-64ED-44AE-968F-A333379D46C3 amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2025-12-23T18:56:08.170449Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:596: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3867:5828], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2025-12-23T18:56:08.170625Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3866:5827] 2025-12-23T18:56:08.170700Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:463: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3867:5828], sender# [1:3866:5827], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:64951 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 43DE3409-C23E-4726-AF98-FD2A94F495AA amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2025-12-23T18:56:08.173375Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:596: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3867:5828], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-12-23T18:56:08.173432Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:708: [Export] [s3] Finish: self# [1:3867:5828], success# 1, error# , multipart# 1, uploadId# 1 2025-12-23T18:56:08.178716Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:530: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3867:5828], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:64951 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DF0F714B-07CE-48F8-A39A-ABC274A8C8F7 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2025-12-23T18:56:08.191543Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:627: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3867:5828], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2025-12-23T18:56:08.191846Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3866:5827], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-12-23T18:56:08.213639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 717 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-12-23T18:56:08.213709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:56:08.213871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 717 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-12-23T18:56:08.213980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 717 RawX2: 4294969999 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-12-23T18:56:08.214043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:56:08.214078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:56:08.214113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:56:08.214153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:56:08.214291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:56:08.217345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:56:08.217837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:56:08.217885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:56:08.217992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:56:08.218022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:56:08.218057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:56:08.218085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:56:08.218115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:56:08.218189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:745:2723] message: TxId: 102 2025-12-23T18:56:08.218235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:56:08.218268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:56:08.218297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:56:08.218445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:56:08.221655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:56:08.221704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3852:5814] TestWaitNotification: OK eventTxId 102 >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> TColumnShardTestSchema::Drop+Reboots-GenerateInternalPathId >> MoveTable::WithUncomittedData [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithUncomittedData [GOOD] Test command err: 2025-12-23T18:56:09.618904Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:09.639754Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:09.639959Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:09.645223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:09.645398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:09.645583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:09.645667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:09.645743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:09.645804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:09.645920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:09.645988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:09.646076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:09.646154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:09.646215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:09.646307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:09.646389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:09.666780Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:09.666939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:09.666980Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:09.667123Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:09.667223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:09.667280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:09.667322Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:09.667411Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:09.667455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:09.667483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:09.667509Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:09.667646Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:09.667691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:09.667729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:09.667752Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:09.667812Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:09.667854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:09.667891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:09.667918Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:09.667955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:09.667978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:09.667999Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:09.668024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:09.668075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:09.668104Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:09.668292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:09.668370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:09.668394Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:09.668504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:09.668535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:09.668553Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:09.668583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:09.668606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:09.668624Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:09.668656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:09.668686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:09.668707Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:09.668784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:09.668808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ,"id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"1,2,3,4,5,6,7,8,9,10","t":"FetchOriginalData"},"w":20,"id":21},"8":{"p":{"i":"4","p":{"address":{"name":"uid","id":4}},"o":"4","t":"AssembleOriginalData"},"w":25,"id":8},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":25,"id":2},"18":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":25,"id":18},"0":{"p":{"i":"1,2,3,4,5,6,7,8,9,10","t":"Projection"},"w":250,"id":0},"4":{"p":{"i":"2","p":{"address":{"name":"resource_type","id":2}},"o":"2","t":"AssembleOriginalData"},"w":25,"id":4},"20":{"p":{"i":"10","p":{"address":{"name":"request_id","id":10}},"o":"10","t":"AssembleOriginalData"},"w":25,"id":20},"16":{"p":{"i":"8","p":{"address":{"name":"ingested_at","id":8}},"o":"8","t":"AssembleOriginalData"},"w":25,"id":16},"14":{"p":{"i":"7","p":{"address":{"name":"json_payload","id":7}},"o":"7","t":"AssembleOriginalData"},"w":25,"id":14},"10":{"p":{"i":"5","p":{"address":{"name":"level","id":5}},"o":"5","t":"AssembleOriginalData"},"w":25,"id":10},"6":{"p":{"i":"3","p":{"address":{"name":"resource_id","id":3}},"o":"3","t":"AssembleOriginalData"},"w":25,"id":6},"22":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"resource_type","id":2},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"0","t":"ReserveMemory"},"w":0,"id":22},"12":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":25,"id":12}}}; 2025-12-23T18:56:10.358082Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=11;scan_id=0;gen=0;table=;snapshot={1766516170596:11};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:56:10.399415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=11;scan_id=0;gen=0;table=;snapshot={1766516170596:11};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:330:2342];trace_detailed=; 2025-12-23T18:56:10.400367Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:83;ff_first=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);; 2025-12-23T18:56:10.400560Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2025-12-23T18:56:10.400899Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:10.401029Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:10.401226Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:10.401382Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:10.401518Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:10.401679Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:330:2342] finished for tablet 9437184 2025-12-23T18:56:10.402052Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:324:2336];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1172486,"name":"_full_task","f":1172486,"d_finished":0,"c":0,"l":1174870,"d":2384},"events":[{"name":"bootstrap","f":1172710,"d_finished":1496,"c":1,"l":1174206,"d":1496},{"a":1174329,"name":"ack","f":1174329,"d_finished":0,"c":0,"l":1174870,"d":541},{"a":1174317,"name":"processing","f":1174317,"d_finished":0,"c":0,"l":1174870,"d":553},{"name":"ProduceResults","f":1173887,"d_finished":597,"c":2,"l":1174663,"d":597},{"a":1174666,"name":"Finish","f":1174666,"d_finished":0,"c":0,"l":1174870,"d":204}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:10.402111Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:324:2336];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:10.402408Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:324:2336];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1172486,"name":"_full_task","f":1172486,"d_finished":0,"c":0,"l":1175264,"d":2778},"events":[{"name":"bootstrap","f":1172710,"d_finished":1496,"c":1,"l":1174206,"d":1496},{"a":1174329,"name":"ack","f":1174329,"d_finished":0,"c":0,"l":1175264,"d":935},{"a":1174317,"name":"processing","f":1174317,"d_finished":0,"c":0,"l":1175264,"d":947},{"name":"ProduceResults","f":1173887,"d_finished":597,"c":2,"l":1174663,"d":597},{"a":1174666,"name":"Finish","f":1174666,"d_finished":0,"c":0,"l":1175264,"d":598}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:10.402474Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:10.358052Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:56:10.402503Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:10.402642Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:330:2342];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:55:38.007326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:55:38.007410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:38.007472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:55:38.007513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:55:38.007564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:55:38.007584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:55:38.007622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:55:38.007684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:55:38.008310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:55:38.008561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:55:38.063896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:55:38.063964Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:38.068162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:55:38.068285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:55:38.068385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:55:38.070935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:55:38.071094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:55:38.071568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:55:38.071755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:55:38.073058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:38.073206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:55:38.074033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:55:38.074074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:55:38.074174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:55:38.074212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:55:38.074248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:55:38.074354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:55:38.171236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.171891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.171959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.172011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.172093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.172141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.172174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.172206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.172244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.172290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.172354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.172395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.172447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.172505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:55:38.172546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 3859:5820], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:16176 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 64AA5AD6-6757-4069-820C-B65E83409089 amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2025-12-23T18:56:09.477757Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:596: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3860:5821], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2025-12-23T18:56:09.477898Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3859:5820] 2025-12-23T18:56:09.477961Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:463: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3860:5821], sender# [1:3859:5820], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:16176 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C63FC40D-22CD-45FC-901D-87B4A90C3F93 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2025-12-23T18:56:09.481155Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:596: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3860:5821], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-12-23T18:56:09.481209Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:708: [Export] [s3] Finish: self# [1:3860:5821], success# 1, error# , multipart# 1, uploadId# 1 2025-12-23T18:56:09.486500Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:530: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3860:5821], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:16176 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B006984D-5404-403A-8E84-5E28C7967532 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-12-23T18:56:09.494687Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:627: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3860:5821], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2025-12-23T18:56:09.495154Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3859:5820], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-12-23T18:56:09.507270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5925: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 711 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-12-23T18:56:09.507340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:56:09.507507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 711 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-12-23T18:56:09.507639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 711 RawX2: 4294969993 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-12-23T18:56:09.507735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-12-23T18:56:09.507780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:56:09.507839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-12-23T18:56:09.507893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2025-12-23T18:56:09.508065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:56:09.511689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:56:09.512085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:56:09.512145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:56:09.512245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:56:09.512283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:56:09.512322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:56:09.512375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:56:09.512409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:56:09.512478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:739:2717] message: TxId: 102 2025-12-23T18:56:09.512527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:56:09.512562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:56:09.512587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:56:09.512702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:56:09.516800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:56:09.516859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3843:5805] TestWaitNotification: OK eventTxId 102 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId |98.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl >> TColumnShardTestSchema::RebootHotTiersTtl >> MoveTable::RenameAbsentTable_Negative >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2025-12-23T18:55:43.785450Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:55:43.788569Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:55:43.788860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:55:43.809169Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:55:43.809346Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:55:43.814660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:43.814863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:43.815020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:43.815085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:43.815155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:43.815268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:43.815346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:43.815430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:43.815513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:43.815582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:43.815653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:43.815735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:43.815848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:43.817695Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:55:43.837294Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:55:43.837462Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:55:43.837510Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:55:43.837689Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:43.837858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:43.837934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:43.837987Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:55:43.838081Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:55:43.838135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:43.838177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:43.838203Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:55:43.838394Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:43.838457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:43.838493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:43.838531Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:55:43.838627Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:55:43.838672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:43.838708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:43.838733Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:55:43.838818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:43.838866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:43.838921Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:55:43.838976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:43.839013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:43.839041Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:55:43.839231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:43.839281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:43.839310Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:55:43.839413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:43.839447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:43.839479Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:55:43.839536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:55:43.839576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:55:43.839606Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:55:43.839643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:55:43.839676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... 2.157317Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:56:12.157362Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:56:12.157517Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:12.157682Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.157725Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:56:12.157883Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2025-12-23T18:56:12.157936Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2025-12-23T18:56:12.158170Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=16000;rows=2000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-12-23T18:56:12.158333Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.158448Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.158598Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.158729Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:12.158855Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.158951Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.159207Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:618:2623] finished for tablet 9437184 2025-12-23T18:56:12.159671Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:617:2622];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.007},{"events":["f_ack"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":28705341,"name":"_full_task","f":28705341,"d_finished":0,"c":0,"l":28716602,"d":11261},"events":[{"name":"bootstrap","f":28705604,"d_finished":1251,"c":1,"l":28706855,"d":1251},{"a":28716054,"name":"ack","f":28713480,"d_finished":2373,"c":2,"l":28715963,"d":2921},{"a":28716046,"name":"processing","f":28706987,"d_finished":5262,"c":5,"l":28715967,"d":5818},{"name":"ProduceResults","f":28706393,"d_finished":3325,"c":9,"l":28716304,"d":3325},{"a":28716308,"name":"Finish","f":28716308,"d_finished":0,"c":0,"l":28716602,"d":294},{"name":"task_result","f":28707009,"d_finished":2802,"c":3,"l":28713300,"d":2802}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.159756Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:12.160236Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:617:2622];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.007},{"events":["f_ack"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":28705341,"name":"_full_task","f":28705341,"d_finished":0,"c":0,"l":28717136,"d":11795},"events":[{"name":"bootstrap","f":28705604,"d_finished":1251,"c":1,"l":28706855,"d":1251},{"a":28716054,"name":"ack","f":28713480,"d_finished":2373,"c":2,"l":28715963,"d":3455},{"a":28716046,"name":"processing","f":28706987,"d_finished":5262,"c":5,"l":28715967,"d":6352},{"name":"ProduceResults","f":28706393,"d_finished":3325,"c":9,"l":28716304,"d":3325},{"a":28716308,"name":"Finish","f":28716308,"d_finished":0,"c":0,"l":28717136,"d":828},{"name":"task_result","f":28707009,"d_finished":2802,"c":3,"l":28713300,"d":2802}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.160327Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:12.146038Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2025-12-23T18:56:12.160377Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:12.160533Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> TColumnShardTestSchema::Drop+Reboots-GenerateInternalPathId [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2025-12-23T18:55:46.058209Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:55:46.061506Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:55:46.061862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:55:46.082134Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:55:46.082331Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:55:46.087911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:46.088111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:46.088292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:46.088391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:46.088454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:46.088562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:46.088626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:46.088699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:46.088803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:46.088887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:46.088960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:46.089040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:46.089112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:46.091057Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:55:46.108900Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:55:46.109051Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:55:46.109091Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:55:46.109231Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:46.109343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:46.109403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:46.109446Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:55:46.109520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:55:46.109583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:46.109620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:46.109641Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:55:46.109777Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:46.109824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:46.109861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:46.109885Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:55:46.109955Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:55:46.110001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:46.110031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:46.110051Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:55:46.110099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:46.110138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:46.110168Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:55:46.110206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:46.110233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:46.110253Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:55:46.110396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:46.110432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:46.110465Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:55:46.110563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:46.110595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:46.110617Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:55:46.110668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:55:46.110706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:55:46.110732Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:55:46.110781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:55:46.110805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... xtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:56:12.926403Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:56:12.926464Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:56:12.926678Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:12.926948Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.927002Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:56:12.927201Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2025-12-23T18:56:12.927277Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2025-12-23T18:56:12.927608Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=16000;rows=2000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-12-23T18:56:12.927815Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.927973Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.928171Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.928358Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:12.928469Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.928569Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.928898Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:618:2623] finished for tablet 9437184 2025-12-23T18:56:12.929530Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:617:2622];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":27202455,"name":"_full_task","f":27202455,"d_finished":0,"c":0,"l":27216102,"d":13647},"events":[{"name":"bootstrap","f":27202741,"d_finished":1455,"c":1,"l":27204196,"d":1455},{"a":27215464,"name":"ack","f":27212103,"d_finished":3069,"c":2,"l":27215330,"d":3707},{"a":27215450,"name":"processing","f":27204387,"d_finished":6585,"c":5,"l":27215335,"d":7237},{"name":"ProduceResults","f":27203672,"d_finished":4188,"c":9,"l":27215715,"d":4188},{"a":27215721,"name":"Finish","f":27215721,"d_finished":0,"c":0,"l":27216102,"d":381},{"name":"task_result","f":27204409,"d_finished":3389,"c":3,"l":27211871,"d":3389}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.929622Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:12.930212Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:617:2622];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":27202455,"name":"_full_task","f":27202455,"d_finished":0,"c":0,"l":27216804,"d":14349},"events":[{"name":"bootstrap","f":27202741,"d_finished":1455,"c":1,"l":27204196,"d":1455},{"a":27215464,"name":"ack","f":27212103,"d_finished":3069,"c":2,"l":27215330,"d":4409},{"a":27215450,"name":"processing","f":27204387,"d_finished":6585,"c":5,"l":27215335,"d":7939},{"name":"ProduceResults","f":27203672,"d_finished":4188,"c":9,"l":27215715,"d":4188},{"a":27215721,"name":"Finish","f":27215721,"d_finished":0,"c":0,"l":27216804,"d":1083},{"name":"task_result","f":27204409,"d_finished":3389,"c":3,"l":27211871,"d":3389}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:12.930337Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:12.913052Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2025-12-23T18:56:12.930392Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:12.930601Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop+Reboots-GenerateInternalPathId [GOOD] Test command err: 2025-12-23T18:56:10.367029Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:10.400753Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:10.400994Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:10.408195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:10.408472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:10.408709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:10.408837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:10.408998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:10.409114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:10.409257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:10.409367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:10.409473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:10.409579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:10.409713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:10.409847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:10.409961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:10.440317Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:10.440524Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:10.440589Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:10.440779Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:10.440938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:10.441038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:10.441092Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:10.441187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:10.441249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:10.441300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:10.441333Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:10.441543Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:10.441625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:10.441677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:10.441712Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:10.441800Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:10.441849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:10.441900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:10.441931Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:10.441989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:10.442023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:10.442075Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:10.442127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:10.442189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:10.442231Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:10.442454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:10.442507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:10.442538Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:10.442662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:10.442720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:10.442772Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:10.442826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:10.442865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:10.442911Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:10.442952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:10.443018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:10.443054Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:10.443199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:10.443243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... rd_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:12.984342Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2025-12-23T18:56:12.984398Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:56:12.985118Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.038000s; 2025-12-23T18:56:12.985169Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2025-12-23T18:56:13.081903Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766516171514:max} readable: {1766516171514:max} at tablet 9437184 2025-12-23T18:56:13.082089Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:56:13.085418Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516171514:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:13.085519Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516171514:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:13.086110Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516171514:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-12-23T18:56:13.087709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516171514:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:56:13.144942Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516171514:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:665:2653];trace_detailed=; 2025-12-23T18:56:13.146083Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:83;ff_first=(column_ids=1;column_names=timestamp;);; 2025-12-23T18:56:13.146301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:56:13.146604Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:13.146772Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:13.147125Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:13.147305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:13.147459Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:13.147631Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:665:2653] finished for tablet 9437184 2025-12-23T18:56:13.147995Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:658:2647];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":3225727,"name":"_full_task","f":3225727,"d_finished":0,"c":0,"l":3228542,"d":2815},"events":[{"name":"bootstrap","f":3226028,"d_finished":1639,"c":1,"l":3227667,"d":1639},{"a":3227957,"name":"ack","f":3227957,"d_finished":0,"c":0,"l":3228542,"d":585},{"a":3227937,"name":"processing","f":3227937,"d_finished":0,"c":0,"l":3228542,"d":605},{"name":"ProduceResults","f":3227336,"d_finished":654,"c":2,"l":3228360,"d":654},{"a":3228365,"name":"Finish","f":3228365,"d_finished":0,"c":0,"l":3228542,"d":177}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:13.148057Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:658:2647];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:13.148367Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:658:2647];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":3225727,"name":"_full_task","f":3225727,"d_finished":0,"c":0,"l":3228958,"d":3231},"events":[{"name":"bootstrap","f":3226028,"d_finished":1639,"c":1,"l":3227667,"d":1639},{"a":3227957,"name":"ack","f":3227957,"d_finished":0,"c":0,"l":3228958,"d":1001},{"a":3227937,"name":"processing","f":3227937,"d_finished":0,"c":0,"l":3228958,"d":1021},{"name":"ProduceResults","f":3227336,"d_finished":654,"c":2,"l":3228360,"d":654},{"a":3228365,"name":"Finish","f":3228365,"d_finished":0,"c":0,"l":3228958,"d":593}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:13.148447Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:13.087670Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:56:13.148494Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:13.148626Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> MoveTable::RenameAbsentTable_Negative [GOOD] >> THealthCheckTest::TestStateStorageYellow [GOOD] >> THealthCheckTest::TestStateStorageRed |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::RenameAbsentTable_Negative [GOOD] Test command err: 2025-12-23T18:56:13.253434Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:13.275650Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:13.275871Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:13.281937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:13.282151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:13.282319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:13.282403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:13.282482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:13.282590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:13.282784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:13.282910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:13.283005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:13.283086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:13.283158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:13.283253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:13.283327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:13.304549Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:13.304779Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:13.304823Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:13.304971Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:13.305102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:13.305182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:13.305220Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:13.305292Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:13.305365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:13.305408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:13.305431Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:13.305582Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:13.305637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:13.305669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:13.305695Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:13.305763Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:13.305802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:13.305836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:13.305869Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:13.305915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:13.305941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:13.305961Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:13.305999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:13.306045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:13.306082Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:13.306238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:13.306272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:13.306292Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:13.306382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:13.306411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:13.306431Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:13.306462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:13.306487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:13.306505Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:13.306534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:13.306561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:13.306582Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:13.306679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:13.306710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ata.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-12-23T18:56:13.579933Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:56:13.580097Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2025-12-23T18:56:13.580161Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-12-23T18:56:13.580273Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=71; 2025-12-23T18:56:13.580328Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-12-23T18:56:13.580400Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2025-12-23T18:56:13.580482Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=38; 2025-12-23T18:56:13.580553Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=25; 2025-12-23T18:56:13.580612Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3275; 2025-12-23T18:56:13.580785Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:56:13.580871Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:56:13.580969Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:13.581333Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:13.581429Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2025-12-23T18:56:13.581517Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.112000s; 2025-12-23T18:56:13.581812Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:56:13.581934Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:56:13.581999Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:13.582053Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2025-12-23T18:56:13.582243Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.125000s; 2025-12-23T18:56:13.582297Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2025-12-23T18:56:13.874157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136996675311488;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1766516174232;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:56:13.874263Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136996675311488;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1766516174232;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2025-12-23T18:56:13.886638Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1766516174232;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136996675311488;op_tx=10:TX_KIND_SCHEMA;min=1766516174232;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1766516174232;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137202836449664;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-12-23T18:56:13.886720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1766516174232;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136996675311488;op_tx=10:TX_KIND_SCHEMA;min=1766516174232;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1766516174232;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137202836449664;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:158:2181]; 2025-12-23T18:56:13.886778Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1766516174232;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;this=136996675311488;op_tx=10:TX_KIND_SCHEMA;min=1766516174232;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1766516174232;max=18446744073709551615;plan=0;src=[1:158:2181];cookie=00:0;;int_this=137202836449664;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2025-12-23T18:56:13.887105Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-12-23T18:56:13.887205Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516174232 at tablet 9437184, mediator 0 2025-12-23T18:56:13.887244Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2025-12-23T18:56:13.887543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:56:13.887644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:56:13.887700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:56:13.887801Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2025-12-23T18:56:13.897576Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1766516174232;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2025-12-23T18:56:13.897714Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:56:13.897871Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2025-12-23T18:56:13.897948Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:145;event=RegisterTable;path_id=1000000185; 2025-12-23T18:56:13.898241Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2025-12-23T18:56:13.915483Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:532;event=OnTieringModified;path_id=1000000185; 2025-12-23T18:56:13.939738Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2025-12-23T18:56:13.940601Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136996675361216;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1766516174236;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=schema.cpp:134;propose_execute=move_table;src=111;dst=2; 2025-12-23T18:56:13.940655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136996675361216;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1766516174236;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=111;result=not_found; 2025-12-23T18:56:13.940694Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136996675361216;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1766516174236;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tx_controller.cpp:350;error=problem on start;message=No such table; 2025-12-23T18:56:13.952734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1766516174236;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136996675361216;op_tx=11:TX_KIND_SCHEMA;min=1766516174236;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2025-12-23T18:56:13.952807Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1766516174236;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136996675361216;op_tx=11:TX_KIND_SCHEMA;min=1766516174236;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:23;message=No such table;tablet_id=9437184;tx_id=11; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId [GOOD] >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId [GOOD] Test command err: 2025-12-23T18:56:11.976546Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:11.996406Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:11.996599Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:12.001819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:12.001998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:12.002155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:12.002242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:12.002345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:12.002436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:12.002522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:12.002610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:12.002698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:12.002796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:12.002887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:12.002984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:12.003053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:12.023025Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:12.023208Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:12.023264Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:12.023452Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:12.023619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:12.023707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:12.023758Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:12.023867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:12.023939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:12.023984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:12.024015Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:12.024230Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:12.024323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:12.024374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:12.024402Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:12.024498Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:12.024546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:12.024618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:12.024675Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:12.024744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:12.024787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:12.024816Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:12.024859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:12.024910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:12.024963Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:12.025192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:12.025244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:12.025289Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:12.025425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:12.025471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:12.025505Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:12.025551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:12.025588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:12.025614Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:12.025654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:12.025697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:12.025732Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:12.025881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:12.025928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... und=cleanup;skip_reason=no_changes; 2025-12-23T18:56:14.488384Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2025-12-23T18:56:14.488424Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:56:14.489503Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.145000s; 2025-12-23T18:56:14.489571Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2025-12-23T18:56:14.577181Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766516173135:max} readable: {1766516173135:max} at tablet 9437184 2025-12-23T18:56:14.577375Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:56:14.581592Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516173135:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:14.581733Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516173135:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:14.582456Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516173135:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-12-23T18:56:14.584317Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516173135:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:56:14.643941Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:611:2608];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516173135:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:665:2653];trace_detailed=; 2025-12-23T18:56:14.645358Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:83;ff_first=(column_ids=1;column_names=timestamp;);; 2025-12-23T18:56:14.645587Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:56:14.645919Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:14.646074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:14.646404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:14.646540Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:14.646694Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:14.646906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:665:2653] finished for tablet 9437184 2025-12-23T18:56:14.647315Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:658:2647];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":3047350,"name":"_full_task","f":3047350,"d_finished":0,"c":0,"l":3050447,"d":3097},"events":[{"name":"bootstrap","f":3047683,"d_finished":1914,"c":1,"l":3049597,"d":1914},{"a":3049865,"name":"ack","f":3049865,"d_finished":0,"c":0,"l":3050447,"d":582},{"a":3049846,"name":"processing","f":3049846,"d_finished":0,"c":0,"l":3050447,"d":601},{"name":"ProduceResults","f":3049239,"d_finished":649,"c":2,"l":3050218,"d":649},{"a":3050222,"name":"Finish","f":3050222,"d_finished":0,"c":0,"l":3050447,"d":225}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:14.647382Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:658:2647];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:14.647747Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:658:2647];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":3047350,"name":"_full_task","f":3047350,"d_finished":0,"c":0,"l":3050911,"d":3561},"events":[{"name":"bootstrap","f":3047683,"d_finished":1914,"c":1,"l":3049597,"d":1914},{"a":3049865,"name":"ack","f":3049865,"d_finished":0,"c":0,"l":3050911,"d":1046},{"a":3049846,"name":"processing","f":3049846,"d_finished":0,"c":0,"l":3050911,"d":1065},{"name":"ProduceResults","f":3049239,"d_finished":649,"c":2,"l":3050218,"d":649},{"a":3050222,"name":"Finish","f":3050222,"d_finished":0,"c":0,"l":3050911,"d":689}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:14.647823Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:14.584279Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:56:14.647910Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:14.648030Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:665:2653];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn >> Initializer::Simple [GOOD] >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:06.559382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:06.694230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:06.721056Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:06.721550Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:06.721642Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:07.185443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:07.185560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:07.252217Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516104190924 != 1766516104190928 2025-12-23T18:55:07.270665Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:07.321276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:07.422763Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004876/r3tmp/tmpydN27W/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6816, node 1 TClient is connected to server localhost:8894 2025-12-23T18:55:08.369946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:08.371290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:08.371392Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:08.371427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:08.372327Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:08.384508Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:55:18.579077Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:897:2763], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:18.579200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:907:2768], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:18.579288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:18.584467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:913:2773], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:18.584687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:18.591027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:18.675279Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:911:2771], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-12-23T18:55:18.743685Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:968:2809] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:19.152526Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:977:2817], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:55:19.160583Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=ZGI3MzU4MzctNTI5N2MzMTQtYjAzODk4NGEtN2Q2MTQ4ZA==, ActorId: [1:893:2760], ActorState: ExecuteState, TraceId: 01kd691n1804xqk1kr3g21eh9a, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/test]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2025-12-23T18:55:19.239299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:20.648251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:55:21.072813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:21.769463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) Initialization finished REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-12-23T18:55:43.662808Z node 1 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [1:1542:3225] txid# 281474976715678, Access denied for root@builtin on path /Root/.metadata/test, with access RemoveSchema 2025-12-23T18:55:43.663017Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1542:3225] txid# 281474976715678, issues: { message: "Access denied for root@builtin on path /Root/.metadata/test" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:946 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2025-12-23T18:56:15.385768Z node 1 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [1:1715:3348] txid# 281474976715686, Access denied for root@builtin on path /Root/.metadata/initialization/migrations, with access RemoveSchema 2025-12-23T18:56:15.385947Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1715:3348] txid# 281474976715686, issues: { message: "Access denied for root@builtin on path /Root/.metadata/initialization/migrations" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 |98.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2025-12-23T18:55:46.829463Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:55:46.832464Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:55:46.832776Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:55:46.851749Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:55:46.851939Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:55:46.856762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:46.856949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:46.857106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:46.857175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:46.857242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:46.857338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:46.857392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:46.857467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:46.857550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:46.857626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:46.857696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:46.857768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:46.857850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:46.859681Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:55:46.876529Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:55:46.876662Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:55:46.876697Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:55:46.876824Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:46.876958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:46.877014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:46.877051Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:55:46.877116Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:55:46.877151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:46.877179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:46.877196Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:55:46.877324Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:46.877378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:46.877407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:46.877428Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:55:46.877501Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:55:46.877532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:46.877559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:46.877575Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:55:46.877619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:46.877652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:46.877685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:55:46.877713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:46.877742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:46.877762Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:55:46.877893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:46.877925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:46.877945Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:55:46.878048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:46.878076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:46.878093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:55:46.878138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:55:46.878167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:55:46.878191Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:55:46.878217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:55:46.878238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... .297869Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:56:16.297933Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:56:16.298176Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:16.298412Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:16.298475Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:56:16.298700Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2025-12-23T18:56:16.298801Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2025-12-23T18:56:16.299148Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:719:2700];bytes=16000;rows=2000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-12-23T18:56:16.299398Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:16.299563Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:16.299770Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:16.300002Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:16.300135Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:16.300250Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:16.300602Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:720:2701] finished for tablet 9437184 2025-12-23T18:56:16.301315Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:719:2700];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.015}],"full":{"a":29799683,"name":"_full_task","f":29799683,"d_finished":0,"c":0,"l":29814974,"d":15291},"events":[{"name":"bootstrap","f":29800047,"d_finished":1735,"c":1,"l":29801782,"d":1735},{"a":29814279,"name":"ack","f":29810563,"d_finished":3401,"c":2,"l":29814133,"d":4096},{"a":29814263,"name":"processing","f":29802000,"d_finished":7230,"c":5,"l":29814140,"d":7941},{"name":"ProduceResults","f":29801140,"d_finished":4615,"c":9,"l":29814571,"d":4615},{"a":29814577,"name":"Finish","f":29814577,"d_finished":0,"c":0,"l":29814974,"d":397},{"name":"task_result","f":29802034,"d_finished":3689,"c":3,"l":29810297,"d":3689}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:16.301429Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:719:2700];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:16.302105Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:719:2700];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":29799683,"name":"_full_task","f":29799683,"d_finished":0,"c":0,"l":29815789,"d":16106},"events":[{"name":"bootstrap","f":29800047,"d_finished":1735,"c":1,"l":29801782,"d":1735},{"a":29814279,"name":"ack","f":29810563,"d_finished":3401,"c":2,"l":29814133,"d":4911},{"a":29814263,"name":"processing","f":29802000,"d_finished":7230,"c":5,"l":29814140,"d":8756},{"name":"ProduceResults","f":29801140,"d_finished":4615,"c":9,"l":29814571,"d":4615},{"a":29814577,"name":"Finish","f":29814577,"d_finished":0,"c":0,"l":29815789,"d":1212},{"name":"task_result","f":29802034,"d_finished":3689,"c":3,"l":29810297,"d":3689}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:16.302221Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:16.282722Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2025-12-23T18:56:16.302290Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:16.302505Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:720:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke >> TColumnShardTestSchema::CreateTable-Reboots-GenerateInternalPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:30.591525Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:30.666799Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:30.672360Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:30.672610Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:30.672676Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:30.946724Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:31.020132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:31.020241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:31.076882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:31.124159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004918/r3tmp/tmpiV1ql3/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19641, node 1 2025-12-23T18:55:31.767066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:31.767768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:31.767798Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:31.767820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:31.767942Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:31.829739Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:23808 2025-12-23T18:55:32.330182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:34.475524Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:34.480282Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:34.482638Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:34.505996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:34.506071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:34.542936Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:34.544511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:34.770454Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.770986Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.771559Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.772020Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.772242Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.772323Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.772446Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.772535Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.772648Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:34.868715Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:34.891847Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:34.891921Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:34.904553Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:35.047261Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:35.084410Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:35.084482Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:35.105296Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:35.106399Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:35.106569Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:35.106612Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:35.106654Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:35.106694Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:35.106731Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:35.106778Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:35.107056Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:35.146380Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:35.146460Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2188:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:35.149128Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2185:2602] 2025-12-23T18:55:35.165532Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:35.168950Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2258:2658] 2025-12-23T18:55:35.169151Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2258:2658], schemeshard id = 72075186224037897 2025-12-23T18:55:35.190212Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Describe result: PathErrorUnknown 2025-12-23T18:55:35.190270Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Creating table 2025-12-23T18:55:35.190327Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:35.209695Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2356:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:35.215324Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:35.221142Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:35.221230Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:35.230526Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:35.321752Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:35.623405Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:35.712497Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2651]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:35.712588Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2248:2652] Owner: [2:2247:2 ... rd count = 1 2025-12-23T18:56:11.677526Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2025-12-23T18:56:11.677595Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2025-12-23T18:56:11.677632Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:56:11.682541Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2025-12-23T18:56:11.714222Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2025-12-23T18:56:11.714842Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2025-12-23T18:56:11.714955Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2025-12-23T18:56:11.715982Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:56:11.729809Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:56:11.730047Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-12-23T18:56:11.730988Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5575:4766], server id = [2:5579:4770], tablet id = 72075186224037899, status = OK 2025-12-23T18:56:11.731321Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5575:4766], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:56:11.731586Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5576:4767], server id = [2:5580:4771], tablet id = 72075186224037900, status = OK 2025-12-23T18:56:11.731628Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5576:4767], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:56:11.732307Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5577:4768], server id = [2:5581:4772], tablet id = 72075186224037901, status = OK 2025-12-23T18:56:11.732353Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5577:4768], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:56:11.732904Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5578:4769], server id = [2:5582:4773], tablet id = 72075186224037902, status = OK 2025-12-23T18:56:11.732946Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5578:4769], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:56:11.737462Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-12-23T18:56:11.738168Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5575:4766], server id = [2:5579:4770], tablet id = 72075186224037899 2025-12-23T18:56:11.738211Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:11.738701Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-12-23T18:56:11.739061Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5576:4767], server id = [2:5580:4771], tablet id = 72075186224037900 2025-12-23T18:56:11.739087Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:11.739283Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-12-23T18:56:11.739685Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5577:4768], server id = [2:5581:4772], tablet id = 72075186224037901 2025-12-23T18:56:11.739719Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:11.740090Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-12-23T18:56:11.740124Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:56:11.740260Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:56:11.740576Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5578:4769], server id = [2:5582:4773], tablet id = 72075186224037902 2025-12-23T18:56:11.740600Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:11.765245Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:56:11.765453Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-12-23T18:56:12.290768Z node 2 :STATISTICS DEBUG: service_impl.cpp:254: Event round 2 is different from the current 3 2025-12-23T18:56:12.290848Z node 2 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2025-12-23T18:56:15.362500Z node 2 :STATISTICS INFO: service_impl.cpp:418: Node 3 is unavailable 2025-12-23T18:56:15.362599Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:56:15.362755Z node 2 :STATISTICS DEBUG: service_impl.cpp:254: Event round 3 is different from the current 0 2025-12-23T18:56:15.362792Z node 2 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2025-12-23T18:56:15.362875Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:56:15.362970Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:56:15.363596Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-12-23T18:56:15.377789Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-12-23T18:56:15.377963Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-12-23T18:56:15.378608Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5697:4827], server id = [2:5698:4828], tablet id = 72075186224037900, status = OK 2025-12-23T18:56:15.378694Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5697:4827], path = { OwnerId: 72075186224037897 LocalId: 34 } 2025-12-23T18:56:15.379769Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-12-23T18:56:15.379816Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2025-12-23T18:56:15.380012Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5697:4827], server id = [2:5698:4828], tablet id = 72075186224037900 2025-12-23T18:56:15.380033Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2025-12-23T18:56:15.380088Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-12-23T18:56:15.380190Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-12-23T18:56:15.380463Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5700:4830], ActorId: [2:5701:4831], Starting query actor #1 [2:5702:4832] 2025-12-23T18:56:15.380510Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5701:4831], ActorId: [2:5702:4832], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:56:15.382760Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5701:4831], ActorId: [2:5702:4832], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NWMyZGIxYzctZTY3NzBhOWItMTY5ZWJlYjMtNTA5ZmVlMjk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:56:15.452561Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5711:4841]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:15.452755Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:56:15.452781Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5711:4841], StatRequests.size() = 1 2025-12-23T18:56:15.572843Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5701:4831], ActorId: [2:5702:4832], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWMyZGIxYzctZTY3NzBhOWItMTY5ZWJlYjMtNTA5ZmVlMjk=, TxId: 2025-12-23T18:56:15.572912Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5701:4831], ActorId: [2:5702:4832], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWMyZGIxYzctZTY3NzBhOWItMTY5ZWJlYjMtNTA5ZmVlMjk=, TxId: 2025-12-23T18:56:15.573334Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5700:4830], ActorId: [2:5701:4831], Got response [2:5702:4832] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-12-23T18:56:15.573723Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5725:4847]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:15.574276Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:56:15.574331Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:56:15.574533Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:56:15.575329Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:56:15.575382Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:56:15.575426Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:56:15.581019Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 probe = 4 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.2%| [TA] $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId [GOOD] |98.2%| [TA] {RESULT} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_kill_tablet_using_developer_ui [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId [GOOD] Test command err: 2025-12-23T18:56:15.373897Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:15.397615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:15.397794Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:15.402886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:15.403104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:15.403297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:15.403383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:15.403487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:15.403555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:15.403669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:15.403757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:15.403821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:15.403893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:15.403976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:15.404063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:15.404137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:15.423278Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:15.423431Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:15.423501Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:15.423621Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:15.423732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:15.423791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:15.423836Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:15.423943Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:15.423991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:15.424018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:15.424040Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:15.424159Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:15.424200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:15.424239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:15.424258Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:15.424343Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:15.424383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:15.424415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:15.424435Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:15.424471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:15.424495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:15.424513Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:15.424554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:15.424594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:15.424647Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:15.424793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:15.424825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:15.424843Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:15.424929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:15.424972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:15.424991Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:15.425021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:15.425044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:15.425064Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:15.425091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:15.425116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:15.425134Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:15.425207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:15.425230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 37184;request_tx=104:TX_KIND_SCHEMA;min=1766516176479;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136316595652992;op_tx=104:TX_KIND_SCHEMA;min=1766516176479;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=104:TX_KIND_SCHEMA;min=1766516176479;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=136522735869952;method=TTxController::FinishProposeOnComplete;tx_id=104;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=104; 2025-12-23T18:56:17.809954Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-12-23T18:56:17.810057Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516176479 at tablet 9437184, mediator 0 2025-12-23T18:56:17.810102Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] execute at tablet 9437184 2025-12-23T18:56:17.810310Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: DropTable for pathId: {internal: 1000000185, ss: 1} at tablet 9437184 2025-12-23T18:56:17.822756Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] complete at tablet 9437184 2025-12-23T18:56:17.823414Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766516176479:max} readable: {1766516176479:max} at tablet 9437184 2025-12-23T18:56:17.823584Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:56:17.827890Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516176479:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:17.827993Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516176479:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:17.828750Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516176479:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-12-23T18:56:17.830091Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516176479:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:56:17.879717Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516176479:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:563:2575];trace_detailed=; 2025-12-23T18:56:17.881037Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:83;ff_first=(column_ids=1;column_names=timestamp;);; 2025-12-23T18:56:17.881288Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:56:17.881594Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:17.881730Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:17.881992Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:17.882168Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:17.882320Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:17.882523Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:563:2575] finished for tablet 9437184 2025-12-23T18:56:17.882958Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:557:2569];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":2907502,"name":"_full_task","f":2907502,"d_finished":0,"c":0,"l":2910429,"d":2927},"events":[{"name":"bootstrap","f":2907854,"d_finished":1756,"c":1,"l":2909610,"d":1756},{"a":2909797,"name":"ack","f":2909797,"d_finished":0,"c":0,"l":2910429,"d":632},{"a":2909781,"name":"processing","f":2909781,"d_finished":0,"c":0,"l":2910429,"d":648},{"name":"ProduceResults","f":2909314,"d_finished":615,"c":2,"l":2910205,"d":615},{"a":2910209,"name":"Finish","f":2910209,"d_finished":0,"c":0,"l":2910429,"d":220}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:17.883051Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:557:2569];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:17.883407Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:557:2569];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":2907502,"name":"_full_task","f":2907502,"d_finished":0,"c":0,"l":2910957,"d":3455},"events":[{"name":"bootstrap","f":2907854,"d_finished":1756,"c":1,"l":2909610,"d":1756},{"a":2909797,"name":"ack","f":2909797,"d_finished":0,"c":0,"l":2910957,"d":1160},{"a":2909781,"name":"processing","f":2909781,"d_finished":0,"c":0,"l":2910957,"d":1176},{"name":"ProduceResults","f":2909314,"d_finished":615,"c":2,"l":2910205,"d":615},{"a":2910209,"name":"Finish","f":2910209,"d_finished":0,"c":0,"l":2910957,"d":748}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:17.883492Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:17.830052Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:56:17.883542Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:17.883666Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:563:2575];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable+Reboots+GenerateInternalPathId >> TColumnShardTestSchema::CreateTable-Reboots-GenerateInternalPathId [GOOD] >> TColumnShardTestSchema::RebootExportAfterFail >> TColumnShardTestSchema::DropWriteRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable-Reboots-GenerateInternalPathId [GOOD] Test command err: 2025-12-23T18:56:17.826214Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:17.848464Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:17.848669Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:17.854800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:17.855031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:17.855228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:17.855312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:17.855382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:17.855445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:17.855526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:17.855635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:17.855711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:17.855802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:17.855886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:17.855970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:17.856055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:17.878054Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:17.878362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:17.878417Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:17.878577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:17.878720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:17.878828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:17.878885Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:17.878978Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:17.879030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:17.879063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:17.879084Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:17.879233Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:17.879284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:17.879325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:17.879375Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:17.879446Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:17.879486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:17.879525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:17.879561Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:17.879617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:17.879648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:17.879671Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:17.879719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:17.879749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:17.879799Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:17.879980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:17.880022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:17.880044Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:17.880146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:17.880180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:17.880203Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:17.880242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:17.880274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:17.880297Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:17.880328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:17.880354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:17.880385Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:17.880533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:17.880588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... tartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=1766516178858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2025-12-23T18:56:19.004015Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1766516178858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137138412479680;op_tx=119:TX_KIND_SCHEMA;min=1766516178858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1766516178858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137344573759680;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-12-23T18:56:19.004092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1766516178858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137138412479680;op_tx=119:TX_KIND_SCHEMA;min=1766516178858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1766516178858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137344573759680;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:56:19.004153Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1766516178858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137138412479680;op_tx=119:TX_KIND_SCHEMA;min=1766516178858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1766516178858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137344573759680;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2025-12-23T18:56:19.004534Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-12-23T18:56:19.004687Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516178858 at tablet 9437184, mediator 0 2025-12-23T18:56:19.004751Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] execute at tablet 9437184 2025-12-23T18:56:19.005039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2025-12-23T18:56:19.005089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2025-12-23T18:56:19.005166Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 20, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2025-12-23T18:56:19.005246Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=20; 2025-12-23T18:56:19.005317Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:145;event=RegisterTable;path_id=20; 2025-12-23T18:56:19.005624Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2025-12-23T18:56:19.006285Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:532;event=OnTieringModified;path_id=20; 2025-12-23T18:56:19.018131Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-12-23T18:56:19.019427Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=137138412482592;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1766516178861;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-12-23T18:56:19.031324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1766516178861;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137138412482592;op_tx=120:TX_KIND_SCHEMA;min=1766516178861;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:56:19.031385Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1766516178861;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137138412482592;op_tx=120:TX_KIND_SCHEMA;min=1766516178861;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-12-23T18:56:19.032377Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=137138412484384;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1766516178863;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-12-23T18:56:19.044174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1766516178863;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137138412484384;op_tx=121:TX_KIND_SCHEMA;min=1766516178863;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:56:19.044231Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1766516178863;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137138412484384;op_tx=121:TX_KIND_SCHEMA;min=1766516178863;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-12-23T18:56:19.045278Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=137138412486176;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1766516178864;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-12-23T18:56:19.056848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1766516178864;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137138412486176;op_tx=122:TX_KIND_SCHEMA;min=1766516178864;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:56:19.056906Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1766516178864;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137138412486176;op_tx=122:TX_KIND_SCHEMA;min=1766516178864;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] >> TColumnShardTestSchema::HotTiersTtl >> TColumnShardTestSchema::CreateTable+Reboots+GenerateInternalPathId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] Test command err: 2025-12-23T18:56:19.847292Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:19.878129Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:19.878348Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:19.884223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:19.884447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:19.884632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:19.884724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:19.884893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:19.884964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:19.885049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:19.885124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:19.885189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:19.885269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:19.885341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:19.885437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:19.885514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:19.909385Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:19.909535Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:19.909579Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:19.909718Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:19.909842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:19.909909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:19.909948Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:19.910015Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:19.910056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:19.910083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:19.910101Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:19.910242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:19.910302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:19.910347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:19.910369Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:19.910443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:19.910486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:19.910516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:19.910535Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:19.910575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:19.910598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:19.910618Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:19.910660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:19.910708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:19.910762Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:19.910934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:19.910973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:19.911001Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:19.911083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:19.911132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:19.911155Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:19.911191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:19.911221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:19.911241Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:19.911272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:19.911301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:19.911324Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:19.911417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:19.911448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... n;tablet_id=9437184;tx_id=101;this=136703749314432;method=TTxController::StartProposeOnExecute;tx_info=101:TX_KIND_SCHEMA;min=1766516180817;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2025-12-23T18:56:20.496045Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1766516180817;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136703749314432;op_tx=101:TX_KIND_SCHEMA;min=1766516180817;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1766516180817;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=136909910452288;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-12-23T18:56:20.496122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1766516180817;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136703749314432;op_tx=101:TX_KIND_SCHEMA;min=1766516180817;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1766516180817;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=136909910452288;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2025-12-23T18:56:20.496180Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1766516180817;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136703749314432;op_tx=101:TX_KIND_SCHEMA;min=1766516180817;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1766516180817;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=136909910452288;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=101; 2025-12-23T18:56:20.496482Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-12-23T18:56:20.496643Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516180817 at tablet 9437184, mediator 0 2025-12-23T18:56:20.496696Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2025-12-23T18:56:20.497008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:56:20.497137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:56:20.497189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:56:20.497261Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2025-12-23T18:56:20.504657Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1766516180817;tx_id=101;;switch_optimizer=0;switch_accessors=0; 2025-12-23T18:56:20.504751Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:56:20.504854Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2025-12-23T18:56:20.504937Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine.h:145;event=RegisterTable;path_id=1000000185; 2025-12-23T18:56:20.505149Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2025-12-23T18:56:20.517491Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:532;event=OnTieringModified;path_id=1000000185; 2025-12-23T18:56:20.540688Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6120;columns=10; 2025-12-23T18:56:20.544315Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=1;last=1; 2025-12-23T18:56:20.544384Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=6120;operation_id=11d16ed2-e03111f0-b3e802ff-65cea840;in_flight=1;size_in_flight=6120; 2025-12-23T18:56:20.555478Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=1;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8392;count=1;actions=__DEFAULT,;waiting=1;; 2025-12-23T18:56:20.557410Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6120;event=data_write_finished;writing_id=11d16ed2-e03111f0-b3e802ff-65cea840; 2025-12-23T18:56:20.557726Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=78;data_size=59;sum=78;count=1; 2025-12-23T18:56:20.557804Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=174;data_size=171;sum=174;count=2;size_of_meta=112; 2025-12-23T18:56:20.557882Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=254;data_size=251;sum=254;count=1;size_of_portion=192; 2025-12-23T18:56:20.558511Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-12-23T18:56:20.558714Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=2;operation_id=1; 2025-12-23T18:56:20.570557Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-12-23T18:56:20.570796Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:56:20.595974Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1766516180825;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136703749665888;op_tx=103:TX_KIND_SCHEMA;min=1766516180825;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1766516180825;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=136909910564288;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-12-23T18:56:20.596071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1766516180825;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136703749665888;op_tx=103:TX_KIND_SCHEMA;min=1766516180825;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1766516180825;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=136909910564288;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2025-12-23T18:56:20.596130Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1766516180825;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136703749665888;op_tx=103:TX_KIND_SCHEMA;min=1766516180825;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1766516180825;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=136909910564288;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=103; 2025-12-23T18:56:20.596399Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-12-23T18:56:20.596503Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516180825 at tablet 9437184, mediator 0 2025-12-23T18:56:20.596545Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] execute at tablet 9437184 2025-12-23T18:56:20.596760Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: DropTable for pathId: {internal: 1000000185, ss: 1} at tablet 9437184 2025-12-23T18:56:20.608474Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] complete at tablet 9437184 2025-12-23T18:56:20.608760Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516180826 at tablet 9437184, mediator 0 2025-12-23T18:56:20.608827Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[8] execute at tablet 9437184 2025-12-23T18:56:20.609071Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=abstract.h:88;progress_tx_id=102;lock_id=1;broken=0; 2025-12-23T18:56:20.620794Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[8] complete at tablet 9437184 2025-12-23T18:56:20.620916Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:101;progress_tx_id=102;lock_id=1;broken=0; 2025-12-23T18:56:20.621068Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=102;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2025-12-23T18:56:20.621108Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=102;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=1; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable+Reboots+GenerateInternalPathId [GOOD] Test command err: 2025-12-23T18:56:19.281360Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:19.302463Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:19.302663Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:19.308473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:19.308700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:19.308910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:19.308996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:19.309064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:19.309135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:19.309257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:19.309315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:19.309395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:19.309494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:19.309572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:19.309649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:19.309720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:19.329826Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:19.330227Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:19.330305Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:19.330516Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:19.330760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:19.330868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:19.330944Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:19.331048Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:19.331124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:19.331171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:19.331205Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:19.331432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:19.331516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:19.331565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:19.331600Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:19.331719Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:19.331829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:19.331897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:19.331931Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:19.332001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:19.332070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:19.332116Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:19.332169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:19.332218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:19.332278Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:19.332525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:19.332594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:19.332629Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:19.332800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:19.332863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:19.332899Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:19.332956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:19.333003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:19.333033Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:19.333077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:19.333117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:19.333166Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:19.333333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:19.333385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 19:TX_KIND_SCHEMA;min=1766516180370;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2025-12-23T18:56:20.815557Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1766516180370;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136229535171104;op_tx=119:TX_KIND_SCHEMA;min=1766516180370;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1766516180370;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136435696712512;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-12-23T18:56:20.815638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1766516180370;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136229535171104;op_tx=119:TX_KIND_SCHEMA;min=1766516180370;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1766516180370;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136435696712512;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:56:20.815738Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1766516180370;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136229535171104;op_tx=119:TX_KIND_SCHEMA;min=1766516180370;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1766516180370;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136435696712512;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2025-12-23T18:56:20.816032Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-12-23T18:56:20.816152Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516180370 at tablet 9437184, mediator 0 2025-12-23T18:56:20.816197Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] execute at tablet 9437184 2025-12-23T18:56:20.816486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2025-12-23T18:56:20.816536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2025-12-23T18:56:20.816602Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000202, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2025-12-23T18:56:20.816670Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000202; 2025-12-23T18:56:20.816722Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:145;event=RegisterTable;path_id=1000000202; 2025-12-23T18:56:20.816952Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2025-12-23T18:56:20.817595Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:532;event=OnTieringModified;path_id=1000000202; 2025-12-23T18:56:20.829551Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-12-23T18:56:20.830791Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=136229535174016;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1766516180373;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-12-23T18:56:20.842762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1766516180373;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136229535174016;op_tx=120:TX_KIND_SCHEMA;min=1766516180373;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:56:20.842836Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1766516180373;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136229535174016;op_tx=120:TX_KIND_SCHEMA;min=1766516180373;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-12-23T18:56:20.843891Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=136229535175808;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1766516180374;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-12-23T18:56:20.855775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1766516180374;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136229535175808;op_tx=121:TX_KIND_SCHEMA;min=1766516180374;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:56:20.855834Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1766516180374;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136229535175808;op_tx=121:TX_KIND_SCHEMA;min=1766516180374;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-12-23T18:56:20.857122Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=136229535177600;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1766516180376;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-12-23T18:56:20.868986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1766516180376;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136229535177600;op_tx=122:TX_KIND_SCHEMA;min=1766516180376;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2025-12-23T18:56:20.869083Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1766516180376;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136229535177600;op_tx=122:TX_KIND_SCHEMA;min=1766516180376;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; >> TColumnShardTestSchema::ExportAfterFail [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516743.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516743.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515543.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-12-23T18:55:44.779758Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:55:44.797873Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:55:44.798074Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:55:44.803146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:44.803352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:44.803512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:44.803585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:44.803661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:44.803747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:44.803813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:44.803871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:44.803936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:44.803995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:44.804073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:44.804130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:44.804190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:44.822444Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:55:44.822591Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:55:44.822637Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:55:44.822765Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:44.822895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:44.822950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:44.822978Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:55:44.823038Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:55:44.823079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:44.823106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:44.823139Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:55:44.823253Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:44.823292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:44.823317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:44.823335Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:55:44.823393Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:55:44.823429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:44.823455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:44.823482Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:55:44.823531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:44.823556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:44.823573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:55:44.823603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:44.823626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:44.823642Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:55:44.823775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:44.823806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:44.823843Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:55:44.823936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:44.823965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:44.823983Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:55:44.824013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:55:44.824039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:55:44.824058Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:55:44.824084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... ];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:21.501394Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=timestamp: timestamp[us];);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:21.501433Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:56:21.501536Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=80000; 2025-12-23T18:56:21.501577Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=80000;batch_columns=timestamp; 2025-12-23T18:56:21.501769Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:832:2801];bytes=1280000;rows=160000;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:833:2802]->[1:832:2801] 2025-12-23T18:56:21.501875Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:21.501994Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:21.502074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:21.502183Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:21.502247Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:21.502323Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:21.502491Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:833:2802] finished for tablet 9437184 2025-12-23T18:56:21.502818Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:832:2801];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.08},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.082}],"full":{"a":37970872,"name":"_full_task","f":37970872,"d_finished":0,"c":0,"l":38053503,"d":82631},"events":[{"name":"bootstrap","f":37971043,"d_finished":879,"c":1,"l":37971922,"d":879},{"a":38053148,"name":"ack","f":38051180,"d_finished":1822,"c":2,"l":38053084,"d":2177},{"a":38053140,"name":"processing","f":37972037,"d_finished":4199,"c":5,"l":38053086,"d":4562},{"name":"ProduceResults","f":37971635,"d_finished":2537,"c":9,"l":38053320,"d":2537},{"a":38053323,"name":"Finish","f":38053323,"d_finished":0,"c":0,"l":38053503,"d":180},{"name":"task_result","f":37972049,"d_finished":2277,"c":3,"l":38050963,"d":2277}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:21.502885Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:832:2801];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:21.503210Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:832:2801];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.08},{"events":["l_ProduceResults","f_Finish"],"t":0.082},{"events":["l_ack","l_processing","l_Finish"],"t":0.083}],"full":{"a":37970872,"name":"_full_task","f":37970872,"d_finished":0,"c":0,"l":38053899,"d":83027},"events":[{"name":"bootstrap","f":37971043,"d_finished":879,"c":1,"l":37971922,"d":879},{"a":38053148,"name":"ack","f":38051180,"d_finished":1822,"c":2,"l":38053084,"d":2573},{"a":38053140,"name":"processing","f":37972037,"d_finished":4199,"c":5,"l":38053086,"d":4958},{"name":"ProduceResults","f":37971635,"d_finished":2537,"c":9,"l":38053320,"d":2537},{"a":38053323,"name":"Finish","f":38053323,"d_finished":0,"c":0,"l":38053899,"d":576},{"name":"task_result","f":37972049,"d_finished":2277,"c":3,"l":38050963,"d":2277}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:833:2802]->[1:832:2801] 2025-12-23T18:56:21.503291Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:21.418794Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9739224;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9739224;selected_rows=0; 2025-12-23T18:56:21.503338Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:21.503431Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:833:2802];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 160000/9739224 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] >> test_canonical_records.py::test_create_drop_and_alter_database [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:25.928035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:25.998945Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:26.002890Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:26.003157Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:26.003223Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:26.321896Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:26.393636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:26.393738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:26.449612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:26.537646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004932/r3tmp/tmpooZJiF/pdisk_1.dat TServer::EnableGrpc on GrpcPort 65178, node 1 2025-12-23T18:55:27.188695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:27.189445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:27.189480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:27.189503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:27.189699Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:27.252727Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22431 2025-12-23T18:55:27.745932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:29.951160Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:29.955792Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:29.958092Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:29.978646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:29.978727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:30.015847Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:30.017483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:30.244418Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:30.245022Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:30.245629Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:30.246095Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:30.246303Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:30.246379Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:30.246490Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:30.246594Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:30.246734Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:30.353768Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:30.364986Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:30.365088Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:30.377535Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:30.523007Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:30.558179Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:30.558250Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:30.578437Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:30.579596Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:30.579757Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:30.579797Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:30.579837Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:30.579873Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:30.579904Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:30.579936Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:30.580197Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:30.617122Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:30.617229Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:30.627403Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:55:30.634097Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:55:30.634893Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:55:30.638881Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:30.661952Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:30.662003Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:55:30.662068Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:30.679338Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:30.687130Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:30.692902Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:30.692987Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:30.701269Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:30.794467Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:31.079922Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:31.184093Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:31.184160Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2 ... TAtNTViYzAwZTEtODgwMzM5MDItNDgxMWZkNWM=, TxId: 2025-12-23T18:56:15.179807Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4644:3965], ActorId: [2:4645:3966], Got response [2:4646:3967] SUCCESS 2025-12-23T18:56:15.180032Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:56:15.194082Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:56:15.194157Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:49: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3388:3496] 2025-12-23T18:56:15.227490Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:56:15.227592Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:56:15.281654Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4465:3835], schemeshard count = 1 2025-12-23T18:56:16.100335Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T18:56:16.104614Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4723:4006], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:56:16.107226Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4723:4006], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table2` 2025-12-23T18:56:16.107334Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4723:4006], Start read next stream part 2025-12-23T18:56:16.142205Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4740:4016]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:16.142408Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T18:56:16.142444Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4740:4016], StatRequests.size() = 1 2025-12-23T18:56:20.949078Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4723:4006], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:56:20.949211Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4723:4006], Start read next stream part 2025-12-23T18:56:20.949470Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4826:4078], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:56:20.952127Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4826:4078], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table2` 2025-12-23T18:56:20.952262Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4826:4078], Start read next stream part 2025-12-23T18:56:20.952404Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4723:4006], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:56:20.952450Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4723:4006], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTI1M2ZlY2YtMTQ0ZjZhMS04NWU2ZDc4Yy02N2U0NDY5MQ==, TxId: 2025-12-23T18:56:20.952559Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 34130, txId: 281474976720666] shutting down 2025-12-23T18:56:20.991196Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4845:4093]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:20.991394Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2025-12-23T18:56:20.991422Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:4845:4093], StatRequests.size() = 1 2025-12-23T18:56:23.922176Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4826:4078], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:56:23.922306Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4826:4078], Start read next stream part 2025-12-23T18:56:23.923763Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4915:4141], ActorId: [2:4916:4142], Starting query actor #1 [2:4917:4143] 2025-12-23T18:56:23.923814Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4916:4142], ActorId: [2:4917:4143], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:56:23.925738Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4826:4078], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:56:23.925781Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4713:2473], ActorId: [2:4826:4078], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmQ4YThiNzQtYmEyNGRhODAtNmFiZDMzNTEtZTNmZDFhYzc=, TxId: 2025-12-23T18:56:23.925860Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 34140, txId: 281474976720669] shutting down 2025-12-23T18:56:23.926684Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4916:4142], ActorId: [2:4917:4143], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NTdmNzA4NWEtOTc5MDJmZGYtOTAxMzFkMmQtNDE0NTQ1ZDM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:56:23.960197Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4916:4142], ActorId: [2:4917:4143], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTdmNzA4NWEtOTc5MDJmZGYtOTAxMzFkMmQtNDE0NTQ1ZDM=, TxId: 2025-12-23T18:56:23.960273Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4916:4142], ActorId: [2:4917:4143], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTdmNzA4NWEtOTc5MDJmZGYtOTAxMzFkMmQtNDE0NTQ1ZDM=, TxId: 2025-12-23T18:56:23.960593Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4915:4141], ActorId: [2:4916:4142], Got response [2:4917:4143] SUCCESS 2025-12-23T18:56:23.960900Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:56:23.986774Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 35] 2025-12-23T18:56:23.986858Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3388:3496] 2025-12-23T18:56:23.987718Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 6 ], ReplyToActorId[ [2:4949:4164]], StatType[ 2 ], StatRequestsCount[ 2 ] 2025-12-23T18:56:23.988182Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2025-12-23T18:56:23.988250Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:56:23.988607Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2025-12-23T18:56:23.988674Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 6 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:56:23.988737Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:56:23.989022Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T18:56:23.994632Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-12-23T18:56:23.995521Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 6 2025-12-23T18:56:23.995710Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 6 2025-12-23T18:56:23.996279Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 7 ], ReplyToActorId[ [2:4963:4178]], StatType[ 2 ], StatRequestsCount[ 2 ] 2025-12-23T18:56:23.996626Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] 2025-12-23T18:56:23.996693Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T18:56:23.997042Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] 2025-12-23T18:56:23.997109Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 7 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T18:56:23.997177Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T18:56:23.997434Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T18:56:24.000145Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2025-12-23T18:56:24.000998Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 7 2025-12-23T18:56:24.001750Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 7 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> Backup::ProposeBackup >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes >> Normalizers::SubColumnsPortionsCleanerNormalizer+useSubcolumns >> Normalizers::PortionsNormalizer >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot >> Backup::ProposeBackup [GOOD] >> EvWrite::AbortInTransaction >> Normalizers::PortionsNormalizer [GOOD] >> Normalizers::RemoveDeleteFlagNormalizer >> EvWrite::AbortInTransaction [GOOD] >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] >> TColumnShardTestReadWrite::WriteReadExoticTypes >> Normalizers::RemoveDeleteFlagNormalizer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2025-12-23T18:56:27.409670Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:27.434673Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:27.435579Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:27.448854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:27.449062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:27.449230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:27.449317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:27.449408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:27.449484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:27.449553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:27.449645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:27.449711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:27.449773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:27.449848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:27.449934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:27.450040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:27.472082Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:27.472432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:27.472475Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:27.472674Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:27.474365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:27.474453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:27.474517Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:27.474618Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:27.474716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:27.474778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:27.474801Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:27.474956Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:27.475002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:27.475031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:27.475054Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:27.475145Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:27.475198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:27.475228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:27.475245Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:27.475282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:27.475304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:27.475324Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:27.475351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:27.475375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:27.475398Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:27.475562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:27.475612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:27.475636Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:27.475710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:27.475754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:27.475785Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:27.475837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:27.475868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:27.475888Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:27.475915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:27.475937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:27.475954Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:27.476047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:27.476080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... er.h:56: PlanStep 1766516190035 at tablet 9437184, mediator 0 2025-12-23T18:56:30.098977Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[5] execute at tablet 9437184 2025-12-23T18:56:30.099018Z node 2 :TX_COLUMNSHARD ERROR: ctor_logger.h:56: TxPlanStep[5] Ignore old txIds [112] for step 1766516190035 last planned step 1766516190035 at tablet 9437184 2025-12-23T18:56:30.110719Z node 2 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tx_id=0;lock_id=222;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2025-12-23T18:56:30.110793Z node 2 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tx_id=0;lock_id=222;fline=manager.cpp:197;event=remove_operation;operation_id=1; 2025-12-23T18:56:30.110863Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[5] complete at tablet 9437184 2025-12-23T18:56:30.111178Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766516190035:max} readable: {1766516190035:max} at tablet 9437184 2025-12-23T18:56:30.111284Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:56:30.111462Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516190035:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-12-23T18:56:30.111541Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516190035:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-12-23T18:56:30.112142Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516190035:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"1,2","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"2","p":{"address":{"name":"field","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,2","t":"Projection"},"w":18,"id":0}}}; 2025-12-23T18:56:30.113567Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516190035:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:56:30.118779Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516190035:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[2:182:2194];trace_detailed=; 2025-12-23T18:56:30.119430Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:83;ff_first=(column_ids=1,2;column_names=field,key;);; 2025-12-23T18:56:30.119587Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2025-12-23T18:56:30.119840Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:30.119961Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:30.120102Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:30.120253Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:30.120383Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:30.120536Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:182:2194] finished for tablet 9437184 2025-12-23T18:56:30.120839Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:181:2193];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":3506734,"name":"_full_task","f":3506734,"d_finished":0,"c":0,"l":3508619,"d":1885},"events":[{"name":"bootstrap","f":3506963,"d_finished":1065,"c":1,"l":3508028,"d":1065},{"a":3508125,"name":"ack","f":3508125,"d_finished":0,"c":0,"l":3508619,"d":494},{"a":3508113,"name":"processing","f":3508113,"d_finished":0,"c":0,"l":3508619,"d":506},{"name":"ProduceResults","f":3507771,"d_finished":532,"c":2,"l":3508447,"d":532},{"a":3508450,"name":"Finish","f":3508450,"d_finished":0,"c":0,"l":3508619,"d":169}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:30.120903Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:181:2193];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:30.121232Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:181:2193];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":3506734,"name":"_full_task","f":3506734,"d_finished":0,"c":0,"l":3508993,"d":2259},"events":[{"name":"bootstrap","f":3506963,"d_finished":1065,"c":1,"l":3508028,"d":1065},{"a":3508125,"name":"ack","f":3508125,"d_finished":0,"c":0,"l":3508993,"d":868},{"a":3508113,"name":"processing","f":3508113,"d_finished":0,"c":0,"l":3508993,"d":880},{"name":"ProduceResults","f":3507771,"d_finished":532,"c":2,"l":3508447,"d":532},{"a":3508450,"name":"Finish","f":3508450,"d_finished":0,"c":0,"l":3508993,"d":543}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:30.121331Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:30.113542Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:56:30.121376Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:30.121480Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:182:2194];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SubColumnsPortionsCleanerNormalizer+useSubcolumns [GOOD] >> Normalizers::SubColumnsPortionsCleanerNormalizer-useSubcolumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] Test command err: 2025-12-23T18:56:27.409675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:27.434829Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:27.435689Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:27.448917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:27.449222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:27.449425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:27.449560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:27.449678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:27.449789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:27.449921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:27.450036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:27.450135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:27.450252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:27.450362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:27.450482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:27.450591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:27.481222Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:27.481371Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:27.481435Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:27.481608Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:27.481743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:27.481808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:27.481851Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:27.481953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:27.482006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:27.482051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:27.482083Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:27.482270Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:27.482353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:27.482401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:27.482429Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:27.482518Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:27.482568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:27.482610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:27.482638Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:27.482694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:27.482771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:27.482809Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:27.482881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:27.482942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:27.482983Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:27.483204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:27.483255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:27.483283Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:27.483439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:27.483482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:27.483512Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:27.483555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:27.483600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:27.483630Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:27.483679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:27.483715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:27.483743Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:27.483861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:27.483901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2025-12-23T18:56:31.669469Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::RemoveDeleteFlagNormalizer [GOOD] Test command err: 2025-12-23T18:56:27.444712Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:27.476728Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:27.477029Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:27.484655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-12-23T18:56:27.484956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=LeakedBlobsNormalizer; 2025-12-23T18:56:27.485095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:27.485330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:27.485490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:27.485591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:27.485697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:27.485775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:27.485866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:27.485952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:27.486034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:27.486106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:27.486181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:27.486250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:27.486323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:27.507054Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:27.507331Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=14;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-12-23T18:56:27.507382Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-12-23T18:56:27.507657Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:323;tasks_for_remove=0;distribution=; 2025-12-23T18:56:27.507775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:27.507835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:27.507869Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-12-23T18:56:27.508100Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-12-23T18:56:27.508181Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-12-23T18:56:27.508268Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-12-23T18:56:27.508332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-12-23T18:56:27.508391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:27.508440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-12-23T18:56:27.508470Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-12-23T18:56:27.508609Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:27.508724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:27.508759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:27.508780Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-12-23T18:56:27.508846Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:27.508896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:27.508927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:27.508948Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:27.509081Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:27.509134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:27.509160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:27.509198Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:27.509270Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:27.509310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:27.509335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:27.509354Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:27.509394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:27.509421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:27.509438Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:27.509502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:27.509536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:27.509557Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:27.509752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline ... {class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-12-23T18:56:32.078651Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2025-12-23T18:56:32.078680Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:56:32.078705Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:56:32.079110Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:32.079199Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:32.079229Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:56:32.079323Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2025-12-23T18:56:32.079358Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2025-12-23T18:56:32.079485Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:326:2327];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-12-23T18:56:32.079559Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:32.079702Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:32.079820Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:32.079899Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:32.079956Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:32.080023Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:32.080265Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:328:2328] finished for tablet 9437184 2025-12-23T18:56:32.080662Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:326:2327];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.046},{"events":["l_task_result"],"t":0.593},{"events":["l_ProduceResults","f_Finish"],"t":0.594},{"events":["l_ack","l_processing","l_Finish"],"t":0.595}],"full":{"a":4432429,"name":"_full_task","f":4432429,"d_finished":0,"c":0,"l":5027433,"d":595004},"events":[{"name":"bootstrap","f":4432797,"d_finished":1283,"c":1,"l":4434080,"d":1283},{"a":5026999,"name":"ack","f":4479251,"d_finished":236044,"c":421,"l":5026953,"d":236478},{"a":5026992,"name":"processing","f":4434240,"d_finished":504529,"c":843,"l":5026954,"d":504970},{"name":"ProduceResults","f":4433673,"d_finished":408164,"c":1266,"l":5027150,"d":408164},{"a":5027160,"name":"Finish","f":5027160,"d_finished":0,"c":0,"l":5027433,"d":273},{"name":"task_result","f":4434254,"d_finished":260713,"c":422,"l":5025826,"d":260713}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:32.080724Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:326:2327];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:32.081071Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:326:2327];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.046},{"events":["l_task_result"],"t":0.593},{"events":["l_ProduceResults","f_Finish"],"t":0.594},{"events":["l_ack","l_processing","l_Finish"],"t":0.595}],"full":{"a":4432429,"name":"_full_task","f":4432429,"d_finished":0,"c":0,"l":5027865,"d":595436},"events":[{"name":"bootstrap","f":4432797,"d_finished":1283,"c":1,"l":4434080,"d":1283},{"a":5026999,"name":"ack","f":4479251,"d_finished":236044,"c":421,"l":5026953,"d":236910},{"a":5026992,"name":"processing","f":4434240,"d_finished":504529,"c":843,"l":5026954,"d":505402},{"name":"ProduceResults","f":4433673,"d_finished":408164,"c":1266,"l":5027150,"d":408164},{"a":5027160,"name":"Finish","f":5027160,"d_finished":0,"c":0,"l":5027865,"d":705},{"name":"task_result","f":4434254,"d_finished":260713,"c":422,"l":5025826,"d":260713}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:32.081129Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:31.484024Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2025-12-23T18:56:32.081161Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:32.081280Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> THeavyPerfTest::TTestLoadEverything [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes >> Normalizers::SubColumnsPortionsCleanerNormalizer-useSubcolumns [GOOD] >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2025-12-23T18:56:07.656635Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:56:07.661026Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:56:07.661438Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:07.692411Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:07.692661Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:07.699632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:07.699897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:07.700103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:07.700208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:07.700320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:07.700466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:07.700565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:07.700689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:07.700987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:07.701104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:07.701240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:07.701361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:07.701477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:07.704925Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:56:07.730316Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:07.730454Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:07.730503Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:07.730649Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:07.730785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:07.730900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:07.730932Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:07.730990Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:07.731031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:07.731060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:07.731093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:07.731241Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:07.731287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:07.731321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:07.731347Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:07.731405Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:07.731439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:07.731468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:07.731488Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:07.731535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:07.731568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:07.731601Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:07.731633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:07.731656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:07.731674Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:07.731877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:07.731938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:07.731965Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:07.732053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:07.732104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:07.732129Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:07.732193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:07.732218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:07.732242Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:07.732279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:07.732301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... =[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:56:36.279607Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:56:36.279916Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:36.280188Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:36.280265Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:56:36.280476Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2025-12-23T18:56:36.280558Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2025-12-23T18:56:36.280932Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:484:2488];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-12-23T18:56:36.281168Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:36.281337Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:36.281547Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:36.281814Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:36.282038Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:36.282238Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:36.282637Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:485:2489] finished for tablet 9437184 2025-12-23T18:56:36.283407Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:484:2488];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.006},{"events":["f_ack"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.009},{"events":["l_ack","l_processing","l_Finish"],"t":0.01}],"full":{"a":29001546,"name":"_full_task","f":29001546,"d_finished":0,"c":0,"l":29011632,"d":10086},"events":[{"name":"bootstrap","f":29001859,"d_finished":1548,"c":1,"l":29003407,"d":1548},{"a":29010689,"name":"ack","f":29008792,"d_finished":1721,"c":1,"l":29010513,"d":2664},{"a":29010667,"name":"processing","f":29003601,"d_finished":4140,"c":3,"l":29010517,"d":5105},{"name":"ProduceResults","f":29002813,"d_finished":2971,"c":6,"l":29011185,"d":2971},{"a":29011191,"name":"Finish","f":29011191,"d_finished":0,"c":0,"l":29011632,"d":441},{"name":"task_result","f":29003628,"d_finished":2336,"c":2,"l":29008539,"d":2336}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:36.283510Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:484:2488];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:36.284101Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:484:2488];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.006},{"events":["f_ack"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.009},{"events":["l_ack","l_processing","l_Finish"],"t":0.01}],"full":{"a":29001546,"name":"_full_task","f":29001546,"d_finished":0,"c":0,"l":29012473,"d":10927},"events":[{"name":"bootstrap","f":29001859,"d_finished":1548,"c":1,"l":29003407,"d":1548},{"a":29010689,"name":"ack","f":29008792,"d_finished":1721,"c":1,"l":29010513,"d":3505},{"a":29010667,"name":"processing","f":29003601,"d_finished":4140,"c":3,"l":29010517,"d":5946},{"name":"ProduceResults","f":29002813,"d_finished":2971,"c":6,"l":29011185,"d":2971},{"a":29011191,"name":"Finish","f":29011191,"d_finished":0,"c":0,"l":29012473,"d":1282},{"name":"task_result","f":29003628,"d_finished":2336,"c":2,"l":29008539,"d":2336}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:36.284222Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:36.270666Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2025-12-23T18:56:36.284278Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:36.284467Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] Test command err: 2025-12-23T18:56:32.209889Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:32.230455Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:32.230647Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:32.236090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:32.236296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:32.236462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:32.236577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:32.236672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:32.236739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:32.236819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:32.236894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:32.236962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:32.237039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:32.237112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:32.237192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:32.237269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:32.257431Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:32.257604Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:32.257646Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:32.257791Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:32.257905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:32.257958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:32.257993Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:32.258055Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:32.258099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:32.258143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:32.258167Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:32.258320Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:32.258371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:32.258397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:32.258414Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:32.258486Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:32.258523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:32.258554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:32.258581Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:32.258623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:32.258661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:32.258684Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:32.258727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:32.258787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:32.258826Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:32.258996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:32.259043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:32.259062Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:32.259158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:32.259186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:32.259202Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:32.259232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:32.259255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:32.259275Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:32.259303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:32.259342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:32.259364Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:32.259466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:32.259494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... (column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-12-23T18:56:35.129866Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-12-23T18:56:35.130106Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=2791;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-12-23T18:56:35.130262Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:35.130377Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:35.130505Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:35.130658Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:35.130832Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:35.130976Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:35.131269Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:477:2488] finished for tablet 9437184 2025-12-23T18:56:35.131665Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:476:2487];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":3459890,"name":"_full_task","f":3459890,"d_finished":0,"c":0,"l":3471178,"d":11288},"events":[{"name":"bootstrap","f":3460132,"d_finished":1217,"c":1,"l":3461349,"d":1217},{"a":3470492,"name":"ack","f":3469227,"d_finished":1151,"c":1,"l":3470378,"d":1837},{"a":3470484,"name":"processing","f":3461459,"d_finished":3331,"c":3,"l":3470380,"d":4025},{"name":"ProduceResults","f":3460990,"d_finished":2027,"c":6,"l":3470848,"d":2027},{"a":3470852,"name":"Finish","f":3470852,"d_finished":0,"c":0,"l":3471178,"d":326},{"name":"task_result","f":3461471,"d_finished":2142,"c":2,"l":3469078,"d":2142}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:35.131725Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:35.132056Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:476:2487];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":3459890,"name":"_full_task","f":3459890,"d_finished":0,"c":0,"l":3471610,"d":11720},"events":[{"name":"bootstrap","f":3460132,"d_finished":1217,"c":1,"l":3461349,"d":1217},{"a":3470492,"name":"ack","f":3469227,"d_finished":1151,"c":1,"l":3470378,"d":2269},{"a":3470484,"name":"processing","f":3461459,"d_finished":3331,"c":3,"l":3470380,"d":4457},{"name":"ProduceResults","f":3460990,"d_finished":2027,"c":6,"l":3470848,"d":2027},{"a":3470852,"name":"Finish","f":3470852,"d_finished":0,"c":0,"l":3471610,"d":758},{"name":"task_result","f":3461471,"d_finished":2142,"c":2,"l":3469078,"d":2142}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:35.132115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:35.117445Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2025-12-23T18:56:35.132154Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:35.132343Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SubColumnsPortionsCleanerNormalizer-useSubcolumns [GOOD] Test command err: 2025-12-23T18:56:27.409696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:27.437694Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:27.437957Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:27.445382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanSubColumnsPortions; 2025-12-23T18:56:27.445598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:27.445815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:27.445948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:27.446029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:27.446130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:27.446217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:27.446291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:27.446353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:27.446429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:27.446508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:27.446592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:27.446662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:27.446791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:27.480274Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:27.480619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=CleanSubColumnsPortions; 2025-12-23T18:56:27.480675Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-12-23T18:56:27.481010Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=60; 2025-12-23T18:56:27.481108Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-12-23T18:56:27.481215Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=56; 2025-12-23T18:56:27.481327Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2025-12-23T18:56:27.481492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanSubColumnsPortions;id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:27.481595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-12-23T18:56:27.481650Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-12-23T18:56:27.481821Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:27.481919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:27.481979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:27.482015Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-12-23T18:56:27.482110Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:27.482168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:27.482208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:27.482239Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:27.482423Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:27.482491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:27.482539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:27.482574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:27.482676Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:27.482762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:27.482832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:27.482866Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:27.482962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:27.483006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:27.483038Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:27.483102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:27.483167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:27.483201Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:27.483431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:27.483477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:27.483520Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:27.483733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:27.483785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:27.483831Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstr ... ds=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-12-23T18:56:34.892668Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=138;finished=1; 2025-12-23T18:56:34.892687Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:56:34.892706Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:56:34.892944Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:34.893050Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:138;schema=id: uint64 json_payload: binary;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:34.893073Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:56:34.893135Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=138; 2025-12-23T18:56:34.893167Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=138;batch_columns=id,json_payload; 2025-12-23T18:56:34.893246Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:288:2289];bytes=283016;rows=4162;faults=0;finished=0;fault=0;schema=id: uint64 json_payload: binary; 2025-12-23T18:56:34.893324Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:34.893397Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:34.893512Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:34.893575Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:34.893626Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:34.893692Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:34.893895Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:290:2290] finished for tablet 9437184 2025-12-23T18:56:34.894247Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:288:2289];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.026},{"events":["l_task_result"],"t":0.312},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.313}],"full":{"a":7740226,"name":"_full_task","f":7740226,"d_finished":0,"c":0,"l":8053782,"d":313556},"events":[{"name":"bootstrap","f":7740479,"d_finished":1294,"c":1,"l":7741773,"d":1294},{"a":8053407,"name":"ack","f":7766398,"d_finished":124736,"c":251,"l":8053371,"d":125111},{"a":8053402,"name":"processing","f":7741958,"d_finished":267441,"c":503,"l":8053373,"d":267821},{"name":"ProduceResults","f":7741342,"d_finished":214219,"c":756,"l":8053553,"d":214219},{"a":8053558,"name":"Finish","f":8053558,"d_finished":0,"c":0,"l":8053782,"d":224},{"name":"task_result","f":7741973,"d_finished":138418,"c":252,"l":8052557,"d":138418}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:34.894307Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:288:2289];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:34.894666Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:288:2289];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.026},{"events":["l_task_result"],"t":0.312},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.313}],"full":{"a":7740226,"name":"_full_task","f":7740226,"d_finished":0,"c":0,"l":8054202,"d":313976},"events":[{"name":"bootstrap","f":7740479,"d_finished":1294,"c":1,"l":7741773,"d":1294},{"a":8053407,"name":"ack","f":7766398,"d_finished":124736,"c":251,"l":8053371,"d":125531},{"a":8053402,"name":"processing","f":7741958,"d_finished":267441,"c":503,"l":8053373,"d":268241},{"name":"ProduceResults","f":7741342,"d_finished":214219,"c":756,"l":8053553,"d":214219},{"a":8053558,"name":"Finish","f":8053558,"d_finished":0,"c":0,"l":8054202,"d":644},{"name":"task_result","f":7741973,"d_finished":138418,"c":252,"l":8052557,"d":138418}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:34.894724Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:34.579148Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=1409432;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=1409432;selected_rows=0; 2025-12-23T18:56:34.894776Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:34.894900Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:290:2290];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;; >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_database [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] >> TSchemeShardTest::RmDirTwice |98.2%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2025-12-23T18:56:09.518988Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:56:09.523666Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:56:09.524159Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:09.557961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:09.558192Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:09.565498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:09.565769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:09.565997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:09.566114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:09.566243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:09.566435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:09.566539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:09.566674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:09.566817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:09.566954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:09.567088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:09.567193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:09.567318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:09.570154Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:56:09.598421Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:09.598602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:09.598653Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:09.598918Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:09.599155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:09.599249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:09.599295Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:09.599383Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:09.599445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:09.599526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:09.599561Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:09.599792Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:09.599865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:09.599937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:09.599971Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:09.600067Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:09.600125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:09.600168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:09.600202Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:09.600278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:09.600334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:09.600383Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:09.600435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:09.600478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:09.600510Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:09.600734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:09.600794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:09.600832Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:09.600972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:09.601038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:09.601080Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:09.601133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:09.601172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:09.601206Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:09.601249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:09.601283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:56:40.996928Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:56:40.997266Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:40.997534Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:40.997602Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:56:40.997885Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2025-12-23T18:56:40.997971Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2025-12-23T18:56:40.998341Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:588:2568];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-12-23T18:56:40.998595Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:40.998790Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:40.999036Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:40.999310Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:40.999552Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:40.999819Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:41.000185Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:589:2569] finished for tablet 9437184 2025-12-23T18:56:41.000930Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:588:2568];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":31881989,"name":"_full_task","f":31881989,"d_finished":0,"c":0,"l":31893013,"d":11024},"events":[{"name":"bootstrap","f":31882308,"d_finished":1691,"c":1,"l":31883999,"d":1691},{"a":31892016,"name":"ack","f":31889958,"d_finished":1870,"c":1,"l":31891828,"d":2867},{"a":31891991,"name":"processing","f":31884220,"d_finished":4550,"c":3,"l":31891833,"d":5572},{"name":"ProduceResults","f":31883363,"d_finished":3292,"c":6,"l":31892599,"d":3292},{"a":31892607,"name":"Finish","f":31892607,"d_finished":0,"c":0,"l":31893013,"d":406},{"name":"task_result","f":31884251,"d_finished":2581,"c":2,"l":31889691,"d":2581}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:41.001046Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:588:2568];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:41.001732Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:588:2568];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":31881989,"name":"_full_task","f":31881989,"d_finished":0,"c":0,"l":31893852,"d":11863},"events":[{"name":"bootstrap","f":31882308,"d_finished":1691,"c":1,"l":31883999,"d":1691},{"a":31892016,"name":"ack","f":31889958,"d_finished":1870,"c":1,"l":31891828,"d":3706},{"a":31891991,"name":"processing","f":31884220,"d_finished":4550,"c":3,"l":31891833,"d":6411},{"name":"ProduceResults","f":31883363,"d_finished":3292,"c":6,"l":31892599,"d":3292},{"a":31892607,"name":"Finish","f":31892607,"d_finished":0,"c":0,"l":31893852,"d":1245},{"name":"task_result","f":31884251,"d_finished":2581,"c":2,"l":31889691,"d":2581}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:41.001861Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:40.987159Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2025-12-23T18:56:41.001919Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:41.002116Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:589:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] Test command err: 192 112 28 48 32 24 16 24 56 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.2%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |98.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode >> TColumnShardTestReadWrite::WriteReadStandalone >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::SchemeErrors >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate >> TColumnShardTestSchema::OneColdTier [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516758.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=166516758.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516758.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=146516758.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515558.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=146515558.000000s;Name=;Codec=}; 2025-12-23T18:56:00.223443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:00.254605Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:00.254863Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:00.261679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:00.261938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:00.262170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:00.262289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:00.262424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:00.262552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:00.262655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:00.262783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:00.262901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:00.263009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:00.263114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:00.263224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:00.263330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:00.294627Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:00.294822Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:00.294877Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:00.295064Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:00.295231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:00.295334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:00.295381Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:00.295477Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:00.295543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:00.295587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:00.295617Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:00.295778Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:00.295839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:00.295880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:00.295910Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:00.296002Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:00.296052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:00.296093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:00.296142Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:00.296226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:00.296267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:00.296299Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:00.296343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:00.296403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:00.296438Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:00.296658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:00.296734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:00.296772Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:00.296887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:00.296932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:00.296961Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:00.297008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:00.297048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:00.297077Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:00.297125Z node 1 :TX_COLUM ... ; 2025-12-23T18:56:46.270980Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:876:2834];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:56:46.271929Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 7 at tablet 9437184 2025-12-23T18:56:46.272195Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766516196371:max} readable: {1766516196371:max} at tablet 9437184 2025-12-23T18:56:46.272351Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:56:46.272563Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516196371:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:46.272673Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516196371:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:46.273186Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516196371:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-12-23T18:56:46.274978Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516196371:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:56:46.275964Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516196371:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:884:2842];trace_detailed=; 2025-12-23T18:56:46.276495Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:83;ff_first=(column_ids=1;column_names=timestamp;);; 2025-12-23T18:56:46.276709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:56:46.277012Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:46.277208Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:46.277525Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:46.277661Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:46.277777Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:46.278048Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:884:2842] finished for tablet 9437184 2025-12-23T18:56:46.278543Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:883:2841];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":47603038,"name":"_full_task","f":47603038,"d_finished":0,"c":0,"l":47605264,"d":2226},"events":[{"name":"bootstrap","f":47603315,"d_finished":1083,"c":1,"l":47604398,"d":1083},{"a":47604653,"name":"ack","f":47604653,"d_finished":0,"c":0,"l":47605264,"d":611},{"a":47604633,"name":"processing","f":47604633,"d_finished":0,"c":0,"l":47605264,"d":631},{"name":"ProduceResults","f":47604026,"d_finished":625,"c":2,"l":47604967,"d":625},{"a":47604978,"name":"Finish","f":47604978,"d_finished":0,"c":0,"l":47605264,"d":286}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:46.278650Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:883:2841];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:46.279174Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:883:2841];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":47603038,"name":"_full_task","f":47603038,"d_finished":0,"c":0,"l":47605859,"d":2821},"events":[{"name":"bootstrap","f":47603315,"d_finished":1083,"c":1,"l":47604398,"d":1083},{"a":47604653,"name":"ack","f":47604653,"d_finished":0,"c":0,"l":47605859,"d":1206},{"a":47604633,"name":"processing","f":47604633,"d_finished":0,"c":0,"l":47605859,"d":1226},{"name":"ProduceResults","f":47604026,"d_finished":625,"c":2,"l":47604967,"d":625},{"a":47604978,"name":"Finish","f":47604978,"d_finished":0,"c":0,"l":47605859,"d":881}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:884:2842]->[1:883:2841] 2025-12-23T18:56:46.279286Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:46.274945Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:56:46.279344Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:46.279479Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate >> TColumnShardTestReadWrite::RebootWriteReadStandalone >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] >> TColumnShardTestReadWrite::WriteOverload-InStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=166516748.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516748.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516748.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=166516748.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516748.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516748.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=166516748.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515548.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146516748.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=146516748.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515548.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146515548.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=146515548.000000s;Name=;Codec=}; 2025-12-23T18:55:49.034753Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:55:49.054021Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:55:49.054212Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:55:49.059255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:49.059458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:49.059614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:49.059719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:49.059791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:49.059867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:49.059929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:49.059987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:49.060046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:49.060108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:49.060182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:49.060239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:49.060301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:49.080496Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:55:49.080624Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:55:49.080662Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:55:49.080809Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:49.080922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:49.080986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:49.081014Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:55:49.081070Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:55:49.081110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:49.081134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:49.081151Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:55:49.081292Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:49.081336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:49.081367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:49.081385Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:55:49.081443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:55:49.081471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:49.081508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:49.081530Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:55:49.081572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:49.081594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:49.081612Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:55:49.081639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:49.081661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:49.081680Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:55:49.081828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:49.081858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:49.081875Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:55:49.081945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:49.081970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:49.081988Z nod ... nshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:46.714387Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:56:46.714504Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766516197369:max} readable: {1766516197369:max} at tablet 9437184 2025-12-23T18:56:46.714598Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:56:46.714731Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516197369:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:46.714796Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516197369:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:46.715152Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516197369:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-12-23T18:56:46.716759Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516197369:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:56:46.717693Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516197369:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1451:3396];trace_detailed=; 2025-12-23T18:56:46.718176Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:83;ff_first=(column_ids=1;column_names=timestamp;);; 2025-12-23T18:56:46.718395Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:56:46.718658Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:46.718885Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:46.719322Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:46.719440Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:46.719542Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:46.719789Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1451:3396] finished for tablet 9437184 2025-12-23T18:56:46.720332Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1450:3395];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":59877120,"name":"_full_task","f":59877120,"d_finished":0,"c":0,"l":59879396,"d":2276},"events":[{"name":"bootstrap","f":59877429,"d_finished":1056,"c":1,"l":59878485,"d":1056},{"a":59878838,"name":"ack","f":59878838,"d_finished":0,"c":0,"l":59879396,"d":558},{"a":59878823,"name":"processing","f":59878823,"d_finished":0,"c":0,"l":59879396,"d":573},{"name":"ProduceResults","f":59878076,"d_finished":626,"c":2,"l":59879104,"d":626},{"a":59879109,"name":"Finish","f":59879109,"d_finished":0,"c":0,"l":59879396,"d":287}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:46.720429Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1450:3395];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:46.720916Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1450:3395];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":59877120,"name":"_full_task","f":59877120,"d_finished":0,"c":0,"l":59880025,"d":2905},"events":[{"name":"bootstrap","f":59877429,"d_finished":1056,"c":1,"l":59878485,"d":1056},{"a":59878838,"name":"ack","f":59878838,"d_finished":0,"c":0,"l":59880025,"d":1187},{"a":59878823,"name":"processing","f":59878823,"d_finished":0,"c":0,"l":59880025,"d":1202},{"name":"ProduceResults","f":59878076,"d_finished":626,"c":2,"l":59879104,"d":626},{"a":59879109,"name":"Finish","f":59879109,"d_finished":0,"c":0,"l":59880025,"d":916}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1451:3396]->[1:1450:3395] 2025-12-23T18:56:46.721034Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:46.716737Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:56:46.721090Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:46.721226Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1451:3396];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> AnalyzeColumnshard::AnalyzeEmptyTable [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2025-12-23T18:56:16.067751Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:56:16.071818Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:56:16.072346Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:16.096229Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:16.096485Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:16.102673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:16.102950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:16.103136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:16.103215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:16.103300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:16.103413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:16.103481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:16.103596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:16.103680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:16.103759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:16.103871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:16.103989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:16.104105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:16.106756Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:56:16.127503Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:16.127677Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:16.127741Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:16.127908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:16.128090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:16.128225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:16.128291Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:16.128424Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:16.128520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:16.128595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:16.128634Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:16.128894Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:16.128982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:16.129027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:16.129097Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:16.129224Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:16.129312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:16.129404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:16.129447Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:16.129591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:16.129668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:16.129741Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:16.129801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:16.129840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:16.129878Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:16.130052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:16.130094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:16.130126Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:16.130243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:16.130276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:16.130311Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:16.130356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:16.130387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:16.130411Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:16.130441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:16.130467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... inished=1; 2025-12-23T18:56:47.335584Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:56:47.335615Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:56:47.335864Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:47.336063Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:47.336112Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:56:47.336276Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2025-12-23T18:56:47.336362Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2025-12-23T18:56:47.336644Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:758:2738];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-12-23T18:56:47.336863Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:47.336981Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:47.337127Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:47.337292Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:47.337458Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:47.337607Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:47.337897Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:759:2739] finished for tablet 9437184 2025-12-23T18:56:47.338370Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:758:2738];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.008}],"full":{"a":31647631,"name":"_full_task","f":31647631,"d_finished":0,"c":0,"l":31656494,"d":8863},"events":[{"name":"bootstrap","f":31647931,"d_finished":1719,"c":1,"l":31649650,"d":1719},{"a":31655805,"name":"ack","f":31654375,"d_finished":1316,"c":1,"l":31655691,"d":2005},{"a":31655791,"name":"processing","f":31649906,"d_finished":3521,"c":3,"l":31655694,"d":4224},{"name":"ProduceResults","f":31648972,"d_finished":2607,"c":6,"l":31656162,"d":2607},{"a":31656166,"name":"Finish","f":31656166,"d_finished":0,"c":0,"l":31656494,"d":328},{"name":"task_result","f":31649942,"d_finished":2120,"c":2,"l":31654167,"d":2120}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:47.338443Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:758:2738];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:47.338971Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:758:2738];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":31647631,"name":"_full_task","f":31647631,"d_finished":0,"c":0,"l":31657019,"d":9388},"events":[{"name":"bootstrap","f":31647931,"d_finished":1719,"c":1,"l":31649650,"d":1719},{"a":31655805,"name":"ack","f":31654375,"d_finished":1316,"c":1,"l":31655691,"d":2530},{"a":31655791,"name":"processing","f":31649906,"d_finished":3521,"c":3,"l":31655694,"d":4749},{"name":"ProduceResults","f":31648972,"d_finished":2607,"c":6,"l":31656162,"d":2607},{"a":31656166,"name":"Finish","f":31656166,"d_finished":0,"c":0,"l":31657019,"d":853},{"name":"task_result","f":31649942,"d_finished":2120,"c":2,"l":31654167,"d":2120}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:47.339054Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:47.326959Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59288;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59288;selected_rows=0; 2025-12-23T18:56:47.339099Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:47.339250Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2025-12-23T18:56:43.142171Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:43.163599Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:43.163852Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:43.169613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:43.169915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:43.170133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:43.170281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:43.170418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:43.170530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:43.170649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:43.170787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:43.170907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:43.171049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:43.171158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:43.171249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:43.171397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:43.192213Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:43.192342Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:43.192379Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:43.192521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:43.192635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:43.192680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:43.192713Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:43.192771Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:43.192812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:43.192862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:43.192888Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:43.193038Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:43.193079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:43.193117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:43.193137Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:43.193197Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:43.193227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:43.193266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:43.193288Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:43.193327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:43.193350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:43.193367Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:43.193413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:43.193442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:43.193493Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:43.193650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:43.193683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:43.193701Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:43.193787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:43.193819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:43.193837Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:43.193873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:43.193897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:43.193914Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:43.193942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:43.193962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:43.193986Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:43.194067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:43.194097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-12-23T18:56:45.905395Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-12-23T18:56:45.905612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-12-23T18:56:45.905766Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:45.905873Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:45.905988Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:45.906142Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:45.906265Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:45.906379Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:45.906627Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:477:2488] finished for tablet 9437184 2025-12-23T18:56:45.907038Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:476:2487];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":3136035,"name":"_full_task","f":3136035,"d_finished":0,"c":0,"l":3146442,"d":10407},"events":[{"name":"bootstrap","f":3136254,"d_finished":1188,"c":1,"l":3137442,"d":1188},{"a":3145898,"name":"ack","f":3144745,"d_finished":1031,"c":1,"l":3145776,"d":1575},{"a":3145887,"name":"processing","f":3137545,"d_finished":3039,"c":3,"l":3145778,"d":3594},{"name":"ProduceResults","f":3137065,"d_finished":1812,"c":6,"l":3146170,"d":1812},{"a":3146174,"name":"Finish","f":3146174,"d_finished":0,"c":0,"l":3146442,"d":268},{"name":"task_result","f":3137558,"d_finished":1964,"c":2,"l":3144606,"d":1964}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:45.907092Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:45.907381Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:476:2487];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":3136035,"name":"_full_task","f":3136035,"d_finished":0,"c":0,"l":3146896,"d":10861},"events":[{"name":"bootstrap","f":3136254,"d_finished":1188,"c":1,"l":3137442,"d":1188},{"a":3145898,"name":"ack","f":3144745,"d_finished":1031,"c":1,"l":3145776,"d":2029},{"a":3145887,"name":"processing","f":3137545,"d_finished":3039,"c":3,"l":3145778,"d":4048},{"name":"ProduceResults","f":3137065,"d_finished":1812,"c":6,"l":3146170,"d":1812},{"a":3146174,"name":"Finish","f":3146174,"d_finished":0,"c":0,"l":3146896,"d":722},{"name":"task_result","f":3137558,"d_finished":1964,"c":2,"l":3144606,"d":1964}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:45.907436Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:45.893799Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-12-23T18:56:45.907469Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:45.907635Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig [GOOD] >> TSchemeShardTest::RejectAlterSolomon ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeEmptyTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:29.248791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:29.317458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:29.321083Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:29.321350Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:29.321415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:29.630016Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:29.704545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:29.704644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:29.761259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:29.849567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00492e/r3tmp/tmpNOZRBp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 65010, node 1 2025-12-23T18:55:30.448768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:30.449493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:30.449527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:30.449549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:30.449722Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:30.510618Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:16261 2025-12-23T18:55:31.015679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:33.304274Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:33.309239Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:33.311847Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:33.333622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:33.333699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:33.370105Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:33.371673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:33.513490Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:33.513609Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:33.528022Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:33.605568Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.606156Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.606652Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.607036Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.607327Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.607558Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.607632Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.607701Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.607789Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:33.781023Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:33.810021Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:33.810089Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:33.828795Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:33.829928Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:33.830073Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:33.830119Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:33.830156Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:33.830192Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:33.830229Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:33.830261Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:33.830982Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:33.842275Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:33.842374Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2133:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:33.861948Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2202:2640] 2025-12-23T18:55:33.862097Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2202:2640], schemeshard id = 72075186224037897 2025-12-23T18:55:33.925780Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:33.950298Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2280:2670] 2025-12-23T18:55:33.952637Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:33.960812Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2288:2676] Owner: [2:2287:2675]. Describe result: PathErrorUnknown 2025-12-23T18:55:33.960855Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2288:2676] Owner: [2:2287:2675]. Creating table 2025-12-23T18:55:33.960915Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2288:2676] Owner: [2:2287:2675]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:33.967652Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2346:2702], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:33.970687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:33.975598Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2288:2676] Owner: [2:2287:2675]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:33.975728Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2288:2676] Owner: [2:2287:2675]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:33.983610Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2288:2676] Owner: [2:2287:2675]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:34.087990Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:34.388299Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2288:2676] Owner: [2:2287:2675]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:34.476835Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2288:2676] Owner: [2:2287:2675]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:34.476931Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2288:2676] Owner: [2:2287:2 ... N: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:55:43.495174Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:45.239533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:55:45.239606Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:55.813459Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:55:55.813526Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:55:55.813562Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:55:55.813588Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:55:57.714954Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2025-12-23T18:55:57.715047Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:55:57.715290Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2025-12-23T18:55:57.728863Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:55:57.989765Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T18:55:57.993822Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4428:2465], ActorId: [2:4438:3997], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:55:58.006435Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4428:2465], ActorId: [2:4438:3997], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2025-12-23T18:55:58.006601Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4428:2465], ActorId: [2:4438:3997], Start read next stream part 2025-12-23T18:55:58.014907Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4450:4003], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:58.015009Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4462:4008], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:58.015156Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:58.016263Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4466:4012], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:58.016373Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:58.021020Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4483:4017], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:58.023745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:58.066217Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4465:4011], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-12-23T18:55:58.181218Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4549:4060], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:58.238581Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4548:4059] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:58.487658Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4570:4073]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:58.487883Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:58.487951Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4572:4075] 2025-12-23T18:55:58.488011Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4572:4075] 2025-12-23T18:55:58.488332Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4573:4076] 2025-12-23T18:55:58.488422Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4572:4075], server id = [2:4573:4076], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:58.488488Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4573:4076], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:58.488546Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:55:58.488677Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:55:58.488754Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4570:4073], StatRequests.size() = 1 2025-12-23T18:55:58.488847Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:56:47.041301Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4428:2465], ActorId: [2:4438:3997], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:56:47.041503Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4428:2465], ActorId: [2:4438:3997], Start read next stream part 2025-12-23T18:56:47.041992Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd692vhp57fyazjvedkkpzr2", SessionId: ydb://session/3?node_id=2&id=OGUxMjg5MTUtMTkyM2FiMmQtOGQ2MGQyLTMyZGIzNjdk, Slow query, duration: 49.029969s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:56:47.042646Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4757:4187], ActorId: [2:4758:4188], Starting query actor #1 [2:4759:4189] 2025-12-23T18:56:47.042718Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4758:4188], ActorId: [2:4759:4189], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:56:47.045172Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32310, txId: 18446744073709551615] shutting down 2025-12-23T18:56:47.045747Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4758:4188], ActorId: [2:4759:4189], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MTQ2ZWRkNWItMTllZThhNWYtZTlmYTk4NWYtMTZmY2I1NTI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:56:47.046609Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4428:2465], ActorId: [2:4438:3997], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:56:47.046660Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4428:2465], ActorId: [2:4438:3997], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTNhZjVhMmMtNWEzMWYzYzgtMmQ5YWY4Y2YtYzJmMTkzZjc=, TxId: 2025-12-23T18:56:47.114809Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4777:4204]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:47.115039Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:56:47.115077Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4777:4204], StatRequests.size() = 1 2025-12-23T18:56:47.240133Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4758:4188], ActorId: [2:4759:4189], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTQ2ZWRkNWItMTllZThhNWYtZTlmYTk4NWYtMTZmY2I1NTI=, TxId: 2025-12-23T18:56:47.240212Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4758:4188], ActorId: [2:4759:4189], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTQ2ZWRkNWItMTllZThhNWYtZTlmYTk4NWYtMTZmY2I1NTI=, TxId: 2025-12-23T18:56:47.240482Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4757:4187], ActorId: [2:4758:4188], Got response [2:4759:4189] SUCCESS 2025-12-23T18:56:47.240700Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:56:47.254398Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:56:47.254514Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3505:3448] |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop >> TColumnShardTestSchema::RebootHotTiers [GOOD] >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-false >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516744.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516744.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=166516744.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516744.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516744.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=166516744.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515544.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146516744.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=146516744.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515544.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146515544.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=146515544.000000s;Name=;Codec=}; 2025-12-23T18:55:44.295915Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:55:44.313517Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:55:44.313704Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:55:44.318750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:44.318984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:44.319137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:44.319212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:44.319287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:44.319357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:44.319420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:44.319484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:44.319545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:44.319609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:44.319667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:44.319720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:44.319785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:44.337723Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:55:44.337854Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:55:44.337892Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:55:44.338015Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:44.338116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:44.338168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:44.338197Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:55:44.338254Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:55:44.338298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:44.338324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:44.338355Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:55:44.338471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:44.338526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:44.338564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:44.338594Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:55:44.338683Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:55:44.338719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:44.338765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:44.338800Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:55:44.338854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:44.338894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:44.338917Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:55:44.338952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:44.338986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:44.339012Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:55:44.339131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:44.339160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:44.339177Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:55:44.339253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:44.339278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:44.339 ... 17; 2025-12-23T18:56:49.904600Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=79; 2025-12-23T18:56:49.904629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=5518; 2025-12-23T18:56:49.904664Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=5618; 2025-12-23T18:56:49.904712Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=8; 2025-12-23T18:56:49.904771Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=28; 2025-12-23T18:56:49.904801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=6166; 2025-12-23T18:56:49.904916Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=72; 2025-12-23T18:56:49.905010Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=59; 2025-12-23T18:56:49.905121Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=74; 2025-12-23T18:56:49.905229Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=75; 2025-12-23T18:56:49.907828Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2553; 2025-12-23T18:56:49.910783Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2888; 2025-12-23T18:56:49.910855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-12-23T18:56:49.910908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-12-23T18:56:49.910957Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:56:49.911041Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2025-12-23T18:56:49.911077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-12-23T18:56:49.911150Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2025-12-23T18:56:49.911184Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-12-23T18:56:49.911240Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2025-12-23T18:56:49.911316Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=44; 2025-12-23T18:56:49.911546Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=199; 2025-12-23T18:56:49.911577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=19689; 2025-12-23T18:56:49.911681Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:56:49.911769Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:56:49.911814Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:56:49.911865Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:49.925184Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=1; 2025-12-23T18:56:49.925328Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:49.925400Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2025-12-23T18:56:49.925454Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515894136;tx_id=18446744073709551615;;current_snapshot_ts=1766516145615; 2025-12-23T18:56:49.925500Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:56:49.925537Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:49.925577Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:49.925643Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:56:49.925846Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.056000s; 2025-12-23T18:56:49.926823Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:56:49.927517Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:56:49.927558Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:49.927630Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2025-12-23T18:56:49.927680Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515894136;tx_id=18446744073709551615;;current_snapshot_ts=1766516145615; 2025-12-23T18:56:49.927724Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:56:49.927759Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:49.927789Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:49.927850Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:56:49.928179Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.181000s; 2025-12-23T18:56:49.928211Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2025-12-23T18:56:11.293619Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:56:11.298044Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:56:11.298539Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:11.332848Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:11.333125Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:11.340565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:11.340842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:11.341055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:11.341196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:11.341320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:11.341466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:11.341588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:11.341721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:11.341843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:11.341974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:11.342098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:11.342227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:11.342346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:11.345302Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:56:11.373190Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:11.373362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:11.373410Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:11.373598Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:11.373792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:11.373870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:11.373910Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:11.373993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:11.374048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:11.374089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:11.374124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:11.374322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:11.374399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:11.374438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:11.374465Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:11.374557Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:11.374605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:11.374655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:11.374681Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:11.374771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:11.374827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:11.374877Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:11.374939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:11.374976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:11.375009Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:11.375211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:11.375256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:11.375288Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:11.375428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:11.375470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:11.375501Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:11.375543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:11.375577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:11.375612Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:11.375651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:11.375684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... 6:50.305617Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:56:50.305654Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:56:50.305983Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:50.306260Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:50.306335Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:56:50.306603Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2025-12-23T18:56:50.306695Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2025-12-23T18:56:50.307096Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2595];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-12-23T18:56:50.307369Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:50.307560Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:50.307801Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:50.308106Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:50.308361Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:50.308582Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:50.308972Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:618:2596] finished for tablet 9437184 2025-12-23T18:56:50.309702Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:617:2595];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":39397423,"name":"_full_task","f":39397423,"d_finished":0,"c":0,"l":39408971,"d":11548},"events":[{"name":"bootstrap","f":39397780,"d_finished":1933,"c":1,"l":39399713,"d":1933},{"a":39407949,"name":"ack","f":39405818,"d_finished":1928,"c":1,"l":39407746,"d":2950},{"a":39407916,"name":"processing","f":39399960,"d_finished":4494,"c":3,"l":39407752,"d":5549},{"name":"ProduceResults","f":39399047,"d_finished":3292,"c":6,"l":39408493,"d":3292},{"a":39408501,"name":"Finish","f":39408501,"d_finished":0,"c":0,"l":39408971,"d":470},{"name":"task_result","f":39400004,"d_finished":2463,"c":2,"l":39405557,"d":2463}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:50.309812Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2595];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:50.310451Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:617:2595];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.011},{"events":["l_ack","l_processing","l_Finish"],"t":0.012}],"full":{"a":39397423,"name":"_full_task","f":39397423,"d_finished":0,"c":0,"l":39409755,"d":12332},"events":[{"name":"bootstrap","f":39397780,"d_finished":1933,"c":1,"l":39399713,"d":1933},{"a":39407949,"name":"ack","f":39405818,"d_finished":1928,"c":1,"l":39407746,"d":3734},{"a":39407916,"name":"processing","f":39399960,"d_finished":4494,"c":3,"l":39407752,"d":6333},{"name":"ProduceResults","f":39399047,"d_finished":3292,"c":6,"l":39408493,"d":3292},{"a":39408501,"name":"Finish","f":39408501,"d_finished":0,"c":0,"l":39409755,"d":1254},{"name":"task_result","f":39400004,"d_finished":2463,"c":2,"l":39405557,"d":2463}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:50.310608Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:50.295376Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2025-12-23T18:56:50.310698Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:50.310974Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-false [GOOD] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-true >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516762.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=166516762.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516762.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=146516762.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515562.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=146515562.000000s;Name=;Codec=}; 2025-12-23T18:56:04.269502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:04.291485Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:04.291730Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:04.298007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:04.298231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:04.298410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:04.298488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:04.298579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:04.298661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:04.298777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:04.298856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:04.298945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:04.299017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:04.299114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:04.299191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:04.299277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:04.319139Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:04.319278Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:04.319325Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:04.319461Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:04.319611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:04.319679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:04.319714Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:04.319797Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:04.319848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:04.319880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:04.319902Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:04.320035Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:04.320113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:04.320154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:04.320178Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:04.320246Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:04.320288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:04.320317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:04.320337Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:04.320384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:04.320412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:04.320437Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:04.320471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:04.320497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:04.320517Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:04.320682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:04.320730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:04.320757Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:04.320847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:04.320900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:04.320927Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:04.320967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:04.321000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:04.321022Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:04.321050Z node 1 :TX_COLUM ... ; 2025-12-23T18:56:50.916532Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:874:2832];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:56:50.917405Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 7 at tablet 9437184 2025-12-23T18:56:50.917727Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766516200418:max} readable: {1766516200418:max} at tablet 9437184 2025-12-23T18:56:50.917851Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:56:50.918024Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516200418:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:50.918117Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516200418:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:50.918631Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516200418:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-12-23T18:56:50.920344Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516200418:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:56:50.921348Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516200418:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:882:2840];trace_detailed=; 2025-12-23T18:56:50.921788Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:83;ff_first=(column_ids=1;column_names=timestamp;);; 2025-12-23T18:56:50.922037Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:56:50.922305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:50.922479Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:50.922789Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:50.922942Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:50.923074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:50.923326Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:882:2840] finished for tablet 9437184 2025-12-23T18:56:50.923800Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:881:2839];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":48204427,"name":"_full_task","f":48204427,"d_finished":0,"c":0,"l":48206535,"d":2108},"events":[{"name":"bootstrap","f":48204662,"d_finished":1005,"c":1,"l":48205667,"d":1005},{"a":48205917,"name":"ack","f":48205917,"d_finished":0,"c":0,"l":48206535,"d":618},{"a":48205881,"name":"processing","f":48205881,"d_finished":0,"c":0,"l":48206535,"d":654},{"name":"ProduceResults","f":48205343,"d_finished":613,"c":2,"l":48206259,"d":613},{"a":48206265,"name":"Finish","f":48206265,"d_finished":0,"c":0,"l":48206535,"d":270}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:50.923904Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:881:2839];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:50.924345Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:881:2839];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":48204427,"name":"_full_task","f":48204427,"d_finished":0,"c":0,"l":48207110,"d":2683},"events":[{"name":"bootstrap","f":48204662,"d_finished":1005,"c":1,"l":48205667,"d":1005},{"a":48205917,"name":"ack","f":48205917,"d_finished":0,"c":0,"l":48207110,"d":1193},{"a":48205881,"name":"processing","f":48205881,"d_finished":0,"c":0,"l":48207110,"d":1229},{"name":"ProduceResults","f":48205343,"d_finished":613,"c":2,"l":48206259,"d":613},{"a":48206265,"name":"Finish","f":48206265,"d_finished":0,"c":0,"l":48207110,"d":845}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:882:2840]->[1:881:2839] 2025-12-23T18:56:50.924439Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:50.920314Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:56:50.924500Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:50.924632Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> THealthCheckTest::TestStateStorageRed [GOOD] >> TColumnShardTestSchema::RebootOneColdTier [GOOD] >> TColumnShardTestReadWrite::WriteOverload-InStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516764.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=166516764.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516764.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=146516764.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515564.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=146515564.000000s;Name=;Codec=}; 2025-12-23T18:56:05.777566Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:05.807906Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:05.808119Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:05.814716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:05.814993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:05.815208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:05.815321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:05.815453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:05.815575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:05.815681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:05.815782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:05.815894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:05.816005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:05.816109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:05.816217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:05.816320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:05.843792Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:05.843909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:05.843942Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:05.844059Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:05.844165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:05.844234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:05.844272Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:05.844339Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:05.844385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:05.844417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:05.844437Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:05.844538Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:05.844584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:05.844609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:05.844626Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:05.844691Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:05.844732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:05.844761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:05.844790Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:05.844835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:05.844859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:05.844876Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:05.844904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:05.844927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:05.844946Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:05.845084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:05.845117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:05.845138Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:05.845213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:05.845239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:05.845259Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:05.845294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:05.845332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:05.845361Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:05.845397Z node 1 :TX_COLUM ... ; 2025-12-23T18:56:51.450701Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:876:2834];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:56:51.451659Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 7 at tablet 9437184 2025-12-23T18:56:51.451958Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766516201928:max} readable: {1766516201928:max} at tablet 9437184 2025-12-23T18:56:51.452104Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:56:51.452306Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516201928:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:51.452385Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516201928:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:56:51.452916Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516201928:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-12-23T18:56:51.454622Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516201928:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:56:51.455598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516201928:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:884:2842];trace_detailed=; 2025-12-23T18:56:51.456147Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:83;ff_first=(column_ids=1;column_names=timestamp;);; 2025-12-23T18:56:51.456358Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:56:51.456643Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:51.456859Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:51.457114Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:51.457245Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:51.457361Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:51.457602Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:884:2842] finished for tablet 9437184 2025-12-23T18:56:51.458157Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:883:2841];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":47239554,"name":"_full_task","f":47239554,"d_finished":0,"c":0,"l":47241717,"d":2163},"events":[{"name":"bootstrap","f":47239854,"d_finished":1085,"c":1,"l":47240939,"d":1085},{"a":47241137,"name":"ack","f":47241137,"d_finished":0,"c":0,"l":47241717,"d":580},{"a":47241120,"name":"processing","f":47241120,"d_finished":0,"c":0,"l":47241717,"d":597},{"name":"ProduceResults","f":47240561,"d_finished":629,"c":2,"l":47241439,"d":629},{"a":47241444,"name":"Finish","f":47241444,"d_finished":0,"c":0,"l":47241717,"d":273}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:51.458247Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:883:2841];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:51.458677Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:883:2841];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":47239554,"name":"_full_task","f":47239554,"d_finished":0,"c":0,"l":47242346,"d":2792},"events":[{"name":"bootstrap","f":47239854,"d_finished":1085,"c":1,"l":47240939,"d":1085},{"a":47241137,"name":"ack","f":47241137,"d_finished":0,"c":0,"l":47242346,"d":1209},{"a":47241120,"name":"processing","f":47241120,"d_finished":0,"c":0,"l":47242346,"d":1226},{"name":"ProduceResults","f":47240561,"d_finished":629,"c":2,"l":47241439,"d":629},{"a":47241444,"name":"Finish","f":47241444,"d_finished":0,"c":0,"l":47242346,"d":902}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:884:2842]->[1:883:2841] 2025-12-23T18:56:51.458844Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:51.454591Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:56:51.458896Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:51.459041Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:884:2842];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-true [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] >> TColumnShardTestSchema::ForgetAfterFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516762.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=166516762.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516762.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=146516762.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515562.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=146515562.000000s;Name=;Codec=}; 2025-12-23T18:56:03.441637Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:03.475380Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:03.475603Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:03.482649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:03.482958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:03.483200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:03.483330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:03.483480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:03.483613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:03.483724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:03.483846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:03.483958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:03.484077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:03.484188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:03.484302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:03.484412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:03.514124Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:03.514288Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:03.514334Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:03.514499Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:03.514651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:03.514759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:03.514803Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:03.514904Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:03.514971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:03.515016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:03.515046Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:03.515223Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:03.515293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:03.515334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:03.515369Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:03.515465Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:03.515522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:03.515579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:03.515615Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:03.515680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:03.515717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:03.515746Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:03.515794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:03.515831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:03.515862Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:03.516085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:03.516154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:03.516188Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:03.516308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:03.516354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:03.516385Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:03.516431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:03.516475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:03.516503Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:03.516547Z node 1 :TX_COLUM ... rd;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=9; 2025-12-23T18:56:51.391342Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=71; 2025-12-23T18:56:51.391377Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=3249; 2025-12-23T18:56:51.391428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=3387; 2025-12-23T18:56:51.391508Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2025-12-23T18:56:51.391570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=23; 2025-12-23T18:56:51.391602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=3995; 2025-12-23T18:56:51.391710Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=67; 2025-12-23T18:56:51.391809Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=58; 2025-12-23T18:56:51.391915Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=69; 2025-12-23T18:56:51.392008Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=51; 2025-12-23T18:56:51.394038Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1980; 2025-12-23T18:56:51.396195Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2077; 2025-12-23T18:56:51.396271Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-12-23T18:56:51.396315Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-12-23T18:56:51.396350Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-12-23T18:56:51.396410Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=32; 2025-12-23T18:56:51.396458Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-12-23T18:56:51.396540Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2025-12-23T18:56:51.396583Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-12-23T18:56:51.396636Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2025-12-23T18:56:51.396701Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=37; 2025-12-23T18:56:51.396956Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=189; 2025-12-23T18:56:51.397000Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=18287; 2025-12-23T18:56:51.397109Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:56:51.397213Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:56:51.397269Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:56:51.397325Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:51.407715Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=1; 2025-12-23T18:56:51.407830Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:51.407924Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2025-12-23T18:56:51.407988Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515900973;tx_id=18446744073709551615;;current_snapshot_ts=1766516188860; 2025-12-23T18:56:51.408030Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:56:51.408069Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:51.408103Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:51.408174Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:56:51.408348Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.195000s; 2025-12-23T18:56:51.410215Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:56:51.410353Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:56:51.410393Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:51.410471Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2025-12-23T18:56:51.410522Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515900973;tx_id=18446744073709551615;;current_snapshot_ts=1766516188860; 2025-12-23T18:56:51.410557Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:56:51.410594Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:51.410631Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:51.410701Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:56:51.411386Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.100000s; 2025-12-23T18:56:51.411432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516763.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=166516763.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516763.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=146516763.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515563.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=146515563.000000s;Name=;Codec=}; 2025-12-23T18:56:04.555280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:04.577965Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:04.578154Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:04.583991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:04.584194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:04.584391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:04.584492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:04.584590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:04.584674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:04.584753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:04.584828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:04.584897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:04.584965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:04.585034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:04.585103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:04.585168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:04.604355Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:04.604483Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:04.604528Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:04.604671Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:04.604823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:04.604898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:04.604940Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:04.605017Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:04.605069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:04.605102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:04.605124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:04.605251Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:04.605296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:04.605329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:04.605351Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:04.605418Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:04.605457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:04.605486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:04.605518Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:04.605572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:04.605603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:04.605632Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:04.605683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:04.605712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:04.605732Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:04.605894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:04.605935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:04.605966Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:04.606070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:04.606107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:04.606128Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:04.606162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:04.606191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:04.606215Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:04.606246Z node 1 :TX_COLUM ... ard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=7; 2025-12-23T18:56:51.751710Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=75; 2025-12-23T18:56:51.751736Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=2467; 2025-12-23T18:56:51.751764Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=2559; 2025-12-23T18:56:51.751801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=7; 2025-12-23T18:56:51.751857Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=19; 2025-12-23T18:56:51.751885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2990; 2025-12-23T18:56:51.751973Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=52; 2025-12-23T18:56:51.752043Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=41; 2025-12-23T18:56:51.752137Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=63; 2025-12-23T18:56:51.752202Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=38; 2025-12-23T18:56:51.753526Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1275; 2025-12-23T18:56:51.754818Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1215; 2025-12-23T18:56:51.754879Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-12-23T18:56:51.754908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-12-23T18:56:51.754950Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-12-23T18:56:51.754996Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=25; 2025-12-23T18:56:51.755027Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2025-12-23T18:56:51.755081Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=35; 2025-12-23T18:56:51.755106Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2025-12-23T18:56:51.755145Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=19; 2025-12-23T18:56:51.755217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=49; 2025-12-23T18:56:51.755369Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=126; 2025-12-23T18:56:51.755397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=13124; 2025-12-23T18:56:51.755496Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:56:51.755587Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:56:51.755640Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:56:51.755722Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:51.762986Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=1; 2025-12-23T18:56:51.763129Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:51.763222Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2025-12-23T18:56:51.763275Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515902089;tx_id=18446744073709551615;;current_snapshot_ts=1766516189976; 2025-12-23T18:56:51.763306Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:56:51.763342Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:51.763383Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:51.763483Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:56:51.763660Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.114000s; 2025-12-23T18:56:51.764876Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:56:51.764985Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:56:51.765028Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:51.765094Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2025-12-23T18:56:51.765157Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515902089;tx_id=18446744073709551615;;current_snapshot_ts=1766516189976; 2025-12-23T18:56:51.765224Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:56:51.765265Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:51.765296Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:51.765358Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:56:51.766149Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.034000s; 2025-12-23T18:56:51.766186Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1334:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516747.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516747.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515547.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-12-23T18:55:48.242345Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:55:48.279397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:55:48.279653Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:55:48.286630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:48.286913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:48.287149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:48.287268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:48.287385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:48.287524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:48.287641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:48.287756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:48.287868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:48.287977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:48.288089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:48.288200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:48.288317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:48.316832Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:55:48.317001Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:55:48.317052Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:55:48.317225Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:48.317378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:48.317460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:48.317502Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:55:48.317592Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:55:48.317653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:48.317695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:48.317725Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:55:48.317907Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:48.317979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:48.318019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:48.318047Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:55:48.318136Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:55:48.318186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:48.318228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:48.318261Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:55:48.318321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:48.318367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:48.318401Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:55:48.318463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:48.318531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:48.318564Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:55:48.318799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:48.318857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:48.318900Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:55:48.319037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:48.319089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:48.319120Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:55:48.319168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:55:48.319236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:55:48.319271Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:55:48.319322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:52.162636Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:2254;schema=timestamp: timestamp[us];);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.162675Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:56:52.162833Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=2254; 2025-12-23T18:56:52.162909Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=2254;batch_columns=timestamp; 2025-12-23T18:56:52.163151Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1235:3173];bytes=1280000;rows=160000;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1236:3174]->[1:1235:3173] 2025-12-23T18:56:52.163338Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.163467Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.163596Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.163729Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:52.163836Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.163967Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.164234Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1236:3174] finished for tablet 9437184 2025-12-23T18:56:52.164699Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1235:3173];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.029},{"events":["l_task_result"],"t":0.183},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.185}],"full":{"a":64985331,"name":"_full_task","f":64985331,"d_finished":0,"c":0,"l":65171298,"d":185967},"events":[{"name":"bootstrap","f":64985554,"d_finished":1135,"c":1,"l":64986689,"d":1135},{"a":65170722,"name":"ack","f":65015178,"d_finished":68055,"c":71,"l":65170632,"d":68631},{"a":65170710,"name":"processing","f":64986824,"d_finished":143400,"c":143,"l":65170635,"d":143988},{"name":"ProduceResults","f":64986246,"d_finished":116669,"c":216,"l":65171000,"d":116669},{"a":65171005,"name":"Finish","f":65171005,"d_finished":0,"c":0,"l":65171298,"d":293},{"name":"task_result","f":64986841,"d_finished":72769,"c":72,"l":65169252,"d":72769}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.164766Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1235:3173];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:52.165217Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1235:3173];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.029},{"events":["l_task_result"],"t":0.183},{"events":["l_ProduceResults","f_Finish"],"t":0.185},{"events":["l_ack","l_processing","l_Finish"],"t":0.186}],"full":{"a":64985331,"name":"_full_task","f":64985331,"d_finished":0,"c":0,"l":65171812,"d":186481},"events":[{"name":"bootstrap","f":64985554,"d_finished":1135,"c":1,"l":64986689,"d":1135},{"a":65170722,"name":"ack","f":65015178,"d_finished":68055,"c":71,"l":65170632,"d":69145},{"a":65170710,"name":"processing","f":64986824,"d_finished":143400,"c":143,"l":65170635,"d":144502},{"name":"ProduceResults","f":64986246,"d_finished":116669,"c":216,"l":65171000,"d":116669},{"a":65171005,"name":"Finish","f":65171005,"d_finished":0,"c":0,"l":65171812,"d":807},{"name":"task_result","f":64986841,"d_finished":72769,"c":72,"l":65169252,"d":72769}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1236:3174]->[1:1235:3173] 2025-12-23T18:56:52.165307Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:51.977000Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10565848;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10565848;selected_rows=0; 2025-12-23T18:56:52.165378Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:52.165509Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1236:3174];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/10565848 160000/10565848 0/0 160000/10565848 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestReadWrite::ReadWithProgram |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> EvWrite::WriteWithSplit >> TStorageTenantTest::Empty [GOOD] >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestStateStorageRed [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:54:00.314501Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:00.435068Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:00.442889Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:00.443400Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:00.443523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:00.860014Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:00.952055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:00.952205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:00.986658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:01.056354Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bf4/r3tmp/tmpxENGRs/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18227, node 1 TClient is connected to server localhost:7550 2025-12-23T18:54:01.860279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:01.860361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:01.860393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:01.860812Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... blocking NKikimr::NSchemeShard::TEvSchemeShard::TEvDescribeScheme from MONITORING_REQUEST to FLAT_SCHEMESHARD_ACTOR cookie 1 2025-12-23T18:54:01.911596Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:54:10.919029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:54:10.919140Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished 2025-12-23T18:54:20.318870Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:20.319000Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:20.325401Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:20.327223Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:20.327752Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:846:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:20.328038Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:20.328223Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:20.329394Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:841:2350], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:20.329645Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:20.329712Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:20.576978Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:20.691255Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:20.691398Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:20.691845Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:20.691915Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:20.750225Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:54:20.750809Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:20.751283Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:20.788195Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:20.868048Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bf4/r3tmp/tmpJHyP2B/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30808, node 3 TClient is connected to server localhost:19483 2025-12-23T18:54:23.689113Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:23.692826Z node 3 :HIVE DEBUG: hive_impl.cpp:764: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 3: Status: 2 2025-12-23T18:54:23.692933Z node 3 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(3)::Execute 2025-12-23T18:54:23.692981Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-12-23T18:54:23.693074Z node 3 :HIVE DEBUG: tx__status.cpp:66: HIVE#72057594037968897 THive::TTxStatus(3)::Complete 2025-12-23T18:54:23.695763Z node 3 :HIVE DEBUG: tx__kill_node.cpp:22: HIVE#72057594037968897 THive::TTxKillNode(3)::Execute 2025-12-23T18:54:23.695889Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:54:23.695930Z node 3 :HIVE TRACE: hive_domains.cpp:16: Node(3) DeregisterInDomains (72057594046644480:1) : 2 -> 1 2025-12-23T18:54:23.695974Z node 3 :HIVE DEBUG: hive_impl.cpp:2875: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(1, 3) 2025-12-23T18:54:23.696021Z node 3 :HIVE TRACE: tx__kill_node.cpp:50: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [3:1222:2633] 2025-12-23T18:54:23.696065Z node 3 :HIVE DEBUG: hive_impl.cpp:133: HIVE#72057594037968897 TryToDeleteNode(3): waiting 3600.000000s 2025-12-23T18:54:23.696662Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:23.696701Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:23.696733Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:23.697379Z node 3 :HIVE TRACE: hive_impl.cpp:150: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([3:871:2416]) [3:1222:2633] 2025-12-23T18:54:23.697705Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:54:23.700699Z node 3 :HIVE TRACE: hive_impl.cpp:142: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([3:1707:2998]) [3:1941:3002] 2025-12-23T18:54:23.700985Z node 3 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(PersQueue(72057594046578946,0)) 2025-12-23T18:54:23.715578Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046578946 OwnerIdx: 0 TabletType: PersQueue BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } 2025-12-23T18:54:23.715685Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-12-23T18:54:23.715922Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type PersQueue: {} 2025-12-23T18:54:23.716000Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-12-23T18:54:23.716158Z node 3 :HIVE DEBUG: hive_impl.cpp:2888: HIVE#72057594037968897 CreateTabletFollowers Tablet PersQueue.72075186224037888.Leader.0 2025-12-23T18:54:23.716238Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-12-23T18:54:23.716371Z node 3 :HIVE DEBUG: hive_impl.cpp:1108: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-12-23T18:54:23.716974Z node 3 :HIVE DEBUG: hive_impl.cpp:764: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 4: Status: 2 2025-12-23T18:54:23.717041Z node 3 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(4)::Execute 2025-12-23T18:54:23.717073Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(st ... ath=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:56:45.420685Z node 43 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:56:45.785140Z node 35 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:56:45.966065Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:56:45.966190Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:45.966941Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:56:45.967014Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:45.967296Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(38, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:56:45.967360Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(38, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:45.967591Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:56:45.967640Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:45.967846Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(40, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:56:45.967895Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(40, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:45.968048Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(41, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:56:45.968093Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(41, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:45.968249Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(42, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:56:45.968301Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(42, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:45.968513Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:56:45.968567Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:45.968774Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(39, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:56:45.968821Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(39, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:46.055191Z node 35 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 36 Cookie 36 2025-12-23T18:56:46.055878Z node 35 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 37 Cookie 37 2025-12-23T18:56:46.056157Z node 35 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 38 Cookie 38 2025-12-23T18:56:46.056322Z node 35 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 39 Cookie 39 2025-12-23T18:56:46.056520Z node 35 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 40 Cookie 40 2025-12-23T18:56:46.056663Z node 35 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 41 Cookie 41 2025-12-23T18:56:46.056841Z node 35 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 42 Cookie 42 2025-12-23T18:56:46.056979Z node 35 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 43 Cookie 43 2025-12-23T18:56:46.057380Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:56:46.058125Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:56:46.058431Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(38, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:56:46.059035Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:56:46.059355Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(43, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:56:46.059605Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(40, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:56:46.059836Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(41, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:56:46.060074Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(42, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:56:46.060351Z node 35 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(39, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:56:46.078601Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:56:46.097004Z node 41 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:56:46.201189Z node 38 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:56:46.219435Z node 43 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:56:46.409585Z node 39 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:56:46.459954Z node 42 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:56:46.521280Z node 40 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:56:46.538946Z node 36 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:56:46.571365Z node 35 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002bf4/r3tmp/tmpVZiAFq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8819, node 35 TClient is connected to server localhost:27645 2025-12-23T18:56:47.468122Z node 35 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:56:47.468177Z node 35 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:56:47.468200Z node 35 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:56:47.469191Z node 35 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:56:47.548024Z node 38 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:56:47.548150Z node 39 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:56:47.548210Z node 40 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:56:47.548269Z node 41 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:56:47.548327Z node 42 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:56:47.548682Z node 43 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:56:47.551644Z node 36 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:56:47.551750Z node 35 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:56:47.551832Z node 37 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; self_check_result: EMERGENCY issue_log { id: "RED-7831" status: RED message: "There is not enough functional rings" type: "STATE_STORAGE" level: 1 } issue_log { id: "RED-ccd4-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-b30b-1-35" type: "STATE_STORAGE_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-b30b-1-35" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 35 } } } } type: "STATE_STORAGE_NODE" level: 3 listed: 6 count: 6 } issue_log { id: "RED-568c" status: RED message: "There is not enough functional rings" type: "SCHEME_BOARD" level: 1 } issue_log { id: "RED-9bdc-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-106b-1-35" type: "SCHEME_BOARD_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-106b-1-35" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 35 } } } } type: "SCHEME_BOARD_NODE" level: 3 listed: 6 count: 6 } issue_log { id: "RED-18c3" status: RED message: "There is not enough functional rings" type: "BOARD" level: 1 } issue_log { id: "RED-aaf7-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-0632-1-35" type: "BOARD_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-0632-1-35" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 35 } } } } type: "BOARD_NODE" level: 3 listed: 6 count: 6 } location { id: 35 host: "::1" port: 12001 } |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload-InStore [GOOD] Test command err: 2025-12-23T18:56:47.934516Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:47.968087Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:47.968341Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:47.975483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:47.975749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:47.976022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:47.976152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:47.976290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:47.976412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:47.976536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:47.976654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:47.976757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:47.976879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:47.976985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:47.977123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:47.977248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:48.007001Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:48.007187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:48.007240Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:48.007408Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:48.007585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:48.007663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:48.007711Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:48.007818Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:48.007890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:48.007930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:48.007960Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:48.008151Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:48.008224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:48.008264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:48.008299Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:48.008387Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:48.008432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:48.008489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:48.008527Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:48.008589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:48.008630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:48.008682Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:48.008759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:48.008813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:48.008849Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:48.009082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:48.009139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:48.009184Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:48.009320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:48.009362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:48.009391Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:48.009438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:48.009478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:48.009509Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:48.009554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:48.009593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:48.009622Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:48.009756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:48.009794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... G: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-12-23T18:56:51.286889Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:56:51.299048Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=233723ba-e03111f0-a33cf0e5-437e1608; 2025-12-23T18:56:51.299236Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=188;count=3; 2025-12-23T18:56:51.299277Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=380;count=4;size_of_meta=112; 2025-12-23T18:56:51.299318Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=540;count=2;size_of_portion=192; 2025-12-23T18:56:51.299674Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-12-23T18:56:51.299745Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=3;operation_id=2; 2025-12-23T18:56:51.311297Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-12-23T18:56:51.311480Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=5;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:56:51.313014Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=237bcbbe-e03111f0-a5452872-d38e1d35; 2025-12-23T18:56:51.313244Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=282;count=5; 2025-12-23T18:56:51.313302Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=570;count=6;size_of_meta=112; 2025-12-23T18:56:51.313358Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=810;count=3;size_of_portion=192; 2025-12-23T18:56:51.313851Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-12-23T18:56:51.313950Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=4;operation_id=3; 2025-12-23T18:56:51.325438Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-12-23T18:56:51.325633Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=6;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:56:51.327591Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=23be266c-e03111f0-a64d50f4-df8ed9d6; 2025-12-23T18:56:51.327818Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=376;count=7; 2025-12-23T18:56:51.327867Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=760;count=8;size_of_meta=112; 2025-12-23T18:56:51.327918Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1080;count=4;size_of_portion=192; 2025-12-23T18:56:51.328321Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-12-23T18:56:51.328391Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=5;operation_id=4; 2025-12-23T18:56:51.340070Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-12-23T18:56:51.340209Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:56:51.355601Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=5;last=5; 2025-12-23T18:56:51.355677Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=6330728;operation_id=242edc18-e03111f0-80614dc7-977871a4;in_flight=1;size_in_flight=6330728; 2025-12-23T18:56:51.887456Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=5;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8246112;count=1;actions=__DEFAULT,;waiting=1;; 2025-12-23T18:56:51.924141Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=242edc18-e03111f0-80614dc7-977871a4; 2025-12-23T18:56:51.924351Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=470;count=9; 2025-12-23T18:56:51.924401Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=950;count=10;size_of_meta=112; 2025-12-23T18:56:51.924458Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1350;count=5;size_of_portion=192; 2025-12-23T18:56:51.924914Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-12-23T18:56:51.924998Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=6;operation_id=5; 2025-12-23T18:56:51.936669Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-12-23T18:56:51.936825Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] Test command err: 2025-12-23T18:56:47.733832Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:47.753365Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:47.753532Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:47.758536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:47.758767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:47.758971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:47.759091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:47.759214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:47.759284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:47.759349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:47.759421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:47.759493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:47.759569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:47.759634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:47.759712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:47.759786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:47.778618Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:47.778781Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:47.778827Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:47.778958Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:47.779069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:47.779120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:47.779148Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:47.779215Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:47.779265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:47.779309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:47.779337Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:47.779504Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:47.779554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:47.779579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:47.779600Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:47.779667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:47.779704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:47.779735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:47.779752Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:47.779785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:47.779810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:47.779830Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:47.779870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:47.779896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:47.779925Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:47.780073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:47.780103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:47.780120Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:47.780200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:47.780224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:47.780240Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:47.780265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:47.780286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:47.780306Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:47.780335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:47.780355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:47.780373Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:47.780457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:47.780484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... =1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-12-23T18:56:52.453627Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-12-23T18:56:52.453933Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-12-23T18:56:52.454115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.454253Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.454418Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.454628Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:52.454833Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.455037Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.455419Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1059:2926] finished for tablet 9437184 2025-12-23T18:56:52.455897Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1058:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.033},{"events":["f_ack"],"t":0.034},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.036}],"full":{"a":5141708,"name":"_full_task","f":5141708,"d_finished":0,"c":0,"l":5178437,"d":36729},"events":[{"name":"bootstrap","f":5141959,"d_finished":1682,"c":1,"l":5143641,"d":1682},{"a":5177545,"name":"ack","f":5175915,"d_finished":1462,"c":1,"l":5177377,"d":2354},{"a":5177532,"name":"processing","f":5143839,"d_finished":3944,"c":3,"l":5177382,"d":4849},{"name":"ProduceResults","f":5143196,"d_finished":2525,"c":6,"l":5178002,"d":2525},{"a":5178007,"name":"Finish","f":5178007,"d_finished":0,"c":0,"l":5178437,"d":430},{"name":"task_result","f":5143854,"d_finished":2429,"c":2,"l":5175676,"d":2429}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.455968Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:52.456394Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1058:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.033},{"events":["f_ack"],"t":0.034},{"events":["l_ProduceResults","f_Finish"],"t":0.036},{"events":["l_ack","l_processing","l_Finish"],"t":0.037}],"full":{"a":5141708,"name":"_full_task","f":5141708,"d_finished":0,"c":0,"l":5178950,"d":37242},"events":[{"name":"bootstrap","f":5141959,"d_finished":1682,"c":1,"l":5143641,"d":1682},{"a":5177545,"name":"ack","f":5175915,"d_finished":1462,"c":1,"l":5177377,"d":2867},{"a":5177532,"name":"processing","f":5143839,"d_finished":3944,"c":3,"l":5177382,"d":5362},{"name":"ProduceResults","f":5143196,"d_finished":2525,"c":6,"l":5178002,"d":2525},{"a":5178007,"name":"Finish","f":5178007,"d_finished":0,"c":0,"l":5178950,"d":943},{"name":"task_result","f":5143854,"d_finished":2429,"c":2,"l":5175676,"d":2429}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:52.456465Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:52.415709Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-12-23T18:56:52.456505Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:52.456731Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] >> TColumnShardTestReadWrite::Write |98.2%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |98.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |98.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn [GOOD] >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] >> ColumnStatistics::EqWidthHistogram >> TColumnShardTestSchema::RebootColdTiers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516774.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516774.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=166516774.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516774.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516774.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=146516774.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=166516774.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516774.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=146515574.000000s;Name=;Codec=}; 2025-12-23T18:56:15.178254Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:15.198570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:15.198778Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:15.203750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:15.203922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:15.204078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:15.204148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:15.204228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:15.204303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:15.204362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:15.204423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:15.204482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:15.204547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:15.204607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:15.204670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:15.204731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:15.222633Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:15.222770Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:15.222811Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:15.222935Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:15.223030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:15.223090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:15.223127Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:15.223187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:15.223233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:15.223261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:15.223280Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:15.223376Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:15.223412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:15.223435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:15.223455Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:15.223510Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:15.223543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:15.223567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:15.223594Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:15.223639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:15.223667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:15.223686Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:15.223715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:15.223738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:15.223756Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:15.223873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:15.223901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:15.223919Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:15.223992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:15.224015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:15.224032Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:15.224059Z node 1 :TX ... =EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=60; 2025-12-23T18:56:52.606404Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=5176; 2025-12-23T18:56:52.606433Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=5263; 2025-12-23T18:56:52.606475Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=7; 2025-12-23T18:56:52.606528Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=24; 2025-12-23T18:56:52.606557Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=5658; 2025-12-23T18:56:52.606640Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=48; 2025-12-23T18:56:52.606715Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=44; 2025-12-23T18:56:52.606849Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=73; 2025-12-23T18:56:52.606914Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=37; 2025-12-23T18:56:52.609404Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2433; 2025-12-23T18:56:52.611934Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2472; 2025-12-23T18:56:52.611992Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-12-23T18:56:52.612023Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-12-23T18:56:52.612058Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-12-23T18:56:52.612102Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=22; 2025-12-23T18:56:52.612152Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:56:52.612230Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=42; 2025-12-23T18:56:52.612258Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-12-23T18:56:52.612296Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=19; 2025-12-23T18:56:52.612341Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=26; 2025-12-23T18:56:52.612671Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=308; 2025-12-23T18:56:52.612698Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=18167; 2025-12-23T18:56:52.612778Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=24365192;raw_bytes=35131129;count=5;records=400000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:56:52.612855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:56:52.612888Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:56:52.612960Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:52.625965Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=1; 2025-12-23T18:56:52.626066Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:52.626133Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2025-12-23T18:56:52.626180Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515912908;tx_id=18446744073709551615;;current_snapshot_ts=1766516176500; 2025-12-23T18:56:52.626207Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:56:52.626234Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:52.626259Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:52.626310Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:56:52.626474Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.126000s; 2025-12-23T18:56:52.626560Z node 1 :TX_TIERING WARN: log.cpp:841: TEST_STEP=3;fline=fetcher.h:165;error=event_undelivered_to_scheme_cache;reason=ActorUnknown; 2025-12-23T18:56:52.628067Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:56:52.628293Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:56:52.628328Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:52.628384Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2025-12-23T18:56:52.628423Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515912908;tx_id=18446744073709551615;;current_snapshot_ts=1766516176500; 2025-12-23T18:56:52.628448Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:56:52.628475Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:52.628498Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:52.628606Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:56:52.628953Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.066000s; 2025-12-23T18:56:52.628982Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1796:3665];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 80000/4886744 0/0 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2025-12-23T18:56:16.603691Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:56:16.609161Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:56:16.609700Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:16.650443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:16.650771Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:16.659646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:16.659968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:16.660237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:16.660381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:16.660535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:16.660722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:16.660863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:16.661028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:16.661215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:16.661372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:16.661522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:16.661664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:16.661811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:16.665165Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:56:16.698175Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:16.698386Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:16.698470Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:16.698708Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:16.698959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:16.699061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:16.699115Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:16.699250Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:16.699328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:16.699384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:16.699422Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:16.699653Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:16.699748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:16.699808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:16.699851Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:16.699967Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:16.700034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:16.700098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:16.700134Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:16.700216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:16.700296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:16.700347Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:16.700403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:16.700450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:16.700499Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:16.700755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:16.700875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:16.700920Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:16.701077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:16.701139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:16.701177Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:16.701244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:16.701296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:16.701353Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:16.701404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:16.701449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... 8:56:55.234430Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:56:55.234483Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:56:55.234858Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:55.235146Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.235230Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:56:55.235510Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2025-12-23T18:56:55.235632Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2025-12-23T18:56:55.236001Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:512:2515];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2025-12-23T18:56:55.236278Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.236454Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.236645Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.236926Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:55.237143Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.237355Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.237735Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:513:2516] finished for tablet 9437184 2025-12-23T18:56:55.238208Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:512:2515];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":39015672,"name":"_full_task","f":39015672,"d_finished":0,"c":0,"l":39026233,"d":10561},"events":[{"name":"bootstrap","f":39016049,"d_finished":1504,"c":1,"l":39017553,"d":1504},{"a":39025316,"name":"ack","f":39023249,"d_finished":1858,"c":1,"l":39025107,"d":2775},{"a":39025292,"name":"processing","f":39017749,"d_finished":4588,"c":3,"l":39025111,"d":5529},{"name":"ProduceResults","f":39017052,"d_finished":3075,"c":6,"l":39025815,"d":3075},{"a":39025824,"name":"Finish","f":39025824,"d_finished":0,"c":0,"l":39026233,"d":409},{"name":"task_result","f":39017778,"d_finished":2643,"c":2,"l":39022938,"d":2643}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.238280Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:512:2515];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:55.238758Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:512:2515];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":39015672,"name":"_full_task","f":39015672,"d_finished":0,"c":0,"l":39026747,"d":11075},"events":[{"name":"bootstrap","f":39016049,"d_finished":1504,"c":1,"l":39017553,"d":1504},{"a":39025316,"name":"ack","f":39023249,"d_finished":1858,"c":1,"l":39025107,"d":3289},{"a":39025292,"name":"processing","f":39017749,"d_finished":4588,"c":3,"l":39025111,"d":6043},{"name":"ProduceResults","f":39017052,"d_finished":3075,"c":6,"l":39025815,"d":3075},{"a":39025824,"name":"Finish","f":39025824,"d_finished":0,"c":0,"l":39026747,"d":923},{"name":"task_result","f":39017778,"d_finished":2643,"c":2,"l":39022938,"d":2643}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.238936Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:55.225137Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2025-12-23T18:56:55.239011Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:55.239227Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516753.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516753.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=166516753.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516753.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516753.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=166516753.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515553.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146516753.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=146516753.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515553.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146515553.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=146515553.000000s;Name=;Codec=}; 2025-12-23T18:55:53.665500Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:55:53.685332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:55:53.685504Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:55:53.691151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:53.691427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:53.691631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:53.691739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:53.691849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:53.691928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:53.691992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:53.692060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:53.692128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:53.692199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:53.692263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:53.692350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:53.692439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:53.711397Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:55:53.711537Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:55:53.711590Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:55:53.711718Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:53.711856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:53.711946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:53.711986Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:55:53.712073Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:55:53.712166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:53.712229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:53.712270Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:55:53.712464Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:53.712545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:53.712601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:53.712638Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:55:53.712744Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:55:53.712787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:53.712820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:53.712855Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:55:53.712918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:53.712966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:53.712999Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:55:53.713047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:53.713088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:53.713122Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:55:53.713303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:53.713341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:53.713361Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:55:53.713444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:53.713473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:53.713493Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:55:53.713529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:55:53.713571Z node 1 :TX_COLUMNSHARD WARN: l ... ime=8; 2025-12-23T18:56:55.496305Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=75; 2025-12-23T18:56:55.496372Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=3348; 2025-12-23T18:56:55.496446Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=3486; 2025-12-23T18:56:55.496504Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2025-12-23T18:56:55.496568Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=27; 2025-12-23T18:56:55.496603Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=4184; 2025-12-23T18:56:55.496726Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=71; 2025-12-23T18:56:55.496861Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=76; 2025-12-23T18:56:55.497000Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=88; 2025-12-23T18:56:55.497147Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=89; 2025-12-23T18:56:55.500044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2838; 2025-12-23T18:56:55.502570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2427; 2025-12-23T18:56:55.502667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-12-23T18:56:55.502719Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-12-23T18:56:55.502784Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:56:55.502874Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=39; 2025-12-23T18:56:55.502932Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-12-23T18:56:55.503020Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2025-12-23T18:56:55.503067Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-12-23T18:56:55.503155Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2025-12-23T18:56:55.503245Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=49; 2025-12-23T18:56:55.503602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=305; 2025-12-23T18:56:55.503682Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=21238; 2025-12-23T18:56:55.503807Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:56:55.503910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:56:55.503968Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:56:55.504027Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:55.525260Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=1; 2025-12-23T18:56:55.525410Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:55.525529Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2025-12-23T18:56:55.525608Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515903319;tx_id=18446744073709551615;;current_snapshot_ts=1766516191206; 2025-12-23T18:56:55.525654Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:56:55.525716Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:55.525759Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:55.525856Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:56:55.526066Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.102000s; 2025-12-23T18:56:55.527675Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:56:55.527867Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:56:55.527924Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:56:55.528004Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2025-12-23T18:56:55.528066Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515903319;tx_id=18446744073709551615;;current_snapshot_ts=1766516191206; 2025-12-23T18:56:55.528110Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:56:55.528150Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:55.528197Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:56:55.528321Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:56:55.528852Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.122000s; 2025-12-23T18:56:55.528895Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1521:3342];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 80000/4873744 0/0 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> Normalizers::InsertedPortionsCleanerNormalizer >> TColumnShardTestReadWrite::WriteReadZSTD ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] Test command err: 2025-12-23T18:56:54.188960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:54.222409Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:54.222650Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:54.229890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:54.230170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:54.230380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:54.230494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:54.230621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:54.230757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:54.230875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:54.231015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:54.231137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:54.231242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:54.231339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:54.231464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:54.231601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:54.256165Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:54.256312Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:54.256360Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:54.256484Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:54.256626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:54.256684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:54.256714Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:54.256771Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:54.256825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:54.256858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:54.256881Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:54.257011Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:54.257053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:54.257077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:54.257095Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:54.257154Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:54.257186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:54.257232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:54.257254Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:54.257294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:54.257324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:54.257346Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:54.257385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:54.257414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:54.257438Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:54.257594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:54.257628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:54.257646Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:54.257733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:54.257767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:54.257783Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:54.257813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:54.257837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:54.257874Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:54.257918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:54.257942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:54.257960Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:54.258057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:54.258082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 184;fline=source.cpp:346;source_idx=0; 2025-12-23T18:56:55.330257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_idx=0;tablet_id=9437184;source_idx=0;tablet_id=9437184;fline=source.cpp:346;source_idx=0; 2025-12-23T18:56:55.330375Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_idx=0;tablet_id=9437184;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:47;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2025-12-23T18:56:55.330419Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:47;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2025-12-23T18:56:55.330501Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-12-23T18:56:55.330669Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-12-23T18:56:55.330783Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:33;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2025-12-23T18:56:55.330833Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=AssembleOriginalData; 2025-12-23T18:56:55.330991Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=AssembleOriginalData; 2025-12-23T18:56:55.331056Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=Calculation; 2025-12-23T18:56:55.332131Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=Filter; 2025-12-23T18:56:55.332223Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:33;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=4; 2025-12-23T18:56:55.332301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:25;event=empty_result;scan_step_idx=5; 2025-12-23T18:56:55.332424Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:100;event=TEvTaskProcessedResult; 2025-12-23T18:56:55.332480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-12-23T18:56:55.332532Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:22;event=OnSourcePrepared;source_idx=0;prepared=1; 2025-12-23T18:56:55.332578Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:30;event=finish_source;source_idx=0; 2025-12-23T18:56:55.332699Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.332898Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.333099Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:56:55.333206Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.333392Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.333655Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:352:2364] finished for tablet 9437184 2025-12-23T18:56:55.334102Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:351:2363];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.005},{"events":["l_ack","l_processing","l_Finish"],"t":0.006}],"full":{"a":1576325,"name":"_full_task","f":1576325,"d_finished":0,"c":0,"l":1582588,"d":6263},"events":[{"name":"bootstrap","f":1576551,"d_finished":894,"c":1,"l":1577445,"d":894},{"a":1581965,"name":"ack","f":1581965,"d_finished":0,"c":0,"l":1582588,"d":623},{"a":1581955,"name":"processing","f":1577539,"d_finished":1817,"c":2,"l":1581834,"d":2450},{"name":"ProduceResults","f":1577121,"d_finished":1003,"c":4,"l":1582295,"d":1003},{"a":1582299,"name":"Finish","f":1582299,"d_finished":0,"c":0,"l":1582588,"d":289},{"name":"task_result","f":1577549,"d_finished":1774,"c":2,"l":1581831,"d":1774}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.334195Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:351:2363];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:56:55.334565Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:351:2363];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.005},{"events":["l_ack","l_processing","l_Finish"],"t":0.006}],"full":{"a":1576325,"name":"_full_task","f":1576325,"d_finished":0,"c":0,"l":1583123,"d":6798},"events":[{"name":"bootstrap","f":1576551,"d_finished":894,"c":1,"l":1577445,"d":894},{"a":1581965,"name":"ack","f":1581965,"d_finished":0,"c":0,"l":1583123,"d":1158},{"a":1581955,"name":"processing","f":1577539,"d_finished":1817,"c":2,"l":1581834,"d":2985},{"name":"ProduceResults","f":1577121,"d_finished":1003,"c":4,"l":1582295,"d":1003},{"a":1582299,"name":"Finish","f":1582299,"d_finished":0,"c":0,"l":1583123,"d":824},{"name":"task_result","f":1577549,"d_finished":1774,"c":2,"l":1581831,"d":1774}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:56:55.334654Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:56:55.325692Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-12-23T18:56:55.334703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:56:55.334906Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:352:2364];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> EvWrite::WriteInTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:56:41.857534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:56:41.857600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:56:41.857624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:56:41.857662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:56:41.857688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:56:41.857707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:56:41.857777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:56:41.857852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:56:41.858516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:56:41.858859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:56:41.926796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:56:41.926849Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:56:41.931454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:56:41.931606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:56:41.931722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:56:41.934109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:56:41.934308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:56:41.934862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:56:41.935066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:56:41.936788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:56:41.936962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:56:41.937973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:56:41.938016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:56:41.938124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:56:41.938157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:56:41.938186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:56:41.938306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:56:42.042245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:56:42.043764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2, partId: 0 2025-12-23T18:56:57.804678Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000041 2025-12-23T18:56:57.804722Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000041 2025-12-23T18:56:57.804755Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-12-23T18:56:57.804782Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-12-23T18:56:57.804934Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2025-12-23T18:56:57.805149Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 FAKE_COORDINATOR: Erasing txId 104 2025-12-23T18:56:57.810382Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:56:57.810547Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:56:57.810660Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:56:57.810792Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:56:57.810900Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:56:57.813610Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:56:57.813924Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:56:57.813957Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:56:57.814233Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:56:57.814265Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [14:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2025-12-23T18:56:57.814351Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-12-23T18:56:57.814394Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-12-23T18:56:57.814558Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:56:57.814608Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:56:57.814670Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2025-12-23T18:56:57.814712Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:56:57.814778Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-12-23T18:56:57.814838Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-12-23T18:56:57.814897Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2025-12-23T18:56:57.814952Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 104:0 2025-12-23T18:56:57.815194Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 10 2025-12-23T18:56:57.815258Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-12-23T18:56:57.815306Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2025-12-23T18:56:57.816635Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:56:57.816709Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-12-23T18:56:57.816742Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-12-23T18:56:57.816806Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2025-12-23T18:56:57.816864Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2025-12-23T18:56:57.816973Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-12-23T18:56:57.819157Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-12-23T18:56:57.819460Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-12-23T18:56:57.819524Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-12-23T18:56:57.820020Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-12-23T18:56:57.820121Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-12-23T18:56:57.820172Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [14:1436:3261] TestWaitNotification: OK eventTxId 104 2025-12-23T18:56:57.820789Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:56:57.821011Z node 14 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 260us result status StatusSuccess 2025-12-23T18:56:57.821646Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot >> TColumnShardTestReadWrite::RebootWriteRead >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader >> Normalizers::InsertedPortionsCleanerNormalizer [GOOD] >> Normalizers::EmptyTablesNormalizer >> TColumnShardTestReadWrite::Write [GOOD] >> EvWrite::WriteInTransaction [GOOD] >> EvWrite::WriteWithLock >> TColumnShardTestReadWrite::ReadSomePrograms >> TColumnShardTestSchema::HotTiersTtl [GOOD] >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> EvWrite::WriteWithSplit [GOOD] >> Normalizers::ChunksV0MetaNormalizer >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] >> TColumnShardTestReadWrite::ReadWithProgramLike ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516782.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516782.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=166516782.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516782.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516782.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=146516782.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=166516782.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516782.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=146515582.000000s;Name=;Codec=}; 2025-12-23T18:56:23.169388Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:23.186830Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:23.187002Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:23.191622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:23.191789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:23.191944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:23.192012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:23.192083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:23.192165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:23.192227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:23.192283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:23.192344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:23.192402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:23.192462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:23.192534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:23.192599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:23.209984Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:23.210099Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:23.210137Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:23.210250Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:23.210372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:23.210424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:23.210452Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:23.210513Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:23.210553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:23.210579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:23.210595Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:23.210714Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:23.210775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:23.210801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:23.210819Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:23.210874Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:23.210920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:23.210945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:23.210973Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:23.211017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:23.211042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:23.211073Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:23.211107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:23.211130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:23.211147Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:23.211257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:23.211298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:23.211327Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:23.211417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:23.211443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:23.211460Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:23.211492Z node 1 :TX ... impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:00.596255Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:00.596500Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766516219502:max} readable: {1766516219502:max} at tablet 9437184 2025-12-23T18:57:00.596649Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:57:00.596853Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516219502:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:57:00.596920Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516219502:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:57:00.597453Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516219502:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-12-23T18:57:00.599324Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516219502:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:57:00.600378Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516219502:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1327:3296];trace_detailed=; 2025-12-23T18:57:00.601012Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:83;ff_first=(column_ids=1;column_names=timestamp;);; 2025-12-23T18:57:00.601274Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:57:00.601549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:00.601735Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:00.602145Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:00.602288Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:00.602430Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:00.602686Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1327:3296] finished for tablet 9437184 2025-12-23T18:57:00.603266Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1326:3295];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":39747669,"name":"_full_task","f":39747669,"d_finished":0,"c":0,"l":39750163,"d":2494},"events":[{"name":"bootstrap","f":39748078,"d_finished":1070,"c":1,"l":39749148,"d":1070},{"a":39749491,"name":"ack","f":39749491,"d_finished":0,"c":0,"l":39750163,"d":672},{"a":39749471,"name":"processing","f":39749471,"d_finished":0,"c":0,"l":39750163,"d":692},{"name":"ProduceResults","f":39748801,"d_finished":633,"c":2,"l":39749843,"d":633},{"a":39749851,"name":"Finish","f":39749851,"d_finished":0,"c":0,"l":39750163,"d":312}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:00.603372Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1326:3295];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:00.603862Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1326:3295];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":39747669,"name":"_full_task","f":39747669,"d_finished":0,"c":0,"l":39750802,"d":3133},"events":[{"name":"bootstrap","f":39748078,"d_finished":1070,"c":1,"l":39749148,"d":1070},{"a":39749491,"name":"ack","f":39749491,"d_finished":0,"c":0,"l":39750802,"d":1311},{"a":39749471,"name":"processing","f":39749471,"d_finished":0,"c":0,"l":39750802,"d":1331},{"name":"ProduceResults","f":39748801,"d_finished":633,"c":2,"l":39749843,"d":633},{"a":39749851,"name":"Finish","f":39749851,"d_finished":0,"c":0,"l":39750802,"d":951}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1327:3296]->[1:1326:3295] 2025-12-23T18:57:00.603987Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:00.599290Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:57:00.604046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:00.604181Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1327:3296];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 80000/4886744 0/0 |98.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> EvWrite::WriteWithLock [GOOD] >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] |98.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516779.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516779.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515579.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-12-23T18:56:20.548152Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:20.567969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:20.568160Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:20.573637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:20.573820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:20.573997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:20.574083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:20.574196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:20.574312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:20.574413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:20.574476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:20.574547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:20.574610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:20.574675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:20.574734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:20.574816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:20.594786Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:20.594945Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:20.594984Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:20.595109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:20.595234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:20.595291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:20.595325Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:20.595382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:20.595426Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:20.595456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:20.595491Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:20.595625Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:20.595701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:20.595729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:20.595754Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:20.595816Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:20.595850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:20.595877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:20.595906Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:20.595974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:20.596001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:20.596021Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:20.596053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:20.596078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:20.596096Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:20.596217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:20.596269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:20.596300Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:20.596384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:20.596411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:20.596428Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:20.596460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:20.596486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:20.596507Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:20.596534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... granule/portions;fline=constructor_portion.cpp:44;memory_size=286;data_size=260;sum=5688;count=20;size_of_portion=192; 2025-12-23T18:57:00.814074Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=2891; 2025-12-23T18:57:00.814147Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2025-12-23T18:57:00.814682Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=482; 2025-12-23T18:57:00.814729Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=3710; 2025-12-23T18:57:00.814794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=3865; 2025-12-23T18:57:00.814847Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=10; 2025-12-23T18:57:00.814917Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=32; 2025-12-23T18:57:00.814971Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=4455; 2025-12-23T18:57:00.815091Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=75; 2025-12-23T18:57:00.815205Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=59; 2025-12-23T18:57:00.815325Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=70; 2025-12-23T18:57:00.815428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=59; 2025-12-23T18:57:00.817567Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2074; 2025-12-23T18:57:00.819722Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2080; 2025-12-23T18:57:00.819796Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-12-23T18:57:00.819852Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-12-23T18:57:00.819885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:57:00.819947Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2025-12-23T18:57:00.819983Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:57:00.820105Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=58; 2025-12-23T18:57:00.820211Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-12-23T18:57:00.820284Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2025-12-23T18:57:00.820375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=44; 2025-12-23T18:57:00.820572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=163; 2025-12-23T18:57:00.820607Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=19075; 2025-12-23T18:57:00.820737Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:00.820877Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:00.820941Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:00.821011Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:00.831465Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=1; 2025-12-23T18:57:00.831657Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:00.831747Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:57:00.831809Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:00.831852Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:00.831889Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:00.831972Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:00.832194Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.139000s; 2025-12-23T18:57:00.834008Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:00.834209Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:00.834259Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:00.834357Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:57:00.834413Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:00.834453Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:00.834561Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:00.834645Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:00.835138Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.047000s; 2025-12-23T18:57:00.835180Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1280:3148];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 160000/9739224 |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore >> Normalizers::EmptyTablesNormalizer [GOOD] >> TColumnShardTestReadWrite::WriteReadModifications ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516757.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516757.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=166516757.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516757.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516757.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=166516757.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515557.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146516757.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=146516757.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515557.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146515557.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=146515557.000000s;Name=;Codec=}; 2025-12-23T18:55:57.948432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:55:57.968333Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:55:57.968570Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:55:57.974070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:55:57.974274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:55:57.974443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:55:57.974544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:55:57.974634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:55:57.974710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:55:57.974809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:55:57.974893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:55:57.974958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:55:57.975031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:55:57.975120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:55:57.975192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:55:57.975256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:55:57.994912Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:55:57.995061Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:55:57.995114Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:55:57.995254Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:57.995377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:55:57.995452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:55:57.995483Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:55:57.995541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:55:57.995586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:55:57.995617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:55:57.995637Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:55:57.995799Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:55:57.995855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:55:57.995886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:55:57.995906Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:55:57.995967Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:55:57.996013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:55:57.996053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:55:57.996088Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:55:57.996149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:55:57.996193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:55:57.996219Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:55:57.996248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:55:57.996275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:55:57.996298Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:55:57.996427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:55:57.996470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:55:57.996492Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:55:57.996570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:55:57.996599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:55:57.996621Z node 1 :TX_ ... ; 2025-12-23T18:57:01.284448Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=98; 2025-12-23T18:57:01.284486Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8571; 2025-12-23T18:57:01.284528Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8687; 2025-12-23T18:57:01.284590Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2025-12-23T18:57:01.284668Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=36; 2025-12-23T18:57:01.284705Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9324; 2025-12-23T18:57:01.284865Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=102; 2025-12-23T18:57:01.285002Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=78; 2025-12-23T18:57:01.285142Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=91; 2025-12-23T18:57:01.285281Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=93; 2025-12-23T18:57:01.289472Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4125; 2025-12-23T18:57:01.294043Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=4458; 2025-12-23T18:57:01.294154Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=14; 2025-12-23T18:57:01.294207Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-12-23T18:57:01.294234Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-12-23T18:57:01.294295Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2025-12-23T18:57:01.294331Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:57:01.294432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=66; 2025-12-23T18:57:01.294470Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-12-23T18:57:01.294532Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2025-12-23T18:57:01.294633Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=62; 2025-12-23T18:57:01.295012Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=328; 2025-12-23T18:57:01.295067Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=26562; 2025-12-23T18:57:01.295196Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:01.295303Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:01.295357Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:01.295422Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:01.315770Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=1; 2025-12-23T18:57:01.315930Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:01.316014Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2025-12-23T18:57:01.316078Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515907789;tx_id=18446744073709551615;;current_snapshot_ts=1766516159268; 2025-12-23T18:57:01.316121Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:01.316171Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:01.316214Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:01.316298Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:01.316543Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.047000s; 2025-12-23T18:57:01.318153Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:01.318396Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:01.318459Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:01.318553Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2025-12-23T18:57:01.318624Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515907789;tx_id=18446744073709551615;;current_snapshot_ts=1766516159268; 2025-12-23T18:57:01.318671Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:01.318715Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:01.318783Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:01.318870Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:01.319247Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.061000s; 2025-12-23T18:57:01.319291Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2038:3858];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write [GOOD] Test command err: 2025-12-23T18:56:55.585619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:55.621944Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:55.622276Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:55.630130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:55.630403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:55.630692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:55.630849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:55.631033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:55.631169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:55.631295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:55.631439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:55.631549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:55.631678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:55.631798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:55.631928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:55.632061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:55.664580Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:55.664777Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:55.664836Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:55.665050Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:55.665241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:55.665332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:55.665381Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:55.665483Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:55.665552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:55.665602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:55.665638Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:55.665855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:55.665935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:55.666003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:55.666062Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:55.666171Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:55.666232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:55.666276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:55.666309Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:55.666369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:55.666414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:55.666473Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:55.666551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:55.666598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:55.666647Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:55.666950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:55.667009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:55.667047Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:55.667268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:55.667325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:55.667380Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:55.667441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:55.667486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:55.667525Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:55.667578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:55.667618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:55.667654Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:55.667827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:55.667882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2025-12-23T18:57:00.038978Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] |98.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} |98.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] Test command err: 2025-12-23T18:57:00.807685Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:00.830088Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:00.830267Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:00.836757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:00.837079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:00.837331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:00.837502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:00.837664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:00.837797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:00.837956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:00.838090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:00.838252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:00.838386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:00.838525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:00.838654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:00.838815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:00.860112Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:00.860253Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:00.860302Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:00.860443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:00.860555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:00.860605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:00.860634Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:00.860689Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:00.860731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:00.860757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:00.860777Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:00.860904Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:00.860951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:00.860975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:00.860997Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:00.861067Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:00.861102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:00.861143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:00.861176Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:00.861217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:00.861244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:00.861263Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:00.861304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:00.861334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:00.861358Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:00.861509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:00.861542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:00.861562Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:00.861634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:00.861664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:00.861681Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:00.861719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:00.861748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:00.861780Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:00.861815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:00.861840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:00.861858Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:00.861949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:00.861977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 4960;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-12-23T18:57:01.434165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1766516221789;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136531611928512;op_tx=10:TX_KIND_SCHEMA;min=1766516221789;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1766516221789;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=136737773064960;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2025-12-23T18:57:01.434204Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1766516221789;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136531611928512;op_tx=10:TX_KIND_SCHEMA;min=1766516221789;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1766516221789;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=136737773064960;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2025-12-23T18:57:01.434430Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-12-23T18:57:01.434548Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516221789 at tablet 9437184, mediator 0 2025-12-23T18:57:01.434610Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2025-12-23T18:57:01.434897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:57:01.434976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:57:01.435022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:57:01.435152Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2025-12-23T18:57:01.442502Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1766516221789;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2025-12-23T18:57:01.442600Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:01.442699Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2025-12-23T18:57:01.442789Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:145;event=RegisterTable;path_id=1000000185; 2025-12-23T18:57:01.443016Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2025-12-23T18:57:01.456565Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:532;event=OnTieringModified;path_id=1000000185; 2025-12-23T18:57:01.481169Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3200;columns=5; 2025-12-23T18:57:01.484554Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=1;last=1; 2025-12-23T18:57:01.484660Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=3200;operation_id=2a386aca-e03111f0-8b0b358e-ff2a80cf;in_flight=1;size_in_flight=3200; 2025-12-23T18:57:01.496787Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=1;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=3768;count=1;actions=__DEFAULT,;waiting=1;; 2025-12-23T18:57:01.498524Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=3200;event=data_write_finished;writing_id=2a386aca-e03111f0-8b0b358e-ff2a80cf; 2025-12-23T18:57:01.498838Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=60;data_size=20;sum=60;count=1; 2025-12-23T18:57:01.498913Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=156;data_size=132;sum=156;count=2;size_of_meta=112; 2025-12-23T18:57:01.499006Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=236;data_size=212;sum=236;count=1;size_of_portion=192; 2025-12-23T18:57:01.499473Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-12-23T18:57:01.499600Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=2;operation_id=1; 2025-12-23T18:57:01.511719Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-12-23T18:57:01.511975Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:01.525280Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516221795 at tablet 9437184, mediator 0 2025-12-23T18:57:01.525376Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] execute at tablet 9437184 2025-12-23T18:57:01.525686Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=abstract.h:88;progress_tx_id=100;lock_id=1;broken=0; 2025-12-23T18:57:01.537482Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] complete at tablet 9437184 2025-12-23T18:57:01.537594Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:101;progress_tx_id=100;lock_id=1;broken=0; 2025-12-23T18:57:01.537777Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2025-12-23T18:57:01.537817Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=1; 2025-12-23T18:57:01.538141Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:01.538191Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:01.538251Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:01.538282Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:01.538332Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:57:01.549531Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:01.549627Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:01.549680Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:01.549775Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:01.550182Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 100 scanId: 0 version: {1766516221795:100} readable: {1766516221795:max} at tablet 9437184 2025-12-23T18:57:01.562452Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2025-12-23T18:57:01.564795Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=100;scan_id=0;gen=0;table=;snapshot={1766516221795:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: Can't parse TOlapProgram protobuf; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock [GOOD] Test command err: 2025-12-23T18:56:59.145702Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:59.166824Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:59.166988Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:59.172195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:59.172478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:59.172713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:59.172802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:59.172885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:59.172975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:59.173051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:59.173121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:59.173183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:59.173251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:59.173328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:59.173393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:59.173465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:59.194510Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:59.194897Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:59.194967Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:59.195110Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:59.195233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:59.195282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:59.195320Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:59.195380Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:59.195428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:59.195456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:59.195474Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:59.195609Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:59.195668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:59.195696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:59.195728Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:59.195783Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:59.195820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:59.195849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:59.195866Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:59.195898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:59.195919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:59.195936Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:59.195971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:59.196006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:59.196028Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:59.196186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:59.196228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:59.196249Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:59.196333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:59.196361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:59.196391Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:59.196442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:59.196466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:59.196485Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:59.196512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:59.196548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:59.196568Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:59.196650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:59.196672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-12-23T18:57:01.683527Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=54;finished=1; 2025-12-23T18:57:01.683557Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:57:01.683588Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:57:01.683795Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:01.683920Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:54;schema=key: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:01.683980Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:57:01.684072Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=54; 2025-12-23T18:57:01.684114Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=54;batch_columns=key,field; 2025-12-23T18:57:01.684223Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:204:2216];bytes=458752;rows=4096;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-12-23T18:57:01.684327Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:01.684453Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:01.684643Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:01.684754Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:01.684847Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:01.684938Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:01.685184Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:205:2217] finished for tablet 9437184 2025-12-23T18:57:01.685703Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:204:2216];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.024},{"events":["l_task_result"],"t":0.155},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.157}],"full":{"a":2819815,"name":"_full_task","f":2819815,"d_finished":0,"c":0,"l":2977289,"d":157474},"events":[{"name":"bootstrap","f":2820023,"d_finished":1111,"c":1,"l":2821134,"d":1111},{"a":2976789,"name":"ack","f":2844258,"d_finished":57507,"c":86,"l":2976724,"d":58007},{"a":2976780,"name":"processing","f":2821242,"d_finished":123285,"c":173,"l":2976728,"d":123794},{"name":"ProduceResults","f":2820740,"d_finished":97833,"c":261,"l":2977006,"d":97833},{"a":2977009,"name":"Finish","f":2977009,"d_finished":0,"c":0,"l":2977289,"d":280},{"name":"task_result","f":2821256,"d_finished":63756,"c":87,"l":2975657,"d":63756}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:01.685811Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:204:2216];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:01.686294Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:204:2216];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.024},{"events":["l_task_result"],"t":0.155},{"events":["l_ProduceResults","f_Finish"],"t":0.157},{"events":["l_ack","l_processing","l_Finish"],"t":0.158}],"full":{"a":2819815,"name":"_full_task","f":2819815,"d_finished":0,"c":0,"l":2977906,"d":158091},"events":[{"name":"bootstrap","f":2820023,"d_finished":1111,"c":1,"l":2821134,"d":1111},{"a":2976789,"name":"ack","f":2844258,"d_finished":57507,"c":86,"l":2976724,"d":58624},{"a":2976780,"name":"processing","f":2821242,"d_finished":123285,"c":173,"l":2976728,"d":124411},{"name":"ProduceResults","f":2820740,"d_finished":97833,"c":261,"l":2977006,"d":97833},{"a":2977009,"name":"Finish","f":2977009,"d_finished":0,"c":0,"l":2977906,"d":897},{"name":"task_result","f":2821256,"d_finished":63756,"c":87,"l":2975657,"d":63756}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:01.686379Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:01.526406Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=474480;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=474480;selected_rows=0; 2025-12-23T18:57:01.686422Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:01.686567Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:205:2217];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ChunksV0MetaNormalizer [GOOD] >> TOlap::StoreStatsQuota [GOOD] >> TOlapNaming::AlterColumnStoreFailed >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2025-12-23T18:56:57.654502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:57.690025Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:57.690281Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:57.697718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertedPortions; 2025-12-23T18:56:57.697978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:57.698261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:57.698444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:57.698575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:57.698684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:57.698845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:57.698984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:57.699106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:57.699215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:57.699343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:57.699446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:57.699556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:57.699662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:57.730572Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:57.730983Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=CleanInsertedPortions; 2025-12-23T18:56:57.731040Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-12-23T18:56:57.731335Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-12-23T18:56:57.731439Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=47; 2025-12-23T18:56:57.731520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-12-23T18:56:57.731637Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=68; 2025-12-23T18:56:57.731821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertedPortions;id=NO_VALUE_OPTIONAL; 2025-12-23T18:56:57.731918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-12-23T18:56:57.731973Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-12-23T18:56:57.732126Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:57.732223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:57.732271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:57.732317Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-12-23T18:56:57.732423Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:57.732494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:57.732536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:57.732575Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:57.732760Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:57.732824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:57.732868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:57.732904Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:57.733059Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:57.733135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:57.733190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:57.733236Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:57.733323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:57.733385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:57.733419Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:57.733463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:57.733531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:57.733565Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:57.733787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:57.733834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:57.733866Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:57.733985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:57.734027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:57.734058Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cp ... common_data.cpp:29;PRECHARGE:column_enginesLoadingTime=13; 2025-12-23T18:57:02.129359Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:countersLoadingTime=80; 2025-12-23T18:57:02.129502Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:countersLoadingTime=74; 2025-12-23T18:57:02.129592Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:sharding_infoLoadingTime=35; 2025-12-23T18:57:02.129660Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:sharding_infoLoadingTime=26; 2025-12-23T18:57:02.129729Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=8; 2025-12-23T18:57:02.129783Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=8; 2025-12-23T18:57:02.129826Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=576; 2025-12-23T18:57:02.129953Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=59; 2025-12-23T18:57:02.130058Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=60; 2025-12-23T18:57:02.130205Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=101; 2025-12-23T18:57:02.130328Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=80; 2025-12-23T18:57:02.130535Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=161; 2025-12-23T18:57:02.141679Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11077; 2025-12-23T18:57:02.141806Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=14; 2025-12-23T18:57:02.141864Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-12-23T18:57:02.141910Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:57:02.142019Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=66; 2025-12-23T18:57:02.142072Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-12-23T18:57:02.142173Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=62; 2025-12-23T18:57:02.142220Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-12-23T18:57:02.142297Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=40; 2025-12-23T18:57:02.142401Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=59; 2025-12-23T18:57:02.142501Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=48; 2025-12-23T18:57:02.142565Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=22383; 2025-12-23T18:57:02.142722Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:02.142872Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:02.142963Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:02.143049Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:02.143101Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:02.143162Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:02.143243Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:57:02.143321Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:02.143381Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:02.143432Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:02.143531Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:02.161533Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.033000s; 2025-12-23T18:57:02.161986Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:02.162057Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:02.162109Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:02.162184Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:57:02.162242Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:02.162293Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:02.162335Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:02.162409Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:02.162533Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.129000s; 2025-12-23T18:57:02.162575Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2025-12-23T18:57:02.237840Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 111 scanId: 0 version: {1766516221388:111} readable: {1766516221388:max} at tablet 9437184 2025-12-23T18:57:02.238028Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-12-23T18:57:02.238090Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1766516221388:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-12-23T18:57:02.238193Z node 2 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[2:268:2277];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1766516221388:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2025-12-23T18:57:01.922201Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:01.944196Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:01.944398Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:01.950655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:01.950898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:01.951137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:01.951261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:01.951395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:01.951530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:01.951663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:01.951741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:01.951822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:01.951935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:01.952037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:01.952148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:01.952250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:01.973075Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:01.973250Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:01.973314Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:01.973502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:01.973658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:01.973740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:01.973791Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:01.973891Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:01.973959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:01.974008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:01.974054Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:01.974247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:01.974319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:01.974360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:01.974399Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:01.974496Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:01.974551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:01.974608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:01.974639Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:01.974695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:01.974764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:01.974805Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:01.974864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:01.974906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:01.974955Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:01.975220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:01.975283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:01.975316Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:01.975450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:01.975496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:01.975527Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:01.975573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:01.975615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:01.975660Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:01.975712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:01.975748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:01.975778Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:01.975873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:01.975898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-12-23T18:57:02.866500Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-12-23T18:57:02.866537Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:57:02.866573Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:57:02.866808Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:02.866982Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:10;schema=message: string;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:02.867032Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:57:02.867164Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=10; 2025-12-23T18:57:02.867227Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=10;batch_columns=message; 2025-12-23T18:57:02.867498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:371:2383];bytes=61;rows=10;faults=0;finished=0;fault=0;schema=message: string; 2025-12-23T18:57:02.867646Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:02.867784Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:02.867932Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:02.868084Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:02.868207Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:02.868324Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:02.868543Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:372:2384] finished for tablet 9437184 2025-12-23T18:57:02.868977Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:371:2383];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.007},{"events":["l_ack","l_processing","l_Finish"],"t":0.008}],"full":{"a":1447662,"name":"_full_task","f":1447662,"d_finished":0,"c":0,"l":1455801,"d":8139},"events":[{"name":"bootstrap","f":1447906,"d_finished":1026,"c":1,"l":1448932,"d":1026},{"a":1455262,"name":"ack","f":1453985,"d_finished":1172,"c":1,"l":1455157,"d":1711},{"a":1455251,"name":"processing","f":1449061,"d_finished":3208,"c":3,"l":1455160,"d":3758},{"name":"ProduceResults","f":1448594,"d_finished":1923,"c":6,"l":1455540,"d":1923},{"a":1455544,"name":"Finish","f":1455544,"d_finished":0,"c":0,"l":1455801,"d":257},{"name":"task_result","f":1449088,"d_finished":1957,"c":2,"l":1453791,"d":1957}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:02.869040Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:371:2383];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:02.869436Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:371:2383];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.007},{"events":["l_ack","l_processing","l_Finish"],"t":0.008}],"full":{"a":1447662,"name":"_full_task","f":1447662,"d_finished":0,"c":0,"l":1456272,"d":8610},"events":[{"name":"bootstrap","f":1447906,"d_finished":1026,"c":1,"l":1448932,"d":1026},{"a":1455262,"name":"ack","f":1453985,"d_finished":1172,"c":1,"l":1455157,"d":2182},{"a":1455251,"name":"processing","f":1449061,"d_finished":3208,"c":3,"l":1455160,"d":4229},{"name":"ProduceResults","f":1448594,"d_finished":1923,"c":6,"l":1455540,"d":1923},{"a":1455544,"name":"Finish","f":1455544,"d_finished":0,"c":0,"l":1456272,"d":728},{"name":"task_result","f":1449088,"d_finished":1957,"c":2,"l":1453791,"d":1957}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:02.869522Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:02.858877Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-12-23T18:57:02.869562Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:02.869726Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:372:2384];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] Test command err: 2025-12-23T18:56:58.325106Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:58.359502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:58.359794Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:58.367307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:58.367565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:58.367819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:58.367939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:58.368087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:58.368209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:58.368341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:58.368452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:58.368550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:58.368664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:58.368770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:58.368889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:58.369011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:58.399721Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:58.399910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:58.399959Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:58.400151Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:58.400322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:58.400395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:58.400443Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:58.400553Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:58.400623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:58.400668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:58.400705Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:58.400895Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:58.400970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:58.401023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:58.401055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:58.401163Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:58.401226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:58.401267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:58.401296Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:58.401366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:58.401418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:58.401447Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:58.401512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:58.401558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:58.401631Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:58.401864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:58.401916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:58.401949Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:58.402080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:58.402134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:58.402161Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:58.402207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:58.402246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:58.402276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:58.402322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:58.402362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:58.402392Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:58.402635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:58.402687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2025-12-23T18:57:02.636951Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] >> TOlapNaming::AlterColumnStoreFailed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2025-12-23T18:56:57.672178Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:57.694604Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:57.694830Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:57.700891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:57.701109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:57.701299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:57.701394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:57.701482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:57.701546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:57.701619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:57.701700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:57.701791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:57.701859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:57.701930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:57.702008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:57.702101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:57.723041Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:57.723215Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:57.723263Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:57.723404Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:57.723542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:57.723603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:57.723639Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:57.723710Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:57.723774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:57.723817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:57.723841Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:57.723980Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:57.724027Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:57.724062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:57.724109Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:57.724195Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:57.724236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:57.724290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:57.724315Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:57.724359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:57.724394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:57.724425Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:57.724474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:57.724508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:57.724555Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:57.724710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:57.724746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:57.724769Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:57.724896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:57.724937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:57.724958Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:57.724990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:57.725017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:57.725040Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:57.725072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:57.725095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:57.725112Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:57.725233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:57.725289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-12-23T18:57:03.400236Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-12-23T18:57:03.400490Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-12-23T18:57:03.400666Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:03.400788Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:03.400930Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:03.401118Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:03.401305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:03.401477Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:03.401796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1059:2926] finished for tablet 9437184 2025-12-23T18:57:03.402223Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1058:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":6149400,"name":"_full_task","f":6149400,"d_finished":0,"c":0,"l":6162896,"d":13496},"events":[{"name":"bootstrap","f":6149660,"d_finished":1455,"c":1,"l":6151115,"d":1455},{"a":6162145,"name":"ack","f":6160683,"d_finished":1321,"c":1,"l":6162004,"d":2072},{"a":6162134,"name":"processing","f":6151291,"d_finished":4077,"c":3,"l":6162006,"d":4839},{"name":"ProduceResults","f":6150687,"d_finished":2348,"c":6,"l":6162558,"d":2348},{"a":6162562,"name":"Finish","f":6162562,"d_finished":0,"c":0,"l":6162896,"d":334},{"name":"task_result","f":6151307,"d_finished":2702,"c":2,"l":6160431,"d":2702}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:03.402301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:03.402647Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1058:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.011},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":6149400,"name":"_full_task","f":6149400,"d_finished":0,"c":0,"l":6163387,"d":13987},"events":[{"name":"bootstrap","f":6149660,"d_finished":1455,"c":1,"l":6151115,"d":1455},{"a":6162145,"name":"ack","f":6160683,"d_finished":1321,"c":1,"l":6162004,"d":2563},{"a":6162134,"name":"processing","f":6151291,"d_finished":4077,"c":3,"l":6162006,"d":5330},{"name":"ProduceResults","f":6150687,"d_finished":2348,"c":6,"l":6162558,"d":2348},{"a":6162562,"name":"Finish","f":6162562,"d_finished":0,"c":0,"l":6163387,"d":825},{"name":"task_result","f":6151307,"d_finished":2702,"c":2,"l":6160431,"d":2702}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:03.402772Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:03.385272Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4512;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4512;selected_rows=0; 2025-12-23T18:57:03.402822Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:03.403066Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516765.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516765.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=166516765.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516765.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516765.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=166516765.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515565.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146516765.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=146516765.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515565.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146515565.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=146515565.000000s;Name=;Codec=}; 2025-12-23T18:56:05.724089Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:05.756667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:05.756953Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:05.764568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:05.764865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:05.765104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:05.765237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:05.765380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:05.765498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:05.765609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:05.765722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:05.765831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:05.765954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:05.766064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:05.766182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:05.766290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:05.796762Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:05.796939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:05.797013Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:05.797217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:05.797390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:05.797485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:05.797533Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:05.797634Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:05.797706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:05.797752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:05.797786Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:05.797960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:05.798025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:05.798070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:05.798104Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:05.798203Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:05.798271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:05.798327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:05.798367Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:05.798434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:05.798476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:05.798511Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:05.798562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:05.798606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:05.798639Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:05.798876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:05.798945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:05.798980Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:05.799109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:05.799163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:05.799217Z node 1 :TX_ ... ground=cleanup;skip_reason=no_changes; 2025-12-23T18:57:05.510834Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:05.511088Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766516214051:max} readable: {1766516214051:max} at tablet 9437184 2025-12-23T18:57:05.511232Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:57:05.511445Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516214051:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:57:05.511549Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516214051:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:57:05.512134Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516214051:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-12-23T18:57:05.514006Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516214051:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:57:05.515105Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516214051:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1457:3401];trace_detailed=; 2025-12-23T18:57:05.515673Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:83;ff_first=(column_ids=1;column_names=timestamp;);; 2025-12-23T18:57:05.515908Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:57:05.516171Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.516359Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.516850Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:05.517026Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.517162Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.517403Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1457:3401] finished for tablet 9437184 2025-12-23T18:57:05.517954Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1456:3400];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":62360693,"name":"_full_task","f":62360693,"d_finished":0,"c":0,"l":62363184,"d":2491},"events":[{"name":"bootstrap","f":62361059,"d_finished":1024,"c":1,"l":62362083,"d":1024},{"a":62362510,"name":"ack","f":62362510,"d_finished":0,"c":0,"l":62363184,"d":674},{"a":62362489,"name":"processing","f":62362489,"d_finished":0,"c":0,"l":62363184,"d":695},{"name":"ProduceResults","f":62361735,"d_finished":646,"c":2,"l":62362876,"d":646},{"a":62362883,"name":"Finish","f":62362883,"d_finished":0,"c":0,"l":62363184,"d":301}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.518045Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1456:3400];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:05.518581Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1456:3400];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":62360693,"name":"_full_task","f":62360693,"d_finished":0,"c":0,"l":62363810,"d":3117},"events":[{"name":"bootstrap","f":62361059,"d_finished":1024,"c":1,"l":62362083,"d":1024},{"a":62362510,"name":"ack","f":62362510,"d_finished":0,"c":0,"l":62363810,"d":1300},{"a":62362489,"name":"processing","f":62362489,"d_finished":0,"c":0,"l":62363810,"d":1321},{"name":"ProduceResults","f":62361735,"d_finished":646,"c":2,"l":62362876,"d":646},{"a":62362883,"name":"Finish","f":62362883,"d_finished":0,"c":0,"l":62363810,"d":927}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1457:3401]->[1:1456:3400] 2025-12-23T18:57:05.518697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:05.513976Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:57:05.518774Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:05.518922Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1457:3401];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] Test command err: 2025-12-23T18:57:03.165219Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:03.196821Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:03.197065Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:03.204594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:03.204835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:03.205096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:03.205221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:03.205350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:03.205482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:03.205593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:03.205702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:03.205807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:03.205924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:03.206040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:03.206165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:03.206271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:03.234896Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:03.235106Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:03.235160Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:03.235376Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:03.235545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:03.235619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:03.235686Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:03.235771Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:03.235832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:03.235875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:03.235921Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:03.236131Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:03.236201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:03.236243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:03.236273Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:03.236365Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:03.236420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:03.236482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:03.236529Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:03.236594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:03.236636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:03.236663Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:03.236729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:03.236792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:03.236835Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:03.237077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:03.237139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:03.237181Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:03.237327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:03.237371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:03.237405Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:03.237456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:03.237492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:03.237520Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:03.237568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:03.237605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:03.237640Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:03.237849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:03.237897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... en=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-12-23T18:57:04.676504Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=2;fline=abstract.cpp:22;event=OnSourcePrepared;source_idx=2;prepared=1; 2025-12-23T18:57:04.676661Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=4;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=0;SRCS:[{0,12},{1,13},{2,14},{3,15},];}};]};SF:0;PR:0;); 2025-12-23T18:57:04.676729Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-12-23T18:57:04.676781Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:57:04.676822Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:57:04.676942Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:100;event=TEvTaskProcessedResult; 2025-12-23T18:57:04.676979Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-12-23T18:57:04.677053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:22;event=OnSourcePrepared;source_idx=0;prepared=1; 2025-12-23T18:57:04.677113Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:30;event=finish_source;source_idx=0; 2025-12-23T18:57:04.677200Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:30;event=finish_source;source_idx=1; 2025-12-23T18:57:04.677245Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:30;event=finish_source;source_idx=2; 2025-12-23T18:57:04.677278Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:30;event=finish_source;source_idx=3; 2025-12-23T18:57:04.677412Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.677625Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.677913Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:04.678093Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.678266Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.678759Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:458:2470] finished for tablet 9437184 2025-12-23T18:57:04.679375Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:454:2466];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.02},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.021}],"full":{"a":1986937,"name":"_full_task","f":1986937,"d_finished":0,"c":0,"l":2008610,"d":21673},"events":[{"name":"bootstrap","f":1987237,"d_finished":1792,"c":1,"l":1989029,"d":1792},{"a":2007647,"name":"ack","f":2007647,"d_finished":0,"c":0,"l":2008610,"d":963},{"a":2007608,"name":"processing","f":1989287,"d_finished":8052,"c":5,"l":2007439,"d":9054},{"name":"ProduceResults","f":1988368,"d_finished":2210,"c":7,"l":2008067,"d":2210},{"a":2008073,"name":"Finish","f":2008073,"d_finished":0,"c":0,"l":2008610,"d":537},{"name":"task_result","f":1989328,"d_finished":7938,"c":5,"l":2007436,"d":7938}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.679508Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:454:2466];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:04.680079Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:454:2466];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.02},{"events":["l_ProduceResults","f_Finish"],"t":0.021},{"events":["l_ack","l_processing","l_Finish"],"t":0.022}],"full":{"a":1986937,"name":"_full_task","f":1986937,"d_finished":0,"c":0,"l":2009347,"d":22410},"events":[{"name":"bootstrap","f":1987237,"d_finished":1792,"c":1,"l":1989029,"d":1792},{"a":2007647,"name":"ack","f":2007647,"d_finished":0,"c":0,"l":2009347,"d":1700},{"a":2007608,"name":"processing","f":1989287,"d_finished":8052,"c":5,"l":2007439,"d":9791},{"name":"ProduceResults","f":1988368,"d_finished":2210,"c":7,"l":2008067,"d":2210},{"a":2008073,"name":"Finish","f":2008073,"d_finished":0,"c":0,"l":2009347,"d":1274},{"name":"task_result","f":1989328,"d_finished":7938,"c":5,"l":2007436,"d":7938}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.680214Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:04.654646Z;index_granules=0;index_portions=4;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9344;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9344;selected_rows=0; 2025-12-23T18:57:04.680273Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:04.680525Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:458:2470];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:54:36.466801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:54:36.466986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:36.467039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:54:36.467085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:54:36.467143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:54:36.467183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:54:36.467266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:54:36.467397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:54:36.468508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:54:36.469697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:54:36.576204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:54:36.576269Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:36.589524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:54:36.589780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:54:36.590017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:54:36.603774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:54:36.604068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:54:36.604942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:54:36.605204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:54:36.613432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:36.613832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:54:36.632433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:54:36.632534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:54:36.632739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:54:36.632801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:54:36.632882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:54:36.634129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:54:36.778535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.780857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.781982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.782062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:54:36.782147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... nStep5000039 State->FrontStep: 5000039 2025-12-23T18:57:05.802360Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:05.802429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-12-23T18:57:05.802669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:57:05.802865Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:05.802917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-12-23T18:57:05.803003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 39 2025-12-23T18:57:05.803374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:57:05.803432Z node 2 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:461: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-12-23T18:57:05.803503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:487: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-12-23T18:57:05.804340Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:57:05.804456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:57:05.804503Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:57:05.804548Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2025-12-23T18:57:05.804598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2025-12-23T18:57:05.805292Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:57:05.805372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:57:05.805402Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:57:05.805432Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2025-12-23T18:57:05.805464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:57:05.805545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-12-23T18:57:05.808310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-12-23T18:57:05.808404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:39 msg type: 268697639 2025-12-23T18:57:05.808493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2025-12-23T18:57:05.808970Z node 2 :HIVE INFO: tablet_helpers.cpp:1623: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2025-12-23T18:57:05.809077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6411: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2025-12-23T18:57:05.809191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2025-12-23T18:57:05.811042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:57:05.811382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:57:05.812799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:57:05.825435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6585: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2025-12-23T18:57:05.825520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-12-23T18:57:05.825656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:57:05.827999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:57:05.828199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:57:05.828242Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:57:05.828371Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:57:05.828412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:57:05.828453Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:57:05.828488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:57:05.828534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-12-23T18:57:05.828618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:744:2721] message: TxId: 102 2025-12-23T18:57:05.828671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:57:05.828718Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:57:05.828754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:57:05.828917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:57:05.831096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:57:05.831163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:808:2777] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-12-23T18:57:05.834516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "mess age" Type: "Utf8" } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:57:05.834808Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: alter_store.cpp:465: TAlterOlapStore Propose, path: /MyRoot/OlapStore, opId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:57:05.835105Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-12-23T18:57:05.837800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:57:05.838080Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:57:05.838432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:57:05.838482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:57:05.838967Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:57:05.839113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:57:05.839156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:846:2815] TestWaitNotification: OK eventTxId 103 |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ChunksV0MetaNormalizer [GOOD] Test command err: 2025-12-23T18:56:54.473772Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:54.495615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:54.495859Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:54.502071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:54.502261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:54.502518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:54.502603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:54.502698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:54.502783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:54.502879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:54.502979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:54.503052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:54.503138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:54.503207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:54.503276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:54.503349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:54.525306Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:54.525653Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:54.525701Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:54.525875Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:54.526048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:54.526113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:54.526156Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:54.526229Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:54.526307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:54.526344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:54.526374Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:54.526547Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:54.526599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:54.526629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:54.526658Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:54.526756Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:54.526803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:54.526838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:54.526861Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:54.526936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:54.526977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:54.527007Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:54.527052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:54.527091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:54.527120Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:54.527300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:54.527376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:54.527403Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:54.527508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:54.527544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:54.527574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:54.527615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:54.527645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:54.527667Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:54.527704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:54.527729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:54.527749Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:54.527846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:54.527875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ternal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-12-23T18:57:04.093852Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2025-12-23T18:57:04.093879Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:57:04.093910Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:57:04.094218Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:04.094300Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.094323Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:57:04.094393Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2025-12-23T18:57:04.094424Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2025-12-23T18:57:04.094518Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:318:2319];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-12-23T18:57:04.094614Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.094703Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.094859Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.094961Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:04.095033Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.095095Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.095344Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:320:2320] finished for tablet 9437184 2025-12-23T18:57:04.095749Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:318:2319];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.065},{"events":["l_task_result"],"t":0.758},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.759}],"full":{"a":9269202,"name":"_full_task","f":9269202,"d_finished":0,"c":0,"l":10029088,"d":759886},"events":[{"name":"bootstrap","f":9269470,"d_finished":1558,"c":1,"l":9271028,"d":1558},{"a":10028640,"name":"ack","f":9334969,"d_finished":297299,"c":421,"l":10028575,"d":297747},{"a":10028631,"name":"processing","f":9271293,"d_finished":639273,"c":843,"l":10028577,"d":639730},{"name":"ProduceResults","f":9270533,"d_finished":517900,"c":1266,"l":10028801,"d":517900},{"a":10028805,"name":"Finish","f":10028805,"d_finished":0,"c":0,"l":10029088,"d":283},{"name":"task_result","f":9271318,"d_finished":331810,"c":422,"l":10027617,"d":331810}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.095841Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:318:2319];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:04.096229Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:318:2319];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.065},{"events":["l_task_result"],"t":0.758},{"events":["l_ProduceResults","f_Finish"],"t":0.759},{"events":["l_ack","l_processing","l_Finish"],"t":0.76}],"full":{"a":9269202,"name":"_full_task","f":9269202,"d_finished":0,"c":0,"l":10029577,"d":760375},"events":[{"name":"bootstrap","f":9269470,"d_finished":1558,"c":1,"l":9271028,"d":1558},{"a":10028640,"name":"ack","f":9334969,"d_finished":297299,"c":421,"l":10028575,"d":298236},{"a":10028631,"name":"processing","f":9271293,"d_finished":639273,"c":843,"l":10028577,"d":640219},{"name":"ProduceResults","f":9270533,"d_finished":517900,"c":1266,"l":10028801,"d":517900},{"a":10028805,"name":"Finish","f":10028805,"d_finished":0,"c":0,"l":10029577,"d":772},{"name":"task_result","f":9271318,"d_finished":331810,"c":422,"l":10027617,"d":331810}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:04.096301Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:03.333839Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2025-12-23T18:57:04.096337Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:04.096475Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:320:2320];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2025-12-23T18:56:59.965278Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:59.995254Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:59.995469Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:00.000985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:00.001186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:00.001338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:00.001435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:00.001545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:00.001615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:00.001687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:00.001766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:00.001851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:00.001934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:00.001993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:00.002048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:00.002136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:00.024513Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:00.024685Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:00.024738Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:00.024931Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:00.025101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:00.025170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:00.025214Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:00.025314Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:00.025383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:00.025452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:00.025491Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:00.025709Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:00.025790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:00.025861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:00.025898Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:00.025985Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:00.026062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:00.026140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:00.026182Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:00.026252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:00.026292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:00.026320Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:00.026397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:00.026439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:00.026471Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:00.026654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:00.026704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:00.026727Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:00.026847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:00.026882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:00.026901Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:00.026948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:00.026971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:00.026994Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:00.027033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:00.027069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:00.027093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:00.027181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:00.027205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-12-23T18:57:05.337098Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-12-23T18:57:05.337449Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-12-23T18:57:05.337671Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.337840Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.338030Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.338312Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:05.338526Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.338719Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.339206Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1059:2926] finished for tablet 9437184 2025-12-23T18:57:05.339798Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1058:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.034},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.037}],"full":{"a":5791359,"name":"_full_task","f":5791359,"d_finished":0,"c":0,"l":5828958,"d":37599},"events":[{"name":"bootstrap","f":5791643,"d_finished":2063,"c":1,"l":5793706,"d":2063},{"a":5827957,"name":"ack","f":5826050,"d_finished":1690,"c":1,"l":5827740,"d":2691},{"a":5827940,"name":"processing","f":5793919,"d_finished":4848,"c":3,"l":5827744,"d":5866},{"name":"ProduceResults","f":5793112,"d_finished":3027,"c":6,"l":5828450,"d":3027},{"a":5828456,"name":"Finish","f":5828456,"d_finished":0,"c":0,"l":5828958,"d":502},{"name":"task_result","f":5793938,"d_finished":3089,"c":2,"l":5825790,"d":3089}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.339894Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:05.340465Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1058:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.034},{"events":["l_ProduceResults","f_Finish"],"t":0.037},{"events":["l_ack","l_processing","l_Finish"],"t":0.038}],"full":{"a":5791359,"name":"_full_task","f":5791359,"d_finished":0,"c":0,"l":5829616,"d":38257},"events":[{"name":"bootstrap","f":5791643,"d_finished":2063,"c":1,"l":5793706,"d":2063},{"a":5827957,"name":"ack","f":5826050,"d_finished":1690,"c":1,"l":5827740,"d":3349},{"a":5827940,"name":"processing","f":5793919,"d_finished":4848,"c":3,"l":5827744,"d":6524},{"name":"ProduceResults","f":5793112,"d_finished":3027,"c":6,"l":5828450,"d":3027},{"a":5828456,"name":"Finish","f":5828456,"d_finished":0,"c":0,"l":5829616,"d":1160},{"name":"task_result","f":5793938,"d_finished":3089,"c":2,"l":5825790,"d":3089}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.340583Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:05.297577Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-12-23T18:57:05.340651Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:05.340948Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2025-12-23T18:57:01.279419Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:01.312395Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:01.312606Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:01.319868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:01.320166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:01.320374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:01.320515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:01.320649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:01.320761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:01.320872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:01.320988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:01.321115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:01.321226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:01.321339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:01.321455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:01.321578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:01.351960Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:01.352131Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:01.352175Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:01.352340Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:01.352558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:01.352634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:01.352680Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:01.352762Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:01.352821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:01.352881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:01.352919Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:01.353124Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:01.353186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:01.353228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:01.353270Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:01.353372Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:01.353424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:01.353461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:01.353494Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:01.353546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:01.353581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:01.353611Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:01.353675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:01.353725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:01.353761Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:01.353995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:01.354044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:01.354075Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:01.354215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:01.354262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:01.354293Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:01.354338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:01.354373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:01.354405Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:01.354451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:01.354485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:01.354514Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:01.354650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:01.354692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-12-23T18:57:05.273914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-12-23T18:57:05.274294Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=2791;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-12-23T18:57:05.274538Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.274727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.274958Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.275244Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:05.275463Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.275667Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.276092Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:477:2488] finished for tablet 9437184 2025-12-23T18:57:05.276693Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:476:2487];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":4392867,"name":"_full_task","f":4392867,"d_finished":0,"c":0,"l":4410675,"d":17808},"events":[{"name":"bootstrap","f":4393268,"d_finished":1917,"c":1,"l":4395185,"d":1917},{"a":4409708,"name":"ack","f":4407637,"d_finished":1849,"c":1,"l":4409486,"d":2816},{"a":4409684,"name":"processing","f":4395375,"d_finished":5120,"c":3,"l":4409493,"d":6111},{"name":"ProduceResults","f":4394631,"d_finished":3114,"c":6,"l":4410195,"d":3114},{"a":4410202,"name":"Finish","f":4410202,"d_finished":0,"c":0,"l":4410675,"d":473},{"name":"task_result","f":4395394,"d_finished":3203,"c":2,"l":4407377,"d":3203}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.276794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:476:2487];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:05.277343Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:476:2487];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":4392867,"name":"_full_task","f":4392867,"d_finished":0,"c":0,"l":4411345,"d":18478},"events":[{"name":"bootstrap","f":4393268,"d_finished":1917,"c":1,"l":4395185,"d":1917},{"a":4409708,"name":"ack","f":4407637,"d_finished":1849,"c":1,"l":4409486,"d":3486},{"a":4409684,"name":"processing","f":4395375,"d_finished":5120,"c":3,"l":4409493,"d":6781},{"name":"ProduceResults","f":4394631,"d_finished":3114,"c":6,"l":4410195,"d":3114},{"a":4410202,"name":"Finish","f":4410202,"d_finished":0,"c":0,"l":4411345,"d":1143},{"name":"task_result","f":4395394,"d_finished":3203,"c":2,"l":4407377,"d":3203}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:05.277450Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:05.254057Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2025-12-23T18:57:05.277506Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:05.277786Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:477:2488];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::WriteOverload+InStore [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate |98.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader >> Normalizers::CleanEmptyPortionsNormalizer |98.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |98.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore [GOOD] Test command err: 2025-12-23T18:57:02.465803Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:02.499852Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:02.500068Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:02.507424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:02.507682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:02.507950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:02.508108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:02.508227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:02.508357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:02.508485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:02.508604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:02.508705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:02.508821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:02.508933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:02.509058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:02.509169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:02.539600Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:02.539795Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:02.539850Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:02.540045Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:02.540243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:02.540324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:02.540370Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:02.540464Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:02.540528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:02.540572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:02.540633Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:02.540838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:02.540910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:02.540952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:02.540988Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:02.541072Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:02.541126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:02.541194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:02.541227Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:02.541286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:02.541344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:02.541395Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:02.541460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:02.541504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:02.541540Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:02.541789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:02.541839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:02.541888Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:02.542031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:02.542074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:02.542105Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:02.542151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:02.542188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:02.542219Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:02.542270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:02.542309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:02.542340Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:02.542495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:02.542542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... G: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-12-23T18:57:06.727588Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:06.760092Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=2be5c3e0-e03111f0-aae9a79c-19fe8bf7; 2025-12-23T18:57:06.760412Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=188;count=3; 2025-12-23T18:57:06.760473Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=380;count=4;size_of_meta=112; 2025-12-23T18:57:06.760530Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=540;count=2;size_of_portion=192; 2025-12-23T18:57:06.761095Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-12-23T18:57:06.761226Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=3;operation_id=2; 2025-12-23T18:57:06.773605Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-12-23T18:57:06.773819Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=5;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:06.776031Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=2c3068b4-e03111f0-a995b4ea-ad92fae8; 2025-12-23T18:57:06.776294Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=282;count=5; 2025-12-23T18:57:06.776358Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=570;count=6;size_of_meta=112; 2025-12-23T18:57:06.776417Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=810;count=3;size_of_portion=192; 2025-12-23T18:57:06.776988Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-12-23T18:57:06.777116Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=4;operation_id=3; 2025-12-23T18:57:06.789412Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2025-12-23T18:57:06.789636Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=6;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:06.791798Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=2caab1c8-e03111f0-87062fca-114faf02; 2025-12-23T18:57:06.792067Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=376;count=7; 2025-12-23T18:57:06.792127Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=760;count=8;size_of_meta=112; 2025-12-23T18:57:06.792185Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1080;count=4;size_of_portion=192; 2025-12-23T18:57:06.792726Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-12-23T18:57:06.792851Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=5;operation_id=4; 2025-12-23T18:57:06.805165Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2025-12-23T18:57:06.805403Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:06.828206Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=5;last=5; 2025-12-23T18:57:06.828321Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=6330728;operation_id=2d67c9b6-e03111f0-b7a323f2-17d3576a;in_flight=1;size_in_flight=6330728; 2025-12-23T18:57:07.765408Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=5;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8246112;count=1;actions=__DEFAULT,;waiting=1;; 2025-12-23T18:57:07.824924Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=2d67c9b6-e03111f0-b7a323f2-17d3576a; 2025-12-23T18:57:07.825238Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=470;count=9; 2025-12-23T18:57:07.825316Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=950;count=10;size_of_meta=112; 2025-12-23T18:57:07.825391Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1350;count=5;size_of_portion=192; 2025-12-23T18:57:07.826034Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-12-23T18:57:07.826171Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=6;operation_id=5; 2025-12-23T18:57:07.838522Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2025-12-23T18:57:07.838820Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516770.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516770.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=166516770.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516770.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516770.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=166516770.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515570.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146516770.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=146516770.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515570.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146515570.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=146515570.000000s;Name=;Codec=}; 2025-12-23T18:56:10.865055Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:10.898986Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:10.899244Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:10.916908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:10.917364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:10.917715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:10.917871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:10.918021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:10.918142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:10.918275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:10.918398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:10.918520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:10.918658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:10.918802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:10.918937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:10.919058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:10.952012Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:10.952201Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:10.952264Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:10.952471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:10.952649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:10.952740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:10.952786Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:10.952885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:10.952955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:10.953004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:10.953035Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:10.953235Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:10.953306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:10.953354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:10.953391Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:10.953494Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:10.953550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:10.953595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:10.953642Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:10.953722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:10.953769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:10.953808Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:10.953878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:10.953923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:10.953955Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:10.954201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:10.954259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:10.954294Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:10.954460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:10.954514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:10.954547Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:10.954603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:10.954655Z node 1 :TX_COLUMNSHARD WARN: l ... umn_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:57:10.475509Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 9 at tablet 9437184 2025-12-23T18:57:10.475837Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766516219013:max} readable: {1766516219013:max} at tablet 9437184 2025-12-23T18:57:10.475974Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:57:10.476214Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516219013:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:57:10.476289Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516219013:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-12-23T18:57:10.476828Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516219013:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-12-23T18:57:10.478590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516219013:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2025-12-23T18:57:10.479590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766516219013:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:963:2908];trace_detailed=; 2025-12-23T18:57:10.480068Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:83;ff_first=(column_ids=1;column_names=timestamp;);; 2025-12-23T18:57:10.480319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:98;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-12-23T18:57:10.480567Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:10.480747Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:10.481069Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:10.481235Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:10.481382Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:10.481619Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:963:2908] finished for tablet 9437184 2025-12-23T18:57:10.482199Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:962:2907];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":61469485,"name":"_full_task","f":61469485,"d_finished":0,"c":0,"l":61471691,"d":2206},"events":[{"name":"bootstrap","f":61469775,"d_finished":992,"c":1,"l":61470767,"d":992},{"a":61471032,"name":"ack","f":61471032,"d_finished":0,"c":0,"l":61471691,"d":659},{"a":61471013,"name":"processing","f":61471013,"d_finished":0,"c":0,"l":61471691,"d":678},{"name":"ProduceResults","f":61470443,"d_finished":623,"c":2,"l":61471393,"d":623},{"a":61471399,"name":"Finish","f":61471399,"d_finished":0,"c":0,"l":61471691,"d":292}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:10.482293Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:962:2907];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:10.482791Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:962:2907];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":61469485,"name":"_full_task","f":61469485,"d_finished":0,"c":0,"l":61472338,"d":2853},"events":[{"name":"bootstrap","f":61469775,"d_finished":992,"c":1,"l":61470767,"d":992},{"a":61471032,"name":"ack","f":61471032,"d_finished":0,"c":0,"l":61472338,"d":1306},{"a":61471013,"name":"processing","f":61471013,"d_finished":0,"c":0,"l":61472338,"d":1325},{"name":"ProduceResults","f":61470443,"d_finished":623,"c":2,"l":61471393,"d":623},{"a":61471399,"name":"Finish","f":61471399,"d_finished":0,"c":0,"l":61472338,"d":939}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:963:2908]->[1:962:2907] 2025-12-23T18:57:10.482923Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:10.478559Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-12-23T18:57:10.482986Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:10.483121Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:963:2908];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 80000/4873744 0/0 >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot >> TColumnShardTestReadWrite::WriteReadNoCompression >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] Test command err: 2025-12-23T18:56:39.832557Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:39.852427Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:39.852662Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:39.857824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:39.858024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:39.858210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:39.858297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:39.858403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:39.858474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:39.858546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:39.858624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:39.858694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:39.858770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:39.858839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:39.858930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:39.858998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:39.878103Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:39.878246Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:39.878279Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:39.878395Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:39.878516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:39.878576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:39.878606Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:39.878662Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:39.878711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:39.878771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:39.878798Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:39.878936Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:39.878978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:39.879016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:39.879038Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:39.879100Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:39.879133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:39.879168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:39.879188Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:39.879225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:39.879254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:39.879283Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:39.879328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:39.879357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:39.879385Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:39.879536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:39.879569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:39.879587Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:39.879725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:39.879757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:39.879777Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:39.879810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:39.879834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:39.879856Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:39.879882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:39.879917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:39.879950Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:39.880038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:39.880064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2025-12-23T18:57:09.560941Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=650; 2025-12-23T18:57:09.560987Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=38315; 2025-12-23T18:57:09.561025Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=38445; 2025-12-23T18:57:09.561077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2025-12-23T18:57:09.561362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=249; 2025-12-23T18:57:09.561392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=39142; 2025-12-23T18:57:09.561525Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=85; 2025-12-23T18:57:09.561633Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=63; 2025-12-23T18:57:09.561895Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=227; 2025-12-23T18:57:09.562168Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=231; 2025-12-23T18:57:09.571420Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=9190; 2025-12-23T18:57:09.582562Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11038; 2025-12-23T18:57:09.582667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2025-12-23T18:57:09.582731Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2025-12-23T18:57:09.582791Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:57:09.582876Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2025-12-23T18:57:09.582907Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2025-12-23T18:57:09.583025Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=87; 2025-12-23T18:57:09.583073Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-12-23T18:57:09.583143Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2025-12-23T18:57:09.583240Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2025-12-23T18:57:09.583337Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=59; 2025-12-23T18:57:09.583384Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=66973; 2025-12-23T18:57:09.583509Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:09.583619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:09.583763Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:09.583837Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:09.583904Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:09.584066Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:09.584126Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:09.584162Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:09.584204Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:09.584268Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515902292;tx_id=18446744073709551615;;current_snapshot_ts=1766516201316; 2025-12-23T18:57:09.584310Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:09.584353Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:09.584391Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:09.584471Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:09.584644Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.031000s; 2025-12-23T18:57:09.588324Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:09.588688Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:09.588744Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:09.588805Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:09.588860Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:09.588919Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515902292;tx_id=18446744073709551615;;current_snapshot_ts=1766516201316; 2025-12-23T18:57:09.588967Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:09.589018Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:09.589061Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:09.589133Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:57:09.589181Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:09.589984Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.044000s; 2025-12-23T18:57:09.590030Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516764.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516764.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515564.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-12-23T18:56:06.287052Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:06.319873Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:06.320122Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:06.332244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:06.332515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:06.332752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:06.332881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:06.333026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:06.333166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:06.333278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:06.333394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:06.333517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:06.333651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:06.333758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:06.333873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:06.333985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:06.365047Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:06.365223Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:06.365273Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:06.365442Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:06.365604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:06.365702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:06.365755Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:06.365854Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:06.365928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:06.365981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:06.366014Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:06.366168Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:06.366226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:06.366269Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:06.366299Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:06.366387Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:06.366449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:06.366497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:06.366528Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:06.366588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:06.366623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:06.366651Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:06.366713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:06.366786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:06.366819Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:06.367040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:06.367110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:06.367149Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:06.367275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:06.367323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:06.367354Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:06.367404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:06.367445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:06.367473Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:06.367515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... CUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=8; 2025-12-23T18:57:11.519453Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=94; 2025-12-23T18:57:11.519483Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=6030; 2025-12-23T18:57:11.519512Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=6115; 2025-12-23T18:57:11.519564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=7; 2025-12-23T18:57:11.519625Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=30; 2025-12-23T18:57:11.519688Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=6532; 2025-12-23T18:57:11.519779Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=52; 2025-12-23T18:57:11.519867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=52; 2025-12-23T18:57:11.519946Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=50; 2025-12-23T18:57:11.520005Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=39; 2025-12-23T18:57:11.521955Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1873; 2025-12-23T18:57:11.524193Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2170; 2025-12-23T18:57:11.524250Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-12-23T18:57:11.524281Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-12-23T18:57:11.524303Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=3; 2025-12-23T18:57:11.524362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=29; 2025-12-23T18:57:11.524395Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-12-23T18:57:11.524465Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=34; 2025-12-23T18:57:11.524554Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-12-23T18:57:11.524603Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=19; 2025-12-23T18:57:11.524655Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=27; 2025-12-23T18:57:11.524803Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=117; 2025-12-23T18:57:11.524826Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=17597; 2025-12-23T18:57:11.524911Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=21099992;raw_bytes=29608900;count=3;records=320000} evicted {blob_bytes=10565848;raw_bytes=16084450;count=1;records=160000} at tablet 9437184 2025-12-23T18:57:11.524995Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:11.525045Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:11.525100Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:11.531924Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=1; 2025-12-23T18:57:11.532064Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:11.532132Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2025-12-23T18:57:11.532179Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515920055;tx_id=18446744073709551615;;current_snapshot_ts=1766516167445; 2025-12-23T18:57:11.532239Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:11.532273Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:11.532299Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:11.532353Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:11.532506Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.085000s; 2025-12-23T18:57:11.533996Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:11.534189Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:11.534235Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:11.534302Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2025-12-23T18:57:11.534360Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515920055;tx_id=18446744073709551615;;current_snapshot_ts=1766516167445; 2025-12-23T18:57:11.534397Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:11.534433Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:11.534474Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:11.534528Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:11.534777Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.116000s; 2025-12-23T18:57:11.534819Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1682:3521];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/10565848 160000/10565848 0/0 160000/10565848 |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TColumnShardTestReadWrite::ReadStale |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2025-12-23T18:57:07.946217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:07.970765Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:07.971006Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:07.976479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:07.976645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:07.976798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:07.976871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:07.976963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:07.977042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:07.977113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:07.977198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:07.977299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:07.977381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:07.977472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:07.977536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:07.977597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:08.008468Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:08.008662Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:08.008719Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:08.008903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:08.009054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:08.009115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:08.009156Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:08.009272Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:08.009336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:08.009376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:08.009403Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:08.009575Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:08.009638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:08.009692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:08.009725Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:08.009842Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:08.009918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:08.009961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:08.009990Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:08.010041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:08.010076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:08.010103Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:08.010158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:08.010205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:08.010246Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:08.010463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:08.010511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:08.010540Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:08.010653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:08.010704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:08.010756Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:08.010819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:08.010870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:08.010902Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:08.010957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:08.010992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:08.011043Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:08.011235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:08.011278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tab ... doc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-12-23T18:57:11.333819Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-12-23T18:57:11.333850Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:57:11.333877Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:57:11.334228Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:11.334357Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:11.334386Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:57:11.334474Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2025-12-23T18:57:11.334533Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2025-12-23T18:57:11.334754Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-12-23T18:57:11.334882Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:11.335018Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:11.335109Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:11.335343Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:11.335462Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:11.335551Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:11.335733Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:464:2476] finished for tablet 9437184 2025-12-23T18:57:11.336091Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:463:2475];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.005},{"events":["f_ack"],"t":0.006},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.007}],"full":{"a":3804091,"name":"_full_task","f":3804091,"d_finished":0,"c":0,"l":3811941,"d":7850},"events":[{"name":"bootstrap","f":3804293,"d_finished":866,"c":1,"l":3805159,"d":866},{"a":3811478,"name":"ack","f":3810364,"d_finished":914,"c":1,"l":3811278,"d":1377},{"a":3811468,"name":"processing","f":3805260,"d_finished":2271,"c":3,"l":3811280,"d":2744},{"name":"ProduceResults","f":3804853,"d_finished":1561,"c":6,"l":3811721,"d":1561},{"a":3811725,"name":"Finish","f":3811725,"d_finished":0,"c":0,"l":3811941,"d":216},{"name":"task_result","f":3805271,"d_finished":1319,"c":2,"l":3810045,"d":1319}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:11.336150Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:11.336430Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:463:2475];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.005},{"events":["f_ack"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.007},{"events":["l_ack","l_processing","l_Finish"],"t":0.008}],"full":{"a":3804091,"name":"_full_task","f":3804091,"d_finished":0,"c":0,"l":3812329,"d":8238},"events":[{"name":"bootstrap","f":3804293,"d_finished":866,"c":1,"l":3805159,"d":866},{"a":3811478,"name":"ack","f":3810364,"d_finished":914,"c":1,"l":3811278,"d":1765},{"a":3811468,"name":"processing","f":3805260,"d_finished":2271,"c":3,"l":3811280,"d":3132},{"name":"ProduceResults","f":3804853,"d_finished":1561,"c":6,"l":3811721,"d":1561},{"a":3811725,"name":"Finish","f":3811725,"d_finished":0,"c":0,"l":3812329,"d":604},{"name":"task_result","f":3805271,"d_finished":1319,"c":2,"l":3810045,"d":1319}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:11.336491Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:11.326368Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2025-12-23T18:57:11.336520Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:11.336610Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] >> TColumnShardTestReadWrite::ReadStale [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=166516769.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516769.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516769.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=166516769.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516769.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516769.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=166516769.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515569.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146516769.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=146516769.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515569.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146515569.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=146515569.000000s;Name=;Codec=}; 2025-12-23T18:56:10.020515Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:10.040608Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:10.040826Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:10.046781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:10.047070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:10.047252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:10.047338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:10.047460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:10.047558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:10.047623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:10.047703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:10.047783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:10.047862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:10.047932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:10.048000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:10.048063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:10.068321Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:10.068469Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:10.068528Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:10.068668Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:10.068780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:10.068851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:10.068898Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:10.068963Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:10.069014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:10.069052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:10.069072Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:10.069191Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:10.069235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:10.069261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:10.069281Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:10.069344Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:10.069391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:10.069441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:10.069485Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:10.069543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:10.069571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:10.069594Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:10.069625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:10.069657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:10.069680Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:10.069825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:10.069860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:10.069880Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:10.069966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:10.069995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:5 ... 2025-12-23T18:57:15.350431Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=112; 2025-12-23T18:57:15.350475Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=9274; 2025-12-23T18:57:15.350520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9393; 2025-12-23T18:57:15.350582Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2025-12-23T18:57:15.350667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=39; 2025-12-23T18:57:15.350707Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=10012; 2025-12-23T18:57:15.350892Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=107; 2025-12-23T18:57:15.351028Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=72; 2025-12-23T18:57:15.351167Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=91; 2025-12-23T18:57:15.351279Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=72; 2025-12-23T18:57:15.358307Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=6941; 2025-12-23T18:57:15.364868Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6418; 2025-12-23T18:57:15.364996Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2025-12-23T18:57:15.365055Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-12-23T18:57:15.365099Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:57:15.365174Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=42; 2025-12-23T18:57:15.365215Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:57:15.365299Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=54; 2025-12-23T18:57:15.365340Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-12-23T18:57:15.365411Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2025-12-23T18:57:15.365502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=55; 2025-12-23T18:57:15.365907Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=363; 2025-12-23T18:57:15.365960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=35517; 2025-12-23T18:57:15.366103Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:15.366225Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:15.366283Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:15.366355Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:15.387589Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=1; 2025-12-23T18:57:15.387784Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:15.387888Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2025-12-23T18:57:15.387968Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515919856;tx_id=18446744073709551615;;current_snapshot_ts=1766516171335; 2025-12-23T18:57:15.388014Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:15.388062Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:15.388106Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:15.388209Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:15.388463Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.015000s; 2025-12-23T18:57:15.390504Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:15.390721Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:15.390800Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:15.390908Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2025-12-23T18:57:15.390995Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515919856;tx_id=18446744073709551615;;current_snapshot_ts=1766516171335; 2025-12-23T18:57:15.391045Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:15.391100Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:15.391147Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:15.391253Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:15.391768Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.031000s; 2025-12-23T18:57:15.391815Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2014:3834];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2025-12-23T18:57:10.953346Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:10.977789Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:10.978038Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:10.984665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:10.984934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:10.985139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:10.985281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:10.985449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:10.985586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:10.985693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:10.985812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:10.985931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:10.986019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:10.986125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:10.986209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:10.986307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:11.008273Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:11.008444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:11.008497Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:11.008636Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:11.008786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:11.008868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:11.008907Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:11.008988Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:11.009040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:11.009071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:11.009093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:11.009258Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:11.009330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:11.009362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:11.009382Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:11.009444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:11.009507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:11.009566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:11.009587Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:11.009628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:11.009654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:11.009678Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:11.009719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:11.009752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:11.009778Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:11.010073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:11.010127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:11.010154Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:11.010268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:11.010304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:11.010325Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:11.010370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:11.010454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:11.010487Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:11.010520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:11.010545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:11.010563Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:11.010690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:11.010724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tab ... s=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-12-23T18:57:14.285244Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-12-23T18:57:14.285270Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:57:14.285298Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:57:14.285604Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:14.285705Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:14.285730Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:57:14.285805Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2025-12-23T18:57:14.285858Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2025-12-23T18:57:14.286023Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-12-23T18:57:14.286106Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:14.286181Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:14.286253Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:14.286427Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:14.286502Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:14.286583Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:14.286732Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:464:2476] finished for tablet 9437184 2025-12-23T18:57:14.287024Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:463:2475];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["l_task_result"],"t":0.004},{"events":["f_ack"],"t":0.005},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.006}],"full":{"a":3752897,"name":"_full_task","f":3752897,"d_finished":0,"c":0,"l":3759339,"d":6442},"events":[{"name":"bootstrap","f":3753084,"d_finished":710,"c":1,"l":3753794,"d":710},{"a":3758970,"name":"ack","f":3758147,"d_finished":676,"c":1,"l":3758823,"d":1045},{"a":3758962,"name":"processing","f":3753879,"d_finished":1764,"c":3,"l":3758825,"d":2141},{"name":"ProduceResults","f":3753534,"d_finished":1205,"c":6,"l":3759159,"d":1205},{"a":3759162,"name":"Finish","f":3759162,"d_finished":0,"c":0,"l":3759339,"d":177},{"name":"task_result","f":3753887,"d_finished":1057,"c":2,"l":3757868,"d":1057}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:14.287077Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:14.287311Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:463:2475];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["l_task_result"],"t":0.004},{"events":["f_ack"],"t":0.005},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.006}],"full":{"a":3752897,"name":"_full_task","f":3752897,"d_finished":0,"c":0,"l":3759660,"d":6763},"events":[{"name":"bootstrap","f":3753084,"d_finished":710,"c":1,"l":3753794,"d":710},{"a":3758970,"name":"ack","f":3758147,"d_finished":676,"c":1,"l":3758823,"d":1366},{"a":3758962,"name":"processing","f":3753879,"d_finished":1764,"c":3,"l":3758825,"d":2462},{"name":"ProduceResults","f":3753534,"d_finished":1205,"c":6,"l":3759159,"d":1205},{"a":3759162,"name":"Finish","f":3759162,"d_finished":0,"c":0,"l":3759660,"d":498},{"name":"task_result","f":3753887,"d_finished":1057,"c":2,"l":3757868,"d":1057}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:14.287363Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:14.278960Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2025-12-23T18:57:14.287388Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:14.287497Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadStale [GOOD] Test command err: 2025-12-23T18:57:13.718323Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:13.740468Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:13.740667Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:13.746619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:13.746845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:13.747046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:13.747126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:13.747206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:13.747266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:13.747356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:13.747430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:13.747517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:13.747589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:13.747646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:13.747723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:13.747812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:13.778665Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:13.778836Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:13.778881Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:13.779035Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:13.779259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:13.779317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:13.779356Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:13.779439Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:13.779494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:13.779526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:13.779573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:13.779714Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:13.779771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:13.779799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:13.779818Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:13.779899Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:13.779940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:13.779971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:13.779997Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:13.780039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:13.780064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:13.780082Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:13.780130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:13.780172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:13.780211Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:13.780387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:13.780425Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:13.780446Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:13.780557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:13.780589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:13.780612Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:13.780646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:13.780671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:13.780689Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:13.780721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:13.780748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:13.780769Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:13.780880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:13.780931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2025-12-23T18:57:14.505625Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=1; 2025-12-23T18:57:14.505956Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:14.506022Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:14.506103Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:14.506147Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:14.506217Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:57:14.522526Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:14.522653Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:14.522735Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:14.522880Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:14.523456Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 1 version: {1766515874702:max} readable: {1766516234702:max} at tablet 9437184 2025-12-23T18:57:14.535527Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 1 at tablet 9437184 2025-12-23T18:57:14.537121Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1766515874702:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=constructor.cpp:17;event=overriden_columns;ids=1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043; 2025-12-23T18:57:14.538997Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1766515874702:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 6 } Columns { Id: 7 } Columns { Id: 8 } Columns { Id: 9 } Columns { Id: 10 } Columns { Id: 4294967040 } Columns { Id: 4294967041 } Columns { Id: 4294967042 } Columns { Id: 4294967043 } } } ; 2025-12-23T18:57:14.539135Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1766515874702:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 6 } Columns { Id: 7 } Columns { Id: 8 } Columns { Id: 9 } Columns { Id: 10 } Columns { Id: 4294967040 } Columns { Id: 4294967041 } Columns { Id: 4294967042 } Columns { Id: 4294967043 } } } ; 2025-12-23T18:57:14.541455Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1766515874702:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6},{"from":8},{"from":10},{"from":12},{"from":14},{"from":16},{"from":18},{"from":20},{"from":22},{"from":24},{"from":26},{"from":28}]},{"owner_id":18,"inputs":[{"from":29}]},{"owner_id":2,"inputs":[{"from":29}]},{"owner_id":20,"inputs":[{"from":29}]},{"owner_id":4,"inputs":[{"from":29}]},{"owner_id":22,"inputs":[{"from":29}]},{"owner_id":6,"inputs":[{"from":29}]},{"owner_id":24,"inputs":[{"from":29}]},{"owner_id":8,"inputs":[{"from":29}]},{"owner_id":26,"inputs":[{"from":29}]},{"owner_id":10,"inputs":[{"from":29}]},{"owner_id":28,"inputs":[{"from":29}]},{"owner_id":29,"inputs":[{"from":30}]},{"owner_id":12,"inputs":[{"from":29}]},{"owner_id":30,"inputs":[]},{"owner_id":14,"inputs":[{"from":29}]},{"owner_id":16,"inputs":[{"from":29}]}],"nodes":{"8":{"p":{"i":"4","p":{"address":{"name":"uid","id":4}},"o":"4","t":"AssembleOriginalData"},"w":33,"id":8},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":33,"id":2},"18":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":33,"id":18},"0":{"p":{"i":"1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043","t":"Projection"},"w":462,"id":0},"4":{"p":{"i":"2","p":{"address":{"name":"resource_type","id":2}},"o":"2","t":"AssembleOriginalData"},"w":33,"id":4},"20":{"p":{"i":"10","p":{"address":{"name":"request_id","id":10}},"o":"10","t":"AssembleOriginalData"},"w":33,"id":20},"16":{"p":{"i":"8","p":{"address":{"name":"ingested_at","id":8}},"o":"8","t":"AssembleOriginalData"},"w":33,"id":16},"24":{"p":{"i":"4294967041","p":{"address":{"name":"_yql_tx_id","id":4294967041}},"o":"4294967041","t":"AssembleOriginalData"},"w":33,"id":24},"14":{"p":{"i":"7","p":{"address":{"name":"json_payload","id":7}},"o":"7","t":"AssembleOriginalData"},"w":33,"id":14},"10":{"p":{"i":"5","p":{"address":{"name":"level","id":5}},"o":"5","t":"AssembleOriginalData"},"w":33,"id":10},"29":{"p":{"i":"0","p":{"data":[{"name":"_yql_plan_step","id":4294967040},{"name":"_yql_tx_id","id":4294967041},{"name":"timestamp","id":1},{"name":"_yql_write_id","id":4294967042},{"name":"resource_type","id":2},{"name":"_yql_delete_flag","id":4294967043},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"4294967040,4294967041,1,4294967042,2,4294967043,3,4,5,6,7,8,9,10","t":"FetchOriginalData"},"w":28,"id":29},"6":{"p":{"i":"3","p":{"address":{"name":"resource_id","id":3}},"o":"3","t":"AssembleOriginalData"},"w":33,"id":6},"30":{"p":{"p":{"data":[{"name":"_yql_plan_step","id":4294967040},{"name":"_yql_tx_id","id":4294967041},{"name":"timestamp","id":1},{"name":"_yql_write_id","id":4294967042},{"name":"resource_type","id":2},{"name":"_yql_delete_flag","id":4294967043},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"0","t":"ReserveMemory"},"w":0,"id":30},"22":{"p":{"i":"4294967040","p":{"address":{"name":"_yql_plan_step","id":4294967040}},"o":"4294967040","t":"AssembleOriginalData"},"w":33,"id":22},"12":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":33,"id":12},"28":{"p":{"i":"4294967043","p":{"address":{"name":"_yql_delete_flag","id":4294967043}},"o":"4294967043","t":"AssembleOriginalData"},"w":33,"id":28},"26":{"p":{"i":"4294967042","p":{"address":{"name":"_yql_write_id","id":4294967042}},"o":"4294967042","t":"AssembleOriginalData"},"w":33,"id":26}}}; 2025-12-23T18:57:14.542972Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1766515874702:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build metadata;details=Snapshot too old: {1766515874702:max}. CS min read snapshot: {1766515934702:max}. now: 2025-12-23T18:57:14.542911Z; 2025-12-23T18:57:14.557951Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1766515874702:max} readable: {1766516234702:max} at tablet 9437184 2025-12-23T18:57:14.570124Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-12-23T18:57:14.570400Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766515874702:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-12-23T18:57:14.570523Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766515874702:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-12-23T18:57:14.571475Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766515874702:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"1,6","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,6","t":"Projection"},"w":18,"id":0}}}; 2025-12-23T18:57:14.573624Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1766515874702:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build metadata;details=Snapshot too old: {1766515874702:max}. CS min read snapshot: {1766515934702:max}. now: 2025-12-23T18:57:14.573557Z; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] Test command err: 2025-12-23T18:56:43.582977Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:43.603944Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:43.604121Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:43.609515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:43.609710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:43.609884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:43.609962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:43.610036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:43.610120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:43.610188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:43.610266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:43.610330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:43.610390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:43.610467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:43.610536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:43.610599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:43.631170Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:43.631312Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:43.631347Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:43.631470Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:43.631591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:43.631639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:43.631669Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:43.631726Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:43.631764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:43.631806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:43.631831Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:43.631951Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:43.631995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:43.632021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:43.632043Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:43.632097Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:43.632127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:43.632152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:43.632167Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:43.632208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:43.632250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:43.632271Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:43.632314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:43.632342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:43.632366Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:43.632524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:43.632567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:43.632585Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:43.632668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:43.632697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:43.632713Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:43.632741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:43.632770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:43.632791Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:43.632835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:43.632862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:43.632893Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:43.632979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:43.633000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2025-12-23T18:57:13.745971Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1010; 2025-12-23T18:57:13.746034Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=81437; 2025-12-23T18:57:13.746086Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=81584; 2025-12-23T18:57:13.746158Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2025-12-23T18:57:13.746577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=359; 2025-12-23T18:57:13.746634Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=82571; 2025-12-23T18:57:13.746819Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=122; 2025-12-23T18:57:13.746969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=89; 2025-12-23T18:57:13.747427Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=398; 2025-12-23T18:57:13.747863Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=375; 2025-12-23T18:57:13.765057Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=17108; 2025-12-23T18:57:13.780971Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=15778; 2025-12-23T18:57:13.781111Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=16; 2025-12-23T18:57:13.781179Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2025-12-23T18:57:13.781225Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-12-23T18:57:13.781312Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=46; 2025-12-23T18:57:13.781365Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-12-23T18:57:13.781460Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2025-12-23T18:57:13.781503Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-12-23T18:57:13.781578Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2025-12-23T18:57:13.781667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=52; 2025-12-23T18:57:13.781756Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=50; 2025-12-23T18:57:13.781803Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=126758; 2025-12-23T18:57:13.781951Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:13.782064Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:13.782126Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:13.782197Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:13.782247Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:13.782463Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:13.782530Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:13.782572Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:13.782617Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:13.782691Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515905999;tx_id=18446744073709551615;;current_snapshot_ts=1766516205064; 2025-12-23T18:57:13.782762Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:13.782813Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:13.782855Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:13.782959Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:13.783154Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.013000s; 2025-12-23T18:57:13.786323Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:13.787657Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:13.787727Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:13.787810Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:13.787862Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:13.787931Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515905999;tx_id=18446744073709551615;;current_snapshot_ts=1766516205064; 2025-12-23T18:57:13.788003Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:13.788060Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:13.788105Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:13.788183Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:57:13.788231Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:13.788980Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.048000s; 2025-12-23T18:57:13.789031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] >> Normalizers::CleanUnusedTablesNormalizer |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=166516774.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=166516774.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516774.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=166516774.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146516774.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=166516774.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=166516774.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515574.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146516774.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=146516774.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=146515574.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=146515574.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=146515574.000000s;Name=;Codec=}; 2025-12-23T18:56:14.984810Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:15.002997Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:15.003253Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:15.008998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:15.009245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:15.009415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:15.009500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:15.009589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:15.009665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:15.009731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:15.009801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:15.009865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:15.009940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:15.010005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:15.010074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:15.010138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:15.030211Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:15.030361Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:15.030438Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:15.030612Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:15.030777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:15.030856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:15.030914Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:15.030987Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:15.031039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:15.031071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:15.031092Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:15.031216Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:15.031261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:15.031290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:15.031309Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:15.031375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:15.031424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:15.031465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:15.031487Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:15.031536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:15.031564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:15.031595Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:15.031641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:15.031690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:15.031718Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:15.031864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:15.031899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:15.031919Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:15.032020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:15.032070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:15.032097Z nod ... ime=8; 2025-12-23T18:57:20.235542Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=71; 2025-12-23T18:57:20.235569Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=5012; 2025-12-23T18:57:20.235601Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=5105; 2025-12-23T18:57:20.235646Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2025-12-23T18:57:20.235699Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=23; 2025-12-23T18:57:20.235724Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=5560; 2025-12-23T18:57:20.235820Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=57; 2025-12-23T18:57:20.235898Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=42; 2025-12-23T18:57:20.236011Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=75; 2025-12-23T18:57:20.236112Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=65; 2025-12-23T18:57:20.238575Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2413; 2025-12-23T18:57:20.241120Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2474; 2025-12-23T18:57:20.241191Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-12-23T18:57:20.241230Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-12-23T18:57:20.241258Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=3; 2025-12-23T18:57:20.241304Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=23; 2025-12-23T18:57:20.241330Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2025-12-23T18:57:20.241389Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=34; 2025-12-23T18:57:20.241417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-12-23T18:57:20.241480Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=39; 2025-12-23T18:57:20.241552Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=42; 2025-12-23T18:57:20.241788Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=204; 2025-12-23T18:57:20.241814Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=18356; 2025-12-23T18:57:20.241909Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:20.241988Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:20.242028Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:20.242077Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:20.254316Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=1; 2025-12-23T18:57:20.254476Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:20.254550Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2025-12-23T18:57:20.254599Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515924823;tx_id=18446744073709551615;;current_snapshot_ts=1766516176302; 2025-12-23T18:57:20.254629Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:20.254663Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:20.254694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:20.254778Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:20.254984Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.133000s; 2025-12-23T18:57:20.256313Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:20.256513Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:20.256548Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:20.256609Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2025-12-23T18:57:20.256663Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515924823;tx_id=18446744073709551615;;current_snapshot_ts=1766516176302; 2025-12-23T18:57:20.256701Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:20.256733Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:20.256761Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:20.256821Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:20.257032Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.167000s; 2025-12-23T18:57:20.257073Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2009:3829];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2025-12-23T18:57:13.376557Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:13.402261Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:13.402491Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:13.408466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:13.408719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:13.408935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:13.409093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:13.409223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:13.409312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:13.409421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:13.409539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:13.409666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:13.409793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:13.409902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:13.410025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:13.410146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:13.434695Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:13.434870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:13.434921Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:13.435068Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:13.435173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:13.435221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:13.435252Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:13.435321Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:13.435383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:13.435430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:13.435468Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:13.435592Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:13.435632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:13.435657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:13.435676Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:13.435748Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:13.435783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:13.435819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:13.435853Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:13.435903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:13.435933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:13.435958Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:13.436011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:13.436038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:13.436076Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:13.436226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:13.436262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:13.436280Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:13.436368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:13.436397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:13.436417Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:13.436445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:13.436468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:13.436492Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:13.436519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:13.436540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:13.436559Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:13.436632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:13.436654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2025-12-23T18:57:18.815717Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-12-23T18:57:18.816080Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-12-23T18:57:18.816371Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:18.816580Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:18.816794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:18.817108Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:18.817351Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:18.817590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:18.818066Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1059:2926] finished for tablet 9437184 2025-12-23T18:57:18.818726Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1058:2925];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.016},{"events":["l_ack","l_processing","l_Finish"],"t":0.017}],"full":{"a":5870296,"name":"_full_task","f":5870296,"d_finished":0,"c":0,"l":5887310,"d":17014},"events":[{"name":"bootstrap","f":5870658,"d_finished":2182,"c":1,"l":5872840,"d":2182},{"a":5886236,"name":"ack","f":5884028,"d_finished":1967,"c":1,"l":5885995,"d":3041},{"a":5886217,"name":"processing","f":5873094,"d_finished":5322,"c":3,"l":5886000,"d":6415},{"name":"ProduceResults","f":5872223,"d_finished":3391,"c":6,"l":5886797,"d":3391},{"a":5886804,"name":"Finish","f":5886804,"d_finished":0,"c":0,"l":5887310,"d":506},{"name":"task_result","f":5873115,"d_finished":3278,"c":2,"l":5883706,"d":3278}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:18.818857Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1058:2925];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:18.819466Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1058:2925];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.016},{"events":["l_ack","l_processing","l_Finish"],"t":0.017}],"full":{"a":5870296,"name":"_full_task","f":5870296,"d_finished":0,"c":0,"l":5888079,"d":17783},"events":[{"name":"bootstrap","f":5870658,"d_finished":2182,"c":1,"l":5872840,"d":2182},{"a":5886236,"name":"ack","f":5884028,"d_finished":1967,"c":1,"l":5885995,"d":3810},{"a":5886217,"name":"processing","f":5873094,"d_finished":5322,"c":3,"l":5886000,"d":7184},{"name":"ProduceResults","f":5872223,"d_finished":3391,"c":6,"l":5886797,"d":3391},{"a":5886804,"name":"Finish","f":5886804,"d_finished":0,"c":0,"l":5888079,"d":1275},{"name":"task_result","f":5873115,"d_finished":3278,"c":2,"l":5883706,"d":3278}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:18.819603Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:18.796315Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-12-23T18:57:18.819668Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:18.819984Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1059:2926];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |98.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> Normalizers::CleanUnusedTablesNormalizer [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:20.506472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:20.590634Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:20.596141Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:20.596470Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:20.596586Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:20.913344Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:20.999831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:20.999977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:21.034483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:21.115166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00495b/r3tmp/tmpxdifEt/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14298, node 1 2025-12-23T18:55:21.797993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:21.798983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:21.799053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:21.799086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:21.799674Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:21.865061Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12120 2025-12-23T18:55:22.395257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:25.576639Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:25.581717Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:25.584956Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:25.612777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:25.612877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:25.651120Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:25.652611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.806213Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.806947Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.807625Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.808455Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.808741Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.808906Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.809121Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.809276Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.809385Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.978394Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:26.000327Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:26.000441Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:26.013535Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:26.171419Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:26.201061Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:26.201133Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:26.225422Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:26.226597Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:26.226817Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:26.226899Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:26.226939Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:26.226995Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:26.227041Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:26.227077Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:26.227853Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:26.339089Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:26.339211Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:26.349597Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:55:26.350681Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:55:26.447674Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:55:26.447936Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:26.449666Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:55:26.449712Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:55:26.449771Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:26.452286Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:26.455720Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:26.460874Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:26.460966Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:26.470328Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:26.601071Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:27.025009Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:27.124122Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:27.124198Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2 ... or: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:56.416735Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4223:3755], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:56.416847Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:56.421271Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4240:3760], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:56.423708Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:56.461862Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4222:3754], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-12-23T18:55:56.597091Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4309:3802], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:56.621222Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4308:3801] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:56.835416Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4330:3815]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:56.835675Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:56.835743Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4332:3817] 2025-12-23T18:55:56.835794Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4332:3817] 2025-12-23T18:55:56.836040Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4333:3818] 2025-12-23T18:55:56.836151Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4333:3818], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:56.836206Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:55:56.836320Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4332:3817], server id = [2:4333:3818], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:56.836373Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:55:56.836434Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4330:3815], StatRequests.size() = 1 2025-12-23T18:55:57.546685Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:56:40.282655Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4185:2468], ActorId: [2:4195:3740], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:56:40.282852Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4185:2468], ActorId: [2:4195:3740], Start read next stream part 2025-12-23T18:56:40.283251Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd692szq90r41fq0q5j99vh4", SessionId: ydb://session/3?node_id=2&id=ZTc5ZWJjOTUtZDFiNzNlM2YtOGY2YWQ3ZTUtYjA0MjI0NzU=, Slow query, duration: 43.870888s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:56:40.284404Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4185:2468], ActorId: [2:4508:3917], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:56:40.286212Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 18446744073709551615] shutting down 2025-12-23T18:56:40.286470Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4185:2468], ActorId: [2:4508:3917], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table` 2025-12-23T18:56:40.286547Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4185:2468], ActorId: [2:4508:3917], Start read next stream part 2025-12-23T18:56:40.288762Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4185:2468], ActorId: [2:4195:3740], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:56:40.288814Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4185:2468], ActorId: [2:4195:3740], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTkzYzY0YWQtYjBjYjVlNTItNGUyNmMwYzYtMzI4NWJiYTM=, TxId: 2025-12-23T18:56:40.356852Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4533:3938]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:40.357028Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:56:40.357059Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4533:3938], StatRequests.size() = 1 2025-12-23T18:57:22.603723Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4185:2468], ActorId: [2:4508:3917], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:57:22.603886Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4185:2468], ActorId: [2:4508:3917], Start read next stream part 2025-12-23T18:57:22.604620Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd6944ty5kw7g621965zrf2k", SessionId: ydb://session/3?node_id=2&id=MzI4MTY1OS03YjExMzY3LWI1Y2NhMTc0LTk3MmZkMWQ4, Slow query, duration: 42.314736s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:57:22.605243Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4687:4021], ActorId: [2:4688:4022], Starting query actor #1 [2:4689:4023] 2025-12-23T18:57:22.605320Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4688:4022], ActorId: [2:4689:4023], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:57:22.610161Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33170, txId: 18446744073709551615] shutting down 2025-12-23T18:57:22.610979Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4688:4022], ActorId: [2:4689:4023], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YTZjYjdjZDgtZTIwMjhhMC04NjIxNjhiLTQ4N2U4MTI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:57:22.613384Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4185:2468], ActorId: [2:4508:3917], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:57:22.613446Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4185:2468], ActorId: [2:4508:3917], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDg4ZGM5NjQtY2I0NzY5YjAtMmYzMzkyOWItOTlhMWY0NDY=, TxId: 2025-12-23T18:57:22.689982Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4706:4037]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:57:22.690287Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:57:22.690328Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4706:4037], StatRequests.size() = 1 2025-12-23T18:57:22.837544Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4688:4022], ActorId: [2:4689:4023], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTZjYjdjZDgtZTIwMjhhMC04NjIxNjhiLTQ4N2U4MTI=, TxId: 2025-12-23T18:57:22.837628Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4688:4022], ActorId: [2:4689:4023], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTZjYjdjZDgtZTIwMjhhMC04NjIxNjhiLTQ4N2U4MTI=, TxId: 2025-12-23T18:57:22.837910Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4687:4021], ActorId: [2:4688:4022], Got response [2:4689:4023] SUCCESS 2025-12-23T18:57:22.838200Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:57:22.875934Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:57:22.876008Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3232:3459] >> Secret::Deactivated |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanUnusedTablesNormalizer [GOOD] Test command err: 2025-12-23T18:57:09.309024Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:09.329288Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:09.329453Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:09.334515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-12-23T18:57:09.334666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:09.334876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:09.335000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:09.335086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:09.335157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:09.335248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:09.335325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:09.335394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:09.335458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:09.335539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:09.335604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:09.335670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:09.335745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:09.355237Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:09.355475Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-12-23T18:57:09.355528Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-12-23T18:57:09.355760Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:323;tasks_for_remove=0;distribution=; 2025-12-23T18:57:09.355866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:09.355918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-12-23T18:57:09.355947Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-12-23T18:57:09.356060Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:09.356119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:09.356145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:09.356180Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-12-23T18:57:09.356256Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:09.356297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:09.356324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:09.356342Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:09.356437Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:09.356469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:09.356516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:09.356540Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:09.356636Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:09.356676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:09.356699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:09.356716Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:09.356764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:09.356807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:09.356837Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:09.356881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:09.356918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:09.356938Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:09.357064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:09.357107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:09.357128Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:09.357200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:09.357223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:09.357254Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:09.357292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:09.357317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksM ... };in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2025-12-23T18:57:22.457657Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2025-12-23T18:57:22.457684Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:57:22.457711Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:57:22.458200Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:22.458352Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:22.458393Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:57:22.458494Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2025-12-23T18:57:22.458540Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2025-12-23T18:57:22.458670Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:326:2327];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-12-23T18:57:22.458818Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:22.458965Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:22.459131Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:22.459249Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:22.459335Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:22.459424Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:22.459729Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:328:2328] finished for tablet 9437184 2025-12-23T18:57:22.460190Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:326:2327];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.066},{"events":["l_task_result"],"t":0.791},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.793}],"full":{"a":12804290,"name":"_full_task","f":12804290,"d_finished":0,"c":0,"l":13598183,"d":793893},"events":[{"name":"bootstrap","f":12804767,"d_finished":1936,"c":1,"l":12806703,"d":1936},{"a":13597626,"name":"ack","f":12871045,"d_finished":322693,"c":421,"l":13597563,"d":323250},{"a":13597617,"name":"processing","f":12806951,"d_finished":668826,"c":843,"l":13597566,"d":669392},{"name":"ProduceResults","f":12806079,"d_finished":536562,"c":1266,"l":13597837,"d":536562},{"a":13597842,"name":"Finish","f":13597842,"d_finished":0,"c":0,"l":13598183,"d":341},{"name":"task_result","f":12806979,"d_finished":335873,"c":422,"l":13596118,"d":335873}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:22.460266Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:326:2327];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:22.460730Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:326:2327];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.066},{"events":["l_task_result"],"t":0.791},{"events":["l_ProduceResults","f_Finish"],"t":0.793},{"events":["l_ack","l_processing","l_Finish"],"t":0.794}],"full":{"a":12804290,"name":"_full_task","f":12804290,"d_finished":0,"c":0,"l":13598698,"d":794408},"events":[{"name":"bootstrap","f":12804767,"d_finished":1936,"c":1,"l":12806703,"d":1936},{"a":13597626,"name":"ack","f":12871045,"d_finished":322693,"c":421,"l":13597563,"d":323765},{"a":13597617,"name":"processing","f":12806951,"d_finished":668826,"c":843,"l":13597566,"d":669907},{"name":"ProduceResults","f":12806079,"d_finished":536562,"c":1266,"l":13597837,"d":536562},{"a":13597842,"name":"Finish","f":13597842,"d_finished":0,"c":0,"l":13598698,"d":856},{"name":"task_result","f":12806979,"d_finished":335873,"c":422,"l":13596118,"d":335873}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:22.460811Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:21.663621Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2025-12-23T18:57:22.460864Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:22.461028Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:328:2328];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: 2025-12-23T18:54:15.168377Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:15.184867Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:15.185095Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:15.185647Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:15.185848Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:15.186429Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:51:2076] ControllerId# 72057594037932033 2025-12-23T18:54:15.186456Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:15.186534Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:15.186626Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:15.193930Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:15.193985Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:15.195624Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:60:2080] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.195727Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:61:2081] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.195799Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:62:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.195874Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:63:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.195959Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:64:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.196031Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:65:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.196113Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:66:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.196132Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:15.196194Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:51:2076] 2025-12-23T18:54:15.196216Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:51:2076] 2025-12-23T18:54:15.196261Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:15.196294Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:15.196525Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:15.198457Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:15.198568Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:15.198806Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:15.198994Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-12-23T18:54:15.199734Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-12-23T18:54:15.199771Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:15.200295Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:75:2077] ControllerId# 72057594037932033 2025-12-23T18:54:15.200319Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:15.200368Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:15.200433Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:15.208709Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:15.208740Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:15.209773Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:83:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.209860Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:84:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.209931Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:85:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.209999Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:86:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.210068Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:87:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.210136Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:88:2087] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.210204Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:89:2088] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:15.210218Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:15.210250Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:75:2077] 2025-12-23T18:54:15.210283Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:75:2077] 2025-12-23T18:54:15.210310Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:15.210332Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:15.210887Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:15.211025Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:15.211350Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:51:2076] 2025-12-23T18:54:15.211405Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.211554Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:15.211675Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:15.219108Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2025-12-23T18:54:15.219170Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.219316Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.219488Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.219509Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-12-23T18:54:15.220847Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:15.221282Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 1 2025-12-23T18:54:15.221325Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-12-23T18:54:15.221405Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:15.221551Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2025-12-23T18:54:15.221572Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:15.221604Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:100:2093] 2025-12-23T18:54:15.221620Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:100:2093] 2025-12-23T18:54:15.221663Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [2:55:2064] 2025-12-23T18:54:15.221677Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [2:55:2064] 2025-12-23T18:54:15.221886Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:54:15.221966Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:54:15.221987Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvRe ... 2960] Result# TEvPutResult {Id# [72057594037927937:2:490:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-12-23T18:57:23.630289Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [409dbd935e372960] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:490:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:57:23.631673Z node 20 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:490:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-12-23T18:57:23.631879Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} commited cookie 1 for step 490 2025-12-23T18:57:23.634283Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} Tx{1486, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-12-23T18:57:23.634360Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} Tx{1486, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:57:23.634633Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} Tx{1486, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{992, redo 335b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-12-23T18:57:23.634697Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:491} Tx{1486, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:57:23.634876Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037932033] send [20:1355:2259] 2025-12-23T18:57:23.634922Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [20:1355:2259] 2025-12-23T18:57:23.635002Z node 20 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [20:1291:2223] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.048) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.062) *****----------------------------------------------------------------------------------------------- (0.048) *******--------------------------------------------------------------------------------------------- (0.066) *****----------------------------------------------------------------------------------------------- (0.05) ******---------------------------------------------------------------------------------------------- (0.058) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.058) 2025-12-23T18:57:23.737910Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1487, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-12-23T18:57:23.737991Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1487, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:57:23.738104Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:272: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136655307867040}: tablet 72075186224037935 wasn't changed 2025-12-23T18:57:23.738144Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136655307867040}: tablet 72075186224037935 skipped channel 0 2025-12-23T18:57:23.738231Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136655307867040}: tablet 72075186224037935 skipped channel 1 2025-12-23T18:57:23.738260Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136655307867040}: tablet 72075186224037935 skipped channel 2 2025-12-23T18:57:23.738328Z node 20 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{136655307867040}(72075186224037935)::Execute - TryToBoot was not successfull 2025-12-23T18:57:23.738397Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1487, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{993, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-12-23T18:57:23.738441Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1487, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:57:23.758483Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [728fceb6c2541081] bootstrap ActorId# [20:11722:4481] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:491:0:0:246:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-12-23T18:57:23.758621Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [728fceb6c2541081] Id# [72057594037927937:2:491:0:0:246:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:57:23.758661Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [728fceb6c2541081] restore Id# [72057594037927937:2:491:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:57:23.758710Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [728fceb6c2541081] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:491:0:0:246:1] Marker# BPG33 2025-12-23T18:57:23.758767Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [728fceb6c2541081] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:491:0:0:246:1] Marker# BPG32 2025-12-23T18:57:23.758897Z node 20 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [20:467:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:491:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:57:23.762822Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [728fceb6c2541081] received {EvVPutResult Status# OK ID# [72057594037927937:2:491:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 508 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 509 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-12-23T18:57:23.762989Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [728fceb6c2541081] Result# TEvPutResult {Id# [72057594037927937:2:491:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-12-23T18:57:23.763053Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [728fceb6c2541081] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:491:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:57:23.764101Z node 20 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:491:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-12-23T18:57:23.764297Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} commited cookie 1 for step 491 2025-12-23T18:57:23.767471Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1488, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-12-23T18:57:23.767552Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1488, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:57:23.767836Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1488, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{994, redo 335b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-12-23T18:57:23.767899Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:492} Tx{1488, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:57:23.768051Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037932033] send [20:1355:2259] 2025-12-23T18:57:23.768094Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [20:1355:2259] 2025-12-23T18:57:23.768155Z node 20 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [20:1291:2223] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.048) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.062) *****----------------------------------------------------------------------------------------------- (0.048) *******--------------------------------------------------------------------------------------------- (0.066) *****----------------------------------------------------------------------------------------------- (0.05) ******---------------------------------------------------------------------------------------------- (0.058) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.058) 2025-12-23T18:57:23.870278Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1489, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-12-23T18:57:23.870371Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1489, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:57:23.870510Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:272: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136655307844640}: tablet 72075186224037981 wasn't changed 2025-12-23T18:57:23.870556Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136655307844640}: tablet 72075186224037981 skipped channel 0 2025-12-23T18:57:23.870650Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136655307844640}: tablet 72075186224037981 skipped channel 1 2025-12-23T18:57:23.870689Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136655307844640}: tablet 72075186224037981 skipped channel 2 2025-12-23T18:57:23.870797Z node 20 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{136655307844640}(72075186224037981)::Execute - TryToBoot was not successfull 2025-12-23T18:57:23.870883Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1489, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{995, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-12-23T18:57:23.870952Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:493} Tx{1489, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:21.472726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:21.562732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:21.571310Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:21.571767Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:21.571889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:21.895034Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:21.973126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:21.973260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:22.031355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:22.144588Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00494b/r3tmp/tmpMl8Jkp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6864, node 1 2025-12-23T18:55:22.786432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:22.787644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:22.787704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:22.787737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:22.787969Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:22.855005Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18185 2025-12-23T18:55:23.393622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:26.292846Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:26.298120Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:26.300751Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:26.324060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:26.324151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:26.361155Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:26.362802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:26.600912Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.601497Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.602118Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.602601Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.602820Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.602951Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.603091Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.603212Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.603306Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:26.716991Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:26.728440Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:26.728548Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:26.741286Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:26.948977Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:26.992514Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:26.992610Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:27.016792Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:27.018220Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:27.018423Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:27.018473Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:27.018517Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:27.018567Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:27.018622Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:27.018667Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:27.019076Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:27.062307Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:27.062389Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:27.072919Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:55:27.080809Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:55:27.081712Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:55:27.086136Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:27.112848Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:27.112897Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:55:27.112962Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:27.133333Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:27.143357Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:27.157402Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:27.157536Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:27.169870Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:27.276495Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:27.585633Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:27.689114Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:27.689188Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:26 ... blechecking } 2025-12-23T18:55:55.364183Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4266:3783], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:55.388129Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4265:3782] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:55.558045Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4287:3796]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:55.558233Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:55.558288Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4289:3798] 2025-12-23T18:55:55.558342Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4289:3798] 2025-12-23T18:55:55.558820Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4290:3799] 2025-12-23T18:55:55.558914Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4290:3799], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:55.558966Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:55:55.559052Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4289:3798], server id = [2:4290:3799], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:55.559114Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:55:55.559171Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4287:3796], StatRequests.size() = 1 2025-12-23T18:55:55.559329Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 ... blocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR cookie 0 ... waiting for TEvKqpScan (done) 2025-12-23T18:56:41.898965Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2831:3025];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:984;event=tablet_die; 2025-12-23T18:56:41.912818Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [2:4376:3879]. Got EvDeliveryProblem, TabletId: 72075186224037899, NotDelivered: 0, Starting 2025-12-23T18:56:41.912899Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [2:4376:3879]. TKqpScanFetcherActor: broken pipe with tablet 72075186224037899, restarting scan from last received key , attempt #1 (total 1) schedule after 0.000000s 2025-12-23T18:56:42.026609Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:4448:3884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=20; ... blocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR cookie 0 2025-12-23T18:56:42.233195Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:56:42.233273Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 ... unblocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to ... unblocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR 2025-12-23T18:56:42.833929Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4144:2473], ActorId: [2:4154:3722], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:56:42.834149Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4144:2473], ActorId: [2:4154:3722], Start read next stream part 2025-12-23T18:56:42.834832Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd692rrp6fra7g87n9wqqf2q", SessionId: ydb://session/3?node_id=2&id=Mjg1NGRhN2MtYWI2N2U1ZjItOWVmODVhN2UtZDYyZTRmMGU=, Slow query, duration: 47.672058s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:56:42.835306Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4144:2473], ActorId: [2:4581:3997], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:56:42.838386Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4144:2473], ActorId: [2:4581:3997], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table` 2025-12-23T18:56:42.838511Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4144:2473], ActorId: [2:4581:3997], Start read next stream part 2025-12-23T18:56:42.838622Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 18446744073709551615] shutting down 2025-12-23T18:56:42.841714Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4144:2473], ActorId: [2:4154:3722], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:56:42.841761Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4144:2473], ActorId: [2:4154:3722], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWRkNGQ1ZjEtNDBkOWQ0ODctOWFjODQwMDItMTE4OGVjNDU=, TxId: 2025-12-23T18:56:42.941239Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4606:4018]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:42.941581Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:56:42.941636Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4606:4018], StatRequests.size() = 1 2025-12-23T18:57:25.498488Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4144:2473], ActorId: [2:4581:3997], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:57:25.498603Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4144:2473], ActorId: [2:4581:3997], Start read next stream part 2025-12-23T18:57:25.499189Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd6947ape86wtnj6stgq8gbx", SessionId: ydb://session/3?node_id=2&id=YzU0ZTEzZDYtZTY5OGY1MDQtNjBjZDIxODMtY2ZmMzFmNGI=, Slow query, duration: 42.656283s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:57:25.500903Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4763:4103], ActorId: [2:4764:4104], Starting query actor #1 [2:4765:4105] 2025-12-23T18:57:25.500962Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4764:4104], ActorId: [2:4765:4105], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:57:25.503016Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33150, txId: 18446744073709551615] shutting down 2025-12-23T18:57:25.503556Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4764:4104], ActorId: [2:4765:4105], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NjE0N2ZhZGYtNmU5NWE1ZjUtMWNlOThlYjktZjk0MjVlZWM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:57:25.504413Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4144:2473], ActorId: [2:4581:3997], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:57:25.504455Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4144:2473], ActorId: [2:4581:3997], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTg0YzEwMmUtZDUxMGZlNzEtNTAzZWQ0MGYtNTMzMzI2NjY=, TxId: 2025-12-23T18:57:25.565730Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4782:4119]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:57:25.566009Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:57:25.566055Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4782:4119], StatRequests.size() = 1 2025-12-23T18:57:25.756245Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4764:4104], ActorId: [2:4765:4105], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjE0N2ZhZGYtNmU5NWE1ZjUtMWNlOThlYjktZjk0MjVlZWM=, TxId: 2025-12-23T18:57:25.756337Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4764:4104], ActorId: [2:4765:4105], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjE0N2ZhZGYtNmU5NWE1ZjUtMWNlOThlYjktZjk0MjVlZWM=, TxId: 2025-12-23T18:57:25.756632Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4763:4103], ActorId: [2:4764:4104], Got response [2:4765:4105] SUCCESS 2025-12-23T18:57:25.756903Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:57:25.771154Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:57:25.771231Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3190:3444] |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] Test command err: 2025-12-23T18:56:28.155994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:28.178647Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:28.178923Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:28.185294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:28.185566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:28.185760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:28.185841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:28.185925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:28.186002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:28.186072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:28.186163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:28.186231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:28.186330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:28.186396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:28.186467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:28.186585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:28.209673Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:28.209826Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:28.209879Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:28.210034Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:28.210174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:28.210254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:28.210289Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:28.210387Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:28.210446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:28.210481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:28.210504Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:28.210636Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:28.210681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:28.210706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:28.210729Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:28.210804Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:28.210837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:28.210878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:28.210931Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:28.210979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:28.211023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:28.211045Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:28.211089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:28.211119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:28.211144Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:28.211300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:28.211341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:28.211361Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:28.211456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:28.211487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:28.211518Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:28.211560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:28.211598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:28.211624Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:28.211671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:28.211697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:28.211716Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:28.211800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:28.211821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2025-12-23T18:57:24.357559Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=684; 2025-12-23T18:57:24.357602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=258324; 2025-12-23T18:57:24.357650Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=258426; 2025-12-23T18:57:24.357707Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2025-12-23T18:57:24.357994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=247; 2025-12-23T18:57:24.358028Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=259132; 2025-12-23T18:57:24.358157Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=82; 2025-12-23T18:57:24.358253Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=58; 2025-12-23T18:57:24.358516Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=226; 2025-12-23T18:57:24.358770Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=215; 2025-12-23T18:57:24.368502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=9668; 2025-12-23T18:57:24.378907Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10289; 2025-12-23T18:57:24.379030Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2025-12-23T18:57:24.379083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-12-23T18:57:24.379115Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-12-23T18:57:24.379182Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2025-12-23T18:57:24.379218Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:57:24.379295Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=46; 2025-12-23T18:57:24.379326Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-12-23T18:57:24.379377Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=26; 2025-12-23T18:57:24.379442Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=39; 2025-12-23T18:57:24.379502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=35; 2025-12-23T18:57:24.379533Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=286432; 2025-12-23T18:57:24.379651Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:24.379745Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:24.379794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:24.379850Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:24.379884Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:24.380121Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:24.380174Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:24.380206Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:24.380242Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:24.380300Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515892291;tx_id=18446744073709551615;;current_snapshot_ts=1766516189692; 2025-12-23T18:57:24.380339Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:24.380380Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:24.380421Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:24.380518Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:24.380695Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.078000s; 2025-12-23T18:57:24.383488Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:24.383815Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:24.383864Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:24.383930Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:24.383979Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:24.384046Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515892291;tx_id=18446744073709551615;;current_snapshot_ts=1766516189692; 2025-12-23T18:57:24.384083Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:24.384124Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:24.384157Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:24.384223Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2025-12-23T18:57:24.384270Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:24.384910Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.035000s; 2025-12-23T18:57:24.384954Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:19.574898Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:19.674891Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:19.680329Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:19.680634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:19.680740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:20.029335Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:20.113085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:20.113249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:20.147554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:20.172612Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004962/r3tmp/tmplv2snS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11103, node 1 2025-12-23T18:55:21.195172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:21.195916Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:21.195963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:21.195990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:21.196561Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:21.261595Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18006 2025-12-23T18:55:21.810391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:24.345484Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:24.346794Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:24.353222Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:24.378037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.378131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.406122Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:24.408072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.593004Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.593119Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.621102Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.692950Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.694093Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.695188Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.695809Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.696369Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.696627Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.696973Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.697293Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.697454Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.922895Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.003589Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:25.003715Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:25.051593Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:25.052978Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:25.053282Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:25.053357Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:25.053418Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:25.053482Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:25.053544Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:25.053601Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:25.054133Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:25.056401Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1921:2458] 2025-12-23T18:55:25.062142Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:25.070382Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2132:2589] Owner: [2:2131:2588]. Describe result: PathErrorUnknown 2025-12-23T18:55:25.070452Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2132:2589] Owner: [2:2131:2588]. Creating table 2025-12-23T18:55:25.070545Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2132:2589] Owner: [2:2131:2588]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:25.083680Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.083862Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2156:2600], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.117545Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2223:2648], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:25.123939Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2231:2654] 2025-12-23T18:55:25.125925Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2231:2654], schemeshard id = 72075186224037897 2025-12-23T18:55:25.147103Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2145:2596] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-12-23T18:55:25.151884Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2132:2589] Owner: [2:2131:2588]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-12-23T18:55:25.222110Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:25.278991Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:25.561265Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2132:2589] Owner: [2:2131:2588]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:25.563580Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2550:2872], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:25.567386Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:25.570918Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2132:2589] Owner: [2:2131:2588]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186 ... pp:1273: SyncNode(), pipe client id = [2:4279:3804] 2025-12-23T18:55:55.261634Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4280:3805] 2025-12-23T18:55:55.261687Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4280:3805], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:55.261741Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:55:55.261828Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4279:3804], server id = [2:4280:3805], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:55.261878Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:55:55.261947Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4277:3802], StatRequests.size() = 1 2025-12-23T18:55:55.262133Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:56:42.355739Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4132:2466], ActorId: [2:4145:3727], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:56:42.355936Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4132:2466], ActorId: [2:4145:3727], Start read next stream part 2025-12-23T18:56:42.356473Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd692rcsfx3bm92wcgm8yv3j", SessionId: ydb://session/3?node_id=2&id=ZjEyNTY3OWYtNjcxOTNiYjItYTdjNTRhZWItNzQ2NWJkNGM=, Slow query, duration: 47.576035s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:56:42.357015Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4132:2466], ActorId: [2:4455:3904], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:56:42.359762Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 18446744073709551615] shutting down 2025-12-23T18:56:42.359885Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4132:2466], ActorId: [2:4455:3904], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table` 2025-12-23T18:56:42.359977Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4132:2466], ActorId: [2:4455:3904], Start read next stream part 2025-12-23T18:56:42.362106Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4132:2466], ActorId: [2:4145:3727], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:56:42.362174Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4132:2466], ActorId: [2:4145:3727], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTc1N2Q0YzgtOWI2ZGE5ZWEtODA2ZTQxN2QtNDk0ZDk4NDY=, TxId: 2025-12-23T18:56:42.435197Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4480:3925]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:42.435409Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:56:42.435452Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4480:3925], StatRequests.size() = 1 2025-12-23T18:57:26.775143Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4132:2466], ActorId: [2:4455:3904], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:57:26.775297Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4132:2466], ActorId: [2:4455:3904], Start read next stream part 2025-12-23T18:57:26.775949Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd6946vq6t6pnhb2ph30042b", SessionId: ydb://session/3?node_id=2&id=ZjRmZGUxMmEtMTEyZDgzNzEtN2Q5YmNjMzctNTA4YjRmMWQ=, Slow query, duration: 44.412775s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:57:26.778042Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4634:4008], ActorId: [2:4635:4009], Starting query actor #1 [2:4636:4010] 2025-12-23T18:57:26.778110Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4635:4009], ActorId: [2:4636:4010], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:57:26.780553Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33170, txId: 18446744073709551615] shutting down 2025-12-23T18:57:26.781001Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4635:4009], ActorId: [2:4636:4010], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NGIyZmFjYTYtY2Q2MzUxMjYtNWUxZTA3My0yMDA3NmRhNA==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:57:26.782733Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4132:2466], ActorId: [2:4455:3904], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:57:26.782795Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4132:2466], ActorId: [2:4455:3904], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODI2Y2M1NTYtOTI2MDQyZGEtM2Y5MjVjZTgtZGE1OTZhNjM=, TxId: 2025-12-23T18:57:26.844341Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4653:4024]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:57:26.844609Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:57:26.844648Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4653:4024], StatRequests.size() = 1 2025-12-23T18:57:27.002045Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4635:4009], ActorId: [2:4636:4010], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGIyZmFjYTYtY2Q2MzUxMjYtNWUxZTA3My0yMDA3NmRhNA==, TxId: 2025-12-23T18:57:27.002164Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4635:4009], ActorId: [2:4636:4010], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGIyZmFjYTYtY2Q2MzUxMjYtNWUxZTA3My0yMDA3NmRhNA==, TxId: ... blocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from SQL_QUERY to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... cookie 0 ... waiting for TEvSaveStatisticsQueryResponse (done) 2025-12-23T18:57:27.004665Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4670:4032] 2025-12-23T18:57:27.005501Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_IN_PROGRESS 2025-12-23T18:57:27.007139Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4672:4033]
---- StatisticsAggregator ----
Database: /Root/Database
BaseStatistics: 1
SchemeShards: 1
    72075186224037897
Nodes: 1
    2
RequestedSchemeShards: 1
    72075186224037897
FastCounter: 2
FastCheckInFlight: 1
FastSchemeShards: 0
FastNodes: 0
CurPropagationSeq: 0
PropagationInFlight: 0
PropagationSchemeShards: 0
PropagationNodes: 0
LastSSIndex: 0
PendingRequests: 0
ProcessUrgentInFlight: 0
Columns: 0
DatashardRanges: 0
CountMinSketches: 0
ScheduleTraversalsByTime: 2
  oldest table: [OwnerId: 72075186224037897, LocalPathId: 33], update time: 1970-01-01T00:00:00Z
ScheduleTraversalsBySchemeShard: 1
    72075186224037897
    [OwnerId: 72075186224037897, LocalPathId: 34], [OwnerId: 72075186224037897, LocalPathId: 33]
ForceTraversals: 1
    1970-01-01T00:00:04Z
NavigatePathId: 
ForceTraversalOperationId: operationId
  CreatedAt: 1970-01-01T00:00:04.040012Z
, ReplyToActorId: [1:1260:3033]
, Types: 1
, Tables size: 1
, Tables: 
    Table[0] PathId: [OwnerId: 72075186224037897, LocalPathId: 34]
        Status: AnalyzeStarted
        AnalyzedShards size: 0
        ColumnTags: 1,2
TraversalStartTime: 2025-12-23T18:55:54Z
TraversalDatabase: 
TraversalPathId: [OwnerId: 72075186224037897, LocalPathId: 34]
TraversalIsColumnTable: 0
TraversalStartKey: 
GlobalTraversalRound: 1
TraversalRound: 0
HiveRequestRound: 0
... unblocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... 2025-12-23T18:57:27.008816Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4634:4008], ActorId: [2:4635:4009], Got response [2:4636:4010] SUCCESS 2025-12-23T18:57:27.009078Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:57:27.036561Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:57:27.036655Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:1260:3033] 2025-12-23T18:57:27.038420Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4676:4036] 2025-12-23T18:57:27.039374Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |98.4%| [TA] $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TA] {RESULT} $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:24.972770Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:25.061086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:25.066702Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:25.067009Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:25.067088Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:25.455789Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.542948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:25.543084Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:25.601037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.693274Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004934/r3tmp/tmp2Sm2fd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28884, node 1 2025-12-23T18:55:26.308902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:26.309607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:26.309635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:26.309657Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:26.309817Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:26.370244Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:23865 2025-12-23T18:55:26.893687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:29.323228Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:29.328821Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:29.331673Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:29.357668Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:29.357769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:29.396938Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:29.398599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:29.629579Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.630144Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.630791Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.631381Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.631667Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.631753Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.631876Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.631962Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.632059Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.698862Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:29.754208Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:29.754331Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:29.767486Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:29.933765Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:29.976863Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:29.976975Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:30.005435Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:30.006575Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:30.006811Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:30.006876Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:30.006919Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:30.006964Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:30.007013Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:30.007058Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:30.007378Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:30.047464Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:30.047609Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:30.059442Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:55:30.067632Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:55:30.068587Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:55:30.073137Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:30.100347Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:30.100399Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:55:30.100475Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:30.121295Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:30.130223Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:30.137599Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:30.137702Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:30.147574Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:30.247889Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:30.641336Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:30.755742Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:30.755850Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2 ... l default not found or you don't have access permissions } 2025-12-23T18:55:57.828845Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4183:3738], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:57.828975Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:55:57.835142Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4200:3743], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:57.838232Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:55:57.882620Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4182:3737], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-12-23T18:55:58.051156Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4267:3784], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:58.111641Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4266:3783] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:55:58.329783Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4288:3797]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:58.329987Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:58.330045Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4290:3799] 2025-12-23T18:55:58.330103Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4290:3799] 2025-12-23T18:55:58.330327Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4291:3800] 2025-12-23T18:55:58.330500Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4291:3800], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:58.330557Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:55:58.330645Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4290:3799], server id = [2:4291:3800], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:58.330706Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:55:58.330802Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4288:3797], StatRequests.size() = 1 2025-12-23T18:55:59.012386Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:56:45.675272Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4145:2473], ActorId: [2:4155:3723], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:56:45.675527Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4145:2473], ActorId: [2:4155:3723], Start read next stream part 2025-12-23T18:56:45.675739Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd692vbr8ce4j3hpqrxbhxps", SessionId: ydb://session/3?node_id=2&id=YmQwZGI0MDItZWVjMzljMWUtNDEzNjEwMDctZGVkZWZlMzI=, Slow query, duration: 47.852833s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:56:45.677062Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 18446744073709551615] shutting down 2025-12-23T18:56:45.677273Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4145:2473], ActorId: [2:4466:3899], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:56:45.679878Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4145:2473], ActorId: [2:4155:3723], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:56:45.679926Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4145:2473], ActorId: [2:4155:3723], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZThkMjJhYWItNGNkN2FlODgtYTZhMWIzMGQtNGQyN2UxZmM=, TxId: 2025-12-23T18:56:45.680404Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4145:2473], ActorId: [2:4466:3899], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table` 2025-12-23T18:56:45.680482Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4145:2473], ActorId: [2:4466:3899], Start read next stream part 2025-12-23T18:56:45.749976Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4491:3920]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:45.750145Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:56:45.750189Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4491:3920], StatRequests.size() = 1 2025-12-23T18:57:27.745718Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4145:2473], ActorId: [2:4466:3899], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:57:27.745842Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4145:2473], ActorId: [2:4466:3899], Start read next stream part 2025-12-23T18:57:27.746303Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd694a3g0n4r7d3dqg382bh6", SessionId: ydb://session/3?node_id=2&id=MzNiNzZjZDMtYmIwNTVjMDAtYmU5YmFjODUtM2UzODFlNWE=, Slow query, duration: 42.062756s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:57:27.748453Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33190, txId: 18446744073709551615] shutting down 2025-12-23T18:57:27.748668Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4645:4003], ActorId: [2:4646:4004], Starting query actor #1 [2:4647:4005] 2025-12-23T18:57:27.748736Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4646:4004], ActorId: [2:4647:4005], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:57:27.751648Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4145:2473], ActorId: [2:4466:3899], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:57:27.751704Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4145:2473], ActorId: [2:4466:3899], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2YwM2MxMjItZjI3NjU3YTYtNTNkZDQxNDUtNDc3OGY0YmU=, TxId: 2025-12-23T18:57:27.752355Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4646:4004], ActorId: [2:4647:4005], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ODI5NDNhODAtMjI0MzU2NGYtNzVkNzIzMmEtY2Q4YTE4ZTc=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:57:27.812997Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4664:4019]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:57:27.813235Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:57:27.813271Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4664:4019], StatRequests.size() = 1 2025-12-23T18:57:27.944132Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4646:4004], ActorId: [2:4647:4005], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODI5NDNhODAtMjI0MzU2NGYtNzVkNzIzMmEtY2Q4YTE4ZTc=, TxId: 2025-12-23T18:57:27.944217Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4646:4004], ActorId: [2:4647:4005], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODI5NDNhODAtMjI0MzU2NGYtNzVkNzIzMmEtY2Q4YTE4ZTc=, TxId: 2025-12-23T18:57:27.944522Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4645:4003], ActorId: [2:4646:4004], Got response [2:4647:4005] SUCCESS 2025-12-23T18:57:27.944770Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:57:27.982457Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:57:27.982526Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3192:3445] |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> KqpWorkload::KV >> KqpQueryPerf::UpdateOn-QueryService-UseSink >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink >> KqpQueryPerf::UpdateOn-QueryService+UseSink >> KqpQueryPerf::Update+QueryService+UseSink >> KqpQueryPerf::Insert+QueryService-UseSink >> KqpQueryPerf::IndexReplace-QueryService-UseSink >> KqpWorkload::STOCK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] Test command err: 2025-12-23T18:57:08.648551Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:08.682569Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:08.682868Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:08.691014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:08.691286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:08.691590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:08.691729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:08.691867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:08.691996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:08.692122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:08.692236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:08.692344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:08.692465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:08.692606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:08.692709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:08.692832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:08.724284Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:08.724505Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:08.724581Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:08.724763Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:08.724931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:08.725008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:08.725056Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:08.725158Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:08.725219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:08.725263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:08.725305Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:08.725524Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:08.725603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:08.725668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:08.725702Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:08.725821Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:08.725884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:08.725940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:08.725977Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:08.726042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:08.726089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:08.726139Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:08.726210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:08.726259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:08.726299Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:08.726527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:08.726593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:08.726635Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:08.726839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:08.726891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:08.726923Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:08.726990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:08.727037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:08.727070Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:08.727123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:08.727161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:08.727191Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:08.727358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:08.727408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... UG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:28.354954Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:10;schema=timestamp: timestamp[us];);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:28.354999Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:57:28.355130Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=10; 2025-12-23T18:57:28.355187Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=10;batch_columns=timestamp; 2025-12-23T18:57:28.355474Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:3589:5595];bytes=80;rows=10;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-12-23T18:57:28.355616Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:28.355756Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:28.355877Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:28.356352Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:57:28.356489Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:28.356611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:28.356838Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:3596:5602] finished for tablet 9437184 2025-12-23T18:57:28.357328Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:3589:5595];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.07},{"events":["f_ack"],"t":0.072},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.074}],"full":{"a":20093692,"name":"_full_task","f":20093692,"d_finished":0,"c":0,"l":20167996,"d":74304},"events":[{"name":"bootstrap","f":20094293,"d_finished":1772,"c":1,"l":20096065,"d":1772},{"a":20167433,"name":"ack","f":20165812,"d_finished":1196,"c":1,"l":20167008,"d":1759},{"a":20167416,"name":"processing","f":20097141,"d_finished":15720,"c":14,"l":20167011,"d":16300},{"name":"ProduceResults","f":20095482,"d_finished":4297,"c":17,"l":20167741,"d":4297},{"a":20167746,"name":"Finish","f":20167746,"d_finished":0,"c":0,"l":20167996,"d":250},{"name":"task_result","f":20097164,"d_finished":14258,"c":13,"l":20164667,"d":14258}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:28.357406Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:3589:5595];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:57:28.357885Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:3589:5595];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.07},{"events":["f_ack"],"t":0.072},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.074}],"full":{"a":20093692,"name":"_full_task","f":20093692,"d_finished":0,"c":0,"l":20168553,"d":74861},"events":[{"name":"bootstrap","f":20094293,"d_finished":1772,"c":1,"l":20096065,"d":1772},{"a":20167433,"name":"ack","f":20165812,"d_finished":1196,"c":1,"l":20167008,"d":2316},{"a":20167416,"name":"processing","f":20097141,"d_finished":15720,"c":14,"l":20167011,"d":16857},{"name":"ProduceResults","f":20095482,"d_finished":4297,"c":17,"l":20167741,"d":4297},{"a":20167746,"name":"Finish","f":20167746,"d_finished":0,"c":0,"l":20168553,"d":807},{"name":"task_result","f":20097164,"d_finished":14258,"c":13,"l":20164667,"d":14258}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:57:28.357962Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:57:28.277300Z;index_granules=0;index_portions=12;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=16464;inserted_portions_bytes=14016;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=30480;selected_rows=0; 2025-12-23T18:57:28.358006Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:57:28.358134Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3596:5602];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=3a1d4af0-e03111f0-a2c8d817-21b58790; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink >> KqpQueryPerf::Upsert-QueryService-UseSink >> KqpQueryPerf::Replace-QueryService+UseSink >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService >> KqpQueryPerf::IndexUpsert-QueryService-UseSink >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:20.256221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:20.371311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:20.375863Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:20.376102Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:20.376175Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:20.717873Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:20.797480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:20.797616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:20.831918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:20.921073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004957/r3tmp/tmp7Lb8AW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8500, node 1 2025-12-23T18:55:21.583912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:21.584905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:21.584966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:21.585001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:21.585576Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:21.651021Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:14512 2025-12-23T18:55:22.159118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:24.663749Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:24.669373Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:24.673234Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:24.698326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.698412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.736935Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:24.738684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.913172Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.913692Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.914108Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.914674Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.914905Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.915018Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.915125Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.915264Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.915367Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.049723Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:25.105138Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:25.105293Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:25.119043Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.236033Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.266702Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:25.266825Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:25.292118Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:25.293321Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:25.293579Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:25.293633Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:25.293684Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:25.293749Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:25.293835Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:25.293900Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:25.294988Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:25.410937Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.411048Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.425516Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:55:25.426935Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:55:25.540380Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:55:25.540777Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:25.542984Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:55:25.543035Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:55:25.543126Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:25.546736Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:25.550815Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:25.558287Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:25.558415Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:25.571578Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:25.719100Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:26.089958Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:26.198191Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:26.198306Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:26 ... rity: 1 } 2025-12-23T18:55:56.442399Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4331:3815]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:56.442615Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:55:56.442682Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4333:3817] 2025-12-23T18:55:56.442774Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4333:3817] 2025-12-23T18:55:56.443064Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4334:3818] 2025-12-23T18:55:56.443157Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4334:3818], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:55:56.443205Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:55:56.443291Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4333:3817], server id = [2:4334:3818], tablet id = 72075186224037894, status = OK 2025-12-23T18:55:56.443359Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:55:56.443420Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4331:3815], StatRequests.size() = 1 2025-12-23T18:55:56.443590Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:56:42.582445Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4186:2468], ActorId: [2:4196:3740], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:56:42.582620Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4186:2468], ActorId: [2:4196:3740], Start read next stream part 2025-12-23T18:56:42.582791Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd692skb9d0hb7fxtzhmgjwj", SessionId: ydb://session/3?node_id=2&id=MTgyMmFmNWItYjZhN2ZjMWItMTY2OTg3NjQtM2Y3MzI3ZTc=, Slow query, duration: 46.566923s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:56:42.583927Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 18446744073709551615] shutting down 2025-12-23T18:56:42.584021Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4186:2468], ActorId: [2:4507:3916], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:56:42.586108Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4186:2468], ActorId: [2:4507:3916], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table` 2025-12-23T18:56:42.586189Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4186:2468], ActorId: [2:4507:3916], Start read next stream part 2025-12-23T18:56:42.586721Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4186:2468], ActorId: [2:4196:3740], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:56:42.586812Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4186:2468], ActorId: [2:4196:3740], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmFiNWY4MTktYmRjYWM2Mi1lNWFjNDU4Yy0yYjE2ODZjYg==, TxId: 2025-12-23T18:56:42.663103Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4532:3937]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:42.663289Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:56:42.663321Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4532:3937], StatRequests.size() = 1 2025-12-23T18:57:25.552728Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4186:2468], ActorId: [2:4507:3916], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:57:25.552845Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4186:2468], ActorId: [2:4507:3916], Start read next stream part 2025-12-23T18:57:25.553377Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd69472ta62a94s601pqp46e", SessionId: ydb://session/3?node_id=2&id=ZGFhYWQzY2EtMzZhYWY3ZjAtOWVmZjYwYTItYThkYmM5YWE=, Slow query, duration: 42.964272s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:57:25.553871Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4686:4020], ActorId: [2:4687:4021], Starting query actor #1 [2:4688:4022] 2025-12-23T18:57:25.553924Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4687:4021], ActorId: [2:4688:4022], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:57:25.557865Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33200, txId: 18446744073709551615] shutting down 2025-12-23T18:57:25.558120Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4687:4021], ActorId: [2:4688:4022], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MjE5ZTM2MmMtNDhiNTQ4OWQtOGMzOTRiZDItZjRjZjRhOTk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:57:25.618057Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4186:2468], ActorId: [2:4507:3916], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:57:25.618172Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4186:2468], ActorId: [2:4507:3916], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTY4NTE3ODYtZmJmNWI3NmYtMjhhZjQ0Yi0zNmFmZmEyMg==, TxId: 2025-12-23T18:57:25.621859Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4705:4036]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:57:25.622070Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:57:25.622100Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4705:4036], StatRequests.size() = 1 2025-12-23T18:57:25.777125Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4687:4021], ActorId: [2:4688:4022], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjE5ZTM2MmMtNDhiNTQ4OWQtOGMzOTRiZDItZjRjZjRhOTk=, TxId: 2025-12-23T18:57:25.777251Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4687:4021], ActorId: [2:4688:4022], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjE5ZTM2MmMtNDhiNTQ4OWQtOGMzOTRiZDItZjRjZjRhOTk=, TxId: ... blocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from SQL_QUERY to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... cookie 0 ... waiting for TEvSaveStatisticsQueryResponse (done) 2025-12-23T18:57:25.838965Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:57:25.839071Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:57:25.871601Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4333:3817], schemeshard count = 1 2025-12-23T18:57:26.160966Z node 2 :STATISTICS ERROR: tx_analyze_deadline.cpp:26: [72075186224037894] Delete long analyze operation, OperationId=operationId 2025-12-23T18:57:26.446032Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:57:26.446336Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 1 2025-12-23T18:57:26.446533Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2025-12-23T18:57:26.457472Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2025-12-23T18:57:26.457548Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:57:26.457778Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2025-12-23T18:57:27.314884Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:57:27.314992Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:57:27.315052Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:57:27.315101Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:57:31.187899Z node 2 :STATISTICS DEBUG: tx_analyze_deadline.cpp:44: [72075186224037894] TTxAnalyzeDeadline::Complete. Send TEvAnalyzeResponse for deleted operation, OperationId=operationId, ActorId=[1:3232:3459] 2025-12-23T18:57:31.188015Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] >> BSCMovePDisk::PDiskMove_Mirror3dc |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] >> BSCMovePDisk::PDiskMove_Block42 |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:22.857607Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:22.944575Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:22.953002Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:22.953340Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:22.953431Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:23.314510Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:23.393648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:23.393779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:23.451654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:23.542681Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004940/r3tmp/tmpgAVOWG/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18339, node 1 2025-12-23T18:55:24.213716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:24.215000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:24.215062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:24.215099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:24.215358Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:24.282883Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18927 2025-12-23T18:55:24.802974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:27.246355Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:27.252461Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:27.255054Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:27.278791Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:27.278890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:27.317053Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:27.319318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:27.552186Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.552913Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.553721Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.554362Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.554638Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.554735Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.554904Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.555051Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.555155Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.666026Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-12-23T18:55:27.677291Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:27.677395Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:27.689632Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:27.840010Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:27.879878Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:27.879976Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:27.904595Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:27.905806Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:27.905975Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:27.906023Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:27.906063Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:27.906108Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:27.906146Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:27.906194Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:27.906480Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:27.946011Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:27.946117Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:27.956602Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:55:27.964220Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:55:27.965027Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:55:27.969154Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-12-23T18:55:27.993431Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:27.993494Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:55:27.993571Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:55:28.012221Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2352:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:28.021123Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:28.027945Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:28.028045Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:28.037608Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:28.137329Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:28.513475Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:28.621975Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:28.622046Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. ... orce traversal operations sent the requests. 2025-12-23T18:56:01.870514Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037899 2025-12-23T18:56:01.870604Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.457000s, at schemeshard: 72075186224037899 2025-12-23T18:56:01.870854Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2025-12-23T18:56:01.885561Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:56:01.983892Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T18:56:01.987485Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:4889:4145], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:56:01.990768Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:4889:4145], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2025-12-23T18:56:01.990962Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:4889:4145], Start read next stream part 2025-12-23T18:56:02.245099Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4916:4159]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:02.245335Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:56:02.245579Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-12-23T18:56:02.245648Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4919:4162] 2025-12-23T18:56:02.245726Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4919:4162] 2025-12-23T18:56:02.246101Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4920:4163] 2025-12-23T18:56:02.246257Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4919:4162], server id = [2:4920:4163], tablet id = 72075186224037894, status = OK 2025-12-23T18:56:02.246312Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4920:4163], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:56:02.246381Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:56:02.246500Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:56:02.246571Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4916:4159], StatRequests.size() = 1 2025-12-23T18:56:02.246633Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:56:49.813158Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:4889:4145], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:56:49.813379Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:4889:4145], Start read next stream part 2025-12-23T18:56:49.814267Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:5095:4263], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:56:49.816398Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd692ze6775mg5e1n89g1b39", SessionId: ydb://session/3?node_id=2&id=MjU5Y2JkY2EtODNkYTE3OGYtMzViOGQ2MzItNGI3YmU4ZDI=, Slow query, duration: 47.819321s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:56:49.817768Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:4889:4145], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:56:49.817824Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:4889:4145], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDdiNWI5ZDQtZTg0MDhkMWUtZDMxMGFiNzItN2IwMWNkNDg=, TxId: 2025-12-23T18:56:49.817937Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2025-12-23T18:56:49.818016Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:5095:4263], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table` 2025-12-23T18:56:49.818089Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:5095:4263], Start read next stream part 2025-12-23T18:56:49.903450Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5115:4279]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:49.903681Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:56:49.903714Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5115:4279], StatRequests.size() = 1 2025-12-23T18:57:34.988291Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:5095:4263], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:57:34.988450Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:5095:4263], Start read next stream part 2025-12-23T18:57:34.989086Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd694e4t1hyyyevpjgxb60et", SessionId: ydb://session/3?node_id=2&id=ODJjYmMzYmMtNmFkYTczZWQtMWIyY2JhZGEtODIyYWU4OGI=, Slow query, duration: 45.168141s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:57:34.991188Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5272:4364], ActorId: [2:5273:4365], Starting query actor #1 [2:5274:4366] 2025-12-23T18:57:34.991250Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5273:4365], ActorId: [2:5274:4366], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2025-12-23T18:57:34.993809Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:5095:4263], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:57:34.993856Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4879:2473], ActorId: [2:5095:4263], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmMxNjFhMDMtNzYyNWMyNmQtNTU5MmMxMDMtNDMzYTU4ODE=, TxId: 2025-12-23T18:57:34.993946Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33590, txId: 18446744073709551615] shutting down 2025-12-23T18:57:34.994528Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5273:4365], ActorId: [2:5274:4366], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YmI4YTE1ZmMtZDYwY2YwMmYtNDM1NWExYTMtNzIwYjgwMDI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:57:35.071321Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5290:4380]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:57:35.071730Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:57:35.072057Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-12-23T18:57:35.072133Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:57:35.072308Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:57:35.072396Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:5290:4380], StatRequests.size() = 1 2025-12-23T18:57:35.072474Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:57:35.256351Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5273:4365], ActorId: [2:5274:4366], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmI4YTE1ZmMtZDYwY2YwMmYtNDM1NWExYTMtNzIwYjgwMDI=, TxId: 2025-12-23T18:57:35.256497Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5273:4365], ActorId: [2:5274:4366], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmI4YTE1ZmMtZDYwY2YwMmYtNDM1NWExYTMtNzIwYjgwMDI=, TxId: 2025-12-23T18:57:35.257003Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5272:4364], ActorId: [2:5273:4365], Got response [2:5274:4366] SUCCESS 2025-12-23T18:57:35.257876Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:57:35.273234Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 32] 2025-12-23T18:57:35.273331Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3885:3649] |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService >> Secret::DeactivatedQueryService [GOOD] >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] >> KqpQueryPerf::Insert+QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: 2025-12-23T18:56:38.099968Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:38.121778Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:38.121992Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:38.128340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:38.128607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:38.128815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:38.128947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:38.129081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:38.129161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:38.129253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:38.129327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:38.129387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:38.129461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:38.129534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:38.129614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:38.129706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:38.151183Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:38.151339Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:38.151384Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:38.151510Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:38.151653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:38.151715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:38.151749Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:38.151836Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:38.151887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:38.151917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:38.151942Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:38.152078Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:38.152184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:38.152220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:38.152240Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:38.152313Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:38.152361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:38.152398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:38.152418Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:38.152457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:38.152482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:38.152502Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:38.152549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:38.152584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:38.152621Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:38.152787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:38.152833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:38.152867Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:38.152978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:38.153012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:38.153033Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:38.153068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:38.153093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:38.153112Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:38.153144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:38.153170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:38.153201Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:38.153323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:38.153354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... olumn_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=6544; 2025-12-23T18:57:34.396879Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=8; 2025-12-23T18:57:34.397706Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=776; 2025-12-23T18:57:34.397777Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=7625; 2025-12-23T18:57:34.397816Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=7802; 2025-12-23T18:57:34.397870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2025-12-23T18:57:34.397947Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=44; 2025-12-23T18:57:34.397976Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8372; 2025-12-23T18:57:34.398108Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=87; 2025-12-23T18:57:34.398231Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=74; 2025-12-23T18:57:34.398401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=123; 2025-12-23T18:57:34.398533Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=92; 2025-12-23T18:57:34.400244Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1663; 2025-12-23T18:57:34.401899Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1584; 2025-12-23T18:57:34.401958Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-12-23T18:57:34.401993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-12-23T18:57:34.402025Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-12-23T18:57:34.402087Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=34; 2025-12-23T18:57:34.402123Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:57:34.402195Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=44; 2025-12-23T18:57:34.402227Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-12-23T18:57:34.402275Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2025-12-23T18:57:34.402364Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=55; 2025-12-23T18:57:34.402458Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=48; 2025-12-23T18:57:34.402501Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=21562; 2025-12-23T18:57:34.402648Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110437896;raw_bytes=171489958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:34.402759Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:34.402808Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:34.402874Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:34.402922Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:34.403070Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:34.403122Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:34.403174Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:34.403246Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:57:34.403303Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:34.403344Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:34.403386Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:34.403469Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:34.403664Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.077000s; 2025-12-23T18:57:34.405659Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:34.406544Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:34.406626Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:34.406708Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:34.406776Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:34.406842Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:57:34.406916Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:34.406991Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:34.407046Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:34.407149Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:57:34.407209Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:34.407701Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.135000s; 2025-12-23T18:57:34.407744Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17389, MsgBus: 17149 2025-12-23T18:57:31.436174Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129528383510852:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.436260Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fe0/r3tmp/tmpJdsXvI/pdisk_1.dat 2025-12-23T18:57:31.709230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:31.770471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:31.770587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:31.836189Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129528383510820:2081] 1766516251430409 != 1766516251430412 2025-12-23T18:57:31.836298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:31.843139Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17389, node 1 2025-12-23T18:57:31.929097Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:57:32.184836Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.184857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.184871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.184958Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.463137Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:17149 TClient is connected to server localhost:17149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516251899 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:57:33.117589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:57:33.123651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:57:33.139574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:33.269381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.416230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.487451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:34.562955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129541268414581:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.563131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.563436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129541268414591:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.563489Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.990126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.019127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.047673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.075810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.105743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.140507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.176011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.248888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.336736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129545563382755:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.336820Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.337001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129545563382760:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.337335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129545563382761:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.337374Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.340564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:35.351576Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129545563382764:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:57:35.420022Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129545563382815:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:36.436534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129528383510852:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:36.436611Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:57:24.735973Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:57:24.836583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:57:24.845966Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:291:2336], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:57:25.147382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:25.147503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:25.150872Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516241793663 != 1766516241793667 2025-12-23T18:57:25.163093Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:25.211169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:25.291388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003cde/r3tmp/tmpalIMmg/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14156, node 1 TClient is connected to server localhost:61210 2025-12-23T18:57:25.767014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:57:25.767799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:25.767845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:25.767876Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:25.768143Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:25.782095Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-12-23T18:57:37.498170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:882:2761], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:37.498385Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:897:2770], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:37.498481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:37.500035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:900:2773], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:37.500275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:37.505169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:37.526155Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:901:2774], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-12-23T18:57:37.573104Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:953:2807] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:37.792549Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:963:2816], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2025-12-23T18:57:37.795077Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=ZGMxMTU4M2EtNjJkODM5N2QtOGZiNzE5MWMtZDJjZDUyMDk=, ActorId: [1:880:2759], ActorState: ExecuteState, TraceId: 01kd695wp12p8ryqfchx1xzyfe, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 50 } message: "Executing CREATE OBJECT SECRET" end_position { row: 1 column: 50 } severity: 1 issues { message: "metadata provider service is disabled" severity: 1 } } }, remove tx with tx_id: REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 E1223 18:57:38.357393088 1592394 backup_poller.cc:113] run_poller: UNKNOWN:Timer list shutdown {created_time:"2025-12-23T18:57:38.357233057+00:00"} |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64950, MsgBus: 20768 2025-12-23T18:57:31.446971Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129527505298689:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.447026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fdd/r3tmp/tmpepP6H4/pdisk_1.dat 2025-12-23T18:57:31.776426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:31.789256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:31.789376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:31.803780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:31.904828Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64950, node 1 2025-12-23T18:57:31.959990Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:57:32.182898Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.182921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.182999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.183081Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.456115Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:20768 TClient is connected to server localhost:20768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516251962 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:33.092688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:33.099885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:57:33.111617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.249515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.400518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:57:33.475831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:34.663638Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129540390202341:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.663822Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.664131Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129540390202351:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.664202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.990054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.019297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.045912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.080310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.111584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.145819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.179250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.241793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.319012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129544685170518:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.319087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.319101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129544685170523:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.319367Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129544685170525:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.319416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.324025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:35.337041Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129544685170526:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:57:35.392684Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129544685170578:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:36.447158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129527505298689:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:36.447268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4238, MsgBus: 22534 2025-12-23T18:57:31.436831Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129526136997637:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.436890Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fc9/r3tmp/tmpzsCaua/pdisk_1.dat 2025-12-23T18:57:31.779957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:31.789086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:31.789211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:31.847416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:31.905393Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4238, node 1 2025-12-23T18:57:31.912004Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129526136997601:2081] 1766516251430158 != 1766516251430161 2025-12-23T18:57:31.968761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:57:32.183523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.183546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.183563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.183640Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.447319Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22534 TClient is connected to server localhost:22534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516251934 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:33.053571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:33.060100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:57:33.087522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.278262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.429271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.500932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:34.928950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129539021901365:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.929127Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.929620Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129539021901375:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.929687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.407915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.443398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.472738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.500419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.529928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.564026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.597170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.641290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.711099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129543316869545:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.711205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.711441Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129543316869550:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.711505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129543316869551:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.711571Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.715919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:35.727248Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129543316869554:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:57:35.808619Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129543316869605:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:36.437360Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129526136997637:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:36.437430Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2553, MsgBus: 24265 2025-12-23T18:57:31.446975Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129530190373775:2245];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.447497Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fdf/r3tmp/tmpFGC6l0/pdisk_1.dat 2025-12-23T18:57:31.721127Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:31.771298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:31.771407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:31.839161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:31.928661Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2553, node 1 2025-12-23T18:57:31.946078Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:57:32.184603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.184627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.184641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.184707Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.446841Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24265 TClient is connected to server localhost:24265 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516251934 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:33.052044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:33.059289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:57:33.084727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:33.225599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:57:33.370981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:33.445259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:34.464768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129543075277324:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.464932Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.465353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129543075277334:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.465431Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.990141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.016977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.044576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.073451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.103903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.137875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.171748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.225845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.314818Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129547370245501:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.314934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.315139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129547370245505:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.315181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129547370245508:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.315208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.322528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:35.336604Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129547370245510:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:57:35.423851Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129547370245561:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:36.447063Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129530190373775:2245];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:36.448386Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21867, MsgBus: 12337 2025-12-23T18:57:31.450827Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129527015643645:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.453917Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fbf/r3tmp/tmpRqRt9B/pdisk_1.dat 2025-12-23T18:57:31.678915Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:31.754089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:31.754220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:31.800373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:31.866151Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:31.918487Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129527015643621:2081] 1766516251448540 != 1766516251448543 TServer::EnableGrpc on GrpcPort 21867, node 1 2025-12-23T18:57:31.991747Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:57:32.182898Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.182926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.182951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.183045Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.459123Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12337 TClient is connected to server localhost:12337 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516251969 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:33.079625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:33.087951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:57:33.102424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.244379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.412403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.483429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:34.986920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129539900547399:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.987119Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.987544Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129539900547409:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.987602Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.373138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.403781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.436134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.469168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.501196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.539132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.569789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.613716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.681970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129544195515574:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.682044Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.682136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129544195515579:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.682290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129544195515581:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.682356Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.685751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:35.697367Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129544195515583:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:57:35.798440Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129544195515634:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:36.450639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129527015643645:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:36.450770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14547, MsgBus: 11113 2025-12-23T18:57:31.436162Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129528847388500:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.436246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fc8/r3tmp/tmpMrcyeA/pdisk_1.dat 2025-12-23T18:57:31.757659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:31.794970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:31.795077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:31.888657Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:31.891407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:31.934823Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 14547, node 1 2025-12-23T18:57:32.183494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.183516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.183535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.183617Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.448245Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:11113 TClient is connected to server localhost:11113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516251948 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:33.080798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:33.097451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.260463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.424216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:57:33.503350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.106324Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129546027259531:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.106458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.106762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129546027259541:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.106854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.472086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.504115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.535280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.566213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.611846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.638778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.670092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.710010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.775536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129546027260416:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.775648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.775666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129546027260421:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.775876Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129546027260423:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.775958Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.779220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:35.789191Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129546027260424:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:57:35.877590Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129546027260476:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:36.436207Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129528847388500:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:36.436300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> BSCMovePDisk::PDiskMove_ErasureNone >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink >> KqpQueryPerf::IndexReplace-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace-QueryService+UseSink >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] >> Secret::Deactivated [GOOD] >> BSCMovePDisk::PDiskMove_ErasureNone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] Test command err: 2025-12-23T18:57:05.387683Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:05.408539Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:05.408703Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:05.414464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:05.414688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:05.414883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:05.415003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:05.415076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:05.415143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:05.415219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:05.415278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:05.415357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:05.415433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:05.415491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:05.415590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:05.415660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:05.436438Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:05.436625Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:05.436679Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:05.436891Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:05.437081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:05.437151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:05.437194Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:05.437299Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:05.437357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:05.437405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:05.437442Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:05.437654Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:05.437731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:05.437796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:05.437831Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:05.437909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:05.437947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:05.437975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:05.437993Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:05.438033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:05.438056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:05.438081Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:05.438142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:05.438194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:05.438242Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:05.438424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:05.438464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:05.438483Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:05.438570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:05.438599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:05.438619Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:05.438648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:05.438672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:05.438694Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:05.438735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:05.438791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:05.438818Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:05.438899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:05.438945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... age_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2025-12-23T18:57:37.792114Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=877; 2025-12-23T18:57:37.792171Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=61182; 2025-12-23T18:57:37.792214Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=61319; 2025-12-23T18:57:37.792271Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=10; 2025-12-23T18:57:37.792640Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=316; 2025-12-23T18:57:37.792696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=62281; 2025-12-23T18:57:37.792851Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=104; 2025-12-23T18:57:37.792966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=66; 2025-12-23T18:57:37.793365Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=346; 2025-12-23T18:57:37.793739Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=321; 2025-12-23T18:57:37.806358Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=12543; 2025-12-23T18:57:37.816509Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10056; 2025-12-23T18:57:37.816601Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2025-12-23T18:57:37.816650Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-12-23T18:57:37.816711Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:57:37.816790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2025-12-23T18:57:37.816846Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-12-23T18:57:37.816943Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=58; 2025-12-23T18:57:37.816987Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-12-23T18:57:37.817070Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=48; 2025-12-23T18:57:37.817160Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=45; 2025-12-23T18:57:37.817248Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=53; 2025-12-23T18:57:37.817293Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=94297; 2025-12-23T18:57:37.817423Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:37.817520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:37.817569Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:37.817638Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:37.817676Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:37.817878Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:37.817939Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:37.817976Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:37.818013Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:37.818074Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515927857;tx_id=18446744073709551615;;current_snapshot_ts=1766516226869; 2025-12-23T18:57:37.818120Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:37.818158Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:37.818190Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:37.818263Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:37.818394Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.059000s; 2025-12-23T18:57:37.821513Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:37.821713Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:37.821745Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:37.821807Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:37.821839Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:37.821877Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515927857;tx_id=18446744073709551615;;current_snapshot_ts=1766516226869; 2025-12-23T18:57:37.821906Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:37.821936Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:37.821961Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:37.822006Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:57:37.822035Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:37.822721Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.006000s; 2025-12-23T18:57:37.822781Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_ErasureNone [GOOD] Test command err: RandomSeed# 10659951797785409647 |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:57:27.813629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:57:27.890198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:57:27.897497Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:291:2336], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:57:28.136563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:28.136671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:28.139478Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516245783354 != 1766516245783358 2025-12-23T18:57:28.149151Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:28.194182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:28.287245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003cc3/r3tmp/tmp35X9j0/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21990, node 1 TClient is connected to server localhost:11008 2025-12-23T18:57:28.740721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:57:28.741235Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:28.741272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:28.741308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:28.741550Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:28.754283Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-12-23T18:57:40.623285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:883:2761], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:40.623481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:40.623811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:893:2764], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:40.623858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 |98.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc [GOOD] >> BSCMovePDisk::PDiskMove_Block42 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] Test command err: 2025-12-23T18:57:04.557489Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:04.590540Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:04.590782Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:04.598041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:04.598329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:04.598579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:04.598708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:04.598839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:04.598964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:04.599084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:04.599215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:04.599343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:04.599460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:04.599578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:04.599704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:04.599820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:04.630725Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:04.630958Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:04.631015Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:04.631216Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:04.631429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:04.631523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:04.631573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:04.631690Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:04.631765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:04.631812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:04.631864Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:04.632065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:04.632134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:04.632177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:04.632209Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:04.632325Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:04.632390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:04.632440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:04.632477Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:04.632544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:04.632599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:04.632662Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:04.632730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:04.632783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:04.632823Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:04.633075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:04.633143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:04.633174Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:04.633304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:04.633348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:04.633387Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:04.633438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:04.633479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:04.633508Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:04.633555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:04.633612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:04.633649Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:04.633889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:04.633943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2025-12-23T18:57:38.823892Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=977; 2025-12-23T18:57:38.823951Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=74160; 2025-12-23T18:57:38.824007Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=74304; 2025-12-23T18:57:38.824073Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2025-12-23T18:57:38.824506Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=375; 2025-12-23T18:57:38.824553Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=75345; 2025-12-23T18:57:38.824697Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=93; 2025-12-23T18:57:38.824855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=109; 2025-12-23T18:57:38.825329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=415; 2025-12-23T18:57:38.825784Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=400; 2025-12-23T18:57:38.845570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=19702; 2025-12-23T18:57:38.863780Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=18078; 2025-12-23T18:57:38.863905Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2025-12-23T18:57:38.863989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=33; 2025-12-23T18:57:38.864062Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-12-23T18:57:38.864151Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=48; 2025-12-23T18:57:38.864198Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-12-23T18:57:38.864291Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=57; 2025-12-23T18:57:38.864332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-12-23T18:57:38.864401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=36; 2025-12-23T18:57:38.864500Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=59; 2025-12-23T18:57:38.864591Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=50; 2025-12-23T18:57:38.864633Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=125242; 2025-12-23T18:57:38.864782Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:38.864910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:38.864969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:38.865039Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:38.865085Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:38.865273Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:38.865333Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:38.865375Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:38.865422Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:38.865521Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515927009;tx_id=18446744073709551615;;current_snapshot_ts=1766516226033; 2025-12-23T18:57:38.865576Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:38.865627Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:38.865664Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:38.865758Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:38.866028Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.034000s; 2025-12-23T18:57:38.870614Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:38.871132Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:38.871198Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:38.871281Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:38.871335Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:38.871402Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515927009;tx_id=18446744073709551615;;current_snapshot_ts=1766516226033; 2025-12-23T18:57:38.871465Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:38.871530Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:38.871573Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:38.871658Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:57:38.871710Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:38.872797Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.003000s; 2025-12-23T18:57:38.872847Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc [GOOD] Test command err: RandomSeed# 6269642138489540394 |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] Test command err: 2025-12-23T18:56:17.790091Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:17.811274Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:17.811462Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:17.816909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:17.817097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:17.817268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:17.817363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:17.817479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:17.817573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:17.817656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:17.817731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:17.817800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:17.817873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:17.817954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:17.818021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:17.818103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:17.838293Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:17.838449Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:17.838501Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:17.838626Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:17.838766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:17.838828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:17.838861Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:17.838966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:17.839015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:17.839042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:17.839104Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:17.839242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:17.839289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:17.839320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:17.839344Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:17.839441Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:17.839485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:17.839525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:17.839546Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:17.839608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:17.839634Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:17.839652Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:17.839678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:17.839718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:17.839748Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:17.839916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:17.839956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:17.839982Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:17.840085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:17.840124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:17.840149Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:17.840182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:17.840211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:17.840230Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:17.840256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:17.840285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:17.840304Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:17.840384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:17.840407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... umn_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.209258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.209547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.209846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.210122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.210405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.210719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.211051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.211363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.211656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.211946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.212466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.212771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.213078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2025-12-23T18:57:39.213364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5208,5208;s_splitted=5312,5296;r_splitted=854,854; 2025-12-23T18:57:39.217462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=5;size=320144;limit=10240;r_count=80000;fline=column_info.h:139;sizes=10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005;s_splitted=10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10208;r_splitted=2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2516; 2025-12-23T18:57:39.283524Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=6817016;count=1255;actions=__DEFAULT,;waiting=1;; 2025-12-23T18:57:40.234565Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6265200;event=data_write_finished;writing_id=400eb570-e03111f0-9e5a3c63-d57c429; 2025-12-23T18:57:40.235996Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=86;data_size=65;sum=4128;count=95; 2025-12-23T18:57:40.236052Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=182;data_size=177;sum=8736;count=96;size_of_meta=112; 2025-12-23T18:57:40.236125Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=262;data_size=257;sum=12576;count=48;size_of_portion=192; 2025-12-23T18:57:40.248249Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:28 Blob count: 1255 2025-12-23T18:57:40.259472Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=27;operation_id=26; 2025-12-23T18:57:41.270423Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:28 Blob count: 1255 2025-12-23T18:57:41.271759Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=110;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:41.365643Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1766516183095 at tablet 9437184, mediator 0 2025-12-23T18:57:41.365754Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[112] execute at tablet 9437184 2025-12-23T18:57:41.366532Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=128;fline=abstract.h:88;progress_tx_id=128;lock_id=1;broken=0; 2025-12-23T18:57:41.387909Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[112] complete at tablet 9437184 2025-12-23T18:57:41.388019Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:101;progress_tx_id=128;lock_id=1;broken=0; 2025-12-23T18:57:41.388238Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=128;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=27;operation_id=26; 2025-12-23T18:57:41.388284Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=128;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=26; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TResourcePoolTest::ReadOnlyMode >> TResourcePoolTest::ParallelCreateSameResourcePool >> TResourcePoolTest::CreateResourcePoolWithProperties >> TResourcePoolTest::SchemeErrors >> TResourcePoolTest::DropResourcePool >> TResourcePoolTest::DropResourcePoolTwice >> TResourcePoolTest::ParallelAlterResourcePool >> TResourcePoolTest::AlterResourcePool >> TResourcePoolTest::ParallelCreateResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Block42 [GOOD] Test command err: RandomSeed# 7707337915505191625 |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] |98.4%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] Test command err: 2025-12-23T18:56:53.073696Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:53.101854Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:53.102085Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:53.108374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:53.108581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:53.108793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:53.108938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:53.109032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:53.109122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:53.109206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:53.109292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:53.109373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:53.109434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:53.109500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:53.109570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:53.109657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:53.131112Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:53.131382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:53.131450Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:53.131588Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:53.131744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:53.131813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:53.131874Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:53.131984Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:53.132061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:53.132120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:53.132158Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:53.132354Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:53.132428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:53.132477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:53.132508Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:53.132600Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:53.132680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:53.132735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:53.132770Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:53.132839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:53.132876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:53.132905Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:53.132967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:53.133018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:53.133055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:53.133300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:53.133354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:53.133382Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:53.133519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:53.133562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:53.133590Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:53.133643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:53.133706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:53.133739Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:53.133791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:53.133843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:53.133876Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:53.134007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:53.134050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... age_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2025-12-23T18:57:40.139616Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=885; 2025-12-23T18:57:40.139668Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=71674; 2025-12-23T18:57:40.139720Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=71815; 2025-12-23T18:57:40.139789Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2025-12-23T18:57:40.140127Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=289; 2025-12-23T18:57:40.140172Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=72682; 2025-12-23T18:57:40.140326Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=97; 2025-12-23T18:57:40.140444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=67; 2025-12-23T18:57:40.140828Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=337; 2025-12-23T18:57:40.141142Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=265; 2025-12-23T18:57:40.158447Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=17234; 2025-12-23T18:57:40.175599Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=17039; 2025-12-23T18:57:40.175711Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2025-12-23T18:57:40.175773Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-12-23T18:57:40.175834Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:57:40.175922Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=43; 2025-12-23T18:57:40.175969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-12-23T18:57:40.176056Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=51; 2025-12-23T18:57:40.176096Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-12-23T18:57:40.176161Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=31; 2025-12-23T18:57:40.176242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=46; 2025-12-23T18:57:40.176333Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=44; 2025-12-23T18:57:40.176377Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=118200; 2025-12-23T18:57:40.176521Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:40.176629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:40.176685Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:40.176751Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:40.176798Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:40.176979Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:40.177041Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:40.177082Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:40.177130Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:40.177203Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515917204;tx_id=18446744073709551615;;current_snapshot_ts=1766516214604; 2025-12-23T18:57:40.177248Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:40.177295Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:40.177331Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:40.177420Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:40.177599Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.003000s; 2025-12-23T18:57:40.180526Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:40.180765Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:40.180820Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:40.180895Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:40.180946Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:40.181014Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515917204;tx_id=18446744073709551615;;current_snapshot_ts=1766516214604; 2025-12-23T18:57:40.181063Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:40.181111Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:40.181152Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:40.181231Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2025-12-23T18:57:40.181285Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:40.182396Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.067000s; 2025-12-23T18:57:40.182447Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |98.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TResourcePoolTest::ParallelCreateResourcePool [GOOD] >> TResourcePoolTest::ParallelCreateSameResourcePool [GOOD] >> TResourcePoolTest::DropResourcePool [GOOD] >> TResourcePoolTest::DropResourcePoolTwice [GOOD] >> TResourcePoolTest::CreateResourcePoolWithProperties [GOOD] >> TResourcePoolTest::SchemeErrors [GOOD] >> TResourcePoolTest::ParallelAlterResourcePool [GOOD] >> TResourcePoolTest::AlterResourcePool [GOOD] >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29395, MsgBus: 29332 2025-12-23T18:57:31.435905Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129529738756783:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.435956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fd8/r3tmp/tmpV4VYfL/pdisk_1.dat 2025-12-23T18:57:31.754957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:31.783785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:31.783928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:31.861277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:31.873484Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129529738756747:2081] 1766516251429969 != 1766516251429972 2025-12-23T18:57:31.881421Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29395, node 1 2025-12-23T18:57:31.983242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:57:32.183276Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.183308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.183325Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.183409Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.443933Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:29332 TClient is connected to server localhost:29332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516251934 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:33.076765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:33.084467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:57:33.102116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.233730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:57:33.377046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:33.450310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:34.989282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129542623660511:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.989449Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.989841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129542623660521:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.989936Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.273154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.306905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.340896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.368920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.402545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.440351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.488761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.530001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.596869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129546918628691:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.596976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.597030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129546918628696:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.597207Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129546918628698:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.597255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.600561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:38.629193Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:38.629427Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587129558751324922:2081] 1766516258537193 != 1766516258537196 2025-12-23T18:57:38.675035Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:38.675134Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:38.682589Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22635, node 2 2025-12-23T18:57:38.727336Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:38.727365Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:38.727373Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:38.727467Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:38.850165Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1394 TClient is connected to server localhost:1394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516258731 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:39.108860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:39.126641Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:39.177462Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:39.329901Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:39.391269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:39.564084Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:57:41.354062Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129571636228679:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.354160Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.354417Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129571636228688:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.354471Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.427325Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.458408Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.490810Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.517129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.548152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.583308Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.619685Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.669940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.744935Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129571636229560:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.745024Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.745085Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129571636229565:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.745301Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129571636229567:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.745358Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.748974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:41.761138Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587129571636229568:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:57:41.821527Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587129571636229620:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TResourcePoolTest::ReadOnlyMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::DropResourcePoolTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:57:43.856030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:57:43.856155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:57:43.856273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:57:43.856320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:57:43.856354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:57:43.856413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:57:43.857361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:57:43.858984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:57:43.956086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:57:43.956142Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:43.967980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:57:43.968169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:57:43.968316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:57:43.979709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:57:43.979955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:57:43.980649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:57:43.980862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:57:43.985201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.985366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:57:43.986614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:43.986677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.986858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:57:43.986911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:57:43.987049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:57:43.987199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.140588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.141590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.141729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.141829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.141907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.142018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.142110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.142179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.142255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.142329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.142384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.142473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.142566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.142662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.142772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... h_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:44.748336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:57:44.748438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2025-12-23T18:57:44.748505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2025-12-23T18:57:44.748598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:44.748626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 103, path id: 39 2025-12-23T18:57:44.748655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 103, path id: 40 2025-12-23T18:57:44.748673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 103, path id: 41 FAKE_COORDINATOR: Erasing txId 103 2025-12-23T18:57:44.748871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.748905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:57:44.748992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:57:44.749032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:57:44.749104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:57:44.749137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:57:44.749163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-12-23T18:57:44.749198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:57:44.749224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:57:44.749248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:57:44.749296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:57:44.749323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2025-12-23T18:57:44.749351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 7 2025-12-23T18:57:44.749372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 40], 7 2025-12-23T18:57:44.749389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2025-12-23T18:57:44.749873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:57:44.749971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:57:44.750014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:57:44.750050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2025-12-23T18:57:44.750078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2025-12-23T18:57:44.750289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:57:44.750320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2025-12-23T18:57:44.750408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:57:44.750670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:57:44.750714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:57:44.750759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:57:44.750788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2025-12-23T18:57:44.750807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:57:44.751355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:57:44.751420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:57:44.751441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:57:44.751462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 7 2025-12-23T18:57:44.751496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:57:44.751545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-12-23T18:57:44.754271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:57:44.754426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:57:44.754922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:57:44.755295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:57:44.755571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:57:44.755603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:57:44.755874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:57:44.755937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:57:44.755985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:784:2773] TestWaitNotification: OK eventTxId 103 2025-12-23T18:57:44.756375Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:57:44.756541Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 193us result status StatusPathDoesNotExist 2025-12-23T18:57:44.756681Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.metadata/workload_manager/pools" LastExistedPrefixPathId: 40 LastExistedPrefixDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelCreateSameResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:57:43.855968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:57:43.856067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:57:43.856166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:57:43.856201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:57:43.856238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:57:43.856342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:57:43.857287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:57:43.858874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:57:43.930652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:57:43.930726Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:43.947212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:57:43.947491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:57:43.947741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:57:43.962973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:57:43.963370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:57:43.969281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:57:43.971305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:57:43.980209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.980375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:57:43.985729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:43.985804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.986011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:57:43.986068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:57:43.986196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:57:43.986861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.157074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.157818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.157905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.157966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.158016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.158061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.158124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.158174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.158222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.158292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.158349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.158412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.158503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.158571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.158627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 12-23T18:57:44.762856Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 240us result status StatusSuccess 2025-12-23T18:57:44.763244Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:57:44.763807Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:57:44.763966Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 173us result status StatusSuccess 2025-12-23T18:57:44.764246Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2025-12-23T18:57:44.764523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2025-12-23T18:57:44.764560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2025-12-23T18:57:44.764651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2025-12-23T18:57:44.764678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2025-12-23T18:57:44.764735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2025-12-23T18:57:44.764753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2025-12-23T18:57:44.765401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-12-23T18:57:44.765507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-12-23T18:57:44.765604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-12-23T18:57:44.765651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:762:2751] 2025-12-23T18:57:44.765811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-12-23T18:57:44.765857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:762:2751] 2025-12-23T18:57:44.765951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-12-23T18:57:44.766082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-12-23T18:57:44.766113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:762:2751] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-12-23T18:57:44.766592Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:57:44.766772Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 190us result status StatusSuccess 2025-12-23T18:57:44.767087Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2025-12-23T18:57:44.770401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "NilNoviSubLuna" } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:57:44.770665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 128:0, path# /MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna 2025-12-23T18:57:44.770871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-12-23T18:57:44.772931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 125, at schemeshard: 72057594046678944 2025-12-23T18:57:44.773180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |98.4%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::DropResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:57:43.855989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:57:43.856075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:57:43.856167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:57:43.856271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:57:43.856300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:57:43.856374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:57:43.857309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:57:43.858879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:57:43.952103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:57:43.952164Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:43.963204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:57:43.963384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:57:43.963552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:57:43.974890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:57:43.975148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:57:43.975864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:57:43.976099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:57:43.980447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.980604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:57:43.985704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:43.985774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.985945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:57:43.985996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:57:43.986121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:57:43.986860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.131089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.131752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.131851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.131921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.131960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.132022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.132096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.132137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.132188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.132248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.132318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.132371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.132418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.132490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.132549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:44.733526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2025-12-23T18:57:44.733661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2025-12-23T18:57:44.733723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2025-12-23T18:57:44.733828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:44.733868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 103, path id: 39 2025-12-23T18:57:44.733901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 103, path id: 40 2025-12-23T18:57:44.733923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 103, path id: 41 2025-12-23T18:57:44.734054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.734106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-12-23T18:57:44.734197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:57:44.734229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:57:44.734266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2025-12-23T18:57:44.734295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:57:44.734326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-12-23T18:57:44.734382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-12-23T18:57:44.734423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2025-12-23T18:57:44.734461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 103:0 2025-12-23T18:57:44.734558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:57:44.734593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2025-12-23T18:57:44.734641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 7 2025-12-23T18:57:44.734692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 40], 7 2025-12-23T18:57:44.734710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2025-12-23T18:57:44.735189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:57:44.735252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:57:44.735279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:57:44.735328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2025-12-23T18:57:44.735369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2025-12-23T18:57:44.735855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-12-23T18:57:44.735897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2025-12-23T18:57:44.735955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:57:44.736409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:57:44.736515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:57:44.736542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:57:44.736576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2025-12-23T18:57:44.736606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:57:44.737598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:57:44.737679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-12-23T18:57:44.737734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-12-23T18:57:44.737766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 7 2025-12-23T18:57:44.737799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:57:44.737866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-12-23T18:57:44.740039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:57:44.740310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-12-23T18:57:44.740475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-12-23T18:57:44.741337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-12-23T18:57:44.741617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-12-23T18:57:44.741677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-12-23T18:57:44.742066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-12-23T18:57:44.742143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-12-23T18:57:44.742174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:779:2768] TestWaitNotification: OK eventTxId 103 2025-12-23T18:57:44.742628Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:57:44.742854Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 244us result status StatusPathDoesNotExist 2025-12-23T18:57:44.743028Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.metadata/workload_manager/pools" LastExistedPrefixPathId: 40 LastExistedPrefixDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelCreateResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:57:43.855986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:57:43.856078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:57:43.856180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:57:43.856229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:57:43.856266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:57:43.856330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:57:43.857380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:57:43.858891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:57:43.937834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:57:43.937894Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:43.950888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:57:43.951170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:57:43.951391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:57:43.964284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:57:43.964576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:57:43.969309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:57:43.971313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:57:43.980643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.980826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:57:43.985729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:43.985809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.985994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:57:43.986054Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:57:43.986198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:57:43.986857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.122759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.126638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.126863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.126999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.127089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.127170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.127262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.127342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.127425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.127505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.127564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.127641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.127733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.127826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.127912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... t@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool1" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:57:44.723678Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:57:44.723863Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" took 179us result status StatusSuccess 2025-12-23T18:57:44.724167Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" PathDescription { Self { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool2" PathId { OwnerId: 72057594046678944 LocalId: 42 } Version: 1 Properties { } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:57:44.724870Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:57:44.725060Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools" took 165us result status StatusSuccess 2025-12-23T18:57:44.726859Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools" PathDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyResourcePool1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:57:44.728748Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:57:44.728934Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" took 202us result status StatusSuccess 2025-12-23T18:57:44.729239Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" PathDescription { Self { Name: "MyResourcePool1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool1" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:57:44.729686Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:57:44.729879Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" took 184us result status StatusSuccess 2025-12-23T18:57:44.730167Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" PathDescription { Self { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool2" PathId { OwnerId: 72057594046678944 LocalId: 42 } Version: 1 Properties { } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::CreateResourcePoolWithProperties [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:57:43.856037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:57:43.856134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:57:43.856236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:57:43.856273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:57:43.856316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:57:43.856407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:57:43.857383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:57:43.858989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:57:43.955208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:57:43.955269Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:43.966996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:57:43.967219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:57:43.967445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:57:43.979172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:57:43.979415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:57:43.980131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:57:43.980362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:57:43.984615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.984797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:57:43.986121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:43.986182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.986345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:57:43.986414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:57:43.986532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:57:43.986856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.153423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.154504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.154632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.154768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.154857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.154938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.155049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.155144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.155236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.155321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.155390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.155457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.155549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.155646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.155748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d: 72057594046678944 2025-12-23T18:57:44.833626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_resource_pool.cpp:19: [72057594046678944] TCreateResourcePool TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2025-12-23T18:57:44.833767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2025-12-23T18:57:44.833933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:57:44.833991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:57:44.834847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:57:44.836840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:57:44.836934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-12-23T18:57:44.837279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:44.837329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2025-12-23T18:57:44.837471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2025-12-23T18:57:44.837551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2025-12-23T18:57:44.837666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:44.837708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 102, path id: 40 2025-12-23T18:57:44.837749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 102, path id: 41 2025-12-23T18:57:44.837775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2210], at schemeshard: 72057594046678944, txId: 102, path id: 41 2025-12-23T18:57:44.838018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.838068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-12-23T18:57:44.838191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:57:44.838253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:57:44.838299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2025-12-23T18:57:44.838328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:57:44.838363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-12-23T18:57:44.838402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-12-23T18:57:44.838437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2025-12-23T18:57:44.838488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 102:0 2025-12-23T18:57:44.838559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2025-12-23T18:57:44.838596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-12-23T18:57:44.838631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 40], 5 2025-12-23T18:57:44.838667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 41], 2 2025-12-23T18:57:44.839748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:57:44.839846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:57:44.839882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:57:44.839934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 5 2025-12-23T18:57:44.839989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:57:44.841020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:57:44.841166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-12-23T18:57:44.841205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-12-23T18:57:44.841242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2025-12-23T18:57:44.841275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2025-12-23T18:57:44.841351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-12-23T18:57:44.843679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-12-23T18:57:44.844609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-12-23T18:57:44.844819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-12-23T18:57:44.844860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-12-23T18:57:44.845226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-12-23T18:57:44.845312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-12-23T18:57:44.845366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:750:2739] TestWaitNotification: OK eventTxId 102 2025-12-23T18:57:44.845873Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:57:44.846110Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 253us result status StatusSuccess 2025-12-23T18:57:44.847427Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { Properties { key: "concurrent_query_limit" value: "10" } Properties { key: "query_cancel_after_seconds" value: "60" } } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:57:43.856043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:57:43.856146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:57:43.856247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:57:43.856283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:57:43.856319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:57:43.856387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:57:43.857355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:57:43.858969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:57:43.958573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:57:43.958628Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:43.970001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:57:43.970188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:57:43.970354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:57:43.982030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:57:43.982308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:57:43.983030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:57:43.983262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:57:43.987298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.987461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:57:43.988754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:43.988814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.988985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:57:43.989040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:57:43.989152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:57:43.989305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.175669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.176704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.176849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.176986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.177057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.177165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.177259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.177329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.177417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.177495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.177561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.177633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.177713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.177828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.177940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:57:44.808693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 124 2025-12-23T18:57:44.808756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 124 2025-12-23T18:57:44.808779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 124 2025-12-23T18:57:44.808817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 124, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2025-12-23T18:57:44.808847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:57:44.808922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 124, subscribers: 0 2025-12-23T18:57:44.810937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2025-12-23T18:57:44.812784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2025-12-23T18:57:44.812881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2025-12-23T18:57:44.812964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 TestModificationResult got TxId: 124, wait until txId: 124 TestWaitNotification wait txId: 124 2025-12-23T18:57:44.813174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 124: send EvNotifyTxCompletion 2025-12-23T18:57:44.813212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 124 2025-12-23T18:57:44.813567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-12-23T18:57:44.813646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-12-23T18:57:44.813678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:723:2712] TestWaitNotification: OK eventTxId 124 TestModificationResults wait txId: 125 2025-12-23T18:57:44.816494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 125 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:57:44.816772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/AnotherDir, operationId: 125:0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.816935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: AnotherDir, child id: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2025-12-23T18:57:44.816990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 0 2025-12-23T18:57:44.817022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 125:0 type: TxMkDir target path: [OwnerId: 72057594046678944, LocalPathId: 41] source path: 2025-12-23T18:57:44.817093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 125:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:57:44.817157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 125:1, path# /MyRoot/AnotherDir/MyResourcePool 2025-12-23T18:57:44.817273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 125:2, propose status:StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, at schemeshard: 72057594046678944 2025-12-23T18:57:44.819600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:148: Abort operation: IgniteOperation fail to propose a part, opId: 125:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusSchemeError, with reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, tx message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 125 TabletId: 72057594046678944 2025-12-23T18:57:44.819799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:275: MkDir AbortPropose, opId: 125:0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.821812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 125, response: Status: StatusSchemeError Reason: "Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools" TxId: 125 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:57:44.822017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 125, database: /MyRoot, subject: , status: StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, operation: CREATE RESOURCE POOL, path: AnotherDir/MyResourcePool TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 2025-12-23T18:57:44.824752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:57:44.825077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/.metadata/workload_manager/pools/AnotherDir, operationId: 126:0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.825284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 40], parent name: pools, child name: AnotherDir, child id: [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2025-12-23T18:57:44.825334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 0 2025-12-23T18:57:44.825369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 126:0 type: TxMkDir target path: [OwnerId: 72057594046678944, LocalPathId: 42] source path: 2025-12-23T18:57:44.825440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-12-23T18:57:44.825489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 126:1, path# /MyRoot/.metadata/workload_manager/pools/AnotherDir/MyResourcePool 2025-12-23T18:57:44.825608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:2, propose status:StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, at schemeshard: 72057594046678944 2025-12-23T18:57:44.827852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:148: Abort operation: IgniteOperation fail to propose a part, opId: 126:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusSchemeError, with reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, tx message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 126 TabletId: 72057594046678944 2025-12-23T18:57:44.827998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:275: MkDir AbortPropose, opId: 126:0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.829995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:57:44.830227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, operation: CREATE RESOURCE POOL, path: AnotherDir/MyResourcePool TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-12-23T18:57:44.832948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "" } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:57:44.833129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 127:0, path# /MyRoot/.metadata/workload_manager/pools/ 2025-12-23T18:57:44.833264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-12-23T18:57:44.835280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/\', error: path part shouldn\'t be empty" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:57:44.835531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/', error: path part shouldn't be empty, operation: CREATE RESOURCE POOL, path: TestModificationResult got TxId: 127, wait until txId: 127 >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelAlterResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2145] Leader for TabletID 72057594046678944 is [1:138:2160] sender: [1:140:2058] recipient: [1:115:2145] 2025-12-23T18:57:43.877464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:57:43.877543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.877580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:57:43.877613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:57:43.877647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:57:43.877694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:57:43.877780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.877850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:57:43.878720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:57:43.879011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:57:43.954182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:57:43.954282Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:43.960735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:57:43.961012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:57:43.961179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:57:43.964755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:57:43.964932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:57:43.969286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:57:43.971381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:57:43.977739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.977955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:57:43.985564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:43.985666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.985821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:57:43.985875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:57:43.986006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:57:43.986862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.168117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.171845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.172051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.172154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.172233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.172304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.172380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.172472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.172553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.172617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.172708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.172785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.172888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.172985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.173105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... TTxNotificationSubscriber for txId 114: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.042127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2025-12-23T18:57:45.042247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2025-12-23T18:57:45.042305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.042325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.042438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2025-12-23T18:57:45.042520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2025-12-23T18:57:45.042567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.042591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.042693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.042726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.042797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-12-23T18:57:45.042898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-12-23T18:57:45.042965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.043000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.043082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-12-23T18:57:45.043215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-12-23T18:57:45.043262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.043282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.043315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.043336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.043467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2025-12-23T18:57:45.043510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.043530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.043674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-12-23T18:57:45.043743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.043778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.043816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.043835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.043882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2025-12-23T18:57:45.044021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.044045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.044142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2025-12-23T18:57:45.044184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.044203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.044279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.044311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.044365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 132, at schemeshard: 72057594046678944 2025-12-23T18:57:45.044461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.044484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.044631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.044654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.044767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.044795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.044895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.044919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.045050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.045073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:845:2835] 2025-12-23T18:57:45.045191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 132: got EvNotifyTxCompletionResult 2025-12-23T18:57:45.045228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 132: satisfy waiter [1:845:2835] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 132 2025-12-23T18:57:45.049180Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-12-23T18:57:45.049462Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 306us result status StatusSuccess 2025-12-23T18:57:45.049996Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 Properties { Properties { key: "concurrent_query_limit" value: "20" } Properties { key: "query_cancel_after_seconds" value: "60" } Properties { key: "query_count_limit" value: "50" } } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:57:43.857546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:57:43.857627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.857665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:57:43.857695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:57:43.857720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:57:43.857752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:57:43.857804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.857882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:57:43.858574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:57:43.858864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:57:43.951760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:57:43.951816Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:43.961628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:57:43.961797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:57:43.961954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:57:43.971426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:57:43.971607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:57:43.972092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:57:43.972263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:57:43.980011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.980140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:57:43.985644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:43.985700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.985821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:57:43.985883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:57:43.985976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:57:43.986798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.119273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.121178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.121340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.121433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.121473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.121535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.121594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.121631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.121683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.121740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.121793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.121861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.121934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.122018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.122087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard: 72057594046678944 2025-12-23T18:57:45.120074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2025-12-23T18:57:45.120166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T18:57:45.120741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:57:45.120818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:57:45.120846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-12-23T18:57:45.120880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-12-23T18:57:45.120928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-12-23T18:57:45.122282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:57:45.122344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:57:45.122361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-12-23T18:57:45.122379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 2 2025-12-23T18:57:45.122416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2025-12-23T18:57:45.122480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2025-12-23T18:57:45.123822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-12-23T18:57:45.123970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000041 2025-12-23T18:57:45.124976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-12-23T18:57:45.125814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2025-12-23T18:57:45.125948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:57:45.126005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000041, at schemeshard: 72057594046678944 2025-12-23T18:57:45.126127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 129:0 128 -> 240 2025-12-23T18:57:45.126261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-12-23T18:57:45.126323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2025-12-23T18:57:45.126797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Erasing txId 129 2025-12-23T18:57:45.128192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:45.128221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:57:45.128344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2025-12-23T18:57:45.128443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:45.128467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:902:2859], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-12-23T18:57:45.128495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:902:2859], at schemeshard: 72057594046678944, txId: 129, path id: 43 2025-12-23T18:57:45.128660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-12-23T18:57:45.128700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 129:0 ProgressState 2025-12-23T18:57:45.128803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2025-12-23T18:57:45.128845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-12-23T18:57:45.128882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2025-12-23T18:57:45.128913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-12-23T18:57:45.128949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-12-23T18:57:45.128979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-12-23T18:57:45.129005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2025-12-23T18:57:45.129029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 129:0 2025-12-23T18:57:45.129085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2025-12-23T18:57:45.129108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-12-23T18:57:45.129142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-12-23T18:57:45.129191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 43], 3 2025-12-23T18:57:45.129825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:57:45.129888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:57:45.129909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-12-23T18:57:45.129939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-12-23T18:57:45.129984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-12-23T18:57:45.130650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:57:45.130719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-12-23T18:57:45.130764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-12-23T18:57:45.130782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 3 2025-12-23T18:57:45.130809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2025-12-23T18:57:45.130870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-12-23T18:57:45.133572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-12-23T18:57:45.134147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 16092, MsgBus: 13484 2025-12-23T18:57:31.699717Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129528898817231:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.704683Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:57:31.754494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fbc/r3tmp/tmpqHaOGa/pdisk_1.dat 2025-12-23T18:57:32.075562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:32.075663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:32.087947Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:32.107195Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:32.118332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16092, node 1 2025-12-23T18:57:32.188893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.188913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.188931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.189068Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.355414Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13484 2025-12-23T18:57:32.711306Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:13484 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516252165 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:33.049634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:33.084847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.255024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.382302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.450502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:35.361552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129546078688247:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.361724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.362160Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129546078688257:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.362241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.669961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.699996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.733314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.764945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.796731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.864886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.898257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.945632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:36.019930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129550373656426:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.020039Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.020134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129550373656431:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.020283Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129550373656433:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.020320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.024068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025 ... Notification cookie mismatch for subscription [2:7587129558997718124:2081] 1766516258798227 != 1766516258798230 2025-12-23T18:57:38.916972Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:38.917052Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:38.921464Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26075, node 2 2025-12-23T18:57:38.959309Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:38.959352Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:38.959364Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:38.959444Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:39.063598Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:14857 TClient is connected to server localhost:14857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516258962 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:57:39.379378Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:57:39.399015Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:39.444365Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:39.552951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:39.620995Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:39.805955Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:57:41.993132Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129571882621878:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.993231Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.993462Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129571882621888:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.993508Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:42.048148Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:42.072946Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:42.095501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:42.120099Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:42.144684Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:42.175465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:42.205551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:42.267989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:42.341866Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129576177590055:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:42.342010Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:42.342343Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129576177590060:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:42.342354Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129576177590061:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:42.342407Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:42.346005Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:42.357194Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587129576177590064:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:57:42.457655Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587129576177590115:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:43.799037Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587129558997718150:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:43.799115Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 64460, MsgBus: 30385 2025-12-23T18:57:31.512623Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129526614226246:2062];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.512686Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fd9/r3tmp/tmp23Go0O/pdisk_1.dat 2025-12-23T18:57:31.779537Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:31.783598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:31.783702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:31.906103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:31.942865Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129526614226225:2081] 1766516251511102 != 1766516251511105 2025-12-23T18:57:31.947187Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64460, node 1 2025-12-23T18:57:32.006096Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:57:32.185283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.185302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.185316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.185401Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.520380Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:30385 TClient is connected to server localhost:30385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516252011 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:33.093713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:33.098388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:57:33.104014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:33.244029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.403976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.486677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:34.689740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129539499129987:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.689897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.690190Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129539499129997:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.690253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.062398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.095347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.125973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.158581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.188187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.223646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.262197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.319152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.393276Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129543794098164:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.393335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.393486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129543794098169:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.393535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129543794098170:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.393655Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.396718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sc ... 169 2025-12-23T18:57:38.477859Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:38.482883Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:38.483006Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:38.497742Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11926, node 2 2025-12-23T18:57:38.554720Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:38.554762Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:38.554771Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:38.554871Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:38.706175Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16443 TClient is connected to server localhost:16443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516258563 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:38.973444Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:38.991432Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:39.047627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:39.176754Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:39.231519Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:39.407134Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:57:41.486528Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129570577413871:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.486614Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.487060Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129570577413881:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.487115Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.546312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.574851Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.648207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.674697Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.701900Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.730029Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.759526Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.799446Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:41.868518Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129570577414749:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.868608Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.868664Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129570577414754:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.868785Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129570577414756:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.868842Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:41.871908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:41.881262Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587129570577414758:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:57:41.977372Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587129570577414809:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:43.399194Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587129557692510135:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:43.399281Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:128:2152] sender: [1:129:2058] recipient: [1:113:2143] 2025-12-23T18:57:43.855978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7936: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-12-23T18:57:43.856091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7964: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7850: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-12-23T18:57:43.856171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7866: OperationsProcessing config: using default configuration 2025-12-23T18:57:43.856272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-12-23T18:57:43.856305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7872: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-12-23T18:57:43.856363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7996: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-12-23T18:57:43.856460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-12-23T18:57:43.857347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8067: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-12-23T18:57:43.858891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-12-23T18:57:43.951760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:57:43.951825Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:43.961611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-12-23T18:57:43.961750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-12-23T18:57:43.962099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-12-23T18:57:43.971610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-12-23T18:57:43.971782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-12-23T18:57:43.972419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-12-23T18:57:43.972605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-12-23T18:57:43.979567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.979707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-12-23T18:57:43.985627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-12-23T18:57:43.985684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-12-23T18:57:43.985820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-12-23T18:57:43.985879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-12-23T18:57:43.986000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-12-23T18:57:43.986830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7110: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-12-23T18:57:44.162598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.163777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.163963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.164074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.164164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.164250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.164351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.164432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.164531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.164639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.164716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.164791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.164908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.165009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-12-23T18:57:44.165121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... is done id#101:0 progress is 3/3 2025-12-23T18:57:45.991057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-12-23T18:57:45.991083Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2025-12-23T18:57:45.991105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-12-23T18:57:45.991129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: false 2025-12-23T18:57:45.991158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-12-23T18:57:45.991208Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2025-12-23T18:57:45.991240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:0 2025-12-23T18:57:45.991307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2025-12-23T18:57:45.991345Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2025-12-23T18:57:45.991368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:1 2025-12-23T18:57:45.991398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2025-12-23T18:57:45.991419Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2025-12-23T18:57:45.991439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 101:2 2025-12-23T18:57:45.991463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2025-12-23T18:57:45.991488Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 4, subscribers: 0 2025-12-23T18:57:45.991526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-12-23T18:57:45.991556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 6 2025-12-23T18:57:45.991577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 5 2025-12-23T18:57:45.991619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2025-12-23T18:57:45.993752Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:57:45.993840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:57:45.993874Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:57:45.993914Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-12-23T18:57:45.993975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-12-23T18:57:45.994683Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:57:45.994774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:57:45.994802Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:57:45.994828Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2025-12-23T18:57:45.994856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-12-23T18:57:45.996404Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:57:45.996476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:57:45.996502Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:57:45.996528Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 5 2025-12-23T18:57:45.996563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2025-12-23T18:57:45.997164Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:57:45.997233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-12-23T18:57:45.997258Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-12-23T18:57:45.997284Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2025-12-23T18:57:45.997311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2025-12-23T18:57:45.997370Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-12-23T18:57:45.999747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:57:45.999990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:57:46.000216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-12-23T18:57:46.001073Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:57:46.001322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-12-23T18:57:46.001365Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-12-23T18:57:46.001738Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-12-23T18:57:46.001844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-12-23T18:57:46.001881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:721:2711] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-12-23T18:57:46.005156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpAlterResourcePool CreateResourcePool { Name: "MyResourcePool" Properties { Properties { key: "concurrent_query_limit" value: "20" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-12-23T18:57:46.005411Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_resource_pool.cpp:123: [72057594046678944] TAlterResourcePool Propose: opId# 102:0, path# /MyRoot/.metadata/workload_manager/pools/MyResourcePool 2025-12-23T18:57:46.005596Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/MyResourcePool', error: path hasn't been resolved, nearest resolved path: '/MyRoot/.metadata/workload_manager/pools' (id: [OwnerId: 72057594046678944, LocalPathId: 40]), at schemeshard: 72057594046678944 2025-12-23T18:57:46.007964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-12-23T18:57:46.008219Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/MyResourcePool', error: path hasn't been resolved, nearest resolved path: '/MyRoot/.metadata/workload_manager/pools' (id: [OwnerId: 72057594046678944, LocalPathId: 40]), operation: ALTER RESOURCE POOL, path: MyResourcePool TestModificationResult got TxId: 102, wait until txId: 102 |98.4%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |98.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] >> ColumnShardTiers::DSConfigsWithQueryServiceDdl |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] |98.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2025-12-23T18:56:59.510230Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:59.532032Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:59.532222Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:59.537926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:59.538128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:59.538281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:59.538372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:59.538449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:59.538523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:59.538609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:59.538683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:59.538782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:59.538844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:59.538901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:59.538973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:59.539071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:59.560837Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:59.561005Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:59.561039Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:59.561161Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:59.561283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:59.561334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:59.561364Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:59.561421Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:59.561459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:59.561487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:59.561508Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:59.561639Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:59.561679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:59.561704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:59.561726Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:59.561811Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:59.561890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:59.561923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:59.561940Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:59.561975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:59.562006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:59.562030Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:59.562094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:59.562140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:59.562180Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:59.562358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:59.562406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:59.562426Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:59.562513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:59.562544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:59.562565Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:59.562604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:59.562637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:59.562656Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:59.562684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:59.562705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:59.562726Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:59.562826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:59.562851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=9; 2025-12-23T18:57:44.848434Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=848; 2025-12-23T18:57:44.848480Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=49110; 2025-12-23T18:57:44.848524Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=49204; 2025-12-23T18:57:44.848598Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=17; 2025-12-23T18:57:44.848888Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=247; 2025-12-23T18:57:44.848919Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=49932; 2025-12-23T18:57:44.849047Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=83; 2025-12-23T18:57:44.849149Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=63; 2025-12-23T18:57:44.849510Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=322; 2025-12-23T18:57:44.849721Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=177; 2025-12-23T18:57:44.858993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=9215; 2025-12-23T18:57:44.867823Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=8722; 2025-12-23T18:57:44.867923Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2025-12-23T18:57:44.867971Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-12-23T18:57:44.868006Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-12-23T18:57:44.868069Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=34; 2025-12-23T18:57:44.868109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-12-23T18:57:44.868178Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=42; 2025-12-23T18:57:44.868213Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-12-23T18:57:44.868268Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2025-12-23T18:57:44.868364Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2025-12-23T18:57:44.868434Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=39; 2025-12-23T18:57:44.868465Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=76074; 2025-12-23T18:57:44.868578Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:44.868671Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:44.868713Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:44.868768Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:44.868804Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:44.868974Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:44.869040Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:44.869074Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:44.869120Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:44.869179Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515923589;tx_id=18446744073709551615;;current_snapshot_ts=1766516221046; 2025-12-23T18:57:44.869214Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:44.869252Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:44.869285Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:44.869358Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:44.869512Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.042000s; 2025-12-23T18:57:44.871904Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:44.872171Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:44.872213Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:44.872269Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:44.872311Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:44.872381Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515923589;tx_id=18446744073709551615;;current_snapshot_ts=1766516221046; 2025-12-23T18:57:44.872418Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:44.872454Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:44.872486Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:44.872547Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2025-12-23T18:57:44.872587Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:44.873055Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.036000s; 2025-12-23T18:57:44.873089Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; >> ColumnShardTiers::DSConfigsStub >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10636, MsgBus: 22927 test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fb8/r3tmp/tmpfUnD4A/pdisk_1.dat 2025-12-23T18:57:31.672801Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129528242598740:2157];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.672952Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:57:32.045792Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:32.048738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:32.048826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:32.070558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:32.151915Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:32.153769Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129528242598621:2081] 1766516251603776 != 1766516251603779 TServer::EnableGrpc on GrpcPort 10636, node 1 2025-12-23T18:57:32.206990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.207014Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.207030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.207112Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.243522Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22927 2025-12-23T18:57:32.707722Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516252165 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:33.077917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:33.083463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:57:33.101531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.239878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.398812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.465718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:34.662879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129541127502375:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.663087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.663499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129541127502385:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.663585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.990016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.017077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.044084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.073558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.101429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.135072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.166542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.213906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.314830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129545422470550:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.314903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.315334Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129545422470555:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.315360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129545422470556:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.315416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.322565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... figuration 2025-12-23T18:57:40.419963Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6586 TClient is connected to server localhost:6586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516260411 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:40.815542Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:40.823920Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:40.881086Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:41.000685Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:41.056904Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:41.245817Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:57:42.790992Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129575887306119:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:42.791084Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:42.791370Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129575887306129:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:42.791481Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:42.848887Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:42.877862Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:42.906449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:42.936750Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:42.967131Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.002006Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.039527Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.112899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.190474Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129580182274290:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:43.190564Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129580182274295:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:43.190644Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:43.190969Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129580182274298:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:43.191035Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:43.194468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:43.206277Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587129580182274297:2484], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:57:43.287098Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587129580182274352:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:45.039862Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:45.071111Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:45.108244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:45.239424Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587129567297369693:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:45.239496Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStrictStyle [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] Test command err: 2025-12-23T18:57:10.404980Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:10.428549Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:10.428709Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:10.434234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:10.434429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:10.434628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:10.434734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:10.434833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:10.434899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:10.434981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:10.435045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:10.435138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:10.435203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:10.435292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:10.435411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:10.435525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:10.456783Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:10.456914Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:10.456954Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:10.457095Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:10.457209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:10.457261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:10.457308Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:10.457372Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:10.457410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:10.457436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:10.457456Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:10.457587Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:10.457639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:10.457667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:10.457684Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:10.457740Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:10.457769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:10.457795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:10.457811Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:10.457845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:10.457866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:10.457883Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:10.457932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:10.457968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:10.457992Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:10.458153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:10.458213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:10.458236Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:10.458321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:10.458347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:10.458365Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:10.458393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:10.458418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:10.458437Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:10.458468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:10.458488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:10.458504Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:10.458573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:10.458607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... oad_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=8; 2025-12-23T18:57:46.146790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=593; 2025-12-23T18:57:46.146837Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=37464; 2025-12-23T18:57:46.146872Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=37556; 2025-12-23T18:57:46.146915Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=8; 2025-12-23T18:57:46.147139Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=197; 2025-12-23T18:57:46.147165Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=38095; 2025-12-23T18:57:46.147257Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=59; 2025-12-23T18:57:46.147329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=44; 2025-12-23T18:57:46.147555Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=196; 2025-12-23T18:57:46.147752Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=164; 2025-12-23T18:57:46.156649Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8836; 2025-12-23T18:57:46.165705Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=8958; 2025-12-23T18:57:46.165791Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-12-23T18:57:46.165850Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-12-23T18:57:46.165880Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-12-23T18:57:46.165934Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=31; 2025-12-23T18:57:46.165961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2025-12-23T18:57:46.166029Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=39; 2025-12-23T18:57:46.166054Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-12-23T18:57:46.166097Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=23; 2025-12-23T18:57:46.166155Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=34; 2025-12-23T18:57:46.166236Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=52; 2025-12-23T18:57:46.166263Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=62523; 2025-12-23T18:57:46.166361Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:46.166446Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:46.166483Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:46.166542Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:46.166569Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:46.166678Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:46.166723Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:46.166763Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:46.166798Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:46.166852Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515932864;tx_id=18446744073709551615;;current_snapshot_ts=1766516231887; 2025-12-23T18:57:46.166881Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:46.166910Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:46.166933Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:46.167001Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:46.167127Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.021000s; 2025-12-23T18:57:46.170098Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:46.170447Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:46.170493Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:46.170542Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:46.170572Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:46.170622Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515932864;tx_id=18446744073709551615;;current_snapshot_ts=1766516231887; 2025-12-23T18:57:46.170675Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:46.170709Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:46.170734Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:46.170804Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:57:46.170836Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:46.171502Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.191000s; 2025-12-23T18:57:46.171533Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3186:5180];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] Test command err: 2025-12-23T18:57:12.417566Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:12.452290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:12.452560Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:12.460558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:12.460886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:12.461153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:12.461334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:12.461472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:12.461588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:12.461719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:12.461828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:12.461960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:12.462110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:12.462235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:12.462385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:12.462511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:12.493896Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:12.494104Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:12.494189Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:12.494403Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:12.494585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:12.494689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:12.494763Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:12.494878Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:12.494984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:12.495038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:12.495079Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:12.495304Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:12.495392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:12.495455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:12.495498Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:12.495611Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:12.495678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:12.495726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:12.495758Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:12.495827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:12.495879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:12.495914Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:12.496001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:12.496069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:12.496120Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:12.496383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:12.496450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:12.496485Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:12.496629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:12.496677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:12.496717Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:12.496770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:12.496818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:12.496852Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:12.496904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:12.496944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:12.496982Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:12.497153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:12.497213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=7; 2025-12-23T18:57:46.359859Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=666; 2025-12-23T18:57:46.359905Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=41095; 2025-12-23T18:57:46.359938Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=41201; 2025-12-23T18:57:46.359982Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=8; 2025-12-23T18:57:46.360290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=268; 2025-12-23T18:57:46.360329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=41923; 2025-12-23T18:57:46.360443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=75; 2025-12-23T18:57:46.360543Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=63; 2025-12-23T18:57:46.360814Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=232; 2025-12-23T18:57:46.361030Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=186; 2025-12-23T18:57:46.371274Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=10189; 2025-12-23T18:57:46.381389Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10020; 2025-12-23T18:57:46.381475Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-12-23T18:57:46.381523Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-12-23T18:57:46.381570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-12-23T18:57:46.381649Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2025-12-23T18:57:46.381697Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:57:46.381771Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=43; 2025-12-23T18:57:46.381802Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2025-12-23T18:57:46.381844Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=21; 2025-12-23T18:57:46.381938Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=48; 2025-12-23T18:57:46.381993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=31; 2025-12-23T18:57:46.382036Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=69331; 2025-12-23T18:57:46.382141Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:46.382231Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:46.382272Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:46.382320Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:46.382357Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:46.382515Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:46.382559Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:46.382585Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:46.382613Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:46.382660Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515934877;tx_id=18446744073709551615;;current_snapshot_ts=1766516233890; 2025-12-23T18:57:46.382690Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:46.382718Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:46.382760Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:46.382818Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:46.382972Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.034000s; 2025-12-23T18:57:46.386389Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:46.386591Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:46.386625Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:46.386690Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:46.386732Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:46.386804Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515934877;tx_id=18446744073709551615;;current_snapshot_ts=1766516233890; 2025-12-23T18:57:46.386852Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:46.386885Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:46.386923Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:46.386998Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:57:46.387032Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:46.387788Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.142000s; 2025-12-23T18:57:46.387848Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3209:5203];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17498, MsgBus: 32573 2025-12-23T18:57:31.435649Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129527237789477:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.435724Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fd0/r3tmp/tmp2DlpET/pdisk_1.dat 2025-12-23T18:57:31.805395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:31.814758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:31.814889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:31.873203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17498, node 1 2025-12-23T18:57:31.960055Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:32.095644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:57:32.183415Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.183439Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.183465Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.183553Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.444619Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:32573 TClient is connected to server localhost:32573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516251976 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:33.107679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:33.113422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:57:33.123776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.261184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.412067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.498926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:34.976487Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129540122693197:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.976652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.977059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129540122693207:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.977163Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.267596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.302373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.372052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.408795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.441261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.499127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.530361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.575414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.649960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129544417661379:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.650048Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.650099Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129544417661384:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.650303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129544417661386:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.650439Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.655336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:35.666533Z node 1 :KQP_WORKLOAD ... guration 2025-12-23T18:57:41.097736Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25648 TClient is connected to server localhost:25648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516261097 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:41.487991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:41.505187Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:41.553145Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:41.700718Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:41.767776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:41.951014Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:57:43.633834Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129579655519784:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:43.633939Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:43.634405Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129579655519793:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:43.634473Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:43.691260Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.719652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.776989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.804797Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.831672Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.861505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.894419Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.942660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:44.019759Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129583950487959:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:44.019875Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:44.020018Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129583950487964:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:44.020084Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129583950487965:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:44.020128Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:44.023429Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:44.033530Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587129583950487968:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:57:44.090457Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587129583950488019:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:45.551303Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:45.632291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:45.663415Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:45.927142Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587129566770616060:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:45.927213Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStrictStyle [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6098, MsgBus: 2207 2025-12-23T18:57:31.436316Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129528397703330:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.436429Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fcb/r3tmp/tmplmnl90/pdisk_1.dat 2025-12-23T18:57:31.765457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:31.765979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:31.838160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:31.864546Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129528397703302:2081] 1766516251429725 != 1766516251429728 2025-12-23T18:57:31.867260Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:31.876204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6098, node 1 2025-12-23T18:57:32.075193Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:57:32.184019Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.184045Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.184058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.184191Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.445609Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:2207 TClient is connected to server localhost:2207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516251927 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:33.090657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:33.109212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.269671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.435147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:33.509512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:34.652832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129541282607064:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.653015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.653353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129541282607074:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.653406Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.990066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.019936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.047661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.076292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.105648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.137347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.204774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.252339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.338242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129545577575240:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.338326Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.338592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129545577575246:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.338930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129545577575245:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.338990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.342439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:35.354962Z node 1 :KQP ... figuration TClient is connected to server localhost:4858 2025-12-23T18:57:41.023042Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516260880 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:41.238645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:41.246556Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:41.294012Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:41.399467Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:41.453691Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:57:41.739354Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:57:43.646298Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129580878628879:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:43.646396Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:43.646667Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129580878628889:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:43.646726Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:43.705808Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.736024Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.768483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.797481Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.829560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.863695Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.901169Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:43.954120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:44.029527Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129585173597054:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:44.029616Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:44.029627Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129585173597059:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:44.029804Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587129585173597061:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:44.029887Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:44.033491Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:44.044654Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587129585173597062:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:57:44.111025Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587129585173597114:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:45.444498Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:45.480091Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:45.518679Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:45.734005Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587129567993725159:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:45.734098Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] >> S3SettingsConversion::Basic [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage >> TKqpScanData::UnboxedValueSize >> TKqpScheduler::LeftFairShareIsDistributed [GOOD] >> TKqpScheduler::MultipleDatabasesPoolsQueries [GOOD] >> TKqpScheduler::ZeroQueries [GOOD] >> TKqpScheduler::ZeroWeightDatabasePoolQuery [GOOD] >> TKqpScheduler::WeightedPools [GOOD] >> TKqpScheduler::WeightedQueries [GOOD] >> TKqpScheduler::QueriesWithFairShareOverlimit+AllowOverlimit [GOOD] >> TKqpScheduler::QueriesWithFairShareOverlimit-AllowOverlimit [GOOD] >> TKqpScanData::ArrowToUnboxedValueConverter >> TKqpScheduler::ZeroLimits [GOOD] >> TKqpScheduler::ZeroLimitDbWithNonZeroPools [GOOD] >> TKqpScanFetcher::ScanDelayedRetry >> TKqpScheduler::DemandIsCutOffByLimit [GOOD] >> TKqpScheduler::AddUpdateQueries [GOOD] >> TKqpScheduler::DeleteQueries [GOOD] >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> TKqpScanData::EmptyColumns [GOOD] >> TKqpScheduler::SingleDatabasePoolQueryStructure [GOOD] >> TKqpScheduler::WeightedDatabase [GOOD] >> TKqpScanData::UnboxedValueSize [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> TKqpScanFetcher::ScanDelayedRetry [GOOD] >> TKqpScheduler::AddUpdatePools [GOOD] >> TKqpScheduler::AddUpdateDeleteNonExistent [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::DeleteQueries [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::WeightedQueries [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::QueriesWithFairShareOverlimit-AllowOverlimit [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::ZeroWeightDatabasePoolQuery [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::MultipleDatabasesPoolsQueries [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::ZeroLimitDbWithNonZeroPools [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::WeightedDatabase [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::AddUpdateDeleteNonExistent [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=535;kqp_scan_fetcher_actor.cpp:50 :META:Reads { ShardId: 1001001 KeyRanges { } } 2025-12-23T18:57:54.921936Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [1:7:2054]. Got EvDeliveryProblem, TabletId: 1001001, NotDelivered: 0, Starting 2025-12-23T18:57:54.922869Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [1:7:2054]. TKqpScanFetcherActor: broken pipe with tablet 1001001, restarting scan from last received key , attempt #1 (total 1) schedule after 0.000000s 2025-12-23T18:57:54.933414Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [1:7:2054]. Got EvDeliveryProblem, TabletId: 1001001, NotDelivered: 0, Starting 2025-12-23T18:57:54.933510Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [1:7:2054]. TKqpScanFetcherActor: broken pipe with tablet 1001001, restarting scan from last received key , attempt #2 (total 2) schedule after 0.250000s |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |98.5%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.5%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> YdbProxy::RemoveDirectory >> YdbProxy::CopyTable >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> YdbProxy::ReadTopic >> PartitionEndWatcher::EmptyPartition [GOOD] >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> YdbProxy::DropTable >> YdbProxy::DescribePath >> YdbProxy::ListDirectory >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout >> YdbProxy::CreateTable >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> YdbProxy::MakeDirectory >> YdbProxy::CreateTopic >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-12-23T18:57:57.916866Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:57.925358Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:57.926505Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-12-23T18:57:57.926587Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:57.926658Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-12-23T18:57:57.928675Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:260:2253], now have 1 active actors on pipe 2025-12-23T18:57:57.928781Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:57.954826Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:57.955010Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:57.955709Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:57.955898Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:57.956187Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:57.956519Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:268:2224] 2025-12-23T18:57:57.958683Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:57.958754Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-12-23T18:57:57.958821Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2224] 2025-12-23T18:57:57.958871Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:57.958928Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:57.959084Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:57:57.959116Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:57:57.960382Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:57.960420Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:57.960454Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:57.960486Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2025-12-23T18:57:57.961771Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:57.962967Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:57:57.963470Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:271:2258], now have 1 active actors on pipe 2025-12-23T18:57:58.013305Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.016555Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.016820Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-12-23T18:57:58.016875Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.016924Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928138] doesn't have tx writes info 2025-12-23T18:57:58.017546Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [2:400:2355], now have 1 active actors on pipe 2025-12-23T18:57:58.017736Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:58.020011Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-12-23T18:57:58.020132Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.020836Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928138] Config applied version 2 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-12-23T18:57:58.020988Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:58.021291Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:58.021529Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [2:408:2326] 2025-12-23T18:57:58.023452Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:58.023504Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-12-23T18:57:58.023547Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:408:2326] 2025-12-23T18:57:58.023598Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:58.023649Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928138][Partition][1][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:58.023687Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928138][Partition][1][StateIdle] Process pending events. Count 0 2025-12-23T18:57:58.023716Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928138][Partition][1][StateIdle] Process user action and tx events 2025-12-23T18:57:58.023747Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928138][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.023775Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928138][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:58.023811Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928138][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.023840Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928138][Partition][1][StateIdle] Try persist 2025-12-23T18:57:58.023912Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:58.024118Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2025-12-23T18:57:58.024566Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [2:411:2360], now have 1 active actors on pipe 2025-12-23T18:57:58.041560Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.044867Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.045151Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-12-23T18:57:58.045205Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.045259Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:57:58.046008Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [2:460:2396], now have 1 active actors on pipe 2025-12-23T18:57:58.046066Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:58.048262Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:57:58.048366Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.049156Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928139] Config applied version 3 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:57:58.049291Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:58.049590Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:58.049815Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:468:2367] 2025-12-23T18:57:58.051835Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:58.051889Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:57:58.051927Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:468:2367] 2025-12-23T18:57:58.051977Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:58.052033Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:58.052079Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:57:58.052114Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:57:58.052169Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.052204Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:58.052239Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.052271Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:57:58.052354Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:58.052565Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:57:58.053036Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [2:471:2401], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2025-12-23T18:57:58.061200Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:478:2404], now have 1 active actors on pipe 2025-12-23T18:57:58.061806Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [2:481:2405], now have 1 active actors on pipe 2025-12-23T18:57:58.062063Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [2:482:2405], now have 1 active actors on pipe 2025-12-23T18:57:58.062734Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928037] server disconnected, pipe [2:478:2404] destroyed 2025-12-23T18:57:58.063343Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928138] server disconnected, pipe [2:481:2405] destroyed 2025-12-23T18:57:58.063437Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928139] server disconnected, pipe [2:482:2405] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] Test command err: 2025-12-23T18:56:42.935242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:56:42.956153Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:56:42.956319Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:56:42.961467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:56:42.961654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:56:42.961804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:56:42.961937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:56:42.962014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:56:42.962106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:56:42.962171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:56:42.962249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:56:42.962311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:56:42.962388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:56:42.962453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:56:42.962531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:56:42.962609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:56:42.982578Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:56:42.982711Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:56:42.982762Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:56:42.982878Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:42.982988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:56:42.983034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:56:42.983063Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:56:42.983148Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:56:42.983196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:56:42.983228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:56:42.983268Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:56:42.983408Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:56:42.983449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:56:42.983472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:56:42.983489Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:56:42.983561Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:56:42.983592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:56:42.983639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:56:42.983660Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:56:42.983694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:56:42.983716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:56:42.983736Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:56:42.983780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:56:42.983813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:56:42.983840Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:56:42.983995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:56:42.984072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:56:42.984102Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:56:42.984179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:56:42.984207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:56:42.984222Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:56:42.984252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:56:42.984273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:56:42.984289Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:56:42.984329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:56:42.984360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:56:42.984380Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:56:42.984472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:56:42.984507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... E:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=3624; 2025-12-23T18:57:55.512866Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=7; 2025-12-23T18:57:55.513599Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=694; 2025-12-23T18:57:55.513655Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=4570; 2025-12-23T18:57:55.513692Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=4680; 2025-12-23T18:57:55.513743Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=8; 2025-12-23T18:57:55.513809Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=36; 2025-12-23T18:57:55.513836Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=5189; 2025-12-23T18:57:55.513960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=72; 2025-12-23T18:57:55.514070Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=73; 2025-12-23T18:57:55.514192Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=91; 2025-12-23T18:57:55.514305Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=87; 2025-12-23T18:57:55.514690Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=350; 2025-12-23T18:57:55.515371Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=625; 2025-12-23T18:57:55.515429Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-12-23T18:57:55.515470Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2025-12-23T18:57:55.515500Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-12-23T18:57:55.515557Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=33; 2025-12-23T18:57:55.515586Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-12-23T18:57:55.515646Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=39; 2025-12-23T18:57:55.515673Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-12-23T18:57:55.515720Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2025-12-23T18:57:55.515800Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=37; 2025-12-23T18:57:55.515870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=44; 2025-12-23T18:57:55.515898Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=15226; 2025-12-23T18:57:55.516011Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=126218384;raw_bytes=174224032;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:55.516103Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:55.516150Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:55.516201Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:55.516235Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:55.516328Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:55.516385Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:55.516425Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:55.516482Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:57:55.516540Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:55.516587Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:55.516619Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:55.516687Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:55.516843Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.087000s; 2025-12-23T18:57:55.518984Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:55.519216Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:55.519260Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:55.519335Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:55.519370Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:55.519410Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:57:55.519488Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:55.519539Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:55.519595Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:55.519667Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:57:55.519711Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:55.520038Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.163000s; 2025-12-23T18:57:55.520075Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] Test command err: 2025-12-23T18:57:11.471617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:11.506196Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:11.506506Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:11.514720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:11.515034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:11.515282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:11.515417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:11.515547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:11.515675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:11.515808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:11.515945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:11.516075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:11.516192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:11.516324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:11.516443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:11.516587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:11.548623Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:11.548845Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:11.548908Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:11.549096Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:11.549259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:11.549352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:11.549407Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:11.549503Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:11.549570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:11.549624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:11.549661Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:11.549886Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:11.549967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:11.550020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:11.550055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:11.550157Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:11.550224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:11.550299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:11.550335Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:11.550401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:11.550442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:11.550474Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:11.550548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:11.550600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:11.550640Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:11.550957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:11.551036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:11.551075Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:11.551225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:11.551275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:11.551306Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:11.551358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:11.551407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:11.551455Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:11.551518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:11.551558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:11.551591Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:11.551744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:11.551806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=9; 2025-12-23T18:57:55.455507Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=928; 2025-12-23T18:57:55.455553Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=42601; 2025-12-23T18:57:55.455589Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=42694; 2025-12-23T18:57:55.455643Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2025-12-23T18:57:55.455946Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=266; 2025-12-23T18:57:55.455975Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=43453; 2025-12-23T18:57:55.456100Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=85; 2025-12-23T18:57:55.456188Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=54; 2025-12-23T18:57:55.456482Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=258; 2025-12-23T18:57:55.456692Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=176; 2025-12-23T18:57:55.465682Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8932; 2025-12-23T18:57:55.474848Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9071; 2025-12-23T18:57:55.474980Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2025-12-23T18:57:55.475037Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-12-23T18:57:55.475075Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:57:55.475140Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=34; 2025-12-23T18:57:55.475179Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-12-23T18:57:55.475252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=42; 2025-12-23T18:57:55.475287Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-12-23T18:57:55.475341Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=26; 2025-12-23T18:57:55.475414Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=41; 2025-12-23T18:57:55.475487Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=42; 2025-12-23T18:57:55.475517Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=69529; 2025-12-23T18:57:55.475623Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:55.475719Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:55.475763Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:55.475819Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:55.475859Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:55.476051Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:55.476102Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:55.476131Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:55.476180Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:55.476234Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515935539;tx_id=18446744073709551615;;current_snapshot_ts=1766516232995; 2025-12-23T18:57:55.476266Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:55.476300Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:55.476327Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:55.476396Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:55.476549Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.188000s; 2025-12-23T18:57:55.478935Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:55.479143Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:55.479186Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:55.479240Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:55.479277Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:55.479340Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515935539;tx_id=18446744073709551615;;current_snapshot_ts=1766516232995; 2025-12-23T18:57:55.479387Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:55.479433Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:55.479466Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:55.479536Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2025-12-23T18:57:55.479575Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:55.480053Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.181000s; 2025-12-23T18:57:55.480092Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-12-23T18:57:58.022722Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.025731Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.026032Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-12-23T18:57:58.026094Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.026140Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-12-23T18:57:58.026712Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:261:2254], now have 1 active actors on pipe 2025-12-23T18:57:58.026785Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:58.041860Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:58.042038Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.042787Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:58.043035Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:58.043334Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:58.043736Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:269:2225] 2025-12-23T18:57:58.045595Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:58.045638Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-12-23T18:57:58.045695Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2225] 2025-12-23T18:57:58.045736Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:58.045788Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:58.045826Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:57:58.045850Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:57:58.045884Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.045913Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:58.045945Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.045971Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2025-12-23T18:57:58.046068Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:58.046232Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:57:58.046709Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:272:2259], now have 1 active actors on pipe 2025-12-23T18:57:58.093577Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.096908Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.097268Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-12-23T18:57:58.097337Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.097403Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:57:58.098181Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [2:402:2357], now have 1 active actors on pipe 2025-12-23T18:57:58.098304Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:58.100565Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:57:58.100679Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.101575Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928139] Config applied version 2 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:57:58.101724Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:58.102048Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:58.102286Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:410:2328] 2025-12-23T18:57:58.104599Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:58.104681Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:57:58.104755Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:410:2328] 2025-12-23T18:57:58.104821Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:58.104894Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:58.104934Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:57:58.104976Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:57:58.105029Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.105066Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:58.105107Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.105145Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:57:58.105243Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:58.105456Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:57:58.105983Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [2:413:2362], now have 1 active actors on pipe 2025-12-23T18:57:58.107184Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:419:2365], now have 1 active actors on pipe 2025-12-23T18:57:58.107383Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [2:421:2366], now have 1 active actors on pipe 2025-12-23T18:57:58.107557Z node 2 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:57:58.107828Z node 2 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:57:58.108087Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928037] server disconnected, pipe [2:419:2365] destroyed 2025-12-23T18:57:58.109008Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928139] server disconnected, pipe [2:421:2366] destroyed 2025-12-23T18:57:58.611536Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.614709Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.615036Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-12-23T18:57:58.615094Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.615151Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-12 ... titionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:57:58.730187Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:57:58.741271Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [3:545:2452], now have 1 active actors on pipe 2025-12-23T18:57:58.765368Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.768876Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.770096Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.770163Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:57:58.770295Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:58.770633Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:58.770902Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:601:2455] 2025-12-23T18:57:58.772919Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-12-23T18:57:58.774273Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-12-23T18:57:58.774579Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-12-23T18:57:58.774695Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2025-12-23T18:57:58.775068Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-12-23T18:57:58.775147Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2025-12-23T18:57:58.775399Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-12-23T18:57:58.775470Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-12-23T18:57:58.775512Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:57:58.775547Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2025-12-23T18:57:58.775653Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2025-12-23T18:57:58.775886Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2025-12-23T18:57:58.775936Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:58.775978Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:57:58.776044Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:601:2455] 2025-12-23T18:57:58.776098Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:58.776160Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:58.776219Z node 3 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:57:58.776260Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:57:58.776295Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.776335Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:58.776372Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.776409Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:57:58.776498Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:58.776695Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:57:58.777373Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928138] server disconnected, pipe [3:536:2445] destroyed 2025-12-23T18:57:58.777452Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928037] server disconnected, pipe [3:535:2444] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 79 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 79 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 93 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 93 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-12-23T18:57:57.925571Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:57.929096Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:57.929425Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-12-23T18:57:57.929500Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:57.929562Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-12-23T18:57:57.930456Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:261:2254], now have 1 active actors on pipe 2025-12-23T18:57:57.930533Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:57.956333Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:57.956534Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:57.957551Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:57.957781Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:57.958182Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:57.958627Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:269:2225] 2025-12-23T18:57:57.961384Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:57.961447Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-12-23T18:57:57.961517Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2225] 2025-12-23T18:57:57.961597Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:57.961673Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:57.961721Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:57:57.961757Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:57:57.961809Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:57.961846Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:57.961898Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:57.961940Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2025-12-23T18:57:57.962092Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:57.962991Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:57:57.963477Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:272:2259], now have 1 active actors on pipe 2025-12-23T18:57:58.016168Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.019011Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.019289Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-12-23T18:57:58.019343Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.019392Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928137] doesn't have tx writes info 2025-12-23T18:57:58.020061Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928137] server connected, pipe [2:401:2356], now have 1 active actors on pipe 2025-12-23T18:57:58.020144Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:58.022150Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:58.022244Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.022888Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928137] Config applied version 2 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:58.023018Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:58.023255Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:58.023432Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [2:409:2327] 2025-12-23T18:57:58.024878Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:58.024937Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-12-23T18:57:58.025012Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:409:2327] 2025-12-23T18:57:58.025082Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:58.025145Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:58.025185Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:57:58.025220Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:57:58.025260Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.025294Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:58.025329Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.025361Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2025-12-23T18:57:58.025452Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:58.025677Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:57:58.026033Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928137] server connected, pipe [2:412:2361], now have 1 active actors on pipe 2025-12-23T18:57:58.042062Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.045201Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.045517Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-12-23T18:57:58.045600Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.045677Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928138] doesn't have tx writes info 2025-12-23T18:57:58.046431Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [2:461:2397], now have 1 active actors on pipe 2025-12-23T18:57:58.046586Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:58.048867Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-12-23T18:57:58.049005Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.049833Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928138] Config applied version 3 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { ... ][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:58.843327Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.843362Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:57:58.843450Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:58.843674Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:57:58.844135Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [3:528:2440], now have 1 active actors on pipe 2025-12-23T18:57:58.845274Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [3:534:2443], now have 1 active actors on pipe 2025-12-23T18:57:58.845441Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [3:535:2444], now have 1 active actors on pipe 2025-12-23T18:57:58.845641Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:57:58.845926Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928138][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:57:58.846012Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [3:536:2444], now have 1 active actors on pipe 2025-12-23T18:57:58.846188Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-12-23T18:57:58.857228Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [3:544:2451], now have 1 active actors on pipe 2025-12-23T18:57:58.882183Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.884169Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.885327Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.885385Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:57:58.885508Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:58.885802Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:58.886106Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:600:2454] 2025-12-23T18:57:58.888186Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-12-23T18:57:58.889393Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-12-23T18:57:58.889685Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-12-23T18:57:58.889822Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2025-12-23T18:57:58.890108Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-12-23T18:57:58.890187Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2025-12-23T18:57:58.890386Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-12-23T18:57:58.890431Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-12-23T18:57:58.890479Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:57:58.890518Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2025-12-23T18:57:58.890619Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2025-12-23T18:57:58.890834Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2025-12-23T18:57:58.890881Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:58.890931Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:57:58.891010Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:600:2454] 2025-12-23T18:57:58.891067Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:58.891122Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:58.891163Z node 3 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:57:58.891201Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:57:58.891240Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.891276Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:58.891317Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.891354Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:57:58.891447Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:58.891624Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:57:58.892289Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928138] server disconnected, pipe [3:535:2444] destroyed 2025-12-23T18:57:58.892363Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928037] server disconnected, pipe [3:534:2443] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 78 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 78 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } } } |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-12-23T18:57:57.944552Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:57.948613Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:57.948986Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-12-23T18:57:57.949056Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:57.949141Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-12-23T18:57:57.950010Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:261:2254], now have 1 active actors on pipe 2025-12-23T18:57:57.950082Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:57.973297Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:57.973512Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:57.974458Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:57.974695Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:57.975165Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:57.975636Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:269:2225] 2025-12-23T18:57:57.978329Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:57.978400Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-12-23T18:57:57.978480Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2225] 2025-12-23T18:57:57.978572Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:57.978657Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:57.978714Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:57:57.978781Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:57:57.978830Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:57.978871Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:57.978912Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:57.978970Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2025-12-23T18:57:57.979124Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:57.979421Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:57:57.979919Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:272:2259], now have 1 active actors on pipe 2025-12-23T18:57:58.037360Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.040990Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.041268Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-12-23T18:57:58.041318Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.041366Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:57:58.042015Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [2:400:2355], now have 1 active actors on pipe 2025-12-23T18:57:58.042072Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:58.044517Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:57:58.044650Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.045540Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928139] Config applied version 2 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:57:58.045701Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:58.046051Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:58.046318Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:408:2326] 2025-12-23T18:57:58.048725Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:58.048794Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:57:58.048873Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:408:2326] 2025-12-23T18:57:58.048940Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:58.049013Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:58.049058Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:57:58.049097Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:57:58.049137Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.049174Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:58.049220Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.049268Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:57:58.049358Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:58.049645Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:57:58.050154Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [2:411:2360], now have 1 active actors on pipe 2025-12-23T18:57:58.051702Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:417:2363], now have 1 active actors on pipe 2025-12-23T18:57:58.052204Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928037] server disconnected, pipe [2:417:2363] destroyed 2025-12-23T18:57:58.052270Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [2:419:2364], now have 1 active actors on pipe 2025-12-23T18:57:58.052846Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928139] server disconnected, pipe [2:419:2364] destroyed 2025-12-23T18:57:58.555748Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.559182Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.559505Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-12-23T18:57:58.559569Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.559634Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-12-23T18:57:58.560347Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [3:259:2253], now have 1 active actors on pipe 2025-12-23T18:57:58.560466Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:58.562920Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:58.56 ... -23T18:57:58.664394Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:58.664625Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2025-12-23T18:57:58.665244Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [3:468:2399], now have 1 active actors on pipe 2025-12-23T18:57:58.682383Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.685712Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.685989Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-12-23T18:57:58.686037Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.686083Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:57:58.686688Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [3:517:2435], now have 1 active actors on pipe 2025-12-23T18:57:58.686817Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:58.688715Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:57:58.688830Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.689515Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928139] Config applied version 6 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:57:58.689653Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:58.689911Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:58.690083Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:525:2406] 2025-12-23T18:57:58.691428Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:58.691475Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:57:58.691513Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:525:2406] 2025-12-23T18:57:58.691560Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:58.691613Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:58.691646Z node 3 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:57:58.691674Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:57:58.691707Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.691736Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:58.691766Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.691793Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:57:58.691868Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:58.692122Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:57:58.692498Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [3:528:2440], now have 1 active actors on pipe 2025-12-23T18:57:58.693379Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [3:534:2443], now have 1 active actors on pipe 2025-12-23T18:57:58.693451Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [3:535:2444], now have 1 active actors on pipe 2025-12-23T18:57:58.693513Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [3:536:2444], now have 1 active actors on pipe 2025-12-23T18:57:58.704317Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [3:541:2448], now have 1 active actors on pipe 2025-12-23T18:57:58.723953Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.725511Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.726467Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.726514Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:57:58.726617Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:58.726867Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:58.727058Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:597:2451] 2025-12-23T18:57:58.728971Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-12-23T18:57:58.730318Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-12-23T18:57:58.730568Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-12-23T18:57:58.730675Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2025-12-23T18:57:58.730963Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-12-23T18:57:58.731031Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2025-12-23T18:57:58.731165Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-12-23T18:57:58.731197Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-12-23T18:57:58.731249Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:57:58.731281Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2025-12-23T18:57:58.731373Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2025-12-23T18:57:58.731534Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2025-12-23T18:57:58.731566Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:58.731605Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:57:58.731655Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:597:2451] 2025-12-23T18:57:58.731695Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:58.731744Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:58.731776Z node 3 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:57:58.731806Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:57:58.731836Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.731869Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:58.731906Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.731944Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:57:58.732028Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:58.732168Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:57:58.732758Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928138] server disconnected, pipe [3:535:2444] destroyed 2025-12-23T18:57:58.732817Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928037] server disconnected, pipe [3:534:2443] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> YdbProxy::MakeDirectory [GOOD] >> YdbProxy::OAuthToken >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::StaticCreds >> YdbProxy::ListDirectory [GOOD] >> YdbProxy::DropTopic >> YdbProxy::CopyTable [GOOD] >> YdbProxy::CopyTables >> YdbProxy::CreateTable [GOOD] >> YdbProxy::CreateCdcStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-12-23T18:57:57.916667Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:57.925349Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:57.926489Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-12-23T18:57:57.926570Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:57.926633Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-12-23T18:57:57.928685Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:261:2254], now have 1 active actors on pipe 2025-12-23T18:57:57.928781Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:57.946276Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:57.946435Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:57.953004Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:57.954069Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:57.954472Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:57.954887Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:269:2225] 2025-12-23T18:57:57.957594Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:57.957657Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-12-23T18:57:57.957724Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2225] 2025-12-23T18:57:57.958408Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:57.958503Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:57.959099Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:57:57.959149Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:57:57.960389Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:57.960452Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:57.960491Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:57.960525Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2025-12-23T18:57:57.961793Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:57.962943Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:57:57.963432Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:272:2259], now have 1 active actors on pipe 2025-12-23T18:57:58.009700Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.013006Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.013354Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-12-23T18:57:58.013415Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.013472Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928137] doesn't have tx writes info 2025-12-23T18:57:58.014298Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928137] server connected, pipe [2:401:2356], now have 1 active actors on pipe 2025-12-23T18:57:58.014374Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:58.016683Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:58.016804Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.017583Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928137] Config applied version 2 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:57:58.017735Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:58.018044Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:58.018302Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [2:409:2327] 2025-12-23T18:57:58.020722Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:58.020790Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-12-23T18:57:58.020858Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:409:2327] 2025-12-23T18:57:58.020911Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:58.020975Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:58.021016Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:57:58.021051Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:57:58.021094Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.021131Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:58.021172Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:58.021211Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2025-12-23T18:57:58.021296Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:58.021528Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:57:58.021977Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928137] server connected, pipe [2:412:2361], now have 1 active actors on pipe 2025-12-23T18:57:58.039252Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:58.042072Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:58.042287Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-12-23T18:57:58.042357Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.042420Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928138] doesn't have tx writes info 2025-12-23T18:57:58.043011Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [2:461:2397], now have 1 active actors on pipe 2025-12-23T18:57:58.043135Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:58.045060Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-12-23T18:57:58.045168Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:58.045820Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928138] Config applied version 3 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { ... 37928138][Partition][1][StateIdle] Try persist 2025-12-23T18:57:59.471838Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:59.472071Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2025-12-23T18:57:59.472616Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [4:470:2401], now have 1 active actors on pipe 2025-12-23T18:57:59.490893Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:59.494982Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:59.495341Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-12-23T18:57:59.495414Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:59.495489Z node 4 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:57:59.496319Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [4:519:2437], now have 1 active actors on pipe 2025-12-23T18:57:59.496450Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:57:59.498982Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:57:59.499122Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:59.499762Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928139] Config applied version 12 actor [4:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:57:59.499917Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:59.500259Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:59.500487Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:527:2408] 2025-12-23T18:57:59.502736Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:59.502819Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:57:59.502870Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:527:2408] 2025-12-23T18:57:59.502934Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:59.503010Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:59.503064Z node 4 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:57:59.503105Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:57:59.503147Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:59.503189Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:59.503233Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:59.503275Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:57:59.503373Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:59.503620Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:57:59.504152Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [4:530:2442], now have 1 active actors on pipe 2025-12-23T18:57:59.505379Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [4:536:2445], now have 1 active actors on pipe 2025-12-23T18:57:59.505758Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [4:537:2446], now have 1 active actors on pipe 2025-12-23T18:57:59.505863Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [4:538:2446], now have 1 active actors on pipe 2025-12-23T18:57:59.517149Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [4:546:2453], now have 1 active actors on pipe 2025-12-23T18:57:59.543386Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:57:59.545857Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:57:59.546667Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:57:59.546720Z node 4 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:57:59.546863Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:57:59.547165Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:57:59.547366Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:602:2456] 2025-12-23T18:57:59.549146Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-12-23T18:57:59.550195Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-12-23T18:57:59.550483Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-12-23T18:57:59.550590Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2025-12-23T18:57:59.551074Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-12-23T18:57:59.551149Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2025-12-23T18:57:59.551326Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-12-23T18:57:59.551364Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-12-23T18:57:59.551407Z node 4 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:57:59.551446Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2025-12-23T18:57:59.551536Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2025-12-23T18:57:59.551706Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2025-12-23T18:57:59.551746Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:57:59.551782Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:57:59.551826Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:602:2456] 2025-12-23T18:57:59.551876Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:57:59.551931Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:57:59.551970Z node 4 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:57:59.552006Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:57:59.552041Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:59.552074Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:57:59.552108Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:57:59.552140Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:57:59.552225Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:57:59.552378Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:57:59.552980Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928037] server disconnected, pipe [4:536:2445] destroyed 2025-12-23T18:57:59.553145Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928138] server disconnected, pipe [4:537:2446] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionOffsetsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "partition is not ready yet" } ErrorCode: OK } } } |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> YdbProxy::AlterTable [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2025-12-23T18:57:12.714714Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:12.739653Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:12.739872Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:12.746974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:12.747190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:12.747359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:12.747437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:12.747511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:12.747582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:12.747649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:12.747727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:12.747791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:12.747865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:12.747933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:12.748005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:12.748086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:12.771021Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:12.771155Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:12.771196Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:12.771342Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:12.771451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:12.771534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:12.771566Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:12.771624Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:12.771681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:12.771715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:12.771737Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:12.771880Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:12.771922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:12.771950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:12.771967Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:12.772024Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:12.772054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:12.772078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:12.772095Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:12.772149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:12.772175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:12.772195Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:12.772240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:12.772274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:12.772297Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:12.772440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:12.772472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:12.772490Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:12.772565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:12.772587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:12.772605Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:12.772647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:12.772681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:12.772700Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:12.772745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:12.772782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:12.772804Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:12.772892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:12.772919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... oad_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=6; 2025-12-23T18:57:56.521151Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=761; 2025-12-23T18:57:56.521194Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=60248; 2025-12-23T18:57:56.521233Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=60347; 2025-12-23T18:57:56.521280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=8; 2025-12-23T18:57:56.521556Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=239; 2025-12-23T18:57:56.521584Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=60991; 2025-12-23T18:57:56.521691Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=68; 2025-12-23T18:57:56.521774Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=49; 2025-12-23T18:57:56.522065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=257; 2025-12-23T18:57:56.522269Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=173; 2025-12-23T18:57:56.531633Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=9304; 2025-12-23T18:57:56.541163Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9421; 2025-12-23T18:57:56.541263Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-12-23T18:57:56.541305Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-12-23T18:57:56.541336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-12-23T18:57:56.541394Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=31; 2025-12-23T18:57:56.541430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:57:56.541489Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=34; 2025-12-23T18:57:56.541521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-12-23T18:57:56.541566Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=21; 2025-12-23T18:57:56.541629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=36; 2025-12-23T18:57:56.541728Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=44; 2025-12-23T18:57:56.541769Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=87573; 2025-12-23T18:57:56.541877Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:56.541973Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:56.542021Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:56.542070Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:56.542145Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:56.542280Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:56.542323Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:56.542352Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:56.542382Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:56.542433Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515936853;tx_id=18446744073709551615;;current_snapshot_ts=1766516234243; 2025-12-23T18:57:56.542466Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:56.542501Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:56.542525Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:56.542589Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:56.542768Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.140000s; 2025-12-23T18:57:56.544909Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:56.545061Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:56.545097Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:56.545154Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:56.545191Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:56.545254Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515936853;tx_id=18446744073709551615;;current_snapshot_ts=1766516234243; 2025-12-23T18:57:56.545287Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:56.545320Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:56.545346Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:56.545401Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2025-12-23T18:57:56.545436Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:56.545988Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.010000s; 2025-12-23T18:57:56.546020Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5567:7198];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2025-12-23T18:57:10.607864Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:10.641654Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:10.641911Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:10.649244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:10.649507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:10.649739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:10.649877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:10.650007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:10.650128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:10.650258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:10.650391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:10.650506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:10.650626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:10.650733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:10.650872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:10.651043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:10.681702Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:10.681882Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:10.681959Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:10.682119Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:10.682303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:10.682389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:10.682438Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:10.682541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:10.682605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:10.682645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:10.682679Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:10.682889Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:10.682973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:10.683013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:10.683043Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:10.683127Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:10.683178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:10.683260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:10.683292Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:10.683354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:10.683401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:10.683429Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:10.683490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:10.683534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:10.683570Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:10.683791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:10.683859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:10.683903Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:10.684035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:10.684079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:10.684108Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:10.684158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:10.684223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:10.684270Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:10.684319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:10.684355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:10.684390Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:10.684515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:10.684555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2025-12-23T18:57:56.737948Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=696; 2025-12-23T18:57:56.737989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=47399; 2025-12-23T18:57:56.738028Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=47501; 2025-12-23T18:57:56.738082Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2025-12-23T18:57:56.738360Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=240; 2025-12-23T18:57:56.738389Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=48172; 2025-12-23T18:57:56.738521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=86; 2025-12-23T18:57:56.738614Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=54; 2025-12-23T18:57:56.738901Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=250; 2025-12-23T18:57:56.739127Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=189; 2025-12-23T18:57:56.748077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8894; 2025-12-23T18:57:56.757303Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9124; 2025-12-23T18:57:56.757401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2025-12-23T18:57:56.757450Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-12-23T18:57:56.757483Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2025-12-23T18:57:56.757545Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=32; 2025-12-23T18:57:56.757580Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-12-23T18:57:56.757649Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=42; 2025-12-23T18:57:56.757686Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-12-23T18:57:56.757739Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2025-12-23T18:57:56.757808Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=41; 2025-12-23T18:57:56.757867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=35; 2025-12-23T18:57:56.757896Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=73550; 2025-12-23T18:57:56.758016Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:57:56.758113Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:57:56.758162Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:57:56.758216Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:57:56.758250Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:57:56.758410Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:56.758456Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:57:56.758489Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:56.758524Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:56.758578Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515934737;tx_id=18446744073709551615;;current_snapshot_ts=1766516232137; 2025-12-23T18:57:56.758612Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:56.758644Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:56.758674Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:56.758771Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:56.758934Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.087000s; 2025-12-23T18:57:56.761222Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:57:56.761334Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:57:56.761373Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:57:56.761431Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:57:56.761471Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T18:57:56.761522Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766515934737;tx_id=18446744073709551615;;current_snapshot_ts=1766516232137; 2025-12-23T18:57:56.761558Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:57:56.761592Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:56.761623Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:57:56.761682Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2025-12-23T18:57:56.761724Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:57:56.762164Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.133000s; 2025-12-23T18:57:56.762208Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5544:7175];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; >> YdbProxy::DescribePath [GOOD] >> YdbProxy::DescribeTable >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic >> DSProxyStrategyTest::Restore_mirror3dc >> DSProxyStrategyTest::Restore_block42 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2025-12-23T18:57:57.267896Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129637730528237:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:57.268219Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005059/r3tmp/tmpDUv6IE/pdisk_1.dat 2025-12-23T18:57:57.536575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:57.536722Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:57.568821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:57.592251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:57.623102Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:57.755500Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61068 TServer::EnableGrpc on GrpcPort 26807, node 1 2025-12-23T18:57:57.974801Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:57.974843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:57.974851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:57.974985Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:58.283531Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:61068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516277680 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:58.529769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:59.734757Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129646320463669:2497] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-12-23T18:57:59.747726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:59.862145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T18:57:59.881587Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129646320463783:2576] txid# 281474976710661, issues: { message: "Can\'t drop unknown column: \'extra\'" severity: 1 } |98.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_strategy/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TKeyValueTest::TestBasicWriteRead >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-12-23T18:58:01.697993Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:58:01.701285Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:58:01.701528Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-12-23T18:58:01.701580Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.701628Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-12-23T18:58:01.702202Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [3:260:2254], now have 1 active actors on pipe 2025-12-23T18:58:01.702315Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:58:01.719526Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:58:01.719721Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.720624Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928037] Config applied version 1 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:58:01.720856Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:58:01.721322Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:58:01.721751Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [3:268:2225] 2025-12-23T18:58:01.724338Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:58:01.724405Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-12-23T18:58:01.724473Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:268:2225] 2025-12-23T18:58:01.724534Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:58:01.724609Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:58:01.724651Z node 3 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:58:01.724690Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:58:01.724746Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:01.724788Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:58:01.724835Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:01.724898Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2025-12-23T18:58:01.725046Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:58:01.725298Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:58:01.725805Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [3:271:2259], now have 1 active actors on pipe 2025-12-23T18:58:01.775435Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:58:01.779533Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:58:01.779900Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-12-23T18:58:01.779972Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.780039Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928137] doesn't have tx writes info 2025-12-23T18:58:01.780816Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928137] server connected, pipe [3:398:2354], now have 1 active actors on pipe 2025-12-23T18:58:01.780975Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:58:01.783407Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:58:01.783571Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.784432Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928137] Config applied version 2 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:58:01.784583Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:58:01.784928Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:58:01.785187Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [3:406:2325] 2025-12-23T18:58:01.787652Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:58:01.787717Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-12-23T18:58:01.787771Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:406:2325] 2025-12-23T18:58:01.787822Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:58:01.787887Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:58:01.787936Z node 3 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:58:01.787976Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:58:01.788019Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:01.788060Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:58:01.788122Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:01.788165Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2025-12-23T18:58:01.788258Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:58:01.788510Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:58:01.789065Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928137] server connected, pipe [3:409:2359], now have 1 active actors on pipe 2025-12-23T18:58:01.806947Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:58:01.809847Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:58:01.810172Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-12-23T18:58:01.810234Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.810323Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928138] doesn't have tx writes info 2025-12-23T18:58:01.811136Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [3:458:2395], now have 1 active actors on pipe 2025-12-23T18:58:01.811273Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:58:01.813714Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-12-23T18:58:01.813838Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.814687Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928138] Config applied version 3 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInfligh ... 2.419552Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928138][Partition][1][StateIdle] Try persist 2025-12-23T18:58:02.419643Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:58:02.419866Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2025-12-23T18:58:02.420372Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [4:468:2399], now have 1 active actors on pipe 2025-12-23T18:58:02.437634Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:58:02.440834Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:58:02.441169Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-12-23T18:58:02.441241Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:02.441315Z node 4 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:58:02.442127Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [4:517:2435], now have 1 active actors on pipe 2025-12-23T18:58:02.442248Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:58:02.444657Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:58:02.444808Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:02.445681Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928139] Config applied version 8 actor [4:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:58:02.445838Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:58:02.446221Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:58:02.446463Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:525:2406] 2025-12-23T18:58:02.448778Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:58:02.448851Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:58:02.448916Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:525:2406] 2025-12-23T18:58:02.448972Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:58:02.449041Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:58:02.449086Z node 4 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:58:02.449129Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:58:02.449171Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:02.449216Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:58:02.449264Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:02.449304Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:58:02.449409Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:58:02.449686Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:58:02.450246Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [4:528:2440], now have 1 active actors on pipe 2025-12-23T18:58:02.451675Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [4:535:2444], now have 1 active actors on pipe 2025-12-23T18:58:02.451762Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [4:534:2443], now have 1 active actors on pipe 2025-12-23T18:58:02.451855Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [4:536:2444], now have 1 active actors on pipe 2025-12-23T18:58:02.462688Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [4:541:2448], now have 1 active actors on pipe 2025-12-23T18:58:02.480356Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:58:02.482132Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:58:02.483031Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:02.483078Z node 4 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:58:02.483181Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:58:02.483424Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:58:02.483606Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:597:2451] 2025-12-23T18:58:02.484935Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-12-23T18:58:02.485962Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-12-23T18:58:02.486195Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-12-23T18:58:02.486287Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2025-12-23T18:58:02.486468Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-12-23T18:58:02.486523Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2025-12-23T18:58:02.486665Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-12-23T18:58:02.486702Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-12-23T18:58:02.486733Z node 4 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:58:02.486795Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2025-12-23T18:58:02.486886Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2025-12-23T18:58:02.487072Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2025-12-23T18:58:02.487119Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:58:02.487151Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:58:02.487187Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:597:2451] 2025-12-23T18:58:02.487229Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:58:02.487274Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:58:02.487308Z node 4 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:58:02.487342Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:58:02.487372Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:02.487401Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:58:02.487437Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:02.487473Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:58:02.487562Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:58:02.487687Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:58:02.488355Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928138] server disconnected, pipe [4:535:2444] destroyed 2025-12-23T18:58:02.488463Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928037] server disconnected, pipe [4:534:2443] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-12-23T18:58:01.491184Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:58:01.495041Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:58:01.495414Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-12-23T18:58:01.495498Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.495566Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-12-23T18:58:01.496455Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:261:2254], now have 1 active actors on pipe 2025-12-23T18:58:01.496529Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:58:01.517841Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:58:01.518070Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.519084Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928037] Config applied version 1 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:58:01.519325Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:58:01.519784Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:58:01.520246Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:269:2225] 2025-12-23T18:58:01.523007Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:58:01.523079Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-12-23T18:58:01.523157Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2225] 2025-12-23T18:58:01.523230Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:58:01.523314Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:58:01.523364Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:58:01.523405Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:58:01.523455Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:01.523491Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:58:01.523537Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:01.523577Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2025-12-23T18:58:01.523720Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:58:01.524029Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:58:01.524563Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:272:2259], now have 1 active actors on pipe 2025-12-23T18:58:01.583921Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:58:01.587338Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:58:01.587705Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-12-23T18:58:01.587778Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.587837Z node 2 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:58:01.588603Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [2:401:2356], now have 1 active actors on pipe 2025-12-23T18:58:01.588690Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:58:01.591131Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:58:01.591250Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.592038Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928139] Config applied version 2 actor [2:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:58:01.592180Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:58:01.592489Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:58:01.592748Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:409:2327] 2025-12-23T18:58:01.595056Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:58:01.595128Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:58:01.595178Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:409:2327] 2025-12-23T18:58:01.595234Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:58:01.595310Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:58:01.595351Z node 2 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:58:01.595392Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:58:01.595433Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:01.595471Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:58:01.595510Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:01.595551Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:58:01.595636Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:58:01.595892Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:58:01.596346Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [2:412:2361], now have 1 active actors on pipe 2025-12-23T18:58:01.597645Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [2:418:2364], now have 1 active actors on pipe 2025-12-23T18:58:01.598067Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [2:420:2365], now have 1 active actors on pipe 2025-12-23T18:58:01.598470Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928037] server disconnected, pipe [2:418:2364] destroyed 2025-12-23T18:58:01.598876Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928139] server disconnected, pipe [2:420:2365] destroyed Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> YdbProxy::DescribeConsumer [GOOD] >> TKeyValueTest::TestWriteDeleteThenReadRemaining >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TKeyValueTest::TestVacuumOnEmptyTablet >> YdbProxy::StaticCreds [GOOD] >> YdbProxy::OAuthToken [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName >> YdbProxy::DescribeTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: 2025-12-23T18:58:01.497844Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:58:01.501161Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:58:01.501503Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-12-23T18:58:01.501598Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.501659Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928037] doesn't have tx writes info 2025-12-23T18:58:01.502518Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [1:260:2254], now have 1 active actors on pipe 2025-12-23T18:58:01.502646Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:58:01.522330Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:58:01.522489Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.523251Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928037] Config applied version 1 actor [1:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:58:01.523410Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:58:01.523718Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:58:01.523979Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [1:268:2225] 2025-12-23T18:58:01.525666Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:58:01.525708Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-12-23T18:58:01.525765Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [1:268:2225] 2025-12-23T18:58:01.525806Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:58:01.525850Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:58:01.525879Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:58:01.525904Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:58:01.525939Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:01.525981Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:58:01.526025Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:01.526055Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2025-12-23T18:58:01.526147Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:58:01.526319Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:58:01.526654Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [1:271:2259], now have 1 active actors on pipe 2025-12-23T18:58:01.569625Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:58:01.572088Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:58:01.572303Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-12-23T18:58:01.572355Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.572403Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928137] doesn't have tx writes info 2025-12-23T18:58:01.572966Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928137] server connected, pipe [1:401:2357], now have 1 active actors on pipe 2025-12-23T18:58:01.573053Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:58:01.574610Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:58:01.574692Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.575384Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928137] Config applied version 2 actor [1:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-12-23T18:58:01.575491Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:58:01.575680Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:58:01.575829Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [1:409:2328] 2025-12-23T18:58:01.577274Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:58:01.577316Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-12-23T18:58:01.577349Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [1:409:2328] 2025-12-23T18:58:01.577381Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:58:01.577431Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:58:01.577468Z node 1 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2025-12-23T18:58:01.577493Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2025-12-23T18:58:01.577518Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:01.577540Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T18:58:01.577569Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:01.577593Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2025-12-23T18:58:01.577651Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:58:01.577863Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T18:58:01.578199Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928137] server connected, pipe [1:412:2362], now have 1 active actors on pipe 2025-12-23T18:58:01.590921Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:58:01.593708Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:58:01.593915Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-12-23T18:58:01.593961Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.594002Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928138] doesn't have tx writes info 2025-12-23T18:58:01.594437Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [1:461:2398], now have 1 active actors on pipe 2025-12-23T18:58:01.594595Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:58:01.596165Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-12-23T18:58:01.596251Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:01.596896Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928138] Config applied version 3 actor [1:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-12-23T18:58:01.596997Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializ ... 0000, To tx_18446744073709551615 2025-12-23T18:58:02.858661Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-12-23T18:58:02.858703Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:02.858814Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:58:02.859377Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [3:519:2437], now have 1 active actors on pipe 2025-12-23T18:58:02.859452Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1288: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-12-23T18:58:02.860946Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:398: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:58:02.861042Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:02.861461Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1315: [PQ: 72057594037928139] Config applied version 12 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-12-23T18:58:02.861560Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:58:02.861771Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:58:02.861926Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:527:2408] 2025-12-23T18:58:02.863258Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:58:02.863297Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:58:02.863331Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:527:2408] 2025-12-23T18:58:02.863373Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:58:02.863420Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:58:02.863454Z node 3 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:58:02.863484Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:58:02.863511Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:02.863557Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:58:02.863589Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:02.863620Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:58:02.863686Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:58:02.863859Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:58:02.864229Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [3:530:2442], now have 1 active actors on pipe 2025-12-23T18:58:02.865327Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928037] server connected, pipe [3:537:2445], now have 1 active actors on pipe 2025-12-23T18:58:02.865707Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928137] server connected, pipe [3:539:2446], now have 1 active actors on pipe 2025-12-23T18:58:02.865819Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928138] server connected, pipe [3:540:2446], now have 1 active actors on pipe 2025-12-23T18:58:02.865868Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [3:541:2446], now have 1 active actors on pipe 2025-12-23T18:58:02.866315Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72057594037928139] server connected, pipe [3:554:2457], now have 1 active actors on pipe 2025-12-23T18:58:02.883431Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:58:02.885211Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:58:02.885865Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:58:02.885913Z node 3 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72057594037928139] doesn't have tx writes info 2025-12-23T18:58:02.886005Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-12-23T18:58:02.886246Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-12-23T18:58:02.886414Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:610:2460] 2025-12-23T18:58:02.887838Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-12-23T18:58:02.888662Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-12-23T18:58:02.888888Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-12-23T18:58:02.888990Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2025-12-23T18:58:02.889207Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-12-23T18:58:02.889263Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2025-12-23T18:58:02.889390Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-12-23T18:58:02.889422Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-12-23T18:58:02.889456Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-12-23T18:58:02.889488Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2025-12-23T18:58:02.889575Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2025-12-23T18:58:02.889696Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2025-12-23T18:58:02.889745Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2025-12-23T18:58:02.889778Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-12-23T18:58:02.889820Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:610:2460] 2025-12-23T18:58:02.889863Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-12-23T18:58:02.889907Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2025-12-23T18:58:02.889940Z node 3 :PERSQUEUE DEBUG: partition.cpp:4479: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2025-12-23T18:58:02.889969Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2025-12-23T18:58:02.890000Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:02.890029Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2025-12-23T18:58:02.890073Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T18:58:02.890105Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2025-12-23T18:58:02.890198Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2025-12-23T18:58:02.890370Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2025-12-23T18:58:02.890986Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928037] server disconnected, pipe [3:537:2445] destroyed 2025-12-23T18:58:02.891070Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928138] server disconnected, pipe [3:540:2446] destroyed 2025-12-23T18:58:02.891106Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72057594037928137] server disconnected, pipe [3:539:2446] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } ErrorCode: OK } } } >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic >> YdbProxy::CreateCdcStream [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TKeyValueTest::TestCopyRangeWorks >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks >> YdbProxy::DropTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2025-12-23T18:57:57.372590Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129638967814497:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:57.372704Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005055/r3tmp/tmplnODgG/pdisk_1.dat 2025-12-23T18:57:57.627209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:57.627316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:57.682111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:57.732995Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:57.733635Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TClient is connected to server localhost:23475 TServer::EnableGrpc on GrpcPort 14622, node 1 2025-12-23T18:57:57.972935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:57.972972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:57.972979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:57.973056Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:58.035212Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23475 2025-12-23T18:57:58.382424Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516277785 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:58.466926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:58.520145Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129643262782604:2494] txid# 281474976710658, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } 2025-12-23T18:58:00.215122Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587129651468398746:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:00.215197Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005055/r3tmp/tmpROvZdQ/pdisk_1.dat 2025-12-23T18:58:00.226122Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:00.282408Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:00.284110Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587129651468398708:2081] 1766516280214274 != 1766516280214277 2025-12-23T18:58:00.328234Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:00.328318Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:00.330692Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:00.463268Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21613 TServer::EnableGrpc on GrpcPort 23326, node 2 2025-12-23T18:58:00.543396Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:58:00.543427Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:58:00.543435Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:58:00.543530Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516280361 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:00.783722Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2025-12-23T18:57:57.267488Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129640284856326:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:57.267652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00504d/r3tmp/tmpqPo43E/pdisk_1.dat 2025-12-23T18:57:57.514052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:57.552569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:57.552743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:57.612109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:57.627087Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:57.796102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5911 TServer::EnableGrpc on GrpcPort 9731, node 1 2025-12-23T18:57:57.972758Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:57.972820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:57.972825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:57.972920Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:58.276937Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516277673 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:58.506537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:58.514706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:57:58.554784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-12-23T18:57:58.569376Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129644579824462:2522] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-12-23T18:58:00.331165Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587129651617310226:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:00.331237Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:58:00.341914Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00504d/r3tmp/tmpMa86SP/pdisk_1.dat 2025-12-23T18:58:00.419631Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:00.458437Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:00.458502Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:00.460228Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:00.610323Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19943 TServer::EnableGrpc on GrpcPort 10393, node 2 2025-12-23T18:58:00.691730Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:58:00.691747Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:58:00.691751Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:58:00.691844Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516280508 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:00.966244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:58:01.011577Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516281012 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516280508 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated:... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1766516281012 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516280508 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricate... (TRUNCATED) |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] Test command err: 2025-12-23T18:57:57.270984Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129639819057821:2147];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:57.271074Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005053/r3tmp/tmphatUIX/pdisk_1.dat 2025-12-23T18:57:57.493333Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:57.578499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:57.578584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:57.606107Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:57.621190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:57.746994Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12766 TServer::EnableGrpc on GrpcPort 1484, node 1 2025-12-23T18:57:57.975474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:57.975501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:57.975507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:57.975586Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:58.275881Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12766 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516277673 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:58.457867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:59.805099Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129648408993176:2500] txid# 281474976710658, issues: { message: "Column key has wrong key type Float" severity: 1 } 2025-12-23T18:57:59.816428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:59.927765Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129648408993262:2560] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:00.438057Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587129652143584356:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:00.438123Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:58:00.449152Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005053/r3tmp/tmpvrUbwZ/pdisk_1.dat 2025-12-23T18:58:00.530064Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:00.531626Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:00.531784Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587129652143584319:2081] 1766516280436869 != 1766516280436872 2025-12-23T18:58:00.571823Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:00.571931Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:00.578543Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:00.729062Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:29535 TServer::EnableGrpc on GrpcPort 16857, node 2 2025-12-23T18:58:00.760331Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:58:00.760355Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:58:00.760362Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:58:00.760432Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516280627 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:01.004900Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:58:01.444668Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:02.999413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:03.134838Z node 2 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][2:7587129665028487267:2331] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:40:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-12-23T18:58:03.158162Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587129665028487318:2638] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/table/updates\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeCdcStream, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2025-12-23T18:57:57.268762Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129638086794964:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:57.268831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00504c/r3tmp/tmpOMg5TU/pdisk_1.dat 2025-12-23T18:57:57.519014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:57.562348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:57.562461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:57.598069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:57.614974Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:57.736310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:31788 TServer::EnableGrpc on GrpcPort 22600, node 1 2025-12-23T18:57:57.974793Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:57.974823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:57.974828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:57.974907Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:58.276175Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:31788 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516277666 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:58.488070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:59.897737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:00.020762Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-12-23T18:58:00.024126Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129650971697821:2593] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-12-23T18:58:00.723608Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587129654468249316:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:00.723738Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00504c/r3tmp/tmp2QXOhK/pdisk_1.dat 2025-12-23T18:58:00.758526Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:00.828688Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:00.829771Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587129654468249281:2081] 1766516280722429 != 1766516280722432 2025-12-23T18:58:00.842894Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:00.843029Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:00.873175Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:01.022800Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16126 TServer::EnableGrpc on GrpcPort 13157, node 2 2025-12-23T18:58:01.076646Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:58:01.076669Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:58:01.076680Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:58:01.076755Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516280928 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:01.330502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2025-12-23T18:57:57.338851Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129641038924484:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:57.341748Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00504e/r3tmp/tmpdsf5le/pdisk_1.dat 2025-12-23T18:57:57.574810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:57.586631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:57.586764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:57.646707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:57.667727Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:57.834501Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16992 TServer::EnableGrpc on GrpcPort 8128, node 1 2025-12-23T18:57:57.973597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:57.973625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:57.973631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:57.973701Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:58.345566Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:16992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516277722 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:58.481180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:58.487016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:58:00.183757Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587129651690116489:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:00.183829Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00504e/r3tmp/tmpI6oWMO/pdisk_1.dat 2025-12-23T18:58:00.195173Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:00.234095Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587129651690116446:2081] 1766516280182628 != 1766516280182631 2025-12-23T18:58:00.278792Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:00.314596Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:00.314662Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:00.316321Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:00.431882Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6904 TServer::EnableGrpc on GrpcPort 7349, node 2 2025-12-23T18:58:00.486604Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:58:00.486623Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:58:00.486628Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:58:00.486685Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:6904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516280361 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:00.738685Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> KqpKnn::BitVectorKnnPushdown |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DropTopic [GOOD] Test command err: 2025-12-23T18:57:57.267582Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129638855247062:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:57.267639Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005050/r3tmp/tmpcYqT9O/pdisk_1.dat 2025-12-23T18:57:57.539326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:57.539503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:57.582537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:57.593040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:57.612900Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TClient is connected to server localhost:63497 TServer::EnableGrpc on GrpcPort 22980, node 1 2025-12-23T18:57:57.846425Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:57:57.975546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:57.975586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:57.975592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:57.975712Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:58.276167Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:63497 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516277694 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:58.460435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:58:00.410772Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587129652088264585:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:00.410845Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005050/r3tmp/tmphm4iGL/pdisk_1.dat 2025-12-23T18:58:00.423649Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:00.499201Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587129652088264556:2081] 1766516280409713 != 1766516280409716 2025-12-23T18:58:00.500917Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:00.527113Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:00.527204Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:00.535824Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:00.635341Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12200 TServer::EnableGrpc on GrpcPort 26900, node 2 2025-12-23T18:58:00.745464Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:58:00.745491Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:58:00.745498Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:58:00.745591Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516280585 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:01.000371Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:58:01.099720Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2025-12-23T18:58:01.107403Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-12-23T18:58:01.107434Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-12-23T18:58:01.118253Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587129656383232828:2585] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] Test command err: 2025-12-23T18:57:57.270699Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129641566269891:2136];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:57.271508Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005054/r3tmp/tmpSOyZjq/pdisk_1.dat 2025-12-23T18:57:57.541974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:57.542067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:57.574460Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:57.599359Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129641566269787:2081] 1766516277267774 != 1766516277267777 2025-12-23T18:57:57.602825Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:57.612312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:57.757879Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28717 TServer::EnableGrpc on GrpcPort 11440, node 1 2025-12-23T18:57:57.973851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:57.973880Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:57.973886Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:57.973996Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:58.278587Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:28717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516277666 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:58.508807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:58:00.778806Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587129653728234046:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:00.778878Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005054/r3tmp/tmpNluAY9/pdisk_1.dat 2025-12-23T18:58:00.788911Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:00.856765Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:00.858880Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587129653728234013:2081] 1766516280777741 != 1766516280777744 2025-12-23T18:58:00.890658Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:00.890756Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:00.893379Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23695 TServer::EnableGrpc on GrpcPort 14442, node 2 2025-12-23T18:58:01.071586Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:58:01.088310Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:58:01.088334Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:58:01.088342Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:58:01.088435Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516280935 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:01.358600Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:58:01.784625Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:03.734718Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] Test command err: 2025-12-23T18:57:00.216544Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:00.238528Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:00.238775Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:00.245064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:00.245255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:00.245458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:00.245561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:00.245659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:00.245810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:00.245904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:00.246006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:00.246113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:00.246210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:00.246279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:00.246380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:00.246465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:00.267800Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:00.267969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:00.268015Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:00.268148Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:00.268307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:00.268396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:00.268438Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:00.268517Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:00.268569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:00.268612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:00.268640Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:00.268801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:00.268847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:00.268876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:00.268917Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:00.268994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:00.269057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:00.269089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:00.269109Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:00.269148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:00.269172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:00.269192Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:00.269259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:00.269292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:00.269319Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:00.269499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:00.269548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:00.269587Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:00.269674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:00.269705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:00.269725Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:00.269757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:00.269796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:00.269815Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:00.269847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:00.269881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:00.269907Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:00.270023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:00.270061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=7503; 2025-12-23T18:58:01.422689Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2025-12-23T18:58:01.423639Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=891; 2025-12-23T18:58:01.423699Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8803; 2025-12-23T18:58:01.423743Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8954; 2025-12-23T18:58:01.423805Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2025-12-23T18:58:01.423910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=49; 2025-12-23T18:58:01.423957Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9760; 2025-12-23T18:58:01.424148Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=132; 2025-12-23T18:58:01.424294Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=98; 2025-12-23T18:58:01.424489Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=153; 2025-12-23T18:58:01.424679Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=146; 2025-12-23T18:58:01.428042Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3309; 2025-12-23T18:58:01.431042Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2924; 2025-12-23T18:58:01.431125Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-12-23T18:58:01.431172Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-12-23T18:58:01.431218Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T18:58:01.431323Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2025-12-23T18:58:01.431368Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:58:01.431476Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=73; 2025-12-23T18:58:01.431522Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-12-23T18:58:01.431592Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=38; 2025-12-23T18:58:01.431711Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=64; 2025-12-23T18:58:01.431823Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=61; 2025-12-23T18:58:01.431868Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=28059; 2025-12-23T18:58:01.432018Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110400112;raw_bytes=155849958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:58:01.432135Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:58:01.432199Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:58:01.432291Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:58:01.432347Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:58:01.432503Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:58:01.432576Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:58:01.432615Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:58:01.432662Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:58:01.432731Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:58:01.432784Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:58:01.432828Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:58:01.432943Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:58:01.433168Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.085000s; 2025-12-23T18:58:01.435864Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:58:01.436124Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:58:01.436194Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:58:01.436270Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:58:01.436319Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:58:01.436376Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:58:01.436465Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:58:01.436540Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:58:01.436588Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:58:01.436670Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:58:01.436728Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:58:01.437398Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.118000s; 2025-12-23T18:58:01.437475Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2347:4327];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.6%| [TA] $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.6%| [TA] {RESULT} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk >> TPipeTrackerTest::TestSimpleAdd [GOOD] >> TResourceBroker::TestAutoTaskId >> DataShardOutOfOrder::TestReadTableWriteConflict >> TResourceBroker::TestAutoTaskId [GOOD] >> BootstrapperTest::RestartUnavailableTablet >> BootstrapperTest::KeepExistingTablet |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> TTabletPipeTest::TestPipeWithVersionInfo |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> YdbProxy::AlterTopic [GOOD] >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage >> DataShardOutOfOrder::TestOutOfOrderLockLost |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> DataShardOutOfOrder::TestSnapshotReadPriority >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries [GOOD] >> BootstrapperTest::KeepExistingTablet [GOOD] >> BootstrapperTest::DuplicateNodes |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for boot2 ... received OnTabletStop ... received OnTabletStop ... received OnTabletStop ... waiting for client shutting down notification ... waiting for connect2 >> BootstrapperTest::UnavailableStateStorage [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2025-12-23T18:57:57.268626Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129640477988033:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:57.268758Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00505c/r3tmp/tmpqMmGMx/pdisk_1.dat 2025-12-23T18:57:57.517621Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:57.584140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:57.584268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:57.607697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:57.658645Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:57.791399Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6440 TServer::EnableGrpc on GrpcPort 15466, node 1 2025-12-23T18:57:57.973799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:57.973826Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:57.973835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:57.973909Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:58.277843Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516277666 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:58.443200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:59.805682Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129649067923467:2500] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-12-23T18:57:59.817598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:00.478596Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587129654611905927:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:00.478650Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00505c/r3tmp/tmpAGjmen/pdisk_1.dat 2025-12-23T18:58:00.491538Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:00.564292Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:00.565512Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587129654611905900:2081] 1766516280477543 != 1766516280477546 2025-12-23T18:58:00.610708Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:00.610950Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:00.616097Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:00.751870Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3776 TServer::EnableGrpc on GrpcPort 8473, node 2 2025-12-23T18:58:00.784624Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:58:00.784647Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:58:00.784654Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:58:00.784743Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516280662 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:01.033266Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:58:01.484990Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:03.088065Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:03.129407Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:03.807794Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587129666289413775:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:03.807853Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00505c/r3tmp/tmpyrRjCC/pdisk_1.dat 2025-12-23T18:58:03.820222Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:03.895105Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:03.939813Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:03.939905Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:03.943607Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9204 TServer::EnableGrpc on GrpcPort 4798, node 3 2025-12-23T18:58:04.121723Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:58:04.145431Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:58:04.145460Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:58:04.145469Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:58:04.145551Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516283987 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:04.395589Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:58:04.501684Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2025-12-23T18:58:04.518249Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587129670584381994:2576] txid# 281474976710660, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2025-12-23T18:58:07.666551Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 0 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 1 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 2 2025-12-23T18:58:07.667240Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2025-12-23T18:58:07.667279Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:260: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.101463s 2025-12-23T18:58:07.847540Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 0 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 1 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 2 ... waiting for multiple state storage lookup attempts (done) |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2025-12-23T18:58:08.074467Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:58:08.074535Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:58:08.075175Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-12-23T18:58:08.075215Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 12552810490399048506 2025-12-23T18:58:08.075300Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-12-23T18:58:08.075318Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-12-23T18:58:08.075915Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-12-23T18:58:08.075974Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2025-12-23T18:58:08.076262Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-12-23T18:58:08.076287Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.139961s 2025-12-23T18:58:08.265892Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-12-23T18:58:08.266597Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:220:2097] 2025-12-23T18:58:08.267208Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2025-12-23T18:58:08.267265Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi >> DataShardTxOrder::RandomDotRanges_DelayRS >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:451:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:454:2057] recipient: [2:453:2379] Leader for TabletID 72057594037927937 is [2:455:2380] sender: [2:456:2057] recipient: [2:453:2379] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:455:2380] Leader for TabletID 72057594037927937 is [2:455:2380] sender: [2:571:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:451:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:454:2057] recipient: [3:453:2379] Leader for TabletID 72057594037927937 is [3:455:2380] sender: [3:456:2057] recipient: [3:453:2379] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:455:2380] Leader for TabletID 72057594037927937 is [3:455:2380] sender: [3:571:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:452:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:455:2057] recipient: [4:454:2379] Leader for TabletID 72057594037927937 is [4:456:2380] sender: [4:457:2057] recipient: [4:454:2379] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:456:2380] Leader for TabletID 72057594037927937 is [4:456:2380] sender: [4:572:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> test_select.py::TestDML::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> DataShardScan::ScanFollowedByUpdate >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun >> DataShardOutOfOrder::TestOutOfOrderLockLost [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DataShardScan::ScanFollowedByUpdate [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardScan::ScanFollowedByUpdate [GOOD] Test command err: 2025-12-23T18:58:11.800556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:11.800606Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:11.801787Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:11.814074Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:11.814307Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:58:11.814497Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:11.846897Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:11.851979Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:11.852041Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:11.853139Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:58:11.853256Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:58:11.853305Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:58:11.853561Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:11.853609Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:11.853654Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:58:11.920248Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:11.946077Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:58:11.946227Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:11.946302Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:58:11.946327Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:58:11.946350Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:58:11.946374Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:11.946506Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:11.946549Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:11.946868Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:58:11.946976Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:58:11.947088Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:11.947126Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:11.947156Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:58:11.947180Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:11.947204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:11.947226Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:58:11.947251Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:11.947307Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:11.947330Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:11.947358Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:58:11.952489Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:58:11.952565Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:11.952646Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:58:11.952781Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:58:11.952828Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:58:11.952872Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:58:11.952904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:11.952930Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:58:11.952954Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:58:11.952976Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:11.953184Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:11.953216Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:58:11.953240Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:58:11.953272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:11.953300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:58:11.953315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:58:11.953339Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:58:11.953362Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:11.953390Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:11.965240Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:58:11.965298Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:11.965322Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:11.965347Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:58:11.965399Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:58:11.965772Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:11.965814Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:11.965854Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:58:11.965954Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:58:11.965988Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:58:11.966097Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:11.966141Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:58:11.966179Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:58:11.966205Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:58:11.968738Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:58:11.968795Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:11.968959Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:11.968985Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:11.969032Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:11.969073Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:11.969097Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:11.969123Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:58:11.969164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... on::Execute at 9437185 2025-12-23T18:58:13.702008Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:58:13.702041Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437185 for ReadTableScan 2025-12-23T18:58:13.702066Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit ReadTableScan 2025-12-23T18:58:13.702092Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437185 error: , IsFatalError: 0 2025-12-23T18:58:13.702133Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is Executed 2025-12-23T18:58:13.702211Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit ReadTableScan 2025-12-23T18:58:13.702229Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437185 to execution unit CompleteOperation 2025-12-23T18:58:13.702244Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit CompleteOperation 2025-12-23T18:58:13.702371Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is DelayComplete 2025-12-23T18:58:13.702392Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit CompleteOperation 2025-12-23T18:58:13.702416Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437185 to execution unit CompletedOperations 2025-12-23T18:58:13.702447Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit CompletedOperations 2025-12-23T18:58:13.702468Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is Executed 2025-12-23T18:58:13.702489Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit CompletedOperations 2025-12-23T18:58:13.702508Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437185 has finished 2025-12-23T18:58:13.702532Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:13.702555Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2025-12-23T18:58:13.702581Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2025-12-23T18:58:13.702614Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2025-12-23T18:58:13.702729Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:463:2405], Recipient [1:463:2405]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:13.702772Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:13.702804Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2025-12-23T18:58:13.702821Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:58:13.702841Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437186 for ReadTableScan 2025-12-23T18:58:13.702861Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit ReadTableScan 2025-12-23T18:58:13.702903Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437186 error: , IsFatalError: 0 2025-12-23T18:58:13.702936Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is Executed 2025-12-23T18:58:13.702971Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit ReadTableScan 2025-12-23T18:58:13.702997Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437186 to execution unit CompleteOperation 2025-12-23T18:58:13.703017Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit CompleteOperation 2025-12-23T18:58:13.703116Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is DelayComplete 2025-12-23T18:58:13.703163Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit CompleteOperation 2025-12-23T18:58:13.703189Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437186 to execution unit CompletedOperations 2025-12-23T18:58:13.703211Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit CompletedOperations 2025-12-23T18:58:13.703246Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is Executed 2025-12-23T18:58:13.703271Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit CompletedOperations 2025-12-23T18:58:13.703304Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437186 has finished 2025-12-23T18:58:13.703325Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:13.703338Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2025-12-23T18:58:13.703352Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2025-12-23T18:58:13.703366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2025-12-23T18:58:13.703441Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:242:2234], Recipient [1:242:2234]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:13.703471Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:13.703510Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:13.703530Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:58:13.703558Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437184 for ReadTableScan 2025-12-23T18:58:13.703582Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit ReadTableScan 2025-12-23T18:58:13.703599Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437184 error: , IsFatalError: 0 2025-12-23T18:58:13.703629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is Executed 2025-12-23T18:58:13.703647Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit ReadTableScan 2025-12-23T18:58:13.703675Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437184 to execution unit CompleteOperation 2025-12-23T18:58:13.703706Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit CompleteOperation 2025-12-23T18:58:13.703808Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is DelayComplete 2025-12-23T18:58:13.703826Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit CompleteOperation 2025-12-23T18:58:13.703839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437184 to execution unit CompletedOperations 2025-12-23T18:58:13.703852Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit CompletedOperations 2025-12-23T18:58:13.703867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is Executed 2025-12-23T18:58:13.703891Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit CompletedOperations 2025-12-23T18:58:13.703920Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437184 has finished 2025-12-23T18:58:13.703940Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:13.703956Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:13.703971Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:13.703994Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:13.716421Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-12-23T18:58:13.716487Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-12-23T18:58:13.716517Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437185 on unit CompleteOperation 2025-12-23T18:58:13.716577Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000006 : 36] from 9437185 at tablet 9437185 send result to client [1:103:2137], exec latency: 3 ms, propose latency: 4 ms 2025-12-23T18:58:13.716625Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-12-23T18:58:13.717390Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:58:13.717427Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:58:13.717444Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437186 on unit CompleteOperation 2025-12-23T18:58:13.717481Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000006 : 36] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 3 ms, propose latency: 5 ms 2025-12-23T18:58:13.717510Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:58:13.717583Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:13.717598Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:13.717613Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437184 on unit CompleteOperation 2025-12-23T18:58:13.717634Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000006 : 36] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 3 ms, propose latency: 5 ms 2025-12-23T18:58:13.717647Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_8_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 8] >> KqpKnn::BitVectorKnnPushdown [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:58:09.752157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:58:09.858040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:58:09.868777Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:09.869172Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:58:09.869256Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:58:10.066056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:10.066188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:10.122300Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516287629848 != 1766516287629852 2025-12-23T18:58:10.133469Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:10.175478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:10.264454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fc6/r3tmp/tmpkAJSns/pdisk_1.dat 2025-12-23T18:58:10.521843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:58:10.523751Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-12-23T18:58:10.523798Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:399:2398] Proxy marker# C1 2025-12-23T18:58:10.535219Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:10.629346Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2025-12-23T18:58:10.629449Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-12-23T18:58:10.629652Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2025-12-23T18:58:10.629955Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2025-12-23T18:58:10.630014Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:399:2398] Proxy 2025-12-23T18:58:10.631592Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-12-23T18:58:10.631662Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-12-23T18:58:10.631690Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-12-23T18:58:10.631731Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2025-12-23T18:58:10.635604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:10.663146Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:875:2758], Recipient [1:884:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:10.663920Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:875:2758], Recipient [1:884:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:10.664123Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:884:2764] 2025-12-23T18:58:10.664291Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:10.709711Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:875:2758], Recipient [1:884:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:10.710435Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:10.710558Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:10.712237Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:58:10.712317Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:58:10.712405Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:58:10.712736Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:10.712880Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:10.712960Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:899:2764] in generation 1 2025-12-23T18:58:10.713375Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:10.752321Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:58:10.752516Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:10.752633Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:901:2774] 2025-12-23T18:58:10.752666Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:58:10.752700Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:58:10.752738Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:58:10.752965Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:884:2764], Recipient [1:884:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:10.753020Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:10.753317Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:58:10.753434Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:58:10.753827Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:58:10.753866Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:10.753905Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:58:10.753938Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:58:10.753969Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:58:10.754000Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:58:10.754050Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:58:10.754145Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:886:2765], Recipient [1:884:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:10.754181Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:10.754254Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:880:2761], serverId# [1:886:2765], sessionId# [0:0:0] 2025-12-23T18:58:10.754340Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:886:2765] 2025-12-23T18:58:10.754376Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:10.754477Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:58:10.754727Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:58:10.754799Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:58:10.754894Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:58:10.754937Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:58:10.755008Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:58:10.755044Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:58:10.755091Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:58:10.755383Z node 1 :TX_DATASHARD TR ... shard_impl.h:3153: StateWork, received event# 269553215, Sender [1:1625:3264], Recipient [1:1566:3229]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-12-23T18:58:14.906828Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:58:14.906910Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2025-12-23T18:58:14.907014Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T18:58:14.907055Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:58:14.907091Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:58:14.907125Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:58:14.907180Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2025-12-23T18:58:14.907219Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T18:58:14.907244Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:58:14.907265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:58:14.907288Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:58:14.907403Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-12-23T18:58:14.907629Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715683, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T18:58:14.907667Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4500/18446744073709551615 2025-12-23T18:58:14.907706Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[1:1625:3264], 0} after executionsCount# 1 2025-12-23T18:58:14.907740Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[1:1625:3264], 0} sends rowCount# 5, bytes# 160, quota rows left# 996, quota bytes left# 5242720, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:58:14.907796Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[1:1625:3264], 0} finished in read 2025-12-23T18:58:14.907837Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T18:58:14.907868Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:58:14.907885Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:58:14.907904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:58:14.907933Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T18:58:14.907946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:58:14.907962Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2025-12-23T18:58:14.907988Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:58:14.908051Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:58:14.908712Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:1625:3264], Recipient [1:1566:3229]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:58:14.908755Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } } 2025-12-23T18:58:15.017321Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep 2025-12-23T18:58:15.017401Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:293: Coordinator# 72057594046316545 scheduling step 5500 in 0.499900s at 5.450000s 2025-12-23T18:58:15.019514Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:1649:3281], Recipient [1:1566:3229]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-12-23T18:58:15.019649Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:58:15.019708Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-12-23T18:58:15.019770Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T18:58:15.019801Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:58:15.019828Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:58:15.019861Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:58:15.019899Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2025-12-23T18:58:15.019931Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T18:58:15.019947Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:58:15.019959Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:58:15.019985Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:58:15.020063Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-12-23T18:58:15.020274Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715686, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T18:58:15.020309Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v5000/18446744073709551615 2025-12-23T18:58:15.020358Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[1:1649:3281], 0} after executionsCount# 1 2025-12-23T18:58:15.020392Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[1:1649:3281], 0} sends rowCount# 6, bytes# 192, quota rows left# 995, quota bytes left# 5242688, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:58:15.020444Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[1:1649:3281], 0} finished in read 2025-12-23T18:58:15.020499Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T18:58:15.020516Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:58:15.020531Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:58:15.020547Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:58:15.020576Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T18:58:15.020590Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:58:15.020607Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2025-12-23T18:58:15.020644Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:58:15.020709Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:58:15.020880Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [1:69:2116], Recipient [1:1566:3229]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715686 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-12-23T18:58:15.021364Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:1649:3281], Recipient [1:1566:3229]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:58:15.021400Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } }, { items { uint32_value: 11 } items { uint32_value: 11 } } |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2025-12-23T18:57:11.928794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:11.955567Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:11.955756Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:11.961894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:11.962083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:11.962255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:11.962334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:11.962420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:11.962525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:11.962595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:11.962656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:11.962717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:11.962829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:11.962920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:11.963010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:11.963076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:11.988465Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:11.988635Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:11.988685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:11.988876Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:11.989037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:11.989111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:11.989162Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:11.989256Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:11.989324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:11.989373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:11.989412Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:11.989646Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:11.989732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:11.989776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:11.989812Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:11.989905Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:11.989972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:11.990034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:11.990068Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:11.990122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:11.990162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:11.990195Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:11.990267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:11.990320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:11.990358Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:11.990586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:11.990640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:11.990677Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:11.990851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:11.990900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:11.990949Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:11.991000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:11.991037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:11.991069Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:11.991113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:11.991148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:11.991204Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:11.991357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:11.991408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... mn_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=6417; 2025-12-23T18:58:13.369021Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=8; 2025-12-23T18:58:13.369689Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=626; 2025-12-23T18:58:13.369734Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=7311; 2025-12-23T18:58:13.369769Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=7437; 2025-12-23T18:58:13.369815Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2025-12-23T18:58:13.369898Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=41; 2025-12-23T18:58:13.369938Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7993; 2025-12-23T18:58:13.370069Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=87; 2025-12-23T18:58:13.370169Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=62; 2025-12-23T18:58:13.370309Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=109; 2025-12-23T18:58:13.370453Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=108; 2025-12-23T18:58:13.372142Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1645; 2025-12-23T18:58:13.374303Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2093; 2025-12-23T18:58:13.374382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-12-23T18:58:13.374431Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-12-23T18:58:13.374473Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-12-23T18:58:13.374543Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2025-12-23T18:58:13.374584Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-12-23T18:58:13.374674Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=56; 2025-12-23T18:58:13.374720Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-12-23T18:58:13.374830Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=48; 2025-12-23T18:58:13.374925Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=51; 2025-12-23T18:58:13.375041Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=79; 2025-12-23T18:58:13.375093Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=20781; 2025-12-23T18:58:13.375252Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110437896;raw_bytes=171489958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:58:13.375364Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:58:13.375428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:58:13.375498Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:58:13.375544Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:58:13.375700Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:58:13.375767Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:58:13.375804Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:58:13.375850Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:58:13.375918Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:58:13.375969Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:58:13.376032Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:58:13.376150Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:58:13.376364Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.148000s; 2025-12-23T18:58:13.379049Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:58:13.379249Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:58:13.379312Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:58:13.379388Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:58:13.379438Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:58:13.379495Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:58:13.379565Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:58:13.379623Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:58:13.379677Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:58:13.379786Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:58:13.379854Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:58:13.380618Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.179000s; 2025-12-23T18:58:13.380676Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2370:4350];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::BitVectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 65283, MsgBus: 13491 ... waiting for SysViewsRoster update finished 2025-12-23T18:58:07.339154Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:58:07.413738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:58:07.421583Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:07.421846Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:58:07.421901Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:58:07.624161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:07.624300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:07.678195Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516285300055 != 1766516285300059 2025-12-23T18:58:07.689235Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:07.731131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:07.882213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002b2c/r3tmp/tmpBVLgtp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 65283, node 1 2025-12-23T18:58:08.217546Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:58:08.217629Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:58:08.217673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:58:08.218030Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:58:08.229579Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:13491 TClient is connected to server localhost:13491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:08.556447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:58:08.612023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:58:08.905472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:58:09.206092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:58:09.480859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:58:10.143349Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1901:3510], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:10.143659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:10.144276Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3529], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:10.144372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:10.167206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:10.376422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:10.599558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:10.882886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:11.108251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:11.359169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:11.609588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:11.906685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:12.248711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4170], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:12.248864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:12.249331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:12.249425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4177], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:12.249480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:12.255295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:12.432803Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2796:4179], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:58:12.480463Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2856:4220] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:13.622121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:58:08.133427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:58:08.208053Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:58:08.215370Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:08.215602Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:58:08.215650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:58:08.425923Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:08.426054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:08.485378Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516286252555 != 1766516286252559 2025-12-23T18:58:08.498206Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:08.542700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:08.643384Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fca/r3tmp/tmpZ9Hsaf/pdisk_1.dat 2025-12-23T18:58:08.932187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:58:08.947039Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:09.046353Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T18:58:09.046415Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T18:58:09.046522Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T18:58:09.174346Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T18:58:09.174435Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T18:58:09.175015Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T18:58:09.175107Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T18:58:09.175385Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T18:58:09.175500Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T18:58:09.175628Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T18:58:09.175860Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T18:58:09.177339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:09.178293Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T18:58:09.178352Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T18:58:09.206281Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:891:2767]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:09.207300Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:891:2767]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:09.207542Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2767] 2025-12-23T18:58:09.207758Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:09.215395Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:893:2769]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:09.240344Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:891:2767]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:09.240650Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:893:2769]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:09.240874Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:893:2769] 2025-12-23T18:58:09.241037Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:09.247431Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:893:2769]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:09.248132Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:09.248277Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:09.249646Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:58:09.249714Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:58:09.249762Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:58:09.250076Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:09.250202Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:09.250273Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:922:2767] in generation 1 2025-12-23T18:58:09.250498Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:09.250555Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:09.251595Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:58:09.251648Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:58:09.251707Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:58:09.251985Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:09.252059Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:09.252104Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:923:2769] in generation 1 2025-12-23T18:58:09.262902Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:09.300707Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:58:09.300926Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:09.301052Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:926:2788] 2025-12-23T18:58:09.301093Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:58:09.301130Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:58:09.301163Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:58:09.301462Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:891:2767], Recipient [1:891:2767]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:09.301516Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:09.301587Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:09.301618Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:58:09.301657Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:09.301694Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:927:2789] 2025-12-23T18:58:09.301711Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:58:09.301754Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot ... Restarts 2025-12-23T18:58:15.907709Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-12-23T18:58:15.907742Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-12-23T18:58:15.907775Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:58:15.907864Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:15.907894Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-12-23T18:58:15.907928Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:58:15.907972Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:58:15.908025Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T18:58:15.908047Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:58:15.908081Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2025-12-23T18:58:15.918831Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-12-23T18:58:15.918912Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:58:15.918980Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-12-23T18:58:15.919075Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:58:15.921191Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:67:2114] Handle TEvProposeTransaction 2025-12-23T18:58:15.921254Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:67:2114] TxId# 281474976715661 ProcessProposeTransaction 2025-12-23T18:58:15.921347Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:272: actor# [2:67:2114] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:1058:2876] DataReq marker# P0 2025-12-23T18:58:15.921474Z node 2 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [2:1058:2876] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2025-12-23T18:58:15.921753Z node 2 :TX_PROXY DEBUG: datareq.cpp:1467: Actor# [2:1058:2876] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2025-12-23T18:58:15.921984Z node 2 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [2:1058:2876] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-12-23T18:58:15.922076Z node 2 :TX_PROXY DEBUG: datareq.cpp:1204: Actor# [2:1058:2876] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2025-12-23T18:58:15.922451Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [2:1058:2876], Recipient [2:884:2764]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 1058 RawX2: 8589937468 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020&\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t\"\004\000\000\000\000\000\000\021<\013\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2025-12-23T18:58:15.922528Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:15.922630Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:58:15.922842Z node 2 :TX_DATASHARD TRACE: key_validator.cpp:33: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:38:0] 2025-12-23T18:58:15.922934Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2025-12-23T18:58:15.923002Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-12-23T18:58:15.923064Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2025-12-23T18:58:15.923111Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:58:15.923148Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:58:15.923188Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-12-23T18:58:15.923254Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715661] at 72075186224037888 2025-12-23T18:58:15.923294Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-12-23T18:58:15.923321Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:58:15.923345Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2025-12-23T18:58:15.923366Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2025-12-23T18:58:15.923391Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-12-23T18:58:15.923413Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2025-12-23T18:58:15.923434Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2025-12-23T18:58:15.923456Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-12-23T18:58:15.923503Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:99: Requested stream clearance from [2:1058:2876] for [0:281474976715661] at 72075186224037888 2025-12-23T18:58:15.923537Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-12-23T18:58:15.923610Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T18:58:15.923702Z node 2 :TX_PROXY DEBUG: datareq.cpp:2504: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2025-12-23T18:58:15.923776Z node 2 :TX_PROXY DEBUG: datareq.cpp:2513: Collected all clerance requests, txid: 281474976715661 2025-12-23T18:58:15.923838Z node 2 :TX_PROXY DEBUG: datareq.cpp:2968: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2025-12-23T18:58:15.923957Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287942, Sender [2:1058:2876], Recipient [2:884:2764]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2025-12-23T18:58:15.924013Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-12-23T18:58:15.924127Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [2:1058:2876], Recipient [2:884:2764]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-12-23T18:58:15.924155Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-12-23T18:58:15.924221Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:884:2764], Recipient [2:884:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:15.924253Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:15.924313Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:58:15.924357Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:58:15.924403Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-12-23T18:58:15.924448Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-12-23T18:58:15.924516Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [0:281474976715661] at 72075186224037888 2025-12-23T18:58:15.924558Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-12-23T18:58:15.924598Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2025-12-23T18:58:15.924640Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2025-12-23T18:58:15.924677Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2025-12-23T18:58:15.924875Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-12-23T18:58:15.924910Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T18:58:15.924948Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-12-23T18:58:15.924984Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:58:15.925020Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:58:15.925085Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:58:15.925570Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1064:2881], Recipient [2:884:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-12-23T18:58:15.925611Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_4_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 4] |98.6%| [TA] $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} |98.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} |98.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS_Reboot >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:58:09.374180Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:58:09.450483Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:58:09.459685Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:09.459906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:58:09.459968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:58:09.657322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:09.657427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:09.705682Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516287506124 != 1766516287506128 2025-12-23T18:58:09.716726Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:09.756612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:09.856360Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fc4/r3tmp/tmpDHsq8P/pdisk_1.dat 2025-12-23T18:58:10.118129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:58:10.131347Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:10.232498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:10.267219Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:10.268198Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:10.268459Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:58:10.268709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:10.314846Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:10.315580Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:10.315705Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:10.317305Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:58:10.317375Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:58:10.317494Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:58:10.317856Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:10.317991Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:10.318084Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:58:10.328814Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:10.347170Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:58:10.347338Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:10.347435Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:58:10.347495Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:58:10.347522Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:58:10.347560Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:58:10.347750Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:10.347791Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:10.348007Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:58:10.348142Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:58:10.348453Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:58:10.348479Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:10.348510Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:58:10.348550Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:58:10.348608Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:58:10.348653Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:58:10.348693Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:58:10.348773Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:10.348806Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:10.348850Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:58:10.348912Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:58:10.348935Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:10.349001Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:58:10.349173Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:58:10.349207Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:58:10.349285Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:58:10.349318Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:58:10.349363Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:58:10.349388Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:58:10.349415Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:58:10.349656Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:10.349690Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:58:10.349713Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:58:10.349733Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:58:10.349772Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T18:58:10.349801Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:58:10.349830Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T18:58:10.349860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T18:58:10.349895Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:10.350998Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T18:58:10.351076Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... t/table-1' scheme changed., code: 2028 }. 2025-12-23T18:58:17.879949Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715662, task: 1. pass away 2025-12-23T18:58:17.880038Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:58:17.881623Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-12-23T18:58:17.881862Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1088:2906];scan_id=1;tx_id=281474976715662;fline=kqp_scan_fetcher_actor.cpp:106;event=TEvTerminateFromCompute;sender=[2:1085:2903];info={
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:58:17.881919Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1088:2906];scan_id=1;tx_id=281474976715662;fline=kqp_scan_compute_manager.h:321;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:58:17.882188Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [2:1080:2877] TxId: 281474976715662. Ctx: { TraceId: 01kd697312eq0ktfrz2xk438an, Database: , SessionId: ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1085:2903], task: 1, state: COMPUTE_STATE_FAILURE, stats: { CpuTimeUs: 160005 Tasks { TaskId: 1 CpuTimeUs: 157036 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 12 BuildCpuTimeUs: 157024 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-ou4bsyjxme" NodeId: 2 CreateTimeMs: 1766516297231 CurrentWaitInputTimeUs: 129194 UpdateTimeMs: 1766516297879 } MaxMemoryUsage: 1048576 } 2025-12-23T18:58:17.882253Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976715662. Ctx: { TraceId: 01kd697312eq0ktfrz2xk438an, Database: , SessionId: ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1085:2903] 2025-12-23T18:58:17.882324Z node 2 :KQP_EXECUTER INFO: kqp_executer_impl.h:1143: ActorId: [2:1080:2877] TxId: 281474976715662. Ctx: { TraceId: 01kd697312eq0ktfrz2xk438an, Database: , SessionId: ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. task: 1, does not have the CA id yet or is already complete 2025-12-23T18:58:17.882375Z node 2 :KQP_EXECUTER INFO: kqp_executer_impl.h:1138: ActorId: [2:1080:2877] TxId: 281474976715662. Ctx: { TraceId: 01kd697312eq0ktfrz2xk438an, Database: , SessionId: ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:1086:2904], task: 2 2025-12-23T18:58:17.882420Z node 2 :KQP_EXECUTER INFO: kqp_executer_impl.h:1138: ActorId: [2:1080:2877] TxId: 281474976715662. Ctx: { TraceId: 01kd697312eq0ktfrz2xk438an, Database: , SessionId: ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:1087:2905], task: 3 2025-12-23T18:58:17.882590Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [2:1080:2877] TxId: 281474976715662. Ctx: { TraceId: 01kd697312eq0ktfrz2xk438an, Database: , SessionId: ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:58:17.882704Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 1970-01-01T00:00:04.000000Z, after 0.550000s 2025-12-23T18:58:17.882758Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [2:1086:2904], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01kd697312eq0ktfrz2xk438an. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2025-12-23T18:58:17.882813Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [2:1086:2904], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01kd697312eq0ktfrz2xk438an. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:1080:2877], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-12-23T18:58:17.882934Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715662, task: 2. pass away 2025-12-23T18:58:17.883035Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:58:17.884340Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-12-23T18:58:17.884467Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:166: SelfId: [2:1087:2905], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01kd697312eq0ktfrz2xk438an. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2025-12-23T18:58:17.884519Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [2:1087:2905], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01kd697312eq0ktfrz2xk438an. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:1080:2877], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-12-23T18:58:17.884585Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715662, task: 3. pass away 2025-12-23T18:58:17.884639Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-12-23T18:58:17.887220Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 3. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-12-23T18:58:17.887591Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=, ActorId: [2:1054:2877], ActorState: ExecuteState, TraceId: 01kd697312eq0ktfrz2xk438an, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } 2025-12-23T18:58:17.887991Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [2:67:2114] Handle TEvExecuteKqpTransaction 2025-12-23T18:58:17.888043Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [2:67:2114] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-12-23T18:58:17.888548Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down 2025-12-23T18:58:17.888626Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:67:2114] Handle TEvProposeTransaction 2025-12-23T18:58:17.888664Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:67:2114] TxId# 0 ProcessProposeTransaction 2025-12-23T18:58:17.888750Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [2:67:2114] Cookie# 0 userReqId# "" txid# 0 reqId# [2:1124:2937] SnapshotReq marker# P0 2025-12-23T18:58:17.889112Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [2:1127:2937] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-12-23T18:58:17.889283Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715664. Resolved key sets: 0 2025-12-23T18:58:17.889428Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:580: TxId: 281474976715664. Ctx: { TraceId: 01kd697312eq0ktfrz2xk438an, Database: , SessionId: ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-12-23T18:58:17.889487Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2620: ActorId: [2:1123:2877] TxId: 281474976715664. Ctx: { TraceId: 01kd697312eq0ktfrz2xk438an, Database: , SessionId: ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-12-23T18:58:17.889584Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [2:1123:2877] TxId: 281474976715664. Ctx: { TraceId: 01kd697312eq0ktfrz2xk438an, Database: , SessionId: ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:58:17.889647Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [2:1123:2877] TxId: 281474976715664. Ctx: { TraceId: 01kd697312eq0ktfrz2xk438an, Database: , SessionId: ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-12-23T18:58:17.889704Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [2:1127:2937] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-12-23T18:58:17.889948Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [2:829:2724], selfId: [2:65:2112], source: [2:1054:2877] 2025-12-23T18:58:17.892015Z node 2 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [2:1124:2937] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-12-23T18:58:17.892386Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1124:2937], Recipient [2:886:2765]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2025-12-23T18:58:17.893224Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=2&id=NTU4NDlkNzUtZTMxOTRjOTItNzU3Y2Y4YjgtMTIwZDQyNGM=, workerId: [2:1054:2877], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 357 |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:58:12.247030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:58:12.322033Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:58:12.329548Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:12.329777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:58:12.329843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:58:12.529751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:12.529851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:12.575720Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516290373602 != 1766516290373606 2025-12-23T18:58:12.587865Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:12.631142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:12.724153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fbe/r3tmp/tmpEnSSVZ/pdisk_1.dat 2025-12-23T18:58:12.980548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:58:12.993977Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:13.095875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:13.123625Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:13.124415Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:13.124626Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T18:58:13.124792Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:13.156369Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:13.156930Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:13.157019Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:13.158203Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:58:13.158255Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:58:13.158348Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:58:13.158608Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:13.158708Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:13.158797Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T18:58:13.169364Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:13.191482Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:58:13.191650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:13.191779Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T18:58:13.191810Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:58:13.191833Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:58:13.191877Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:58:13.192059Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:13.192097Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:13.192293Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:58:13.192394Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:58:13.192684Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:58:13.192712Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:13.192739Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:58:13.192769Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:58:13.192804Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:58:13.192833Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:58:13.192862Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:58:13.192922Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:13.192945Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:13.192980Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T18:58:13.193036Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T18:58:13.193063Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:13.193138Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:58:13.193294Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:58:13.193324Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:58:13.193421Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T18:58:13.193454Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:58:13.193490Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:58:13.193515Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:58:13.193543Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T18:58:13.193751Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:13.193782Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T18:58:13.193808Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T18:58:13.193829Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T18:58:13.193860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T18:58:13.193884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T18:58:13.193920Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T18:58:13.193943Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T18:58:13.193960Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:13.195014Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T18:58:13.195048Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1276:3056], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 337 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 61 FinishTimeMs: 1766516299884 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 27 BuildCpuTimeUs: 34 HostName: "ghrun-ou4bsyjxme" NodeId: 2 StartTimeMs: 1766516299883 CreateTimeMs: 1766516299878 UpdateTimeMs: 1766516299884 } MaxMemoryUsage: 1048576 } 2025-12-23T18:58:19.886104Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1276:3056] 2025-12-23T18:58:19.886152Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1279:3059], CA [2:1280:3060], CA [2:1277:3057], CA [2:1281:3061], CA [2:1278:3058], 2025-12-23T18:58:19.886191Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1279:3059], CA [2:1280:3060], CA [2:1277:3057], CA [2:1281:3061], CA [2:1278:3058], 2025-12-23T18:58:19.886643Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1277:3057], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 407 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 215 FinishTimeMs: 1766516299884 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 168 BuildCpuTimeUs: 47 HostName: "ghrun-ou4bsyjxme" NodeId: 2 StartTimeMs: 1766516299884 CreateTimeMs: 1766516299878 UpdateTimeMs: 1766516299884 } MaxMemoryUsage: 1048576 } 2025-12-23T18:58:19.886711Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1277:3057] 2025-12-23T18:58:19.886772Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1279:3059], CA [2:1280:3060], CA [2:1281:3061], CA [2:1278:3058], 2025-12-23T18:58:19.886810Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1279:3059], CA [2:1280:3060], CA [2:1281:3061], CA [2:1278:3058], 2025-12-23T18:58:19.887219Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1278:3058], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 316 DurationUs: 2000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 141 FinishTimeMs: 1766516299886 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 115 BuildCpuTimeUs: 26 HostName: "ghrun-ou4bsyjxme" NodeId: 2 StartTimeMs: 1766516299884 CreateTimeMs: 1766516299878 UpdateTimeMs: 1766516299886 } MaxMemoryUsage: 1048576 } 2025-12-23T18:58:19.887284Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1278:3058] 2025-12-23T18:58:19.887326Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1279:3059], CA [2:1280:3060], CA [2:1281:3061], 2025-12-23T18:58:19.887360Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1279:3059], CA [2:1280:3060], CA [2:1281:3061], 2025-12-23T18:58:19.887600Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1279:3059], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 504 DurationUs: 2000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 284 FinishTimeMs: 1766516299886 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 236 BuildCpuTimeUs: 48 HostName: "ghrun-ou4bsyjxme" NodeId: 2 StartTimeMs: 1766516299884 CreateTimeMs: 1766516299878 UpdateTimeMs: 1766516299886 } MaxMemoryUsage: 1048576 } 2025-12-23T18:58:19.887655Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1279:3059] 2025-12-23T18:58:19.887693Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1280:3060], CA [2:1281:3061], 2025-12-23T18:58:19.887724Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1280:3060], CA [2:1281:3061], 2025-12-23T18:58:19.888037Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1280:3060], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 311 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 145 FinishTimeMs: 1766516299887 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 80 BuildCpuTimeUs: 65 HostName: "ghrun-ou4bsyjxme" NodeId: 2 CreateTimeMs: 1766516299878 UpdateTimeMs: 1766516299887 } MaxMemoryUsage: 1048576 } 2025-12-23T18:58:19.888096Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1280:3060] 2025-12-23T18:58:19.888138Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:843: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1281:3061], 2025-12-23T18:58:19.888167Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:164: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1281:3061], 2025-12-23T18:58:19.888271Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:534: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, got execution state from compute actor: [2:1281:3061], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 300 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 113 FinishTimeMs: 1766516299887 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 82 BuildCpuTimeUs: 31 HostName: "ghrun-ou4bsyjxme" NodeId: 2 StartTimeMs: 1766516299887 CreateTimeMs: 1766516299878 UpdateTimeMs: 1766516299887 } MaxMemoryUsage: 1048576 } 2025-12-23T18:58:19.888318Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:751: TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1281:3061] 2025-12-23T18:58:19.888541Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1386: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. terminate execution. 2025-12-23T18:58:19.888598Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1062: ActorId: [2:1271:3037] TxId: 281474976715667. Ctx: { TraceId: 01kd6975vab2myh62jr30pzws5, Database: , SessionId: ydb://session/3?node_id=2&id=YWI1NmMxNzEtYTQ2YjY0NDktODU2N2Q3LThlZjlmOWRm, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval: ComputeTime: 0.003001s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> AnalyzeColumnshard::AnalyzeRebootSa [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:58:15.806685Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:58:15.806845Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:58:15.883849Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:58:15.884780Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:58:15.888783Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:505:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:15.889059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:58:15.889191Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:58:15.890672Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:500:2171], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:15.890948Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:58:15.891105Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:58:16.198264Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:16.293074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:16.293177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:16.293799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:16.293863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:16.337577Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:58:16.338206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:16.338494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:16.405685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:58:16.420018Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fbb/r3tmp/tmpi0OG3Z/pdisk_1.dat 2025-12-23T18:58:17.057590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:58:17.126214Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:17.126392Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:17.462433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:17.523900Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [2:1591:2383], Recipient [2:1617:2396]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:17.528166Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [2:1591:2383], Recipient [2:1617:2396]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:17.528519Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1617:2396] 2025-12-23T18:58:17.528786Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:17.585421Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [2:1591:2383], Recipient [2:1617:2396]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:17.590840Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:17.591080Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:17.592715Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:58:17.592804Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:58:17.592865Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:58:17.593217Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:17.594013Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:17.594109Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1641:2396] in generation 1 2025-12-23T18:58:17.597068Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:17.638821Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:58:17.639018Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:17.639134Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1645:2413] 2025-12-23T18:58:17.639174Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:58:17.639209Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:58:17.639247Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:58:17.639535Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1617:2396], Recipient [2:1617:2396]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:17.639596Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:17.639914Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:58:17.640021Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:58:17.640169Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:58:17.640209Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:17.640247Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:58:17.640284Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:58:17.640332Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:58:17.640387Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:58:17.640436Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:58:17.640651Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1614:2394], Recipient [2:1617:2396]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:17.640701Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:17.640746Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1600:3051], serverId# [2:1614:2394], sessionId# [0:0:0] 2025-12-23T18:58:17.641550Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:818:2458], Recipient [2:1614:2394] 2025-12-23T18:58:17.641608Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:17.641717Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:58:17.641980Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:58:17.642034Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:58:17.642139Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-12-23T18:58:17.642191Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:58:17.642233Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:58:17.642276Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:58:17.642311Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710657] at 72075186224037888 on un ... iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:58:20.195791Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:58:20.196574Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:2006:2451], Recipient [2:1617:2396]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:58:20.196623Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-12-23T18:58:20.357763Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:2024:2452], Recipient [2:1617:2396]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } LockTxId: 281474976710662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-12-23T18:58:20.358004Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:58:20.358076Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-12-23T18:58:20.358156Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:58:20.358187Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:58:20.358218Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:58:20.358249Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:58:20.358292Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2025-12-23T18:58:20.358324Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:58:20.358347Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:58:20.358365Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:58:20.358384Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:58:20.358484Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } LockTxId: 281474976710662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-12-23T18:58:20.358760Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976710662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T18:58:20.358809Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-12-23T18:58:20.358847Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:2024:2452], 0} after executionsCount# 1 2025-12-23T18:58:20.358886Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:2024:2452], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:58:20.358957Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:2024:2452], 0} finished in read 2025-12-23T18:58:20.359026Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:58:20.359050Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:58:20.359072Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:58:20.359096Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:58:20.359139Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2025-12-23T18:58:20.359158Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:58:20.359179Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2025-12-23T18:58:20.359212Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:58:20.359315Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:58:20.360082Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:2024:2452], Recipient [2:1617:2396]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:58:20.360143Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-12-23T18:58:20.501979Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:2048:2453], Recipient [2:1617:2396]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976710666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-12-23T18:58:20.502192Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:58:20.502267Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-12-23T18:58:20.502351Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2025-12-23T18:58:20.502381Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:58:20.502413Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:58:20.502443Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:58:20.502478Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:8] at 72075186224037888 2025-12-23T18:58:20.502513Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2025-12-23T18:58:20.502532Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:58:20.502551Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:58:20.502609Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:58:20.502705Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976710666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-12-23T18:58:20.502979Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976710666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T18:58:20.503029Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2025-12-23T18:58:20.503073Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:2048:2453], 0} after executionsCount# 1 2025-12-23T18:58:20.503113Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:2048:2453], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:58:20.503181Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:2048:2453], 0} finished in read 2025-12-23T18:58:20.503240Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2025-12-23T18:58:20.503262Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:58:20.503285Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:58:20.503311Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:58:20.503347Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2025-12-23T18:58:20.503367Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:58:20.503386Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:8] at 72075186224037888 has finished 2025-12-23T18:58:20.503418Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:58:20.503509Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:58:20.504446Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:2048:2453], Recipient [2:1617:2396]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:58:20.504496Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-12-23T18:58:20.504709Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:258:2139], Recipient [2:1617:2396]: NKikimrLongTxService.TEvLockStatus LockId: 281474976710666 LockNode: 1 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2025-12-23T18:58:15.683095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:15.683142Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:15.684240Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:15.693932Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:15.694216Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:58:15.694402Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:15.724788Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:15.730944Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:15.731060Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:15.732741Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:58:15.732820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:58:15.732984Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:58:15.733330Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:15.733396Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:15.733476Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:58:15.818104Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:15.854645Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:58:15.854836Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:15.854928Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:58:15.854984Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:58:15.855017Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:58:15.855054Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:15.855242Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:15.855304Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:15.855602Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:58:15.855711Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:58:15.855852Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:15.855902Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:15.855941Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:58:15.855981Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:15.856011Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:15.856043Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:58:15.856081Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:15.856164Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:15.856195Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:15.856269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:58:15.862415Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:58:15.862473Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:15.862579Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:58:15.862733Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:58:15.862792Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:58:15.862852Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:58:15.862894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:15.862938Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:58:15.862984Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:58:15.863016Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:15.863255Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:15.863314Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:58:15.863354Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:58:15.863384Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:15.863420Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:58:15.863465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:58:15.863497Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:58:15.863527Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:15.863549Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:15.875378Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:58:15.875442Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:15.875471Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:15.875508Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:58:15.875569Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:58:15.876004Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:15.876053Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:15.876104Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:58:15.876232Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:58:15.876263Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:58:15.876380Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:15.876421Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:58:15.876465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:58:15.876499Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:58:15.879880Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:58:15.879940Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:15.880153Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:15.880189Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:15.880251Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:15.880290Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:15.880317Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:15.880353Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:58:15.880403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 58:21.839595Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:154] at 9437184 on unit CompleteOperation 2025-12-23T18:58:21.839739Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:154] at 9437184 is DelayComplete 2025-12-23T18:58:21.839762Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:154] at 9437184 executing on unit CompleteOperation 2025-12-23T18:58:21.839789Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:154] at 9437184 to execution unit CompletedOperations 2025-12-23T18:58:21.839810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:154] at 9437184 on unit CompletedOperations 2025-12-23T18:58:21.839836Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:154] at 9437184 is Executed 2025-12-23T18:58:21.839856Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:154] at 9437184 executing on unit CompletedOperations 2025-12-23T18:58:21.839877Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:154] at 9437184 has finished 2025-12-23T18:58:21.839918Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:21.839948Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:21.839978Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:21.840014Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:21.859720Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:58:21.859781Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-12-23T18:58:21.859836Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2025-12-23T18:58:21.859907Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-12-23T18:58:21.859951Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:58:21.860088Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:58:21.860136Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:58:21.860161Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-12-23T18:58:21.860193Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2025-12-23T18:58:21.860236Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:58:21.860259Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:58:21.860626Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:463:2405], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-12-23T18:58:21.860670Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:21.860701Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-12-23T18:58:21.860780Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:463:2405], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:58:21.860808Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:21.860844Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-12-23T18:58:21.861029Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:21.861059Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2025-12-23T18:58:21.861101Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 3 ms, propose latency: 5 ms 2025-12-23T18:58:21.861149Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-12-23T18:58:21.861177Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:21.861286Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:21.861324Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:150] at 9437184 on unit CompleteOperation 2025-12-23T18:58:21.861356Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 150] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 2 ms 2025-12-23T18:58:21.861381Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:21.861466Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:21.861503Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2025-12-23T18:58:21.861555Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 3 ms, propose latency: 5 ms 2025-12-23T18:58:21.861605Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-12-23T18:58:21.861633Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:21.861721Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:21.861742Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-12-23T18:58:21.861789Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 3 ms, propose latency: 5 ms 2025-12-23T18:58:21.861846Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-12-23T18:58:21.861872Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:21.861961Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:21.861982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:153] at 9437184 on unit CompleteOperation 2025-12-23T18:58:21.862025Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 153] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 2 ms 2025-12-23T18:58:21.862051Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:21.862138Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:21.862158Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-12-23T18:58:21.862184Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2025-12-23T18:58:21.862220Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-12-23T18:58:21.862257Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:21.862419Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-12-23T18:58:21.862459Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:21.862487Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-12-23T18:58:21.862586Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-12-23T18:58:21.862617Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:21.862660Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-12-23T18:58:21.862764Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-12-23T18:58:21.862790Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:21.862812Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-12-23T18:58:21.862875Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-12-23T18:58:21.862899Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:21.862924Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_2_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 2] >> DataShardTxOrder::ImmediateBetweenOnline_Init |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSa [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:19.448002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:19.533847Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:19.538799Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:19.539093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:19.539182Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:19.920570Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:20.026431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:20.026630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:20.061455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:20.152053Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004a26/r3tmp/tmp1Y2mWE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32454, node 1 2025-12-23T18:55:21.194796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:21.195520Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:21.195608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:21.195631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:21.196052Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:21.260276Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12382 2025-12-23T18:55:21.810496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:24.736980Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:24.743067Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:24.746672Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:24.771059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.771188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.809320Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:24.810953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.964106Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.964684Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.965190Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.965822Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.966020Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.966112Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.966272Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.966463Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.966580Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:25.135528Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:25.157821Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:25.157930Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:25.170951Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.342491Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.372666Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:25.372777Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:25.396671Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:25.397888Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:25.398070Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:25.398117Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:25.398165Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:25.398215Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:25.398274Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:25.398319Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:25.399155Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:25.512479Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.512572Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.524497Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:55:25.525781Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:55:25.639834Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:55:25.640184Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:25.642249Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:55:25.642304Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:55:25.642387Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:25.645792Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:25.650173Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:25.656464Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:25.656563Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:25.667206Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:25.818399Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:26.226211Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:26.324881Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:26.324998Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2 ... ISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:56:40.661962Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-12-23T18:56:40.662031Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-12-23T18:56:40.662130Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-12-23T18:56:40.662185Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:56:40.663048Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:56:40.663226Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:4520:3920] Owner: [2:4519:3919]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:56:40.663276Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:4520:3920] Owner: [2:4519:3919]. Column diff is empty, finishing 2025-12-23T18:56:40.680822Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4527:3925] 2025-12-23T18:56:40.681249Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:4527:3925], schemeshard id = 72075186224037897 2025-12-23T18:56:40.681376Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4486:3900], server id = [2:4528:3926], tablet id = 72075186224037894, status = OK 2025-12-23T18:56:40.681423Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4528:3926] 2025-12-23T18:56:40.681494Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4528:3926], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-12-23T18:56:40.782462Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4533:3929] 2025-12-23T18:56:40.783186Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3232:3459] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2025-12-23T18:56:40.783241Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:40: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:3232:3459] 2025-12-23T18:56:40.783298Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2025-12-23T18:56:40.783474Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T18:56:40.784063Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4536:3931], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:56:40.787297Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4536:3931], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2025-12-23T18:56:40.787437Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4536:3931], Start read next stream part 2025-12-23T18:56:40.827299Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4552:3946]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:40.827497Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:56:40.827549Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4552:3946], StatRequests.size() = 1 2025-12-23T18:57:38.164218Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4536:3931], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:57:38.164449Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4536:3931], Start read next stream part 2025-12-23T18:57:38.165338Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4723:4042], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:57:38.167591Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd6945ak11hkna680wb42kfn", SessionId: ydb://session/3?node_id=2&id=YzdjZTY4OGYtZjhlM2FiZTItZTkyOWUyN2EtY2NmZWI1YWY=, Slow query, duration: 57.376645s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:57:38.168934Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4723:4042], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table` 2025-12-23T18:57:38.169030Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4723:4042], Start read next stream part 2025-12-23T18:57:38.169306Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33190, txId: 18446744073709551615] shutting down 2025-12-23T18:57:38.172202Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4536:3931], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:57:38.172263Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4536:3931], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGMxNzMyNjUtNTc4MjNlZjgtN2E1ZmU5NWQtYzk4MDhmODA=, TxId: 2025-12-23T18:57:38.264200Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4743:4058]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:57:38.264415Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:57:38.264446Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4743:4058], StatRequests.size() = 1 2025-12-23T18:58:19.948721Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4723:4042], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:58:19.948835Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4723:4042], Start read next stream part 2025-12-23T18:58:19.949429Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd695xbr7gn264sn6qnqkrp4", SessionId: ydb://session/3?node_id=2&id=ZjQyZDU1MzQtNzg3ZTUzMzQtNmM5NGY2NWUtZTc1ZWNlZjI=, Slow query, duration: 41.776444s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:58:19.951644Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33250, txId: 18446744073709551615] shutting down 2025-12-23T18:58:19.951894Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4900:4143], ActorId: [2:4901:4144], Starting query actor #1 [2:4902:4145] 2025-12-23T18:58:19.951960Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4901:4144], ActorId: [2:4902:4145], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:58:19.954172Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4723:4042], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:58:19.954217Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4534:3904], ActorId: [2:4723:4042], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjQxZTNjOS00ZGZjOTcyLThkNWYzMTE2LTZlMGJhNzZi, TxId: 2025-12-23T18:58:19.955340Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4901:4144], ActorId: [2:4902:4145], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MjQ1ZjA4MGQtMzNhYzY2YzktZmY3MDgzZWYtMjYzMDE3MzI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:58:20.028549Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4918:4159]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:58:20.028885Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T18:58:20.028930Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4918:4159], StatRequests.size() = 1 2025-12-23T18:58:20.182236Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4901:4144], ActorId: [2:4902:4145], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjQ1ZjA4MGQtMzNhYzY2YzktZmY3MDgzZWYtMjYzMDE3MzI=, TxId: 2025-12-23T18:58:20.182303Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4901:4144], ActorId: [2:4902:4145], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjQ1ZjA4MGQtMzNhYzY2YzktZmY3MDgzZWYtMjYzMDE3MzI=, TxId: 2025-12-23T18:58:20.182572Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4900:4143], ActorId: [2:4901:4144], Got response [2:4902:4145] SUCCESS 2025-12-23T18:58:20.182899Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:58:20.196964Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:58:20.197027Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3232:3459] |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:78:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:81:2057] recipient: [10:80:2112] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:83:2057] recipient: [10:80:2112] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:82:2113] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:198:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:78:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:81:2057] recipient: [11:80:2112] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:83:2057] recipient: [11:80:2112] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:82:2113] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:198:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:79:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:82:2057] recipient: [12:81:2112] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:84:2057] recipient: [12:81:2112] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:83:2113] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:199:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:82:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:85:2057] recipient: [13:84:2115] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:87:2057] recipient: [13:84:2115] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:86:2116] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:202:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:82:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:85:2057] recipient: [14:84:2115] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:87:2057] recipient: [14:84:2115] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:86:2116] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:202:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:83:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:86:2057] recipient: [15:85:2115] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:88:2057] recipient: [15:85:2115] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] 2025-12-23T18:58:23.199252Z node 17 :KEYVALUE ERROR: keyvalue_storage_read_request.cpp:256: {KV323@keyvalue_storage_read_request.cpp:256} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 0 ErrorReason# block race detected 2025-12-23T18:58:23.204442Z node 17 :TABLET_MAIN ERROR: tablet_sys.cpp:1006: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2025-12-23T18:58:23.204534Z node 17 :TABLET_MAIN ERROR: tablet_sys.cpp:1925: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3 Marker# TSYS31 |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> DataShardTxOrder::DelayData >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite >> DataShardTxOrder::RandomPoints_DelayData >> DataShardOutOfOrder::UncommittedReadSetAck >> DataShardTxOrder::ForceOnlineBetweenOnline >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi >> KqpWorkload::STOCK [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK [GOOD] Test command err: Trying to start YDB, gRPC: 22015, MsgBus: 15923 2025-12-23T18:57:31.435993Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129529244858773:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.436056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fda/r3tmp/tmp6A3NLS/pdisk_1.dat 2025-12-23T18:57:31.755348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:31.755466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:31.811987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:31.816160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:31.830839Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129529244858740:2081] 1766516251429731 != 1766516251429734 2025-12-23T18:57:31.843103Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22015, node 1 2025-12-23T18:57:31.996614Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:57:32.185043Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.185079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.185095Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.185226Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.445259Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:15923 TClient is connected to server localhost:15923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516251899 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:33.103411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:33.109486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:57:34.615979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129542129761516:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.616171Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.616658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129542129761526:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.616772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.990306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.145694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.671293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:36.084128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129550719700055:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.084234Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.084361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129550719700061:2630], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.084393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129550719700060:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.084403Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.088151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:36.100322Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129550719700064:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-12-23T18:57:36.195428Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129550719700115:5099] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:36.437109Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129529244858773:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:36.437278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:57:46.706054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:57:46.706099Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded took: 0.527408s took: 0.535416s took: 0.539294s took: 0.540978s took: 0.545641s took: 0.557361s took: 0.558962s took: 0.559554s took: 0.559748s took: 0.580373s took: 5.325071s took: 5.335407s took: 5.351051s took: 5.350991s took: 5.352183s took: 5.354069s took: 5.359027s took: 5.358551s took: 5.362318s took: 5.363504s 2025-12-23T18:58:26.394876Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711188; 2025-12-23T18:58:26.419496Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711185; 2025-12-23T18:58:26.420502Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711189; 2025-12-23T18:58:26.421447Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711187; 2025-12-23T18:58:26.421836Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711186; 2025-12-23T18:58:26.425176Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [1:7587129765468077422:5480], Table: `/Root/stock` ([72057594046644480:38:1]), SessionActorId: [1:7587129735403304605:5480]Got LOCKS BROKEN for table `/Root/stock`. ShardID=72075186224037888, Sink=[1:7587129765468077422:5480].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-12-23T18:58:26.425723Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:7587129761173109143:5480], SessionActorId: [1:758712973540330460 ... abletStatus from node 1, TabletId: 72075186224037925 not found 2025-12-23T18:58:27.327490Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-12-23T18:58:27.327500Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-12-23T18:58:27.327510Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-12-23T18:58:27.327520Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-12-23T18:58:27.327528Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-12-23T18:58:27.327538Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-12-23T18:58:27.327546Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-12-23T18:58:27.327556Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-12-23T18:58:27.327573Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-12-23T18:58:27.327583Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-12-23T18:58:27.331697Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-12-23T18:58:27.331727Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-12-23T18:58:27.331742Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-12-23T18:58:27.331757Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-12-23T18:58:27.331772Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-12-23T18:58:27.331807Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-12-23T18:58:27.331827Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-12-23T18:58:27.339666Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-12-23T18:58:27.339708Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-12-23T18:58:27.339733Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-12-23T18:58:27.352471Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-12-23T18:58:27.352509Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-12-23T18:58:27.352527Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-12-23T18:58:27.354651Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-12-23T18:58:27.354682Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-12-23T18:58:27.354697Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-12-23T18:58:27.354711Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-12-23T18:58:27.354729Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-12-23T18:58:27.357093Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-12-23T18:58:27.456152Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037963 not found 2025-12-23T18:58:27.456190Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037950 not found 2025-12-23T18:58:27.456205Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037965 not found 2025-12-23T18:58:27.456218Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037959 not found 2025-12-23T18:58:27.456232Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037948 not found 2025-12-23T18:58:27.456250Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2025-12-23T18:58:27.456407Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037956 not found 2025-12-23T18:58:27.468738Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037954 not found 2025-12-23T18:58:27.468776Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037962 not found 2025-12-23T18:58:27.468808Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037949 not found 2025-12-23T18:58:27.468859Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037952 not found 2025-12-23T18:58:27.468885Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2025-12-23T18:58:27.468899Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037958 not found 2025-12-23T18:58:27.468913Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037968 not found 2025-12-23T18:58:27.468927Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037960 not found 2025-12-23T18:58:27.468943Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037964 not found 2025-12-23T18:58:27.468958Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037936 not found 2025-12-23T18:58:27.468972Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037951 not found 2025-12-23T18:58:27.468993Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037945 not found 2025-12-23T18:58:27.499129Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-12-23T18:58:27.499173Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2025-12-23T18:58:27.499192Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037946 not found 2025-12-23T18:58:27.499208Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037967 not found 2025-12-23T18:58:27.499224Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037943 not found 2025-12-23T18:58:27.499239Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037961 not found 2025-12-23T18:58:27.499254Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037937 not found 2025-12-23T18:58:27.499275Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2025-12-23T18:58:27.499294Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037957 not found 2025-12-23T18:58:27.499310Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037966 not found 2025-12-23T18:58:27.499325Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-12-23T18:58:27.499340Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2025-12-23T18:58:27.499354Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found 2025-12-23T18:58:27.499379Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037939 not found 2025-12-23T18:58:27.499398Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2025-12-23T18:58:27.499414Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037947 not found 2025-12-23T18:58:27.499437Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2025-12-23T18:58:27.499453Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037955 not found 2025-12-23T18:58:27.499467Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037953 not found 2025-12-23T18:58:27.499486Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037969 not found 2025-12-23T18:58:27.499507Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2025-12-23T18:58:23.662334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:23.662386Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:23.663955Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:23.677904Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:23.678156Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:58:23.678360Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:23.713770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:23.719370Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:23.719447Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:23.720897Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:58:23.720964Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:58:23.721056Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:58:23.721304Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:23.721356Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:23.721410Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:58:23.795213Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:23.832438Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:58:23.832618Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:23.832713Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:58:23.832749Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:58:23.832789Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:58:23.832824Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:23.832970Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:23.833027Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:23.833419Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:58:23.833525Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:58:23.833699Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:23.833757Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:23.833795Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:58:23.833832Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:23.833868Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:23.833900Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:58:23.833947Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:23.834041Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:23.834078Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:23.834131Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:58:23.841131Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:58:23.841213Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:23.841333Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:58:23.841519Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:58:23.841568Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:58:23.841634Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:58:23.841678Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:23.841717Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:58:23.841750Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:58:23.841785Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:23.842081Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:23.842150Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:58:23.842194Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:58:23.842228Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:23.842274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:58:23.842304Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:58:23.842347Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:58:23.842403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:23.842428Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:23.854787Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:58:23.854858Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:23.854897Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:23.854936Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:58:23.855025Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:58:23.855554Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:23.855609Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:23.855667Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:58:23.855804Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:58:23.855840Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:58:23.855975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:23.856025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:58:23.856082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:58:23.856124Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:58:23.860176Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:58:23.860248Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:23.860437Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:23.860503Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:23.860578Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:23.860622Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:23.860658Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:23.860696Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:58:23.860761Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... dSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-12-23T18:58:28.328853Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:58:28.328921Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:58:28.328936Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-12-23T18:58:28.328955Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 2 ms 2025-12-23T18:58:28.328978Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-12-23T18:58:28.329000Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:58:28.329077Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:58:28.329103Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:58:28.329200Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:353:2320]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-12-23T18:58:28.329221Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:28.329243Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-12-23T18:58:28.329294Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:464:2406], Recipient [1:464:2406]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:28.329322Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:28.329373Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2025-12-23T18:58:28.329400Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:58:28.329426Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-12-23T18:58:28.329446Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-12-23T18:58:28.329469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2025-12-23T18:58:28.329496Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-12-23T18:58:28.329523Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2025-12-23T18:58:28.329542Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2025-12-23T18:58:28.329571Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2025-12-23T18:58:28.329595Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2025-12-23T18:58:28.329612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-12-23T18:58:28.329625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-12-23T18:58:28.330019Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-12-23T18:58:28.330055Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:58:28.330094Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-12-23T18:58:28.330146Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-12-23T18:58:28.330171Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-12-23T18:58:28.330198Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-12-23T18:58:28.330365Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-12-23T18:58:28.330399Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-12-23T18:58:28.330422Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-12-23T18:58:28.330440Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-12-23T18:58:28.330476Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2025-12-23T18:58:28.330491Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-12-23T18:58:28.330507Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2025-12-23T18:58:28.330526Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:28.330550Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2025-12-23T18:58:28.330576Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2025-12-23T18:58:28.330593Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2025-12-23T18:58:28.330778Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-12-23T18:58:28.330800Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:28.330824Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-12-23T18:58:28.330909Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-12-23T18:58:28.330930Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:28.330945Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-12-23T18:58:28.331026Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-12-23T18:58:28.331045Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:28.331059Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-12-23T18:58:28.331090Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-12-23T18:58:28.331112Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:28.331135Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-12-23T18:58:28.331198Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-12-23T18:58:28.331213Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:28.331230Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-12-23T18:58:28.331278Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-12-23T18:58:28.331297Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:28.331312Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-12-23T18:58:28.344240Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:58:28.344297Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-12-23T18:58:28.344339Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2025-12-23T18:58:28.344388Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:58:28.344432Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:58:28.344632Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:464:2406], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:58:28.344675Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:28.344725Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] >> DataShardTxOrder::ZigZag_oo8_dirty >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> DataShardTxOrder::ReadWriteReorder >> TxOrderInternals::OperationOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:58:21.747323Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:58:21.824634Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:58:21.832300Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:21.832555Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:58:21.832653Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:58:22.047524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:22.047645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:22.124774Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516299534901 != 1766516299534905 2025-12-23T18:58:22.139977Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:22.186211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:22.288289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fb5/r3tmp/tmpBrTAJE/pdisk_1.dat 2025-12-23T18:58:22.557540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:58:22.571988Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:22.676232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:23.009952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:23.228089Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1113:2914], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:23.228264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1123:2919], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:23.228367Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:23.229263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1128:2923], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:23.229350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:23.233026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:23.419800Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1127:2922], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:58:23.488715Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1186:2962] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets ... sending immediate upsert ... waiting for immediate propose ... immediate upsert is blocked 2025-12-23T18:58:24.542379Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting;tx_id=281474976715666; 2025-12-23T18:58:24.553205Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:876: SelfId: [1:1382:3015], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [1:1263:3015]Got OVERLOADED for table `/Root/table-1`. ShardID=72075186224037889, Sink=[1:1382:3015]. Ignored this error.{
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } 2025-12-23T18:58:24.553877Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:1375:3015], SessionActorId: [1:1263:3015], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 . sessionActorId=[1:1263:3015]. 2025-12-23T18:58:24.554839Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=ZDUyYjBkOWItYjRkYTJlNWItM2VmODg3MGUtNTcxYjQ1ZTM=, ActorId: [1:1263:3015], ActorState: ExecuteState, TraceId: 01kd697aj8f5r4d01mas331w7t, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:1376:3015] from: [1:1375:3015] 2025-12-23T18:58:24.555043Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:1376:3015] TxId: 281474976715665. Ctx: { TraceId: 01kd697aj8f5r4d01mas331w7t, Database: , SessionId: ydb://session/3?node_id=1&id=ZDUyYjBkOWItYjRkYTJlNWItM2VmODg3MGUtNTcxYjQ1ZTM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. OVERLOADED: {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006 subissue: {
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } } 2025-12-23T18:58:24.558248Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=ZDUyYjBkOWItYjRkYTJlNWItM2VmODg3MGUtNTcxYjQ1ZTM=, ActorId: [1:1263:3015], ActorState: ExecuteState, TraceId: 01kd697aj8f5r4d01mas331w7t, Create QueryResponse for error on request, msg: , status: OVERLOADED, issues: { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`." issue_code: 2006 severity: 1 issues { message: "Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting" issue_code: 2006 severity: 1 } } 2025-12-23T18:58:24.559345Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1284: SelfId: [1:1348:3017], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [1:1265:3017]TEvDeliveryProblem was received from tablet: 72075186224037889 2025-12-23T18:58:24.559447Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:1338:3017], SessionActorId: [1:1265:3017], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 . sessionActorId=[1:1265:3017]. 2025-12-23T18:58:24.559911Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=ODU1Y2QwNWYtZDdmYTljMDMtODRmNGM4OGYtNTIzNDZkOGM=, ActorId: [1:1265:3017], ActorState: ExecuteState, TraceId: 01kd697af02k12fp6tpkmrt6d2, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [1:1339:3017] from: [1:1338:3017] 2025-12-23T18:58:24.560457Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:1339:3017] TxId: 281474976715664. Ctx: { TraceId: 01kd697af02k12fp6tpkmrt6d2, Database: , SessionId: ydb://session/3?node_id=1&id=ODU1Y2QwNWYtZDdmYTljMDMtODRmNGM4OGYtNTIzNDZkOGM=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. UNDETERMINED: {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 } 2025-12-23T18:58:24.561170Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=ODU1Y2QwNWYtZDdmYTljMDMtODRmNGM4OGYtNTIzNDZkOGM=, ActorId: [1:1265:3017], ActorState: ExecuteState, TraceId: 01kd697af02k12fp6tpkmrt6d2, Create QueryResponse for error on request, msg: , status: UNDETERMINED, issues: { message: "State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889." issue_code: 2026 severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } } ... waiting for SysViewsRoster update finished 2025-12-23T18:58:27.831847Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:58:27.838912Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:27.839169Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:58:27.839250Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:58:28.068475Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:28.068580Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:28.085623Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766516305263290 != 1766516305263293 2025-12-23T18:58:28.091948Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:28.135279Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:28.214031Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fb5/r3tmp/tmpAVXsO2/pdisk_1.dat 2025-12-23T18:58:28.485925Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:58:28.500190Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:28.604053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:28.842775Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===== UPSERT initial rows 2025-12-23T18:58:29.064683Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1023:2864], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:29.064801Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1034:2869], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:29.064907Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:29.065947Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1039:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:29.066114Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:29.070965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:29.213093Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1037:2872], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T18:58:29.249582Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1095:2911] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===== Begin SELECT { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets ... captured readset ... captured readset ===== restarting tablet 2025-12-23T18:58:30.005878Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1284: SelfId: [2:1222:2952], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [2:1159:2952]TEvDeliveryProblem was received from tablet: 72075186224037888 ===== Waiting for commit response ===== Last SELECT { items { uint32_value: 3 } items { uint32_value: 2 } } |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> TxOrderInternals::OperationOrder [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2025-12-23T18:57:13.408133Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:13.432253Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:13.432469Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:13.439243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:13.439499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:13.439737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:13.439891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:13.440063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:13.440254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:13.440394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:13.440499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:13.440612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:13.440774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:13.440887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:13.441014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:13.441173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:13.464152Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:13.464359Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:13.464421Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:13.464610Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:13.464765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:13.464838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:13.464882Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:13.464973Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:13.465034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:13.465110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:13.465177Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:13.465407Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:13.465488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:13.465543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:13.465577Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:13.465684Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:13.465740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:13.465779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:13.465806Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:13.465847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:13.465870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:13.465888Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:13.465941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:13.465972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:13.465995Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:13.466195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:13.466246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:13.466267Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:13.466343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:13.466372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:13.466389Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:13.466417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:13.466438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:13.466455Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:13.466495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:13.466526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:13.466545Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:13.466629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:13.466668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=4245; 2025-12-23T18:58:29.379834Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=7; 2025-12-23T18:58:29.380597Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=721; 2025-12-23T18:58:29.380663Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=5241; 2025-12-23T18:58:29.380699Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=5357; 2025-12-23T18:58:29.380754Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2025-12-23T18:58:29.380838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=47; 2025-12-23T18:58:29.380892Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=5896; 2025-12-23T18:58:29.381044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=85; 2025-12-23T18:58:29.381147Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=69; 2025-12-23T18:58:29.381285Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=101; 2025-12-23T18:58:29.381425Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=104; 2025-12-23T18:58:29.381838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=380; 2025-12-23T18:58:29.382397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=513; 2025-12-23T18:58:29.382451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-12-23T18:58:29.382498Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-12-23T18:58:29.382529Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-12-23T18:58:29.382598Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=33; 2025-12-23T18:58:29.382630Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-12-23T18:58:29.382700Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=45; 2025-12-23T18:58:29.382728Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2025-12-23T18:58:29.382817Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2025-12-23T18:58:29.382889Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=37; 2025-12-23T18:58:29.382956Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=42; 2025-12-23T18:58:29.382997Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=16533; 2025-12-23T18:58:29.383125Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=126218384;raw_bytes=174224032;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T18:58:29.383206Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T18:58:29.383257Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T18:58:29.383316Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T18:58:29.383349Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T18:58:29.383447Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:58:29.383517Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:58:29.383550Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:58:29.383589Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:58:29.383652Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:58:29.383692Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:58:29.383723Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:58:29.383795Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:58:29.383956Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.041000s; 2025-12-23T18:58:29.385194Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T18:58:29.386262Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T18:58:29.386310Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T18:58:29.386368Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T18:58:29.386405Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T18:58:29.386446Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T18:58:29.386508Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T18:58:29.386562Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T18:58:29.386613Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T18:58:29.386722Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T18:58:29.386794Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T18:58:29.387068Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.133000s; 2025-12-23T18:58:29.387102Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1423:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> BasicStatistics::StatisticsOnShardsRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2025-12-23T18:58:26.601420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:26.601471Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:26.602855Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:26.616493Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:26.616819Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:58:26.617096Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:26.666865Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:26.673551Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:26.673639Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:26.675333Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:58:26.675420Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:58:26.675474Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:58:26.675811Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:26.675879Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:26.675950Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:58:26.764447Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:26.793784Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:58:26.793923Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:26.793986Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:58:26.794014Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:58:26.794037Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:58:26.794064Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:26.794173Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:26.794230Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:26.794544Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:58:26.794656Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:58:26.794815Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:26.794871Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:26.794904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:58:26.794949Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:26.794993Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:26.795024Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:58:26.795064Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:26.795147Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:26.795181Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:26.795221Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:58:26.803967Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:58:26.804037Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:26.804157Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:58:26.804338Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:58:26.804378Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:58:26.804445Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:58:26.804499Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:26.804532Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:58:26.804563Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:58:26.804592Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:26.804867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:26.804920Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:58:26.804969Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:58:26.805001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:26.805041Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:58:26.805063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:58:26.805102Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:58:26.805129Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:26.805149Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:26.817201Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:58:26.817279Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:26.817313Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:26.817348Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:58:26.817420Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:58:26.817890Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:26.817938Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:26.817989Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:58:26.818116Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:58:26.818162Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:58:26.818282Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:26.818321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:58:26.818366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:58:26.818408Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:58:26.828578Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:58:26.828646Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:26.828874Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:26.828927Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:26.828989Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:26.829032Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:26.829063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:26.829102Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:58:26.829151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... [1:103:2137], exec latency: 0 ms, propose latency: 2 ms 2025-12-23T18:58:32.253664Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-12-23T18:58:32.253688Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:32.253835Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:352:2319]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-12-23T18:58:32.253864Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:32.253889Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-12-23T18:58:32.253973Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:58:32.254000Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-12-23T18:58:32.254038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2025-12-23T18:58:32.254075Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-12-23T18:58:32.254114Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:58:32.254214Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:58:32.254401Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:463:2405], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-12-23T18:58:32.254441Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:32.254465Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-12-23T18:58:32.254517Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:352:2319]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-12-23T18:58:32.254537Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:32.254554Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-12-23T18:58:32.254665Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:352:2319]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-12-23T18:58:32.254681Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:32.254711Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-12-23T18:58:32.254790Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:242:2234], Recipient [1:352:2319]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-12-23T18:58:32.254809Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:32.254824Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-12-23T18:58:32.254877Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [1:242:2234], Recipient [1:463:2405]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-12-23T18:58:32.254896Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-12-23T18:58:32.254922Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2025-12-23T18:58:32.255003Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-12-23T18:58:32.255041Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2025-12-23T18:58:32.255101Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2025-12-23T18:58:32.255173Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:463:2405], Recipient [1:463:2405]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:32.255193Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:32.255219Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2025-12-23T18:58:32.255250Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-12-23T18:58:32.255288Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-12-23T18:58:32.255326Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-12-23T18:58:32.255352Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2025-12-23T18:58:32.255384Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-12-23T18:58:32.255410Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2025-12-23T18:58:32.255439Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2025-12-23T18:58:32.255458Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2025-12-23T18:58:32.255473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2025-12-23T18:58:32.255490Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-12-23T18:58:32.255504Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-12-23T18:58:32.255957Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-12-23T18:58:32.256001Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:58:32.256051Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-12-23T18:58:32.256070Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-12-23T18:58:32.256093Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-12-23T18:58:32.256127Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-12-23T18:58:32.256313Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-12-23T18:58:32.256342Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-12-23T18:58:32.256386Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-12-23T18:58:32.256414Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-12-23T18:58:32.256448Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2025-12-23T18:58:32.256470Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-12-23T18:58:32.256495Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2025-12-23T18:58:32.256521Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:32.256538Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2025-12-23T18:58:32.256558Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2025-12-23T18:58:32.256576Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2025-12-23T18:58:32.269627Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2025-12-23T18:58:32.269686Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-12-23T18:58:32.269744Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2025-12-23T18:58:32.269794Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:58:32.269828Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-12-23T18:58:32.270032Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:463:2405], Recipient [1:242:2234]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-12-23T18:58:32.270064Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:32.270088Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2025-12-23T18:58:31.868231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:31.868275Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:31.869487Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:31.881008Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:31.881260Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:58:31.881501Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:31.918174Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:31.924365Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:31.924448Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:31.925858Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:58:31.925931Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:58:31.925975Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:58:31.926205Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:31.926251Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:31.926293Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:58:32.000279Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:32.031593Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:58:32.031730Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:32.031801Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:58:32.031825Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:58:32.031852Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:58:32.031876Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:32.031976Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:32.032021Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:32.032249Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:58:32.032321Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:58:32.032439Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:32.032484Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:32.032511Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:58:32.032537Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:32.032560Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:32.032580Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:58:32.032608Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:32.032674Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:32.032699Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:32.032726Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:58:32.040047Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\n\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:58:32.040098Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:32.040180Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:58:32.040342Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:58:32.040375Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:58:32.040427Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:58:32.040456Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:32.040483Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:58:32.040505Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:58:32.040526Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:32.040742Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:32.040779Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:58:32.040803Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:58:32.040838Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:32.040877Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:58:32.040899Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:58:32.040921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:58:32.040943Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:32.040958Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:32.052573Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:58:32.052633Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:32.052669Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:32.052707Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:58:32.052771Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:58:32.053162Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:32.053198Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:32.053226Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:58:32.053328Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:58:32.053362Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:58:32.053447Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:32.053477Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:58:32.053514Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:58:32.053549Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:58:32.056964Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:58:32.057042Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:32.057219Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:32.057251Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:32.057318Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:32.057364Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:32.057396Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:32.057435Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:58:32.057480Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000001: ... tions 2025-12-23T18:58:32.818818Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:32.818986Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:353:2320], Recipient [1:353:2320]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:32.819018Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:32.819052Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2025-12-23T18:58:32.819081Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:32.819097Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2025-12-23T18:58:32.819116Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000005:12] in PlanQueue unit at 9437185 2025-12-23T18:58:32.819132Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437185 on unit PlanQueue 2025-12-23T18:58:32.819160Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437185 is Executed 2025-12-23T18:58:32.819176Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437185 executing on unit PlanQueue 2025-12-23T18:58:32.819208Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437185 to execution unit LoadTxDetails 2025-12-23T18:58:32.819226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437185 on unit LoadTxDetails 2025-12-23T18:58:32.819662Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437185 loaded tx from db 1000005:12 keys extracted: 1 2025-12-23T18:58:32.819688Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437185 is Executed 2025-12-23T18:58:32.819703Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437185 executing on unit LoadTxDetails 2025-12-23T18:58:32.819717Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437185 to execution unit FinalizeDataTxPlan 2025-12-23T18:58:32.819730Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437185 on unit FinalizeDataTxPlan 2025-12-23T18:58:32.819763Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437185 is Executed 2025-12-23T18:58:32.819779Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437185 executing on unit FinalizeDataTxPlan 2025-12-23T18:58:32.819804Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437185 to execution unit BuildAndWaitDependencies 2025-12-23T18:58:32.819826Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437185 on unit BuildAndWaitDependencies 2025-12-23T18:58:32.819866Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000005:12] is the new logically complete end at 9437185 2025-12-23T18:58:32.819895Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000005:12] is the new logically incomplete end at 9437185 2025-12-23T18:58:32.819912Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000005:12] at 9437185 2025-12-23T18:58:32.819934Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437185 is Executed 2025-12-23T18:58:32.819948Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437185 executing on unit BuildAndWaitDependencies 2025-12-23T18:58:32.819960Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437185 to execution unit BuildDataTxOutRS 2025-12-23T18:58:32.819973Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437185 on unit BuildDataTxOutRS 2025-12-23T18:58:32.820010Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437185 is Executed 2025-12-23T18:58:32.820029Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437185 executing on unit BuildDataTxOutRS 2025-12-23T18:58:32.820049Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437185 to execution unit StoreAndSendOutRS 2025-12-23T18:58:32.820068Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437185 on unit StoreAndSendOutRS 2025-12-23T18:58:32.820090Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437185 is Executed 2025-12-23T18:58:32.820121Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437185 executing on unit StoreAndSendOutRS 2025-12-23T18:58:32.820137Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437185 to execution unit PrepareDataTxInRS 2025-12-23T18:58:32.820150Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437185 on unit PrepareDataTxInRS 2025-12-23T18:58:32.820169Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437185 is Executed 2025-12-23T18:58:32.820187Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437185 executing on unit PrepareDataTxInRS 2025-12-23T18:58:32.820205Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437185 to execution unit LoadAndWaitInRS 2025-12-23T18:58:32.820218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437185 on unit LoadAndWaitInRS 2025-12-23T18:58:32.820243Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437185 is Executed 2025-12-23T18:58:32.820256Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437185 executing on unit LoadAndWaitInRS 2025-12-23T18:58:32.820267Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437185 to execution unit BlockFailPoint 2025-12-23T18:58:32.820283Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437185 on unit BlockFailPoint 2025-12-23T18:58:32.820339Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437185 is Executed 2025-12-23T18:58:32.820385Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437185 executing on unit BlockFailPoint 2025-12-23T18:58:32.820399Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437185 to execution unit ExecuteDataTx 2025-12-23T18:58:32.820410Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437185 on unit ExecuteDataTx 2025-12-23T18:58:32.820591Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:12] at tablet 9437185 with status COMPLETE 2025-12-23T18:58:32.820619Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:12] at 9437185: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:58:32.820646Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437185 is Executed 2025-12-23T18:58:32.820659Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437185 executing on unit ExecuteDataTx 2025-12-23T18:58:32.820671Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437185 to execution unit CompleteOperation 2025-12-23T18:58:32.820683Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437185 on unit CompleteOperation 2025-12-23T18:58:32.820806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437185 is DelayComplete 2025-12-23T18:58:32.820829Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437185 executing on unit CompleteOperation 2025-12-23T18:58:32.820845Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437185 to execution unit CompletedOperations 2025-12-23T18:58:32.820860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437185 on unit CompletedOperations 2025-12-23T18:58:32.820878Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437185 is Executed 2025-12-23T18:58:32.820904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437185 executing on unit CompletedOperations 2025-12-23T18:58:32.820927Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:12] at 9437185 has finished 2025-12-23T18:58:32.820954Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:32.820970Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2025-12-23T18:58:32.820987Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2025-12-23T18:58:32.821002Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2025-12-23T18:58:32.833562Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 12} 2025-12-23T18:58:32.833618Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-12-23T18:58:32.833673Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:32.833719Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:12] at 9437184 on unit CompleteOperation 2025-12-23T18:58:32.833799Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 12] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:58:32.833843Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:32.834108Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000005 txid# 12} 2025-12-23T18:58:32.834139Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000005} 2025-12-23T18:58:32.834174Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-12-23T18:58:32.834199Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:12] at 9437185 on unit CompleteOperation 2025-12-23T18:58:32.834254Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 12] from 9437185 at tablet 9437185 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:58:32.834290Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::StatisticsOnShardsRestart [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:17.665942Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:17.747111Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:17.751766Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:17.752048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:17.752120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:18.060665Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:18.154208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:18.154328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:18.211888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:18.346468Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003239/r3tmp/tmp5zWkUS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7628, node 1 2025-12-23T18:52:18.989562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:18.990424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:18.990465Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:18.990490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:18.990673Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:19.057329Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3131 2025-12-23T18:52:19.595455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:22.201202Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:22.208002Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:22.211209Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:22.237601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:22.237674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:22.276112Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:22.278396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:22.515954Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.516610Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.517310Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.517969Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.518263Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.518388Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.518545Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.518703Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.518892Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:22.631901Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:52:22.643558Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:22.643684Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:22.656902Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:22.827783Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:22.867377Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:22.867496Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:22.892149Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:22.893628Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:22.893835Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:22.893889Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:22.893951Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:22.894007Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:22.894050Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:22.894099Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:22.894439Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:22.935361Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:22.935431Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:22.945367Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:52:22.953642Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:52:22.954840Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:52:22.959245Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:52:22.985902Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:52:22.985957Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:52:22.986017Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:52:23.011781Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:23.022765Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:23.033111Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:23.033213Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:23.042939Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:23.138890Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:23.491722Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:23.601962Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:23.602036Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:265 ... 2025-12-23T18:57:45.641554Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:57:45.641630Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:57:45.641951Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2025-12-23T18:57:45.655644Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:57:47.385151Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:57:47.385567Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 42 2025-12-23T18:57:47.385924Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 42 2025-12-23T18:57:51.379945Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:57:52.173800Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:57:52.173857Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:57:52.174056Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2025-12-23T18:57:52.187473Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:57:53.964841Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:57:53.965197Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 43 2025-12-23T18:57:53.965364Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 43 2025-12-23T18:57:57.664313Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:57:58.444493Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:57:58.444569Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:57:58.444789Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2025-12-23T18:57:58.457948Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:58:00.219479Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:58:00.219901Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 44 2025-12-23T18:58:00.220095Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 44 2025-12-23T18:58:03.938143Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:58:04.723638Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:58:04.723728Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:58:04.723962Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2025-12-23T18:58:04.738284Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:58:06.471103Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:58:06.471558Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 45 2025-12-23T18:58:06.471707Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 45 2025-12-23T18:58:10.318678Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-12-23T18:58:10.318767Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8341: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:58:10.318799Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8372: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-12-23T18:58:10.318827Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-12-23T18:58:10.526664Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:58:11.301771Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:58:11.301838Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:58:11.302050Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2025-12-23T18:58:11.315035Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:58:13.029139Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:58:13.029557Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 46 2025-12-23T18:58:13.029713Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 46 2025-12-23T18:58:16.737550Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:58:17.502214Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:58:17.502289Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:58:17.502484Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2025-12-23T18:58:17.516078Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:58:19.009410Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:58:19.009824Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 47 2025-12-23T18:58:19.010025Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 47 2025-12-23T18:58:22.676318Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:58:23.499440Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:58:23.499524Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:58:23.499771Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2025-12-23T18:58:23.513532Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:58:25.289878Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:58:25.290263Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 48 2025-12-23T18:58:25.290430Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 48 2025-12-23T18:58:29.280755Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2025-12-23T18:58:30.056382Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-12-23T18:58:30.056484Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:58:30.056794Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2025-12-23T18:58:30.071405Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:58:31.891073Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:58:31.891456Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 49 2025-12-23T18:58:31.891596Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 49 2025-12-23T18:58:31.903446Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:16635:9874]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:58:31.905885Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T18:58:31.905944Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:16635:9874], StatRequests.size() = 1 2025-12-23T18:58:31.908442Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:16651:9878]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:58:31.911328Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2025-12-23T18:58:31.911395Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:16651:9878], StatRequests.size() = 1 2025-12-23T18:58:31.914500Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 6 ], ReplyToActorId[ [2:16667:9882]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:58:31.917815Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2025-12-23T18:58:31.917889Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 6, ReplyToActorId = [2:16667:9882], StatRequests.size() = 1 |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> DataShardTxOrder::ZigZag |98.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DataShardTxOrder::DelayData [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] >> TKeyValueTest::TestVacuumOnEmptyTablet [GOOD] >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2025-12-23T18:58:25.029096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:25.029148Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:25.030636Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:25.044404Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:25.044722Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:58:25.044987Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:25.088788Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:25.096016Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:25.096132Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:25.097936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:58:25.098098Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:58:25.098172Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:58:25.098540Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:25.098620Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:25.098695Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:58:25.179342Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:25.224469Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:58:25.224729Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:25.224837Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:58:25.224882Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:58:25.224922Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:58:25.224961Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:25.225115Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:25.225173Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:25.225510Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:58:25.225624Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:58:25.225789Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:25.225856Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:25.225896Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:58:25.225934Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:25.225967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:25.225997Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:58:25.226036Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:25.226120Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:25.226154Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:25.226198Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:58:25.233072Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\002\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:58:25.233149Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:25.233261Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:58:25.233453Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:58:25.233504Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:58:25.233566Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:58:25.233614Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:25.233648Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:58:25.233683Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:58:25.233716Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:25.234021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:25.234074Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:58:25.234118Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:58:25.234160Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:25.234202Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:58:25.234232Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:58:25.234274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:58:25.234305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:25.234349Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:25.246691Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:58:25.246788Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:25.246828Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:25.246867Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:58:25.246946Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:58:25.247489Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:25.247550Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:25.247595Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:58:25.247735Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:58:25.247770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:58:25.247921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:25.247968Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:58:25.248024Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:58:25.248069Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:58:25.251998Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:58:25.252069Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:25.252291Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:25.252333Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:25.252407Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:25.252472Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:25.252514Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:25.252556Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:58:25.252607Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... plan for [1000005:506] at 9437184 has finished 2025-12-23T18:58:35.263717Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:35.263746Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:35.263780Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2025-12-23T18:58:35.264090Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:242:2234], Recipient [1:242:2234]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:35.264136Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:35.264197Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:35.264228Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:35.264260Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:282: Return cached ready operation [1000005:507] at 9437184 2025-12-23T18:58:35.264295Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2025-12-23T18:58:35.264338Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2025-12-23T18:58:35.264364Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2025-12-23T18:58:35.264397Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2025-12-23T18:58:35.264420Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2025-12-23T18:58:35.264963Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2025-12-23T18:58:35.265011Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2025-12-23T18:58:35.265037Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2025-12-23T18:58:35.265060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2025-12-23T18:58:35.265083Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2025-12-23T18:58:35.265118Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2025-12-23T18:58:35.265139Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2025-12-23T18:58:35.265175Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2025-12-23T18:58:35.265200Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2025-12-23T18:58:35.265248Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000005:507] is the new logically complete end at 9437184 2025-12-23T18:58:35.265273Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000005:507] is the new logically incomplete end at 9437184 2025-12-23T18:58:35.265311Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000005:507] at 9437184 2025-12-23T18:58:35.265345Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2025-12-23T18:58:35.265370Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2025-12-23T18:58:35.265407Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2025-12-23T18:58:35.265434Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2025-12-23T18:58:35.265475Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2025-12-23T18:58:35.265495Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2025-12-23T18:58:35.265515Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2025-12-23T18:58:35.265533Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2025-12-23T18:58:35.265570Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2025-12-23T18:58:35.265593Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2025-12-23T18:58:35.265627Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2025-12-23T18:58:35.265650Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2025-12-23T18:58:35.265691Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2025-12-23T18:58:35.265713Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2025-12-23T18:58:35.265739Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2025-12-23T18:58:35.265761Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2025-12-23T18:58:35.265783Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2025-12-23T18:58:35.265802Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2025-12-23T18:58:35.265821Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BlockFailPoint 2025-12-23T18:58:35.265841Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BlockFailPoint 2025-12-23T18:58:35.265875Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2025-12-23T18:58:35.265921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BlockFailPoint 2025-12-23T18:58:35.265944Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2025-12-23T18:58:35.265964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2025-12-23T18:58:35.266234Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2025-12-23T18:58:35.266284Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:58:35.266333Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:35.266362Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2025-12-23T18:58:35.266396Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2025-12-23T18:58:35.266430Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2025-12-23T18:58:35.266607Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is DelayComplete 2025-12-23T18:58:35.266644Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2025-12-23T18:58:35.266671Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2025-12-23T18:58:35.266696Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2025-12-23T18:58:35.266730Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2025-12-23T18:58:35.266787Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2025-12-23T18:58:35.266814Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:507] at 9437184 has finished 2025-12-23T18:58:35.266846Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:35.266870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:35.266899Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:35.266936Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:35.281501Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2025-12-23T18:58:35.281571Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-12-23T18:58:35.281628Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:35.281672Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-12-23T18:58:35.281747Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:58:35.281806Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:35.281986Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:35.282032Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-12-23T18:58:35.282096Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:58:35.282137Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:90:2057] recipient: [8:89:2119] Leader for TabletID 72057594037927937 is [8:91:2120] sender: [8:92:2057] recipient: [8:89:2119] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:91:2120] Leader for TabletID 72057594037927937 is [8:91:2120] sender: [8:207:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:91:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:94:2057] recipient: [9:93:2123] Leader for TabletID 72057594037927937 is [9:95:2124] sender: [9:96:2057] recipient: [9:93:2123] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:95:2124] Leader for TabletID 72057594037927937 is [9:95:2124] sender: [9:211:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:91:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:94:2057] recipient: [10:93:2123] Leader for TabletID 72057594037927937 is [10:95:2124] sender: [10:96:2057] recipient: [10:93:2123] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:95:2124] Leader for TabletID 72057594037927937 is [10:95:2124] sender: [10:211:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:96:2057] recipient: [11:95:2125] Leader for TabletID 72057594037927937 is [11:97:2126] sender: [11:98:2057] recipient: [11:95:2125] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:97:2126] Leader for TabletID 72057594037927937 is [11:97:2126] sender: [11:213:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:96:2057] recipient: [12:95:2125] Leader for TabletID 72057594037927937 is [12:97:2126] sender: [12:98:2057] recipient: [12:95:2125] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:97:2126] Leader for TabletID 72057594037927937 is [12:97:2126] sender: [12:213:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:93:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:96:2057] recipient: [13:95:2125] Leader for TabletID 72057594037927937 is [13:97:2126] sender: [13:98:2057] recipient: [13:95:2125] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:97:2126] Leader for TabletID 72057594037927937 is [13:97:2126] sender: [13:213:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:98:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:101:2057] recipient: [14:100:2129] Leader for TabletID 72057594037927937 is [14:102:2130] sender: [14:103:2057] recipient: [14:100:2129] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:102:2130] Leader for TabletID 72057594037927937 is [14:102:2130] sender: [14:218:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:102:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:105:2057] recipient: [15:104:2133] Leader for TabletID 72057594037927937 is [15:106:2134] sender: [15:107:2057] recipient: [15:104:2133] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:106:2134] Leader for TabletID 72057594037927937 is [15:106:2134] sender: [15:222:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:102:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:105:2057] recipient: [16:104:2133] Leader for TabletID 72057594037927937 is [16:106:2134] sender: [16:107:2057] recipient: [16:104:2133] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:106:2134] Leader for TabletID 72057594037927937 is [16:106:2134] sender: [16:222:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:104:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:107:2057] recipient: [17:106:2135] Leader for TabletID 72057594037927937 is [17:108:2136] sender: [17:109:2057] recipient: [17:106:2135] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:108:2136] Leader for TabletID 72057594037927937 is [17:108:2136] sender: [17:224:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:104:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:107:2057] recipient: [18:106:2135] Leader for TabletID 72057594037927937 is [18:108:2136] sender: [18:109:2057] recipient: [18:106:2135] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:108:2136] Leader for TabletID 72057594037927937 is [18:108:2136] sender: [18:224:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2025-12-23T18:58:31.084070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:31.084139Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:31.085759Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:31.101098Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:31.101479Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:58:31.101779Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:31.152290Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:31.159761Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:31.159862Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:31.161740Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:58:31.161857Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:58:31.161915Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:58:31.162316Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:31.162400Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:31.162471Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:58:31.244012Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:31.285389Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:58:31.285640Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:31.285767Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:58:31.285817Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:58:31.285857Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:58:31.285907Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:31.286091Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:31.286164Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:31.286618Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:58:31.286777Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:58:31.286990Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:31.287054Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:31.287098Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:58:31.287169Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:31.287208Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:31.287241Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:58:31.287291Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:31.287396Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:31.287436Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:31.287483Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:58:31.295546Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:58:31.295636Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:31.295780Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:58:31.296016Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:58:31.296070Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:58:31.296148Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:58:31.296202Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:31.296244Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:58:31.296285Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:58:31.296322Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:31.296671Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:31.296744Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:58:31.296786Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:58:31.296844Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:31.296895Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:58:31.296929Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:58:31.296967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:58:31.297018Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:31.297052Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:31.309420Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:58:31.309501Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:31.309537Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:31.309583Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:58:31.309654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:58:31.310223Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:31.310287Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:31.310338Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:58:31.310503Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:58:31.310541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:58:31.310691Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:31.310770Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:58:31.310823Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:58:31.310864Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:58:31.314764Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:58:31.314841Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:31.315103Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:31.315149Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:31.315242Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:31.315292Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:31.315329Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:31.315372Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:58:31.315437Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ions 2025-12-23T18:58:36.715007Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2025-12-23T18:58:36.715196Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:239:2231], Recipient [2:239:2231]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:36.715231Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:36.715274Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:36.715303Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:36.715340Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:36.715375Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2025-12-23T18:58:36.715401Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2025-12-23T18:58:36.715428Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:36.715469Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2025-12-23T18:58:36.715493Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2025-12-23T18:58:36.715528Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2025-12-23T18:58:36.716169Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2025-12-23T18:58:36.716209Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:36.716232Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2025-12-23T18:58:36.716255Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2025-12-23T18:58:36.716279Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2025-12-23T18:58:36.716313Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:36.716333Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2025-12-23T18:58:36.716352Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2025-12-23T18:58:36.716372Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2025-12-23T18:58:36.716432Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437184 2025-12-23T18:58:36.716471Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437184 2025-12-23T18:58:36.716516Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437184 2025-12-23T18:58:36.716554Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:36.716574Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2025-12-23T18:58:36.716595Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2025-12-23T18:58:36.716616Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2025-12-23T18:58:36.716654Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:36.716674Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2025-12-23T18:58:36.716694Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2025-12-23T18:58:36.716713Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2025-12-23T18:58:36.716732Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:36.716752Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2025-12-23T18:58:36.716782Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2025-12-23T18:58:36.716811Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2025-12-23T18:58:36.716852Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:36.716875Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2025-12-23T18:58:36.716894Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2025-12-23T18:58:36.716918Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2025-12-23T18:58:36.716938Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:36.716958Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-12-23T18:58:36.716990Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BlockFailPoint 2025-12-23T18:58:36.717013Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BlockFailPoint 2025-12-23T18:58:36.717034Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:36.717071Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BlockFailPoint 2025-12-23T18:58:36.717093Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-12-23T18:58:36.717113Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-12-23T18:58:36.717423Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-12-23T18:58:36.717470Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:58:36.717519Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:36.717545Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-12-23T18:58:36.717567Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-12-23T18:58:36.717588Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-12-23T18:58:36.717753Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-12-23T18:58:36.717794Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-12-23T18:58:36.717822Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-12-23T18:58:36.717848Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-12-23T18:58:36.717877Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:36.717896Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-12-23T18:58:36.717922Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437184 has finished 2025-12-23T18:58:36.717947Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:36.717971Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:36.717998Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:36.718022Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:36.731022Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-12-23T18:58:36.731106Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-12-23T18:58:36.731173Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-12-23T18:58:36.731219Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-12-23T18:58:36.731287Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:104:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:58:36.731347Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-12-23T18:58:36.732335Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-12-23T18:58:36.732375Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-12-23T18:58:36.732440Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:36.732462Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-12-23T18:58:36.732500Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:104:2137], exec latency: 0 ms, propose latency: 2 ms 2025-12-23T18:58:36.732525Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:78:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:81:2057] recipient: [10:80:2112] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:83:2057] recipient: [10:80:2112] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:82:2113] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:198:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:78:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:81:2057] recipient: [11:80:2112] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:83:2057] recipient: [11:80:2112] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:82:2113] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:198:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:79:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:82:2057] recipient: [12:81:2112] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:84:2057] recipient: [12:81:2112] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:83:2113] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:199:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:82:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:85:2057] recipient: [13:84:2115] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:87:2057] recipient: [13:84:2115] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:86:2116] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:202:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:82:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:85:2057] recipient: [14:84:2115] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:87:2057] recipient: [14:84:2115] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:86:2116] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:202:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:83:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:86:2057] recipient: [15:85:2115] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:88:2057] recipient: [15:85:2115] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:87:2116] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:203:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:86:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:89:2057] recipient: [16:88:2118] Leader for TabletID 72057594037927937 is [16:90:2119] sender: [16:91:2057] recipient: [16:88:2118] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:90:2119] Leader for TabletID 72057594037927937 is [16:90:2119] sender: [16:206:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:86:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:89:2057] recipient: [17:88:2118] Leader for TabletID 72057594037927937 is [17:90:2119] sender: [17:91:2057] recipient: [17:88:2118] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:90:2119] Leader for TabletID 72057594037927937 is [17:90:2119] sender: [17:206:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:87:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:90:2057] recipient: [18:89:2118] Leader for TabletID 72057594037927937 is [18:91:2119] sender: [18:92:2057] recipient: [18:89:2118] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:91:2119] Leader for TabletID 72057594037927937 is [18:91:2119] sender: [18:207:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:90:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:93:2057] recipient: [19:92:2121] Leader for TabletID 72057594037927937 is [19:94:2122] sender: [19:95:2057] recipient: [19:92:2121] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:94:2122] Leader for TabletID 72057594037927937 is [19:94:2122] sender: [19:210:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:90:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:93:2057] recipient: [20:92:2121] Leader for TabletID 72057594037927937 is [20:94:2122] sender: [20:95:2057] recipient: [20:92:2121] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:94:2122] Leader for TabletID 72057594037927937 is [20:94:2122] sender: [20:210:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:91:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:94:2057] recipient: [21:93:2121] Leader for TabletID 72057594037927937 is [21:95:2122] sender: [21:96:2057] recipient: [21:93:2121] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:58:30.233945Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:58:30.234147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:58:30.340875Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:58:30.341843Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:58:30.346107Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:505:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:30.346380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:58:30.346517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:58:30.348273Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:500:2171], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:30.348566Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:58:30.348679Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:58:30.700200Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:30.797342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:30.797475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:30.798400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:30.798510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:30.843567Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:58:30.844461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:30.844882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:30.921385Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:58:30.933894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fad/r3tmp/tmppGqJFc/pdisk_1.dat 2025-12-23T18:58:31.528607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:58:31.591346Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:31.591540Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:31.924128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:31.986517Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [2:1591:2383], Recipient [2:1617:2396]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:31.991052Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [2:1591:2383], Recipient [2:1617:2396]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:31.991502Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1617:2396] 2025-12-23T18:58:31.991798Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:32.051055Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [2:1591:2383], Recipient [2:1617:2396]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:32.056038Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:32.056217Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:32.057986Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:58:32.058071Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:58:32.058136Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:58:32.058545Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:32.059324Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:32.059434Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1641:2396] in generation 1 2025-12-23T18:58:32.062607Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:32.097409Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:58:32.097632Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:32.097783Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1645:2413] 2025-12-23T18:58:32.097826Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:58:32.097866Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:58:32.097900Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:58:32.098197Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1617:2396], Recipient [2:1617:2396]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:32.098260Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:32.098545Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:58:32.098639Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:58:32.098798Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:58:32.098865Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:32.098933Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:58:32.098984Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:58:32.099023Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:58:32.099061Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:58:32.099109Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:58:32.099312Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1614:2394], Recipient [2:1617:2396]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:32.099350Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:32.099394Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1600:3051], serverId# [2:1614:2394], sessionId# [0:0:0] 2025-12-23T18:58:32.100202Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:818:2458], Recipient [2:1614:2394] 2025-12-23T18:58:32.100266Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:32.100384Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T18:58:32.100621Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T18:58:32.100685Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-12-23T18:58:32.100792Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-12-23T18:58:32.100843Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976710657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T18:58:32.100910Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976710657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T18:58:32.100948Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976710657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T18:58:32.100983Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976710657] at 72075186224037888 on un ... ecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3503 UnfrozenTablets: 72075186224037888 2025-12-23T18:58:35.975351Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:58:35.975382Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3503:281474976710670] at 72075186224037888 on unit ExecuteWrite 2025-12-23T18:58:35.975406Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976710670 2025-12-23T18:58:35.975437Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 3 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976710670 2025-12-23T18:58:35.975463Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3503:281474976710670] at 72075186224037888 on unit CompleteWrite 2025-12-23T18:58:35.975505Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:58:35.975559Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-12-23T18:58:35.975589Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-12-23T18:58:35.975604Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-12-23T18:58:35.975657Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3503} 2025-12-23T18:58:35.975733Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-12-23T18:58:35.975751Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3503:281474976710670] at 72075186224037890 on unit ExecuteWrite 2025-12-23T18:58:35.975768Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037890 from 72075186224037890 to 72075186224037888 txId 281474976710670 2025-12-23T18:58:35.975785Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037890 from 72075186224037890 to 72075186224037889 txId 281474976710670 2025-12-23T18:58:35.975814Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3503:281474976710670] at 72075186224037890 on unit CompleteWrite 2025-12-23T18:58:35.975841Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:58:35.975878Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2025-12-23T18:58:35.975894Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2025-12-23T18:58:35.976075Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2363:2507], Recipient [2:2491:2537]: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-12-23T18:58:35.976100Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-12-23T18:58:35.976123Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976710670 2025-12-23T18:58:35.976181Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-12-23T18:58:35.976284Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3503 : 281474976710670] from 72075186224037890 at tablet 72075186224037890 send result to client [1:2587:3618] 2025-12-23T18:58:35.976671Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T18:58:35.977014Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2491:2537], Recipient [2:2363:2507]: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-12-23T18:58:35.977052Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-12-23T18:58:35.977075Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037888 source 72075186224037890 dest 72075186224037888 producer 72075186224037890 txId 281474976710670 2025-12-23T18:58:35.977122Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-12-23T18:58:35.977197Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3503 : 281474976710670] from 72075186224037888 at tablet 72075186224037888 send result to client [1:2585:3618] 2025-12-23T18:58:35.977440Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:58:35.978197Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2363:2507], Recipient [1:2475:3592] 2025-12-23T18:58:35.978228Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-12-23T18:58:35.978257Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976710670 2025-12-23T18:58:35.978294Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-12-23T18:58:35.978331Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2025-12-23T18:58:35.978518Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:64:2065] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3503 UnfrozenTablets: 72075186224037890 2025-12-23T18:58:35.978579Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2491:2537], Recipient [1:2475:3592] 2025-12-23T18:58:35.978600Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-12-23T18:58:35.978625Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037890 dest 72075186224037889 producer 72075186224037890 txId 281474976710670 2025-12-23T18:58:35.978659Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037890 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-12-23T18:58:35.978724Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3503 : 281474976710670] from 72075186224037889 at tablet 72075186224037889 send result to client [1:2586:3618] 2025-12-23T18:58:35.979135Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:64:2065] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3503} 2025-12-23T18:58:35.979344Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:58:35.985335Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2025-12-23T18:58:35.985761Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2491:2537], Recipient [2:2363:2507]: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 3} 2025-12-23T18:58:35.985819Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:35.985867Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976710670 2025-12-23T18:58:35.985979Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2025-12-23T18:58:35.986389Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:2423:3551], Recipient [2:2363:2507] 2025-12-23T18:58:35.986418Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:35.986446Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976710670 2025-12-23T18:58:35.986479Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:2423:3551], Recipient [2:2491:2537] 2025-12-23T18:58:35.986494Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:35.986513Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037889 consumer 72075186224037889 txId 281474976710670 2025-12-23T18:58:35.986623Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2491:2537], Recipient [1:2423:3551] 2025-12-23T18:58:35.986642Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:35.986661Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976710670 2025-12-23T18:58:35.987381Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-12-23T18:58:35.987498Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2363:2507], Recipient [2:2491:2537]: {TEvReadSet step# 3503 txid# 281474976710670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-12-23T18:58:35.987532Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:35.987562Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976710670 2025-12-23T18:58:35.987815Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2363:2507], Recipient [1:2423:3551] 2025-12-23T18:58:35.987843Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:58:35.987867Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976710670 >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |98.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |98.7%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability >> DataShardTxOrder::ZigZag [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::TwoRingGroups >> TPopulatorTest::MakeDir >> TPopulatorTestWithResets::UpdateAck >> TPopulatorQuorumTest::OneWriteOnlyRingGroup >> TPopulatorQuorumTest::OneDisconnectedRingGroup >> TPopulatorQuorumTest::OneRingGroup >> TPopulatorTest::RemoveDir >> TPopulatorTest::Boot |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2025-12-23T18:58:35.608356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:35.608411Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:35.609695Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:35.620363Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:35.620716Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:58:35.620912Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:35.627916Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:35.658411Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:35.658524Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:35.659980Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:58:35.660048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:58:35.660092Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:58:35.660353Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:35.660496Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:35.660555Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:208:2156] in generation 2 2025-12-23T18:58:35.733982Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:35.758029Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:58:35.758203Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:35.758291Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:58:35.758327Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:58:35.758365Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:58:35.758399Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:35.758599Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:35.758676Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:35.758987Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:58:35.759086Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:58:35.759199Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:35.759255Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:35.759290Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:58:35.759331Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:35.759360Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:35.759409Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:58:35.759447Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:35.759539Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:35.759563Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:35.759592Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:58:35.765700Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:58:35.765759Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:35.765872Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:58:35.766065Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:58:35.766109Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:58:35.766165Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:58:35.766211Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:35.766242Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:58:35.766270Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:58:35.766301Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:35.766564Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:35.766610Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:58:35.766643Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:58:35.766686Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:35.766720Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:58:35.766770Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:58:35.766800Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:58:35.766831Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:35.766855Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:35.778586Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:58:35.778640Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:35.778664Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:35.778695Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:58:35.778757Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:58:35.779180Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:35.779234Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:35.779281Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:58:35.779448Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:58:35.779483Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:58:35.779607Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:35.779650Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:58:35.779705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:58:35.779741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:58:35.782828Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:58:35.782892Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:35.783085Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:35.783123Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:35.783182Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:35.783228Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:35.783263Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:35.783299Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:58:35.783345Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 5-12-23T18:58:41.860659Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:41.860777Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:239:2231], Recipient [2:239:2231]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:41.860798Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:41.860832Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:41.860854Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:41.860901Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:41.860925Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2025-12-23T18:58:41.860943Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2025-12-23T18:58:41.860963Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:41.860990Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2025-12-23T18:58:41.861010Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2025-12-23T18:58:41.861028Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2025-12-23T18:58:41.861679Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2025-12-23T18:58:41.861717Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:41.861737Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2025-12-23T18:58:41.861755Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2025-12-23T18:58:41.861773Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2025-12-23T18:58:41.861837Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:41.861854Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2025-12-23T18:58:41.861869Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2025-12-23T18:58:41.861882Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2025-12-23T18:58:41.861918Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437184 2025-12-23T18:58:41.861939Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437184 2025-12-23T18:58:41.861956Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437184 2025-12-23T18:58:41.861982Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:41.861995Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2025-12-23T18:58:41.862019Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2025-12-23T18:58:41.862036Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2025-12-23T18:58:41.862071Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:41.862093Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2025-12-23T18:58:41.862107Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2025-12-23T18:58:41.862121Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2025-12-23T18:58:41.862136Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:41.862149Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2025-12-23T18:58:41.862162Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2025-12-23T18:58:41.862175Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2025-12-23T18:58:41.862191Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:41.862206Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2025-12-23T18:58:41.862230Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2025-12-23T18:58:41.862255Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2025-12-23T18:58:41.862278Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:41.862292Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-12-23T18:58:41.862305Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BlockFailPoint 2025-12-23T18:58:41.862318Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BlockFailPoint 2025-12-23T18:58:41.862331Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:41.862343Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BlockFailPoint 2025-12-23T18:58:41.862355Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-12-23T18:58:41.862368Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-12-23T18:58:41.862624Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-12-23T18:58:41.862668Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:58:41.862703Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:41.862732Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-12-23T18:58:41.862772Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-12-23T18:58:41.862789Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-12-23T18:58:41.862907Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-12-23T18:58:41.862924Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-12-23T18:58:41.862943Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-12-23T18:58:41.862962Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-12-23T18:58:41.862993Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2025-12-23T18:58:41.863015Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-12-23T18:58:41.863037Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437184 has finished 2025-12-23T18:58:41.863058Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:41.863077Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:41.863097Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:41.863125Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:41.876482Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-12-23T18:58:41.876556Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-12-23T18:58:41.876626Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-12-23T18:58:41.876673Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-12-23T18:58:41.876762Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:104:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:58:41.876818Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-12-23T18:58:41.877148Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-12-23T18:58:41.877182Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-12-23T18:58:41.877219Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:41.877259Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-12-23T18:58:41.877304Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:104:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T18:58:41.877337Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> TPopulatorQuorumTest::OneWriteOnlyRingGroup [GOOD] >> TPopulatorQuorumTest::OneDisconnectedRingGroup [GOOD] >> TPopulatorQuorumTest::OneRingGroup [GOOD] >> TPopulatorQuorumTest::TwoRingGroups [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader [GOOD] >> TPopulatorTest::Boot [GOOD] >> TPopulatorTest::MakeDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::TwoRingGroups [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0], [1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:15:2062], service: [1:5497582477939:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:13:2060], service: [1:3298559222387:0] actor: [1:5:2052], service: [1:1099535966835:0] actor: [1:14:2061], service: [1:4398070850163:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2025-12-23T18:58:42.981943Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2025-12-23T18:58:42.993201Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2025-12-23T18:58:42.993325Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2025-12-23T18:58:42.996711Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:32:2079] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:13:2060] 2025-12-23T18:58:42.996787Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:32:2079] Successful handshake: replica# [1:13:2060] 2025-12-23T18:58:42.996847Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:32:2079] Start full sync: replica# [1:13:2060] 2025-12-23T18:58:42.996933Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:33:2080] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:14:2061] 2025-12-23T18:58:42.996970Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:33:2080] Successful handshake: replica# [1:14:2061] 2025-12-23T18:58:42.996992Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:33:2080] Start full sync: replica# [1:14:2061] 2025-12-23T18:58:42.997032Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:34:2081] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:15:2062] 2025-12-23T18:58:42.997055Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:34:2081] Successful handshake: replica# [1:15:2062] 2025-12-23T18:58:42.997074Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:34:2081] Start full sync: replica# [1:15:2062] 2025-12-23T18:58:42.997147Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2025-12-23T18:58:42.997212Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2025-12-23T18:58:42.997244Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2025-12-23T18:58:42.997271Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2025-12-23T18:58:42.997319Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2025-12-23T18:58:42.997350Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2025-12-23T18:58:42.997375Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2025-12-23T18:58:42.997413Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2025-12-23T18:58:42.997437Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2025-12-23T18:58:42.997456Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2025-12-23T18:58:42.997519Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:32:2079] 2025-12-23T18:58:42.997613Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:42.997773Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2025-12-23T18:58:42.997831Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:42.997928Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:42.998015Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:33:2080] 2025-12-23T18:58:42.998081Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:13:2060], cookie# 0 2025-12-23T18:58:42.998146Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:42.998263Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2025-12-23T18:58:42.998327Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:14:2061], cookie# 0 2025-12-23T18:58:42.998397Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:42.998466Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:34:2081] 2025-12-23T18:58:42.998504Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:42.998581Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2025-12-23T18:58:42.998648Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:42.998716Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:15:2062], cookie# 0 2025-12-23T18:58:42.998796Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2025-12-23T18:58:42.998834Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:42.998917Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2025-12-23T18:58:42.998998Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2025-12-23T18:58:42.999060Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:42.999142Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2025-12-23T18:58:42.999202Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:42.999285Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2025-12-23T18:58:42.999335Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2025-12-23T18:58:42.999428Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:42.999486Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2025-12-23T18:58:42.999545Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:42.999607Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2025-12-23T18:58:42.999667Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2025-12-23T18:58:42.999720Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2025-12-23T18:58:42.999760Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:42.999826Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:42.999899Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:32:2079] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:13:2060] 2025-12-23T18:58:42.999949Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:33:2080] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:14:2061] 2025-12-23T18:58:43.000003Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 0 2025-12-23T18:58:43.000051Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:32:2079], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:43.000132Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 0 2025-12-23T18:58:43.000156Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:33:2080], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:43.000241Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2025-12-23T18:58:43.000291Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:43.000358Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2025-12-23T18:58:43.000418Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:43.000461Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:34:2081] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:15:2062] 2025-12-23T18:58:43.000501Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:34:2081], cookie# 0 2025-12-23T18:58:43.000526Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:34:2081], cookie# 0 2025-12-23T18:58:43.000569Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:43.000625Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2025-12-23T18:58:43.000656Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:43.000735Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2025-12-23T18:58:43.000782Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:43.000840Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2025-12-23T18:58:43.000876Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2025-12-23T18:58:43.000910Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:43.000976Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2025-12-23T18:58:43.001018Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:43.001061Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] 2025-12-23T18:58:43.001108Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2025-12-23T18:58:43.001138Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:33:2080], replica: [1:4398070850163:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:34:2081], replica: [1:5497582477939:0] populator: [1:31:2078], replica: [1:2199047594611:0] populator: [1:32:2079], replica: [1:3298559222387:0] 2025-12-23T18:58:43.001359Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 12345 2025-12-23T18:58:43.001411Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 12345 2025-12-23T18:58:43.001459Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2025-12-23T18:58:43.011841Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2025-12-23T18:58:43.011917Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneDisconnectedRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:5:2052], service: [1:1099535966835:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2025-12-23T18:58:43.000032Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2025-12-23T18:58:43.004694Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2025-12-23T18:58:43.004770Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2025-12-23T18:58:43.006037Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2025-12-23T18:58:43.006083Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2025-12-23T18:58:43.006106Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2025-12-23T18:58:43.006165Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2025-12-23T18:58:43.006192Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2025-12-23T18:58:43.006216Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2025-12-23T18:58:43.006262Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2025-12-23T18:58:43.006299Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2025-12-23T18:58:43.006318Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2025-12-23T18:58:43.006392Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2025-12-23T18:58:43.006493Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:43.006620Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2025-12-23T18:58:43.006664Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:43.006731Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2025-12-23T18:58:43.006827Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2025-12-23T18:58:43.006892Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:43.006954Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2025-12-23T18:58:43.007001Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:43.007043Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2025-12-23T18:58:43.007068Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2025-12-23T18:58:43.007100Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:43.007146Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2025-12-23T18:58:43.007177Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:43.007208Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2025-12-23T18:58:43.007242Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2025-12-23T18:58:43.007267Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:43.007314Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2025-12-23T18:58:43.007337Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:43.007420Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] 2025-12-23T18:58:43.007450Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2025-12-23T18:58:43.007479Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:43.007507Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2025-12-23T18:58:43.007524Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2025-12-23T18:58:43.007549Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:43.007617Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2025-12-23T18:58:43.007655Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:43.007686Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] 2025-12-23T18:58:43.007711Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2025-12-23T18:58:43.007755Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:31:2078], replica: [1:2199047594611:0] 2025-12-23T18:58:43.007863Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2025-12-23T18:58:43.018190Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2025-12-23T18:58:43.018266Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:5:2052], service: [1:1099535966835:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2025-12-23T18:58:42.981992Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:19:2066] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:10:2057] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2025-12-23T18:58:42.993202Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:19:2066] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:17:2064], cookie# 12345, event size# 36, preserialized size# 0 2025-12-23T18:58:42.993295Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:19:2066] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2025-12-23T18:58:42.995605Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:20:2067] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2025-12-23T18:58:42.995658Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:20:2067] Successful handshake: replica# [1:4:2051] 2025-12-23T18:58:42.995696Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:20:2067] Start full sync: replica# [1:4:2051] 2025-12-23T18:58:42.995762Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:21:2068] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2025-12-23T18:58:42.995781Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:21:2068] Successful handshake: replica# [1:5:2052] 2025-12-23T18:58:42.995794Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:21:2068] Start full sync: replica# [1:5:2052] 2025-12-23T18:58:42.995822Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:22:2069] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2025-12-23T18:58:42.995835Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:22:2069] Successful handshake: replica# [1:6:2053] 2025-12-23T18:58:42.995849Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:22:2069] Start full sync: replica# [1:6:2053] 2025-12-23T18:58:42.995908Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:20:2067] 2025-12-23T18:58:42.995980Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:20:2067] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2025-12-23T18:58:42.996158Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:20:2067] 2025-12-23T18:58:42.996220Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:20:2067] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2025-12-23T18:58:42.996334Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:21:2068] 2025-12-23T18:58:42.996392Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:20:2067] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2025-12-23T18:58:42.996452Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:21:2068] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2025-12-23T18:58:42.996504Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:21:2068] 2025-12-23T18:58:42.996540Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:21:2068] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2025-12-23T18:58:42.996587Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:22:2069] 2025-12-23T18:58:42.996622Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:21:2068] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2025-12-23T18:58:42.996654Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:22:2069] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2025-12-23T18:58:42.996711Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:22:2069] 2025-12-23T18:58:42.996734Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:22:2069] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2025-12-23T18:58:42.996790Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:22:2069] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2025-12-23T18:58:42.996833Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:20:2067] 2025-12-23T18:58:42.996866Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:20:2067] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2025-12-23T18:58:42.996922Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:20:2067], cookie# 0 2025-12-23T18:58:42.996953Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:20:2067], cookie# 0 2025-12-23T18:58:42.996996Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:20:2067] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:42.997066Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:21:2068] 2025-12-23T18:58:42.997096Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:21:2068] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2025-12-23T18:58:42.997142Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:21:2068], cookie# 0 2025-12-23T18:58:42.997175Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:21:2068], cookie# 0 2025-12-23T18:58:42.997193Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:21:2068] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:42.997281Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:22:2069] 2025-12-23T18:58:42.997310Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:22:2069] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2025-12-23T18:58:42.997342Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:22:2069] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] 2025-12-23T18:58:42.997366Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:22:2069], cookie# 0 2025-12-23T18:58:42.997411Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:22:2069], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:21:2068], replica: [1:1099535966835:0] populator: [1:22:2069], replica: [1:2199047594611:0] populator: [1:20:2067], replica: [1:24339059:0] 2025-12-23T18:58:42.997515Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:20:2067], cookie# 12345 2025-12-23T18:58:43.007884Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:21:2068], cookie# 12345 2025-12-23T18:58:43.007965Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:19:2066] Ack update: ack to# [1:17:2064], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneWriteOnlyRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0], [1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:15:2062], service: [1:5497582477939:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:13:2060], service: [1:3298559222387:0] actor: [1:5:2052], service: [1:1099535966835:0] actor: [1:14:2061], service: [1:4398070850163:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2025-12-23T18:58:42.992894Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2025-12-23T18:58:42.997250Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2025-12-23T18:58:42.997322Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2025-12-23T18:58:42.999164Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:32:2079] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:13:2060] 2025-12-23T18:58:42.999207Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:32:2079] Successful handshake: replica# [1:13:2060] 2025-12-23T18:58:42.999237Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:32:2079] Start full sync: replica# [1:13:2060] 2025-12-23T18:58:42.999285Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:33:2080] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:14:2061] 2025-12-23T18:58:42.999306Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:33:2080] Successful handshake: replica# [1:14:2061] 2025-12-23T18:58:42.999319Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:33:2080] Start full sync: replica# [1:14:2061] 2025-12-23T18:58:42.999340Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:34:2081] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:15:2062] 2025-12-23T18:58:42.999353Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:34:2081] Successful handshake: replica# [1:15:2062] 2025-12-23T18:58:42.999367Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:34:2081] Start full sync: replica# [1:15:2062] 2025-12-23T18:58:42.999413Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2025-12-23T18:58:42.999462Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2025-12-23T18:58:42.999480Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2025-12-23T18:58:42.999501Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2025-12-23T18:58:42.999529Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2025-12-23T18:58:42.999545Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2025-12-23T18:58:42.999558Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2025-12-23T18:58:42.999576Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2025-12-23T18:58:42.999590Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2025-12-23T18:58:42.999602Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2025-12-23T18:58:42.999655Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:32:2079] 2025-12-23T18:58:42.999704Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:42.999804Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2025-12-23T18:58:42.999862Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:42.999925Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:42.999980Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:33:2080] 2025-12-23T18:58:43.000027Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:13:2060], cookie# 0 2025-12-23T18:58:43.000073Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:43.000115Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2025-12-23T18:58:43.000139Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:14:2061], cookie# 0 2025-12-23T18:58:43.000171Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:43.000205Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:34:2081] 2025-12-23T18:58:43.000238Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:43.000280Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2025-12-23T18:58:43.000318Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:43.000363Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:15:2062], cookie# 0 2025-12-23T18:58:43.000407Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2025-12-23T18:58:43.000433Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:43.000488Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2025-12-23T18:58:43.000517Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2025-12-23T18:58:43.000564Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:43.000600Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2025-12-23T18:58:43.000645Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:43.000689Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2025-12-23T18:58:43.000717Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2025-12-23T18:58:43.000778Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2025-12-23T18:58:43.000811Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2025-12-23T18:58:43.000841Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2025-12-23T18:58:43.000877Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2025-12-23T18:58:43.000911Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2025-12-23T18:58:43.000938Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2025-12-23T18:58:43.000993Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:43.001029Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:43.001072Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:32:2079] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:13:2060] 2025-12-23T18:58:43.001100Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:33:2080] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:14:2061] 2025-12-23T18:58:43.001128Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 0 2025-12-23T18:58:43.001149Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:32:2079], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:43.001196Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 0 2025-12-23T18:58:43.001210Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:33:2080], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:43.001255Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2025-12-23T18:58:43.001287Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:43.001339Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2025-12-23T18:58:43.001363Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:43.001384Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:34:2081] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:15:2062] 2025-12-23T18:58:43.001405Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:34:2081], cookie# 0 2025-12-23T18:58:43.001419Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:34:2081], cookie# 0 2025-12-23T18:58:43.001456Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:43.001494Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2025-12-23T18:58:43.001516Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:43.001557Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2025-12-23T18:58:43.001588Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:43.001658Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2025-12-23T18:58:43.001679Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2025-12-23T18:58:43.001695Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2025-12-23T18:58:43.001737Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2025-12-23T18:58:43.001758Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2025-12-23T18:58:43.001781Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] 2025-12-23T18:58:43.001822Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2025-12-23T18:58:43.001841Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:33:2080], replica: [1:4398070850163:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:34:2081], replica: [1:5497582477939:0] populator: [1:31:2078], replica: [1:2199047594611:0] populator: [1:32:2079], replica: [1:3298559222387:0] 2025-12-23T18:58:43.001966Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2025-12-23T18:58:43.012252Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2025-12-23T18:58:43.012327Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 >> TPopulatorTestWithResets::UpdateAck [GOOD] >> TPopulatorTest::RemoveDir [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2025-12-23T18:58:43.148049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:43.148107Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2025-12-23T18:58:43.147181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:43.147249Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ck to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2025-12-23T18:58:43.516888Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-12-23T18:58:43.516921Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-12-23T18:58:43.516954Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-12-23T18:58:43.517211Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:99:2127], cookie# 100 2025-12-23T18:58:43.517313Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:97:2125], cookie# 100 2025-12-23T18:58:43.517583Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:98:2126], cookie# 100 2025-12-23T18:58:43.517604Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 2 FAKE_COORDINATOR: Add transaction: 100 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000037 2025-12-23T18:58:43.517844Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:99:2127], cookie# 100 2025-12-23T18:58:43.517869Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 100 FAKE_COORDINATOR: Erasing txId 100 2025-12-23T18:58:43.518863Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 100, event size# 555, preserialized size# 119 2025-12-23T18:58:43.518902Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 7 2025-12-23T18:58:43.519334Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 100, event size# 314, preserialized size# 2 2025-12-23T18:58:43.519370Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], cookie# 100, is deletion# false, version: 3 2025-12-23T18:58:43.519437Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2025-12-23T18:58:43.519473Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2025-12-23T18:58:43.519516Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2025-12-23T18:58:43.519630Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2025-12-23T18:58:43.519663Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2025-12-23T18:58:43.519681Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2025-12-23T18:58:43.519737Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:12:2059], cookie# 100 2025-12-23T18:58:43.519774Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:15:2062], cookie# 100 2025-12-23T18:58:43.519794Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:18:2065], cookie# 100 2025-12-23T18:58:43.519845Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:97:2125], cookie# 100 2025-12-23T18:58:43.519875Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-12-23T18:58:43.519906Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-12-23T18:58:43.519924Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-12-23T18:58:43.519966Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:98:2126], cookie# 100 2025-12-23T18:58:43.519981Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 7 2025-12-23T18:58:43.520147Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:99:2127], cookie# 100 2025-12-23T18:58:43.520245Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:97:2125], cookie# 100 2025-12-23T18:58:43.520301Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:98:2126], cookie# 100 2025-12-23T18:58:43.520316Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 3 2025-12-23T18:58:43.520359Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:99:2127], cookie# 100 2025-12-23T18:58:43.520375Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2025-12-23T18:58:43.161705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:43.161759Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... BUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 34] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2025-12-23T18:58:43.546107Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 32] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2025-12-23T18:58:43.546156Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/query_metrics_one_minute PathId: [OwnerId: 72057594046678944, LocalPathId: 34] PathVersion: 2 } }: sender# [1:96:2124] 2025-12-23T18:58:43.546201Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/auth_groups PathId: [OwnerId: 72057594046678944, LocalPathId: 32] PathVersion: 2 } }: sender# [1:96:2124] 2025-12-23T18:58:43.546323Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:24339059:0] }: sender# [1:97:2125] 2025-12-23T18:58:43.546388Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:96:2124] 2025-12-23T18:58:43.546486Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2025-12-23T18:58:43.546551Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:96:2124] 2025-12-23T18:58:43.546648Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 33] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2025-12-23T18:58:43.546707Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/resource_pools PathId: [OwnerId: 72057594046678944, LocalPathId: 33] PathVersion: 2 } }: sender# [1:96:2124] 2025-12-23T18:58:43.546793Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:24339059:0] }: sender# [1:97:2125] 2025-12-23T18:58:43.546848Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:96:2124] 2025-12-23T18:58:43.546908Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2025-12-23T18:58:43.546954Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:96:2124] 2025-12-23T18:58:43.547084Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 34] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2025-12-23T18:58:43.547142Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/query_metrics_one_minute PathId: [OwnerId: 72057594046678944, LocalPathId: 34] PathVersion: 2 } }: sender# [1:96:2124] 2025-12-23T18:58:43.547240Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:24339059:0] }: sender# [1:97:2125] 2025-12-23T18:58:43.547292Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:96:2124] 2025-12-23T18:58:43.547418Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2025-12-23T18:58:43.547487Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2025-12-23T18:58:43.547564Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:96:2124] 2025-12-23T18:58:43.547625Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:96:2124] 2025-12-23T18:58:43.547679Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:24339059:0] }: sender# [1:97:2125] 2025-12-23T18:58:43.547734Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:96:2124] 2025-12-23T18:58:43.547864Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2025-12-23T18:58:43.547920Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2025-12-23T18:58:43.547996Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:96:2124] 2025-12-23T18:58:43.548053Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:96:2124] 2025-12-23T18:58:43.548144Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:24339059:0] }: sender# [1:97:2125] 2025-12-23T18:58:43.548206Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2124] 2025-12-23T18:58:43.548286Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2025-12-23T18:58:43.548345Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2124] 2025-12-23T18:58:43.548438Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2025-12-23T18:58:43.548512Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:97:2125] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-12-23T18:58:43.548556Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:98:2126] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-12-23T18:58:43.548595Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:96:2124] 2025-12-23T18:58:43.548652Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2025-12-23T18:58:43.548731Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:96:2124] 2025-12-23T18:58:43.548889Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2025-12-23T18:58:43.548942Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2124] 2025-12-23T18:58:43.549000Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:99:2127] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] TestWaitNotification: OK eventTxId 100 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2025-12-23T18:58:43.156954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:43.157020Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... lator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2025-12-23T18:58:43.521353Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2025-12-23T18:58:43.521414Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 8 }: sender# [1:98:2126], cookie# 101 2025-12-23T18:58:43.521442Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 8 2025-12-23T18:58:43.521480Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:18:2065], cookie# 101 2025-12-23T18:58:43.521741Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 8 }: sender# [1:99:2127], cookie# 101 2025-12-23T18:58:43.521766Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:12:2059], cookie# 101 2025-12-23T18:58:43.521788Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:15:2062], cookie# 101 2025-12-23T18:58:43.522002Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:99:2127], cookie# 101 2025-12-23T18:58:43.522040Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:97:2125], cookie# 101 2025-12-23T18:58:43.522056Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 3 2025-12-23T18:58:43.522146Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:98:2126], cookie# 101 2025-12-23T18:58:43.522164Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:98:2126], cookie# 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2025-12-23T18:58:43.523354Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 101, event size# 447, preserialized size# 65 2025-12-23T18:58:43.523399Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 9 2025-12-23T18:58:43.523475Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2025-12-23T18:58:43.523500Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2025-12-23T18:58:43.523520Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2025-12-23T18:58:43.523727Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000038, drop txId: 101" Path: "/Root/DirB" PathId: 38 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 101, event size# 307, preserialized size# 0 2025-12-23T18:58:43.523749Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], cookie# 101, is deletion# true, version: 0 2025-12-23T18:58:43.523813Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:12:2059], cookie# 101 2025-12-23T18:58:43.523851Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:15:2062], cookie# 101 2025-12-23T18:58:43.523879Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:18:2065], cookie# 101 2025-12-23T18:58:43.523911Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:97:2125], cookie# 101 2025-12-23T18:58:43.523933Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2025-12-23T18:58:43.523956Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2025-12-23T18:58:43.523988Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2025-12-23T18:58:43.524121Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:98:2126], cookie# 101 2025-12-23T18:58:43.524141Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 9 2025-12-23T18:58:43.524166Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2025-12-23T18:58:43.524187Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2025-12-23T18:58:43.524212Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2025-12-23T18:58:43.524374Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:99:2127], cookie# 101 2025-12-23T18:58:43.524486Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:97:2125], cookie# 101 2025-12-23T18:58:43.524528Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:98:2126], cookie# 101 2025-12-23T18:58:43.524549Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 18446744073709551615 2025-12-23T18:58:43.524948Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:99:2127], cookie# 101 2025-12-23T18:58:43.524970Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |98.7%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:78:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:81:2057] recipient: [13:80:2112] Leader for TabletID 72057594037927937 is [13:82:2113] sender: [13:83:2057] recipient: [13:80:2112] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:82:2113] Leader for TabletID 72057594037927937 is [13:82:2113] sender: [13:198:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:78:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:81:2057] recipient: [14:80:2112] Leader for TabletID 72057594037927937 is [14:82:2113] sender: [14:83:2057] recipient: [14:80:2112] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:82:2113] Leader for TabletID 72057594037927937 is [14:82:2113] sender: [14:198:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:79:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:82:2057] recipient: [15:81:2112] Leader for TabletID 72057594037927937 is [15:83:2113] sender: [15:84:2057] recipient: [15:81:2112] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:83:2113] Leader for TabletID 72057594037927937 is [15:83:2113] sender: [15:199:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:82:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:85:2057] recipient: [16:84:2115] Leader for TabletID 72057594037927937 is [16:86:2116] sender: [16:87:2057] recipient: [16:84:2115] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:86:2116] Leader for TabletID 72057594037927937 is [16:86:2116] sender: [16:202:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:82:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:85:2057] recipient: [17:84:2115] Leader for TabletID 72057594037927937 is [17:86:2116] sender: [17:87:2057] recipient: [17:84:2115] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:86:2116] Leader for TabletID 72057594037927937 is [17:86:2116] sender: [17:202:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:83:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:86:2057] recipient: [18:85:2115] Leader for TabletID 72057594037927937 is [18:87:2116] sender: [18:88:2057] recipient: [18:85:2115] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:87:2116] Leader for TabletID 72057594037927937 is [18:87:2116] sender: [18:105:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:88:2057] recipient: [19:87:2117] Leader for TabletID 72057594037927937 is [19:89:2118] sender: [19:90:2057] recipient: [19:87:2117] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:89:2118] Leader for TabletID 72057594037927937 is [19:89:2118] sender: [19:205:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:88:2057] recipient: [20:87:2117] Leader for TabletID 72057594037927937 is [20:89:2118] sender: [20:90:2057] recipient: [20:87:2117] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:89:2118] Leader for TabletID 72057594037927937 is [20:89:2118] sender: [20:205:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:89:2057] recipient: [21:88:2117] Leader for TabletID 72057594037927937 is [21:90:2118] sender: [21:91:2057] recipient: [21:88:2117] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:89:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:92:2057] recipient: [13:91:2119] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:94:2057] recipient: [13:91:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:93:2120] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:209:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:78:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:81:2057] recipient: [17:80:2112] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:83:2057] recipient: [17:80:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:82:2113] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:198:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:53:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:79:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:81:2112] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:84:2057] recipient: [18:81:2112] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:83:2113] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:199:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:82:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:84:2115] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:87:2057] recipient: [20:84:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:86:2116] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:202:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:83:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:85:2115] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:88:2057] recipient: [21:85:2115] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:87:2116] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:105:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:53:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:85:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:88:2057] recipient: [22:87:2117] Leader for TabletID 72057594037927937 is [22:89:2118] sender: [22:90:2057] recipient: [22:87:2117] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:89:2118] Leader for TabletID 72057594037927937 is [22:89:2118] sender: [22:205:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:52:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:85:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:88:2057] recipient: [23:87:2117] Leader for TabletID 72057594037927937 is [23:89:2118] sender: [23:90:2057] recipient: [23:87:2117] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:89:2118] Leader for TabletID 72057594037927937 is [23:89:2118] sender: [23:205:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:86:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:89:2057] recipient: [24:88:2117] Leader for TabletID 72057594037927937 is [24:90:2118] sender: [24:91:2057] recipient: [24:88:2117] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TAuditTest::ModifyingMethodsAlwaysAuditable [GOOD] >> TUrlMatcherTest::MatchExactPathOnly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2025-12-23T18:57:00.361287Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:00.382030Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:00.382216Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:00.387651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:00.387831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:00.387983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:00.388072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:00.388151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:00.388220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:00.388284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:00.388359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:00.388443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:00.388541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:00.388638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:00.388702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:00.388764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:00.408811Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:00.408960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:00.409001Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:00.409128Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:00.409257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:00.409306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:00.409339Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:00.409431Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:00.409489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:00.409535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:00.409555Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:00.409673Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:00.409716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:00.409739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:00.409763Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:00.409829Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:00.409872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:00.409914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:00.409935Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:00.409970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:00.409992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:00.410013Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:00.410040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:00.410070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:00.410098Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:00.410237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:00.410270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:00.410287Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:00.410368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:00.410411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:00.410442Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:00.410492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:00.410517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:00.410538Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:00.410565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:00.410586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:00.410610Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:00.410695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:00.410727Z node 1 :TX_COLUMNSHARD WAR ... :31;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-12-23T18:58:43.181157Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:58:43.181191Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:58:43.181591Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:58:43.181736Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:43.181767Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:58:43.181906Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2025-12-23T18:58:43.181965Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2025-12-23T18:58:43.182170Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:458:2470];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-12-23T18:58:43.182288Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:43.182400Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:43.182506Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:43.182722Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:58:43.182861Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:43.182971Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:43.183172Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [54:459:2471] finished for tablet 9437184 2025-12-23T18:58:43.183568Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[54:458:2470];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.009}],"full":{"a":103214566,"name":"_full_task","f":103214566,"d_finished":0,"c":0,"l":103223833,"d":9267},"events":[{"name":"bootstrap","f":103214806,"d_finished":1099,"c":1,"l":103215905,"d":1099},{"a":103223328,"name":"ack","f":103222195,"d_finished":949,"c":1,"l":103223144,"d":1454},{"a":103223318,"name":"processing","f":103216023,"d_finished":2545,"c":3,"l":103223146,"d":3060},{"name":"ProduceResults","f":103215566,"d_finished":1724,"c":6,"l":103223621,"d":1724},{"a":103223624,"name":"Finish","f":103223624,"d_finished":0,"c":0,"l":103223833,"d":209},{"name":"task_result","f":103216037,"d_finished":1552,"c":2,"l":103221829,"d":1552}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:43.183624Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:458:2470];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:58:43.183983Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[54:458:2470];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.009}],"full":{"a":103214566,"name":"_full_task","f":103214566,"d_finished":0,"c":0,"l":103224276,"d":9710},"events":[{"name":"bootstrap","f":103214806,"d_finished":1099,"c":1,"l":103215905,"d":1099},{"a":103223328,"name":"ack","f":103222195,"d_finished":949,"c":1,"l":103223144,"d":1897},{"a":103223318,"name":"processing","f":103216023,"d_finished":2545,"c":3,"l":103223146,"d":3503},{"name":"ProduceResults","f":103215566,"d_finished":1724,"c":6,"l":103223621,"d":1724},{"a":103223624,"name":"Finish","f":103223624,"d_finished":0,"c":0,"l":103224276,"d":652},{"name":"task_result","f":103216037,"d_finished":1552,"c":2,"l":103221829,"d":1552}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:43.184034Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:58:43.171832Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2025-12-23T18:58:43.184065Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:58:43.184180Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::ModifyingMethodsAlwaysAuditable [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TUrlMatcherTest::MatchExactPathOnly [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |98.7%| [TA] $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TStorageTenantTest::GenericCases >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> TStorageTenantTest::Boot >> TStorageTenantTest::LsLs >> TStorageTenantTest::CreateTableInsideSubDomain2 >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p >> KqpWorkload::KV [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] >> TCacheTestWithRealSystemViewPaths::SystemViews >> TCacheTest::RacyRecreateAndSync >> TCacheTest::MigrationLostMessage >> TCacheTest::Attributes >> TCacheTest::SystemViews >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] >> TCacheTest::SystemViews [FAIL] >> TCacheTest::TableSchemaVersion >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::RacyCreateAndSync ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV [GOOD] Test command err: Trying to start YDB, gRPC: 16387, MsgBus: 3605 2025-12-23T18:57:31.435727Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129529018601142:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:31.435781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fe1/r3tmp/tmpxeR25Q/pdisk_1.dat 2025-12-23T18:57:31.757317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:31.777616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:31.777734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:31.851897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:31.855942Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:31.857392Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129529018601106:2081] 1766516251429679 != 1766516251429682 TServer::EnableGrpc on GrpcPort 16387, node 1 2025-12-23T18:57:31.953310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:57:32.183361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:32.183445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:32.183469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:32.183553Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:32.442564Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:3605 TClient is connected to server localhost:3605 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516251906 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:33.089583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:34.461381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129541903503883:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.461528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.461969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129541903503892:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.462038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:34.993068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.463713Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129546198472753:2452], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.463819Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.464043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129546198472758:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.464111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129546198472759:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.464175Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:35.467488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:35.476690Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129546198472762:2457], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T18:57:35.564790Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129546198472813:3557] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:36.435650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129529018601142:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:36.435746Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:57:46.705357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:57:46.705410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded took: 0.092968s took: 0.092829s took: 0.092771s took: 0.092855s took: 0.092770s took: 0.092842s took: 0.092971s took: 0.093882s took: 0.093943s took: 0.094599s took: 0.088752s took: 0.089827s took: 0.090317s took: 0.090320s took: 0.090333s took: 0.090523s took: 0.090569s took: 0.090768s took: 0.091895s took: 0.092034s took: 0.176481s took: 0.176440s took: 0.176569s took: 0.177549s took: 0.177294s took: 0.178979s took: 0.179939s took: 0.179926s took: 0.179613s took: 0.179939s took: 0.022102s took: 0.022234s took: 0.022759s took: 0.023378s took: 0.025364s took: 0.026143s took: 0.028541s took: 0.031259s took: 0.032796s took: 0.035831s took: 0.089139s took: 0.090802s took: 0.091673s took: 0.114549s took: 0.114600s took: 0.112991s took: 0.113201s took: 0.113796s took: 0.113830s took: 0.113903s 2025-12-23T18:58:48.455357Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-12-23T18:58:48.455407Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-12-23T18:58:48.455424Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-12-23T18:58:48.455439Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-12-23T18:58:48.455456Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-12-23T18:58:48.455473Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-12-23T18:58:48.455492Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-12-23T18:58:48.457442Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-12-23T18:58:48.457481Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-12-23T18:58:48.457498Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-12-23T18:58:48.457518Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-12-23T18:58:48.472266Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-12-23T18:58:48.472305Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-12-23T18:58:48.472322Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-12-23T18:58:48.472337Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-12-23T18:58:48.472353Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-12-23T18:58:48.472370Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-12-23T18:58:48.472386Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-12-23T18:58:48.473132Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-12-23T18:58:48.473168Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-12-23T18:58:48.473198Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-12-23T18:58:48.473222Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2025-12-23T18:58:48.473243Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-12-23T18:58:48.473260Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-12-23T18:58:48.473306Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-12-23T18:58:48.473329Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-12-23T18:58:48.473346Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2025-12-23T18:58:48.473363Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-12-23T18:58:48.473378Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-12-23T18:58:48.473394Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-12-23T18:58:48.473411Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-12-23T18:58:48.473428Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-12-23T18:58:48.473445Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-12-23T18:58:48.473466Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-12-23T18:58:48.473495Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-12-23T18:58:48.473512Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2025-12-23T18:58:48.473527Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-12-23T18:58:48.473542Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-12-23T18:58:48.473558Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-12-23T18:58:48.486128Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TCacheTestWithRealSystemViewPaths::SystemViews [GOOD] >> TCacheTestWithRealSystemViewPaths::CheckSystemViewAccess >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> TCacheTest::RacyCreateAndSync [GOOD] >> TCacheTestWithRealSystemViewPaths::CheckSystemViewAccess [GOOD] >> TCacheTest::CheckAccess [GOOD] >> TCacheTest::CheckSystemViewAccess >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::MigrationUndo >> TCacheTest::TableSchemaVersion [GOOD] >> TCacheTest::CheckSystemViewAccess [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithRealSystemViewPaths::CheckSystemViewAccess [GOOD] Test command err: 2025-12-23T18:58:49.956028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:49.956113Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ate->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2025-12-23T18:58:50.931338Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-12-23T18:58:50.935842Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:58:50.938786Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-12-23T18:58:50.942127Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [2:430:2410], for# user1@builtin, access# DescribeSchema 2025-12-23T18:58:50.942836Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [2:436:2416], for# user1@builtin, access# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyCreateAndSync [GOOD] Test command err: 2025-12-23T18:58:49.954052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:49.955229Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... 2 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2025-12-23T18:58:50.960564Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:58:50.972319Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader [GOOD] |98.7%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCacheTest::MigrationUndo [GOOD] >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit >> DataShardTxOrder::ZigZag_oo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckSystemViewAccess [GOOD] Test command err: 2025-12-23T18:58:49.972677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:49.972761Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2025-12-23T18:58:51.492832Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-12-23T18:58:51.497034Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-12-23T18:58:51.503784Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-12-23T18:58:51.504511Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TestModificationResult got TxId: 102, wait until txId: 102 2025-12-23T18:58:51.510074Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:441:2421], for# user1@builtin, access# DescribeSchema 2025-12-23T18:58:51.511091Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:447:2427], for# user1@builtin, access# |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TStorageTenantTest::LsLs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2025-12-23T18:58:49.977546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:49.977626Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... rUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/compile_cache_queries', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.954772Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720668 2025-12-23T18:58:51.954797Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_request_units_one_minute', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.954825Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_read_bytes_one_minute', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.955025Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720669 2025-12-23T18:58:51.955070Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720670 2025-12-23T18:58:51.955120Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pool_classifiers', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.955220Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/nodes', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.955347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720671 2025-12-23T18:58:51.955377Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_hour', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.955488Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720672 2025-12-23T18:58:51.955511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720673 2025-12-23T18:58:51.955535Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_minute', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.955661Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720674 2025-12-23T18:58:51.955690Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_minute', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.955715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720675 2025-12-23T18:58:51.955745Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_hour', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.955859Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_request_units_one_hour', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.955943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720676 2025-12-23T18:58:51.955989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720677 2025-12-23T18:58:51.956016Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_hour', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.956084Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/pg_tables', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.956136Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720678 2025-12-23T18:58:51.956170Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_duration_one_minute', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.956222Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720679 2025-12-23T18:58:51.956252Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_group_members', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.956297Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720680 2025-12-23T18:58:51.956323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720681 2025-12-23T18:58:51.956353Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_users', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.956377Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pools', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.956404Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720682 2025-12-23T18:58:51.956432Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/streaming_queries', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.956475Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720683 2025-12-23T18:58:51.956495Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_permissions', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.956538Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720684 2025-12-23T18:58:51.956689Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720685 2025-12-23T18:58:51.956724Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_minute', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.956770Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6048: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720686 2025-12-23T18:58:51.956813Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_effective_permissions', reason: Schema is in ReadOnly mode 2025-12-23T18:58:51.956867Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:689:2595] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_groups', reason: Schema is in ReadOnly mode TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-12-23T18:58:51.958696Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__operation_upgrade_subdomain.cpp:1466: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-12-23T18:58:51.958851Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:601) 2025-12-23T18:58:51.961305Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6291: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-12-23T18:58:51.962784Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6291: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409549 2025-12-23T18:58:51.963655Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6291: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-12-23T18:58:51.967334Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6291: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-12-23T18:58:51.969799Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 2025-12-23T18:58:51.974866Z node 2 :TX_DATASHARD ERROR: datashard.cpp:3603: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186233409548 Leader for TabletID 72057594046678944 is [2:408:2397] sender: [2:734:2067] recipient: [2:49:2096] Leader for TabletID 72057594046678944 is [2:408:2397] sender: [2:737:2067] recipient: [2:736:2628] Leader for TabletID 72057594046678944 is [2:738:2629] sender: [2:739:2067] recipient: [2:736:2628] 2025-12-23T18:58:52.029692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:52.029765Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2025-12-23T18:58:34.180848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:34.180888Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:34.182033Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:34.192959Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:34.193203Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:58:34.193397Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:34.225104Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:34.230299Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:34.230364Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:34.231522Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:58:34.231585Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:58:34.231623Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:58:34.231884Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:34.231940Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:34.231989Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:58:34.304348Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:34.329827Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:58:34.329956Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:34.330019Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:58:34.330044Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:58:34.330070Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:58:34.330097Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:34.330194Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:34.330243Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:34.330461Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:58:34.330533Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:58:34.330665Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:34.330703Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:34.330730Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:58:34.330775Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:34.330808Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:34.330831Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:58:34.330856Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:34.330911Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:34.330937Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:34.330964Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:58:34.336601Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:58:34.336684Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:34.336784Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:58:34.336953Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:58:34.337002Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:58:34.337059Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:58:34.337096Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:34.337132Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:58:34.337167Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:58:34.337197Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:34.337465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:34.337516Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:58:34.337568Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:58:34.337611Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:34.337649Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:58:34.337674Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:58:34.337723Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:58:34.337758Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:34.337780Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:34.349761Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:58:34.349824Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:34.349860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:34.349922Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:58:34.349987Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:58:34.350364Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:34.350400Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:34.350436Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:58:34.350529Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:58:34.350555Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:58:34.350641Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:34.350673Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:58:34.350709Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:58:34.350781Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:58:34.353316Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:58:34.353370Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:34.353523Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:34.353549Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:34.353595Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:34.353629Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:34.353653Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:34.353682Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:58:34.353725Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ions 2025-12-23T18:58:52.149542Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:52.149719Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [6:348:2315], Recipient [6:348:2315]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:52.149751Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:52.149809Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2025-12-23T18:58:52.149840Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:52.149863Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2025-12-23T18:58:52.149889Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2025-12-23T18:58:52.149912Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2025-12-23T18:58:52.149941Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2025-12-23T18:58:52.149974Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2025-12-23T18:58:52.149993Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2025-12-23T18:58:52.150013Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2025-12-23T18:58:52.150628Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2025-12-23T18:58:52.150679Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2025-12-23T18:58:52.150720Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2025-12-23T18:58:52.150772Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2025-12-23T18:58:52.150798Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2025-12-23T18:58:52.150830Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2025-12-23T18:58:52.150848Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2025-12-23T18:58:52.150865Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2025-12-23T18:58:52.150893Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2025-12-23T18:58:52.150939Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437185 2025-12-23T18:58:52.150962Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437185 2025-12-23T18:58:52.151001Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437185 2025-12-23T18:58:52.151043Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2025-12-23T18:58:52.151067Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2025-12-23T18:58:52.151098Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2025-12-23T18:58:52.151118Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2025-12-23T18:58:52.151158Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2025-12-23T18:58:52.151176Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2025-12-23T18:58:52.151195Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2025-12-23T18:58:52.151216Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2025-12-23T18:58:52.151234Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2025-12-23T18:58:52.151252Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2025-12-23T18:58:52.151269Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2025-12-23T18:58:52.151289Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2025-12-23T18:58:52.151311Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2025-12-23T18:58:52.151327Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2025-12-23T18:58:52.151345Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2025-12-23T18:58:52.151367Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2025-12-23T18:58:52.151399Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2025-12-23T18:58:52.151417Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-12-23T18:58:52.151435Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BlockFailPoint 2025-12-23T18:58:52.151451Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BlockFailPoint 2025-12-23T18:58:52.151489Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2025-12-23T18:58:52.151517Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BlockFailPoint 2025-12-23T18:58:52.151544Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-12-23T18:58:52.151571Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-12-23T18:58:52.151969Z node 6 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-12-23T18:58:52.152026Z node 6 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-12-23T18:58:52.152084Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2025-12-23T18:58:52.152113Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-12-23T18:58:52.152178Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-12-23T18:58:52.152212Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-12-23T18:58:52.152412Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-12-23T18:58:52.152436Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-12-23T18:58:52.152458Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-12-23T18:58:52.152482Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-12-23T18:58:52.152508Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2025-12-23T18:58:52.152525Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-12-23T18:58:52.152544Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437185 has finished 2025-12-23T18:58:52.152569Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:52.152592Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2025-12-23T18:58:52.152615Z node 6 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2025-12-23T18:58:52.152643Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2025-12-23T18:58:52.166711Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-12-23T18:58:52.166844Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-12-23T18:58:52.166930Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2025-12-23T18:58:52.167009Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-12-23T18:58:52.167103Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:103:2137], exec latency: 0 ms, propose latency: 2 ms 2025-12-23T18:58:52.167175Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-12-23T18:58:52.167784Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-12-23T18:58:52.167829Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-12-23T18:58:52.167874Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:52.167906Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-12-23T18:58:52.167956Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:103:2137], exec latency: 0 ms, propose latency: 2 ms 2025-12-23T18:58:52.167995Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2025-12-23T18:58:49.012318Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129865093298856:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:49.012410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:58:49.049623Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587129862676393330:2162];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:49.050266Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056d9/r3tmp/tmpRaegqa/pdisk_1.dat 2025-12-23T18:58:49.232553Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:58:49.235127Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:58:49.290614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:49.291390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:49.293208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:49.293265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:49.333757Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:49.349182Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:58:49.349426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:49.350951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:49.499527Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T18:58:49.531007Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:1459 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:58:49.557172Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587129865093299079:2133] Handle TEvNavigate describe path dc-1 2025-12-23T18:58:49.557257Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587129865093299855:2742] HANDLE EvNavigateScheme dc-1 2025-12-23T18:58:49.557362Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587129865093299102:2139], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:49.557512Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587129865093299324:2289][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587129865093299102:2139], cookie# 1 2025-12-23T18:58:49.559236Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129865093299359:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129865093299356:2289], cookie# 1 2025-12-23T18:58:49.559293Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129865093299360:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129865093299357:2289], cookie# 1 2025-12-23T18:58:49.559310Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129865093299361:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129865093299358:2289], cookie# 1 2025-12-23T18:58:49.559343Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129860798331452:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129865093299359:2289], cookie# 1 2025-12-23T18:58:49.559351Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129860798331455:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129865093299360:2289], cookie# 1 2025-12-23T18:58:49.559389Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129860798331458:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129865093299361:2289], cookie# 1 2025-12-23T18:58:49.559420Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129865093299360:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129860798331455:2055], cookie# 1 2025-12-23T18:58:49.559434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129865093299359:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129860798331452:2052], cookie# 1 2025-12-23T18:58:49.559457Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129865093299361:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129860798331458:2058], cookie# 1 2025-12-23T18:58:49.559487Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129865093299324:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129865093299357:2289], cookie# 1 2025-12-23T18:58:49.559524Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587129865093299324:2289][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:58:49.559546Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129865093299324:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129865093299356:2289], cookie# 1 2025-12-23T18:58:49.559562Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587129865093299324:2289][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:58:49.559604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129865093299324:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129865093299358:2289], cookie# 1 2025-12-23T18:58:49.559647Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587129865093299324:2289][/dc-1] Sync cookie mismatch: sender# [1:7587129865093299358:2289], cookie# 1, current cookie# 0 2025-12-23T18:58:49.559678Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587129865093299102:2139], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:58:49.559787Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587129865093299102:2139], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587129865093299324:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:58:49.559884Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587129865093299102:2139], cacheItem# { Subscriber: { Subscriber: [1:7587129865093299324:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:58:49.563029Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587129865093299856:2743], recipient# [1:7587129865093299855:2742], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:58:49.563152Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587129865093299855:2742] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:58:49.603433Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587129865093299855:2742] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:58:49.606602Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587129865093299855:2742] Handle TEvDescribeSchemeResult Forward to# [1:7587129865093299854:2741] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 Path ... 87129862676393412:2108], cacheItem# { Subscriber: { Subscriber: [2:7587129866971360797:2119] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:52.054192Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7587129875561295508:2129], recipient# [2:7587129875561295507:2299], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:52.054337Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:58:52.272209Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7587129862676393412:2108], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:52.272366Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7587129862676393412:2108], cacheItem# { Subscriber: { Subscriber: [2:7587129871266328148:2123] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:52.272435Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7587129862676393412:2108], cacheItem# { Subscriber: { Subscriber: [2:7587129871266328149:2124] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:52.272552Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7587129875561295509:2130], recipient# [2:7587129871266328139:2296], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:52.272714Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7587129871266328139:2296], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:52.312756Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7587129871266328137:2122][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7587129871266328141:2122] 2025-12-23T18:58:52.312770Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7587129871266328149:2124][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7587129871266328150:2124] 2025-12-23T18:58:52.312830Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7587129871266328149:2124][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7587129862676393412:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:52.312830Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7587129871266328137:2122][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7587129862676393412:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:52.313120Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7587129871266328137:2122][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7587129871266328142:2122] 2025-12-23T18:58:52.313147Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7587129871266328149:2124][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7587129871266328151:2124] 2025-12-23T18:58:52.313168Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7587129871266328137:2122][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7587129862676393412:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:52.313186Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7587129871266328149:2124][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7587129862676393412:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:52.313205Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7587129871266328149:2124][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7587129871266328152:2124] 2025-12-23T18:58:52.313229Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7587129871266328149:2124][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7587129862676393412:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:52.313290Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7587129871266328137:2122][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7587129871266328143:2122] 2025-12-23T18:58:52.313326Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7587129871266328137:2122][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7587129862676393412:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:52.313674Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7587129871266328148:2123][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7587129871266328157:2123] 2025-12-23T18:58:52.313714Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7587129871266328148:2123][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7587129862676393412:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:52.313757Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7587129871266328148:2123][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7587129871266328158:2123] 2025-12-23T18:58:52.313784Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7587129871266328148:2123][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7587129862676393412:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:52.314076Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7587129871266328148:2123][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7587129871266328159:2123] 2025-12-23T18:58:52.314117Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7587129871266328148:2123][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7587129862676393412:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] >> TxUsage::WriteToTopic_Demo_41_Table >> BasicUsage::CreateTopicWithCustomName >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Table >> BasicUsage::CreateTopicWithStreamingConsumer >> TxUsage::WriteToTopic_Demo_11_Table >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table >> TxUsage::Sinks_Oltp_WriteToTopic_1_Table >> TxUsage::WriteToTopic_Demo_12_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2025-12-23T18:58:48.998244Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129858685158361:2156];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:48.998940Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056db/r3tmp/tmpm5wgc7/pdisk_1.dat 2025-12-23T18:58:49.214375Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:58:49.290133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:49.290240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:49.334789Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:49.343326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:49.373268Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:8823 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:58:49.579917Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587129862980125782:2119] Handle TEvNavigate describe path dc-1 2025-12-23T18:58:49.579998Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587129862980126592:2760] HANDLE EvNavigateScheme dc-1 2025-12-23T18:58:49.580177Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587129862980125788:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:49.580315Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587129862980126043:2287][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587129862980125788:2121], cookie# 1 2025-12-23T18:58:49.581607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129862980126052:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129862980126049:2287], cookie# 1 2025-12-23T18:58:49.581660Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129862980126053:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129862980126050:2287], cookie# 1 2025-12-23T18:58:49.581678Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129862980126054:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129862980126051:2287], cookie# 1 2025-12-23T18:58:49.581711Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129858685158174:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129862980126053:2287], cookie# 1 2025-12-23T18:58:49.581718Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129858685158171:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129862980126052:2287], cookie# 1 2025-12-23T18:58:49.581736Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129858685158177:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129862980126054:2287], cookie# 1 2025-12-23T18:58:49.581773Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129862980126053:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129858685158174:2054], cookie# 1 2025-12-23T18:58:49.581786Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129862980126054:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129858685158177:2057], cookie# 1 2025-12-23T18:58:49.581814Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129862980126052:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129858685158171:2051], cookie# 1 2025-12-23T18:58:49.581868Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129862980126043:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129862980126050:2287], cookie# 1 2025-12-23T18:58:49.581892Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587129862980126043:2287][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:58:49.581907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129862980126043:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129862980126051:2287], cookie# 1 2025-12-23T18:58:49.581986Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587129862980126043:2287][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:58:49.582035Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129862980126043:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129862980126049:2287], cookie# 1 2025-12-23T18:58:49.582052Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587129862980126043:2287][/dc-1] Sync cookie mismatch: sender# [1:7587129862980126049:2287], cookie# 1, current cookie# 0 2025-12-23T18:58:49.582093Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587129862980125788:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:58:49.582243Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587129862980125788:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587129862980126043:2287] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:58:49.582387Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587129862980125788:2121], cacheItem# { Subscriber: { Subscriber: [1:7587129862980126043:2287] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:58:49.584655Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587129862980126593:2761], recipient# [1:7587129862980126592:2760], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:58:49.584749Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587129862980126592:2760] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:58:49.635278Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587129862980126592:2760] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:58:49.638651Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587129862980126592:2760] Handle TEvDescribeSchemeResult Forward to# [1:7587129862980126591:2759] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateS ... esponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587129875865029075:3182] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1766516332100 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] DomainId: [OwnerId: 72057594046644480, LocalPathId: 38] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2025-12-23T18:58:52.147568Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587129862980125788:2121], cacheItem# { Subscriber: { Subscriber: [1:7587129875865029075:3182] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1766516332100 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] DomainId: [OwnerId: 72057594046644480, LocalPathId: 38] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/SimpleTable TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-12-23T18:58:52.147773Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587129875865029086:3187], recipient# [1:7587129875865029085:3186], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/SimpleTable TableId: [72057594046644480:39:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 38] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 38] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:58:52.147831Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587129875865029085:3186] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:58:52.147887Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587129875865029085:3186] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2025-12-23T18:58:52.149063Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587129875865029085:3186] Handle TEvDescribeSchemeResult Forward to# [1:7587129875865029084:3185] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1766516332100 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1766516332100 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: ... (TRUNCATED) 2025-12-23T18:58:52.174924Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-12-23T18:58:52.175459Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7587129858685158171:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7587129868785131133:2102] 2025-12-23T18:58:52.175493Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7587129858685158171:2051] Unsubscribe: subscriber# [3:7587129868785131133:2102], path# /dc-1/USER_0 2025-12-23T18:58:52.175526Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7587129858685158174:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7587129868785131136:2102] 2025-12-23T18:58:52.175537Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7587129858685158174:2054] Unsubscribe: subscriber# [3:7587129868785131136:2102], path# /dc-1/USER_0 2025-12-23T18:58:52.175558Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7587129858685158177:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7587129868785131138:2102] 2025-12-23T18:58:52.175568Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7587129858685158177:2057] Unsubscribe: subscriber# [3:7587129868785131138:2102], path# /dc-1/USER_0 2025-12-23T18:58:52.176035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-12-23T18:58:52.845024Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587129868785131265:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:52.845156Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587129868785131265:2109], cacheItem# { Subscriber: { Subscriber: [3:7587129873080098756:2222] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:52.845282Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587129877375066277:2358], recipient# [3:7587129877375066276:2312], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2025-12-23T18:57:09.007979Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:09.030239Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:09.030471Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:57:09.036797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:09.036989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:09.037181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:09.037297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:09.037414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:09.037532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:09.037631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:09.037748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:09.037865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:09.037992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:09.038103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:09.038209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:09.038301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:09.060647Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:57:09.060828Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:09.060895Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:09.061048Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:09.061220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:09.061282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:09.061324Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:09.061412Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:09.061473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:09.061511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:09.061573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:09.061721Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:09.061765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:09.061793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:09.061811Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:09.061903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:57:09.061963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:09.062004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:09.062025Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:57:09.062070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:09.062100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:09.062126Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:57:09.062173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:09.062210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:09.062241Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:57:09.062445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:09.062483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:09.062504Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:57:09.062652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:09.062701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:09.062731Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:57:09.062814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:09.062847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:09.062869Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:57:09.062899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:09.062947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:57:09.062974Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:57:09.063101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:57:09.063143Z node 1 :TX_COLUMNSHARD WAR ... =1;finished=1; 2025-12-23T18:58:51.544592Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-12-23T18:58:51.544623Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2025-12-23T18:58:51.545001Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:58:51.545138Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:51.545167Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-12-23T18:58:51.545272Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2025-12-23T18:58:51.545321Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2025-12-23T18:58:51.545533Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:458:2470];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-12-23T18:58:51.545649Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:51.545790Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:51.545889Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:51.546069Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-12-23T18:58:51.546169Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:51.546297Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:51.546474Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [54:459:2471] finished for tablet 9437184 2025-12-23T18:58:51.546852Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[54:458:2470];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":102930550,"name":"_full_task","f":102930550,"d_finished":0,"c":0,"l":102939644,"d":9094},"events":[{"name":"bootstrap","f":102930798,"d_finished":1060,"c":1,"l":102931858,"d":1060},{"a":102939184,"name":"ack","f":102938114,"d_finished":920,"c":1,"l":102939034,"d":1380},{"a":102939175,"name":"processing","f":102932015,"d_finished":2430,"c":3,"l":102939036,"d":2899},{"name":"ProduceResults","f":102931496,"d_finished":1685,"c":6,"l":102939450,"d":1685},{"a":102939454,"name":"Finish","f":102939454,"d_finished":0,"c":0,"l":102939644,"d":190},{"name":"task_result","f":102932033,"d_finished":1466,"c":2,"l":102937768,"d":1466}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:51.546919Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:458:2470];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-12-23T18:58:51.547346Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[54:458:2470];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":102930550,"name":"_full_task","f":102930550,"d_finished":0,"c":0,"l":102940089,"d":9539},"events":[{"name":"bootstrap","f":102930798,"d_finished":1060,"c":1,"l":102931858,"d":1060},{"a":102939184,"name":"ack","f":102938114,"d_finished":920,"c":1,"l":102939034,"d":1825},{"a":102939175,"name":"processing","f":102932015,"d_finished":2430,"c":3,"l":102939036,"d":3344},{"name":"ProduceResults","f":102931496,"d_finished":1685,"c":6,"l":102939450,"d":1685},{"a":102939454,"name":"Finish","f":102939454,"d_finished":0,"c":0,"l":102940089,"d":635},{"name":"task_result","f":102932033,"d_finished":1466,"c":2,"l":102937768,"d":1466}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2025-12-23T18:58:51.547407Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-12-23T18:58:51.535470Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2025-12-23T18:58:51.547435Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2025-12-23T18:58:51.547540Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:459:2471];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2025-12-23T18:58:48.961789Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129859079841421:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:48.961913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056da/r3tmp/tmp33NGc2/pdisk_1.dat 2025-12-23T18:58:49.235035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:58:49.289513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:49.289734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:49.320845Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129859079841384:2082] 1766516328960255 != 1766516328960258 2025-12-23T18:58:49.333455Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:49.337934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:49.431901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:29709 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:58:49.553315Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587129863374808959:2117] Handle TEvNavigate describe path dc-1 2025-12-23T18:58:49.553409Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587129863374809735:2722] HANDLE EvNavigateScheme dc-1 2025-12-23T18:58:49.553547Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587129863374808965:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:49.553700Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587129863374809212:2281][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587129863374808965:2119], cookie# 1 2025-12-23T18:58:49.555821Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129863374809218:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129863374809214:2281], cookie# 1 2025-12-23T18:58:49.555881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129863374809219:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129863374809216:2281], cookie# 1 2025-12-23T18:58:49.555899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129863374809220:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129863374809217:2281], cookie# 1 2025-12-23T18:58:49.555952Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129859079841352:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129863374809218:2281], cookie# 1 2025-12-23T18:58:49.555972Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129859079841355:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129863374809219:2281], cookie# 1 2025-12-23T18:58:49.556046Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129859079841358:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129863374809220:2281], cookie# 1 2025-12-23T18:58:49.556081Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129863374809218:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129859079841352:2050], cookie# 1 2025-12-23T18:58:49.556110Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129863374809219:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129859079841355:2053], cookie# 1 2025-12-23T18:58:49.556128Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129863374809220:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129859079841358:2056], cookie# 1 2025-12-23T18:58:49.556201Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129863374809212:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129863374809214:2281], cookie# 1 2025-12-23T18:58:49.556231Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587129863374809212:2281][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:58:49.556262Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129863374809212:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129863374809216:2281], cookie# 1 2025-12-23T18:58:49.556292Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587129863374809212:2281][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:58:49.556340Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129863374809212:2281][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129863374809217:2281], cookie# 1 2025-12-23T18:58:49.556365Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587129863374809212:2281][/dc-1] Sync cookie mismatch: sender# [1:7587129863374809217:2281], cookie# 1, current cookie# 0 2025-12-23T18:58:49.556401Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587129863374808965:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:58:49.556523Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587129863374808965:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587129863374809212:2281] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:58:49.556648Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587129863374808965:2119], cacheItem# { Subscriber: { Subscriber: [1:7587129863374809212:2281] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:58:49.559430Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587129863374809736:2723], recipient# [1:7587129863374809735:2722], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:58:49.559493Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587129863374809735:2722] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:58:49.587972Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587129863374809735:2722] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:58:49.591199Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587129863374809735:2722] Handle TEvDescribeSchemeResult Forward to# [1:7587129863374809734:2721] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ... nder# [1:7587129859079841352:2050] 2025-12-23T18:58:51.851102Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587129859079841355:2053] Subscribe: subscriber# [1:7587129871964745269:3425], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:58:51.851125Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7587129859079841358:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7587129871964745270:3425] 2025-12-23T18:58:51.851139Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7587129859079841358:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-12-23T18:58:51.851169Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7587129871964745254:3425][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7587129871964745265:3425] 2025-12-23T18:58:51.851171Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7587129859079841358:2056] Subscribe: subscriber# [1:7587129871964745270:3425], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-12-23T18:58:51.851204Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587129859079841352:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587129871964745268:3425] 2025-12-23T18:58:51.851212Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7587129871964745269:3425][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7587129859079841355:2053] 2025-12-23T18:58:51.851244Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7587129871964745270:3425][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7587129859079841358:2056] 2025-12-23T18:58:51.851249Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587129859079841355:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587129871964745269:3425] 2025-12-23T18:58:51.851293Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7587129859079841358:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7587129871964745270:3425] 2025-12-23T18:58:51.851295Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7587129871964745254:3425][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7587129871964745266:3425] 2025-12-23T18:58:51.851324Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7587129871964745254:3425][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7587129863374808965:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:51.851348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7587129871964745254:3425][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7587129871964745267:3425] 2025-12-23T18:58:51.851354Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587129863374808965:2119], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-12-23T18:58:51.851370Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7587129871964745254:3425][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7587129863374808965:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:51.851390Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587129863374808965:2119], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7587129871964745254:3425] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:58:51.851453Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587129863374808965:2119], cacheItem# { Subscriber: { Subscriber: [1:7587129871964745254:3425] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:51.851547Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587129871964745271:3427], recipient# [1:7587129871964745247:2327], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:51.965717Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587129863374808965:2119], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:51.965841Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587129863374808965:2119], cacheItem# { Subscriber: { Subscriber: [1:7587129863374810035:2945] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:51.965920Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587129871964745276:3431], recipient# [1:7587129871964745275:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:52.850950Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587129863374808965:2119], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:52.851079Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587129863374808965:2119], cacheItem# { Subscriber: { Subscriber: [1:7587129871964745246:3423] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:52.851209Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587129876259712589:3432], recipient# [1:7587129876259712588:2331], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:52.966671Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587129863374808965:2119], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:52.966785Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587129863374808965:2119], cacheItem# { Subscriber: { Subscriber: [1:7587129863374810035:2945] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:52.966849Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587129876259712594:3436], recipient# [1:7587129876259712593:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table >> TDataShardLocksTest::Points_OneTx >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2025-12-23T18:58:48.957105Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129858766919900:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:48.957188Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056e1/r3tmp/tmpUusJ9z/pdisk_1.dat 2025-12-23T18:58:49.175343Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:58:49.289127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:49.289259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:49.333081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:49.333682Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:49.390364Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:12769 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:58:49.558630Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587129863061887437:2116] Handle TEvNavigate describe path dc-1 2025-12-23T18:58:49.558706Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587129863061888220:2728] HANDLE EvNavigateScheme dc-1 2025-12-23T18:58:49.558878Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587129863061887443:2118], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:49.559015Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587129863061887687:2277][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587129863061887443:2118], cookie# 1 2025-12-23T18:58:49.560409Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129863061887692:2277][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129863061887689:2277], cookie# 1 2025-12-23T18:58:49.560463Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129863061887693:2277][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129863061887690:2277], cookie# 1 2025-12-23T18:58:49.560487Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129863061887694:2277][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129863061887691:2277], cookie# 1 2025-12-23T18:58:49.560517Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129858766919834:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129863061887693:2277], cookie# 1 2025-12-23T18:58:49.560523Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129858766919831:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129863061887692:2277], cookie# 1 2025-12-23T18:58:49.560579Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129858766919837:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129863061887694:2277], cookie# 1 2025-12-23T18:58:49.560614Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129863061887693:2277][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129858766919834:2052], cookie# 1 2025-12-23T18:58:49.560637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129863061887692:2277][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129858766919831:2049], cookie# 1 2025-12-23T18:58:49.560654Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129863061887694:2277][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129858766919837:2055], cookie# 1 2025-12-23T18:58:49.560743Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129863061887687:2277][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129863061887690:2277], cookie# 1 2025-12-23T18:58:49.560804Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587129863061887687:2277][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:58:49.560833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129863061887687:2277][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129863061887689:2277], cookie# 1 2025-12-23T18:58:49.560861Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587129863061887687:2277][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:58:49.560899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129863061887687:2277][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129863061887691:2277], cookie# 1 2025-12-23T18:58:49.560940Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587129863061887687:2277][/dc-1] Sync cookie mismatch: sender# [1:7587129863061887691:2277], cookie# 1, current cookie# 0 2025-12-23T18:58:49.560950Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587129863061887443:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:58:49.561058Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587129863061887443:2118], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587129863061887687:2277] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:58:49.561190Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587129863061887443:2118], cacheItem# { Subscriber: { Subscriber: [1:7587129863061887687:2277] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:58:49.563450Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587129863061888221:2729], recipient# [1:7587129863061888220:2728], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:58:49.563520Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587129863061888220:2728] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:58:49.597435Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587129863061888220:2728] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:58:49.600802Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587129863061888220:2728] Handle TEvDescribeSchemeResult Forward to# [1:7587129863061888219:2727] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 Create ... data/workload_manager/running_requests] Ignore empty state: owner# [4:7587129873992801295:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:56.051262Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587129886877704248:2780][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7587129886877704257:2780] 2025-12-23T18:58:56.051284Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587129886877704248:2780][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7587129873992801295:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:56.121639Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7587129873992801295:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:56.121791Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587129873992801295:2109], cacheItem# { Subscriber: { Subscriber: [4:7587129886877704247:2779] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:56.121842Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7587129873992801295:2109], cacheItem# { Subscriber: { Subscriber: [4:7587129886877704248:2780] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:56.122020Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7587129891172671595:2783], recipient# [4:7587129886877704243:2363], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:56.122219Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7587129886877704243:2363], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:56.211270Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587129886877704248:2780][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7587129886877704255:2780] 2025-12-23T18:58:56.211279Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587129886877704247:2779][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7587129886877704249:2779] 2025-12-23T18:58:56.211334Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587129886877704248:2780][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7587129873992801295:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:56.211334Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587129886877704247:2779][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7587129873992801295:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:56.211357Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587129886877704247:2779][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7587129886877704250:2779] 2025-12-23T18:58:56.211378Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587129886877704247:2779][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7587129873992801295:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:56.211395Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587129886877704247:2779][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7587129886877704251:2779] 2025-12-23T18:58:56.211418Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587129886877704247:2779][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7587129873992801295:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:56.211421Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587129886877704234:2777][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7587129886877704235:2777] 2025-12-23T18:58:56.211450Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587129886877704234:2777][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7587129873992801295:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:56.211470Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587129886877704234:2777][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7587129886877704236:2777] 2025-12-23T18:58:56.211491Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587129886877704234:2777][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7587129873992801295:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:56.211508Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587129886877704234:2777][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7587129886877704237:2777] 2025-12-23T18:58:56.211528Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587129886877704234:2777][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7587129873992801295:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:56.211683Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587129886877704248:2780][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7587129886877704256:2780] 2025-12-23T18:58:56.211715Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587129886877704248:2780][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7587129873992801295:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-12-23T18:58:56.211762Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7587129886877704248:2780][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7587129886877704257:2780] 2025-12-23T18:58:56.211787Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7587129886877704248:2780][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7587129873992801295:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2025-12-23T18:58:48.992317Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129857186638723:2154];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:48.992383Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0056e0/r3tmp/tmp33zeQS/pdisk_1.dat 2025-12-23T18:58:49.248741Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T18:58:49.288746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:49.288846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:49.311536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:49.331824Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:49.496889Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:63758 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-12-23T18:58:49.562875Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7587129861481606142:2118] Handle TEvNavigate describe path dc-1 2025-12-23T18:58:49.562961Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7587129861481606940:2745] HANDLE EvNavigateScheme dc-1 2025-12-23T18:58:49.563122Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7587129861481606150:2122], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:49.563263Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7587129861481606406:2290][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7587129861481606150:2122], cookie# 1 2025-12-23T18:58:49.565202Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129861481606438:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129861481606435:2290], cookie# 1 2025-12-23T18:58:49.565269Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129861481606439:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129861481606436:2290], cookie# 1 2025-12-23T18:58:49.565294Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7587129861481606440:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129861481606437:2290], cookie# 1 2025-12-23T18:58:49.565326Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129857186638540:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129861481606440:2290], cookie# 1 2025-12-23T18:58:49.565326Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129857186638534:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129861481606438:2290], cookie# 1 2025-12-23T18:58:49.565365Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7587129857186638537:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7587129861481606439:2290], cookie# 1 2025-12-23T18:58:49.565381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129861481606440:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129857186638540:2057], cookie# 1 2025-12-23T18:58:49.565408Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129861481606438:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129857186638534:2051], cookie# 1 2025-12-23T18:58:49.565429Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7587129861481606439:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129857186638537:2054], cookie# 1 2025-12-23T18:58:49.565516Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129861481606406:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129861481606437:2290], cookie# 1 2025-12-23T18:58:49.565572Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7587129861481606406:2290][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2025-12-23T18:58:49.565595Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129861481606406:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129861481606435:2290], cookie# 1 2025-12-23T18:58:49.565617Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7587129861481606406:2290][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2025-12-23T18:58:49.565653Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7587129861481606406:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7587129861481606436:2290], cookie# 1 2025-12-23T18:58:49.565693Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7587129861481606406:2290][/dc-1] Sync cookie mismatch: sender# [1:7587129861481606436:2290], cookie# 1, current cookie# 0 2025-12-23T18:58:49.565751Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7587129861481606150:2122], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-12-23T18:58:49.565865Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7587129861481606150:2122], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7587129861481606406:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-12-23T18:58:49.566040Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7587129861481606150:2122], cacheItem# { Subscriber: { Subscriber: [1:7587129861481606406:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-12-23T18:58:49.576184Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7587129861481606941:2746], recipient# [1:7587129861481606940:2745], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-12-23T18:58:49.576293Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7587129861481606940:2745] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T18:58:49.608465Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7587129861481606940:2745] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-12-23T18:58:49.619398Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7587129861481606940:2745] Handle TEvDescribeSchemeResult Forward to# [1:7587129861481606939:2744] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 Create ... 8384223777:2552], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:57.410730Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:58:57.581912Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587129864578760256:2236], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:57.582020Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587129864578760256:2236], cacheItem# { Subscriber: { Subscriber: [3:7587129877463662320:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:57.582078Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587129864578760256:2236], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:57.582130Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587129864578760256:2236], cacheItem# { Subscriber: { Subscriber: [3:7587129877463662320:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:57.582166Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7587129864578760256:2236], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:57.582172Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587129898938511825:6658], recipient# [3:7587129898938511822:5430], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:57.582203Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7587129864578760256:2236], cacheItem# { Subscriber: { Subscriber: [3:7587129877463662352:2309] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:57.582257Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587129898938511827:6660], recipient# [3:7587129898938511824:5432], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:57.582264Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7587129898938511826:6659], recipient# [3:7587129898938511823:5431], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:58.085804Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7587129881204354323:2239], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:58.085937Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7587129881204354323:2239], cacheItem# { Subscriber: { Subscriber: [2:7587129898384223656:2305] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:58.086010Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7587129881204354323:2239], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:58.086066Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7587129881204354323:2239], cacheItem# { Subscriber: { Subscriber: [2:7587129898384223656:2305] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:58.086112Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7587129902679191079:2352], recipient# [2:7587129902679191078:2556], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:58.086124Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7587129902679191080:2353], recipient# [2:7587129902679191077:2555], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:58.088835Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7587129881204354323:2239], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-12-23T18:58:58.088956Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7587129881204354323:2239], cacheItem# { Subscriber: { Subscriber: [2:7587129898384223689:2311] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-12-23T18:58:58.089040Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7587129902679191082:2354], recipient# [2:7587129902679191081:2557], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |98.7%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: 2025-12-23T18:54:14.774582Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:14.804992Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:14.808183Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:14.814709Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:14.817377Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:14.820372Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:51:2076] ControllerId# 72057594037932033 2025-12-23T18:54:14.820409Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:14.822276Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:14.822427Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:14.833097Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:14.833172Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:14.836200Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:60:2080] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836309Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:61:2081] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836406Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:62:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836485Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:63:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836576Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:64:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836679Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:65:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836775Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:50:2075] Create Queue# [2:66:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836796Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:14.837455Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:51:2076] 2025-12-23T18:54:14.837488Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:51:2076] 2025-12-23T18:54:14.837532Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:14.837567Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:14.840558Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:14.843360Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:14.843472Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:14.843768Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:14.843991Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-12-23T18:54:14.845063Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-12-23T18:54:14.845111Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:14.845982Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:75:2077] ControllerId# 72057594037932033 2025-12-23T18:54:14.846014Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:14.846077Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:14.846170Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:14.858113Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:14.858162Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:14.859842Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:83:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.859992Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:84:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.860115Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:85:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.860266Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:86:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.860402Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:87:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.860539Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:88:2087] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.860664Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:74:2076] Create Queue# [1:89:2088] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.860687Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:14.860739Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:75:2077] 2025-12-23T18:54:14.860784Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:75:2077] 2025-12-23T18:54:14.860829Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:14.860862Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:14.861567Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:14.861711Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:14.862808Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:51:2076] 2025-12-23T18:54:14.862926Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:14.863154Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:14.863311Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:14.871936Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2025-12-23T18:54:14.872016Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:14.872181Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:14.873741Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:14.873779Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-12-23T18:54:14.875580Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:14.880221Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 1 2025-12-23T18:54:14.880275Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-12-23T18:54:14.880442Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:14.881813Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:75:2077] 2025-12-23T18:54:14.881856Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:14.881913Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:100:2093] 2025-12-23T18:54:14.881951Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:100:2093] 2025-12-23T18:54:14.882000Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [2:55:2064] 2025-12-23T18:54:14.882021Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [2:55:2064] 2025-12-23T18:54:14.882343Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:54:14.882391Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:54:14.882418Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvRe ... tablet Unknown.65557.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446495Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65556.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446510Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65555.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446529Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65554.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446546Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65553.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446563Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65552.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446581Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65551.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446599Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65550.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446616Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65549.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446633Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65548.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446651Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65547.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446667Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65546.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446684Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65545.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446703Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65544.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446721Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65543.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446771Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65542.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446790Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65541.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446809Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65540.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446828Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65539.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446846Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65538.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446864Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65537.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.446882Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65536.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2025-12-23T18:56:26.447013Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} queued, type NKikimr::NHive::TTxProcessPendingOperations 2025-12-23T18:56:26.447066Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:56:26.447166Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} hope 1 -> done Change{10, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:56:26.447220Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:56:26.447394Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594037927937 Active! Generation: 3, Type: Hive started in 21msec Marker# TSYS24 2025-12-23T18:56:26.447423Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [72057594037927937] Activate 2025-12-23T18:56:26.447587Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:514:2351] 2025-12-23T18:56:26.447616Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:514:2351] 2025-12-23T18:56:26.447691Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:514:2351] 2025-12-23T18:56:26.447807Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [1:100:2093] 2025-12-23T18:56:26.447844Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [1:100:2093] 2025-12-23T18:56:26.447872Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [1:100:2093] 2025-12-23T18:56:26.447971Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal leader: [1:146:2124] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:56:26.448005Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [1:146:2124] 2025-12-23T18:56:26.448105Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [1:99:2093] EventType# 268959744 2025-12-23T18:56:26.448167Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037932033] forward result local node, try to connect [1:514:2351] 2025-12-23T18:56:26.448208Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037932033]::SendEvent [1:514:2351] 2025-12-23T18:56:26.448305Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [2:102:2089] 2025-12-23T18:56:26.448330Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [2:102:2089] 2025-12-23T18:56:26.448346Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [2:102:2089] 2025-12-23T18:56:26.448397Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [2:102:2089] 2025-12-23T18:56:26.448485Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037932033] Accept Connect Originator# [1:514:2351] 2025-12-23T18:56:26.448573Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037932033] connected with status OK role: Leader [1:514:2351] 2025-12-23T18:56:26.448592Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037932033] send queued [1:514:2351] 2025-12-23T18:56:26.448608Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [1:514:2351] 2025-12-23T18:56:26.448631Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [1:431:2280] EventType# 268637702 2025-12-23T18:56:26.448780Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-12-23T18:56:26.448825Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:56:26.448979Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{22, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:56:26.449024Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:56:26.449090Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594037927937] Push Sender# [2:101:2089] EventType# 268959744 2025-12-23T18:56:26.449166Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-12-23T18:56:26.449225Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:56:26.449332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:56:26.449408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:26.449466Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{10, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-12-23T18:56:26.449526Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:56:26.449746Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-12-23T18:56:26.449777Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:56:26.449831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:56:26.449868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:26.449901Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{11, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-12-23T18:56:26.449925Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:56:26.449991Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-12-23T18:56:26.450027Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:56:26.450082Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{12, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:56:26.450119Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} Took 53.028533 seconds |98.7%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::TableSchemaVersion [GOOD] Test command err: 2025-12-23T18:58:49.955458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:49.955523Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... _COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2025-12-23T18:58:51.024576Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-12-23T18:58:51.065463Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } BindedChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-12-23T18:58:51.226392Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure |98.7%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok >> BasicUsage::CreateTopicWithCustomName [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_MoveDeadLetterPolicy >> BasicUsage::CreateTopicWithStreamingConsumer [GOOD] >> BasicUsage::ReadWithoutConsumerWithRestarts [GOOD] >> BasicUsage::ReadWithRestarts >> TxUsage::WriteToTopic_Demo_12_Table [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok >> TxUsage::WriteToTopic_Demo_12_Query >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Table [GOOD] >> TxUsage::WriteToTopic_Demo_41_Table [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... t: [26:39:2086] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:93:2057] recipient: [26:92:2121] Leader for TabletID 72057594037927937 is [26:94:2122] sender: [26:95:2057] recipient: [26:92:2121] !Reboot 72057594037927937 (actor [26:58:2099]) rebooted! !Reboot 72057594037927937 (actor [26:58:2099]) tablet resolver refreshed! new actor is[26:94:2122] Leader for TabletID 72057594037927937 is [26:94:2122] sender: [26:210:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:53:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:59:2057] recipient: [27:53:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:76:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:91:2057] recipient: [27:39:2086] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:94:2057] recipient: [27:93:2121] Leader for TabletID 72057594037927937 is [27:95:2122] sender: [27:96:2057] recipient: [27:93:2121] !Reboot 72057594037927937 (actor [27:58:2099]) rebooted! !Reboot 72057594037927937 (actor [27:58:2099]) tablet resolver refreshed! new actor is[27:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:59:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:76:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:53:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:78:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:81:2057] recipient: [30:80:2112] Leader for TabletID 72057594037927937 is [30:82:2113] sender: [30:83:2057] recipient: [30:80:2112] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:82:2113] Leader for TabletID 72057594037927937 is [30:82:2113] sender: [30:198:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:78:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:81:2057] recipient: [31:80:2112] Leader for TabletID 72057594037927937 is [31:82:2113] sender: [31:83:2057] recipient: [31:80:2112] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:82:2113] Leader for TabletID 72057594037927937 is [31:82:2113] sender: [31:198:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:79:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:82:2057] recipient: [32:81:2112] Leader for TabletID 72057594037927937 is [32:83:2113] sender: [32:84:2057] recipient: [32:81:2112] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:83:2113] Leader for TabletID 72057594037927937 is [32:83:2113] sender: [32:199:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:82:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:85:2057] recipient: [33:84:2115] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:87:2057] recipient: [33:84:2115] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:86:2116] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:202:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:82:2057] recipient: [34:39:2086] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:85:2057] recipient: [34:84:2115] Leader for TabletID 72057594037927937 is [34:86:2116] sender: [34:87:2057] recipient: [34:84:2115] !Reboot 72057594037927937 (actor [34:58:2099]) rebooted! !Reboot 72057594037927937 (actor [34:58:2099]) tablet resolver refreshed! new actor is[34:86:2116] Leader for TabletID 72057594037927937 is [34:86:2116] sender: [34:202:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:83:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:86:2057] recipient: [35:85:2115] Leader for TabletID 72057594037927937 is [35:87:2116] sender: [35:88:2057] recipient: [35:85:2115] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:87:2116] Leader for TabletID 72057594037927937 is [35:87:2116] sender: [35:203:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:89:2057] recipient: [36:88:2118] Leader for TabletID 72057594037927937 is [36:90:2119] sender: [36:91:2057] recipient: [36:88:2118] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:90:2119] Leader for TabletID 72057594037927937 is [36:90:2119] sender: [36:206:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:86:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:89:2057] recipient: [37:88:2118] Leader for TabletID 72057594037927937 is [37:90:2119] sender: [37:91:2057] recipient: [37:88:2118] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:90:2119] Leader for TabletID 72057594037927937 is [37:90:2119] sender: [37:206:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:87:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:90:2057] recipient: [38:89:2118] Leader for TabletID 72057594037927937 is [38:91:2119] sender: [38:92:2057] recipient: [38:89:2118] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:91:2119] Leader for TabletID 72057594037927937 is [38:91:2119] sender: [38:207:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table [GOOD] >> TxUsage::WriteToTopic_Demo_42_Table |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> TDataShardLocksTest::UseLocksCache [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_1_Table [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_1_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:58:59.135128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:58:59.269973Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:58:59.297179Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:58:59.297741Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:58:59.297874Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:58:59.699549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:59.699702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:59.771979Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516336967158 != 1766516336967162 2025-12-23T18:58:59.787268Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:59.831098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:59.926782Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0032a3/r3tmp/tmp8ynunu/pdisk_1.dat 2025-12-23T18:59:00.253476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:59:00.268622Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:00.378492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:00.434940Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:891:2767]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:59:00.436077Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:891:2767]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:59:00.440477Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:891:2767] 2025-12-23T18:59:00.440823Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:59:00.459647Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:893:2769]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:59:00.494403Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:891:2767]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:59:00.494839Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:893:2769]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:59:00.495144Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:893:2769] 2025-12-23T18:59:00.495352Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:59:00.503865Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:893:2769]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:59:00.504469Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:59:00.504602Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:59:00.507759Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T18:59:00.507861Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T18:59:00.507936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T18:59:00.509312Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:59:00.509484Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:59:00.509617Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:922:2767] in generation 1 2025-12-23T18:59:00.510100Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:59:00.510200Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:59:00.511584Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T18:59:00.511658Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T18:59:00.511742Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T18:59:00.512034Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:59:00.512136Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:59:00.512197Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:923:2769] in generation 1 2025-12-23T18:59:00.523121Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:59:00.551701Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T18:59:00.553421Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:59:00.553599Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:926:2788] 2025-12-23T18:59:00.553639Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T18:59:00.553704Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T18:59:00.553748Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:59:00.554082Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:891:2767], Recipient [1:891:2767]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:59:00.554136Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:59:00.555423Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:59:00.555480Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T18:59:00.555575Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:59:00.555677Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:927:2789] 2025-12-23T18:59:00.555710Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T18:59:00.555737Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T18:59:00.555770Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T18:59:00.556127Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:893:2769], Recipient [1:893:2769]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:59:00.556173Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:59:00.556336Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T18:59:00.556448Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T18:59:00.556605Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T18:59:00.556644Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:59:00.556761Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T18:59:00.556804Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T18:59:00.556839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T18:59:00.556895Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T18:59:00.556948Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T18:59:00.557005Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T18:59:00.557090Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T18:59:00.557225Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:895:2770], Recipient [1:891:2767]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:59:00.557266Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:59:00.557308Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2763], serverId# [1:895:2770], sessionId# [0:0:0] 2025-12-23T18:59:00.557349Z node 1 :TX_DATASHARD DEBUG: datashard__prog ... 24037888 is Executed 2025-12-23T18:59:07.854641Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:59:07.854670Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037888 has finished 2025-12-23T18:59:07.865739Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-12-23T18:59:07.865834Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-12-23T18:59:07.865896Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T18:59:07.865944Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 1 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-12-23T18:59:07.866037Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T18:59:07.866201Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:1153:2948], Recipient [2:893:2769]: {TEvReadSet step# 3500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-12-23T18:59:07.866241Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:07.866277Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-12-23T18:59:08.026728Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1196:2974], Recipient [2:1153:2948]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-12-23T18:59:08.028129Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T18:59:08.028225Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2025-12-23T18:59:08.028342Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T18:59:08.028386Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2025-12-23T18:59:08.028427Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T18:59:08.028480Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T18:59:08.028611Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2025-12-23T18:59:08.028669Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T18:59:08.028702Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T18:59:08.028728Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T18:59:08.028753Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2025-12-23T18:59:08.028907Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-12-23T18:59:08.029186Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2025-12-23T18:59:08.029245Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1196:2974], 0} after executionsCount# 1 2025-12-23T18:59:08.029302Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1196:2974], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:59:08.029404Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1196:2974], 0} finished in read 2025-12-23T18:59:08.029474Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T18:59:08.029501Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T18:59:08.029541Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T18:59:08.029570Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-12-23T18:59:08.029614Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T18:59:08.029639Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T18:59:08.029672Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2025-12-23T18:59:08.029712Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T18:59:08.029817Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T18:59:08.030686Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1196:2974], Recipient [2:1153:2948]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T18:59:08.030769Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-12-23T18:59:08.031070Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1196:2974], Recipient [2:893:2769]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-12-23T18:59:08.031228Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-12-23T18:59:08.031290Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2025-12-23T18:59:08.031364Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2025-12-23T18:59:08.031396Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2025-12-23T18:59:08.031423Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-12-23T18:59:08.031462Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2025-12-23T18:59:08.031502Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037889 2025-12-23T18:59:08.031532Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2025-12-23T18:59:08.031555Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-12-23T18:59:08.031579Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2025-12-23T18:59:08.031604Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2025-12-23T18:59:08.031690Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-12-23T18:59:08.031868Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3500/18446744073709551615 2025-12-23T18:59:08.031905Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[2:1196:2974], 1} after executionsCount# 1 2025-12-23T18:59:08.031943Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[2:1196:2974], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T18:59:08.031999Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[2:1196:2974], 1} finished in read 2025-12-23T18:59:08.032043Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2025-12-23T18:59:08.032067Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-12-23T18:59:08.032105Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-12-23T18:59:08.032132Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-12-23T18:59:08.032172Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2025-12-23T18:59:08.032195Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-12-23T18:59:08.032217Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037889 has finished 2025-12-23T18:59:08.032243Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-12-23T18:59:08.032310Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-12-23T18:59:08.032871Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1196:2974], Recipient [2:893:2769]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-12-23T18:59:08.032962Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_locks/unittest |98.7%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |98.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:24.976629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:25.057256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:25.063259Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:25.063546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:25.063626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:25.380771Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.465143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:25.465251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:25.522088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.547304Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004935/r3tmp/tmp65lvnL/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5948, node 1 2025-12-23T18:55:26.222024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:26.222785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:26.222818Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:26.222847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:26.223030Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:26.283964Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7293 2025-12-23T18:55:26.814096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:29.056915Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:29.062114Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:29.064624Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:29.087093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:29.087181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:29.124317Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:29.126258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:29.353372Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:29.365145Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.365670Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.366396Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.366892Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.367104Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.367190Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.367399Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.367554Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.367666Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:29.474749Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:29.474839Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:29.487789Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:29.633538Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:29.669201Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:29.669273Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:29.689002Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:29.690290Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:29.690436Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:29.690480Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:29.690523Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:29.690560Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:29.690596Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:29.690642Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:29.690954Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:29.728040Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:29.728119Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:29.737731Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:55:29.745779Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:55:29.746633Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:55:29.751261Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:29.776428Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:29.776493Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:55:29.776573Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:29.800515Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:29.809424Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:29.815494Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:29.815593Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:29.824681Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:29.919009Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:30.245249Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:30.348521Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:30.348598Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:265 ... >; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:57:24.179249Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4345:2473], ActorId: [2:4652:4034], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:57:24.179308Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4345:2473], ActorId: [2:4652:4034], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjkxODVlOTMtODQ1ODkzNzAtNTRlYWFkNTAtZTA2ZjMyMTU=, TxId: 2025-12-23T18:57:24.258550Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4850:4154]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:57:24.259131Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T18:57:24.259186Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4850:4154], StatRequests.size() = 1 2025-12-23T18:57:24.406716Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4832:4139], ActorId: [2:4833:4140], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGFkYTVjZDQtYjdmNDQxOTktNDFjZjJhN2QtYTU4ZDAwNTI=, TxId: 2025-12-23T18:57:24.406847Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4832:4139], ActorId: [2:4833:4140], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGFkYTVjZDQtYjdmNDQxOTktNDFjZjJhN2QtYTU4ZDAwNTI=, TxId: 2025-12-23T18:57:24.407366Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4831:4138], ActorId: [2:4832:4139], Got response [2:4833:4140] SUCCESS 2025-12-23T18:57:24.408022Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:57:24.433873Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:57:24.433952Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:49: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3443:3511] 2025-12-23T18:57:24.522593Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:57:24.522704Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:57:24.601775Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4482:3938], schemeshard count = 1 2025-12-23T18:57:25.637085Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T18:57:25.641428Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:4920:4191], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:57:25.644655Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:4920:4191], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table2` 2025-12-23T18:57:25.644778Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:4920:4191], Start read next stream part 2025-12-23T18:57:25.689845Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4937:4201]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:57:25.690027Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T18:57:25.690067Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4937:4201], StatRequests.size() = 1 2025-12-23T18:58:23.964212Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:4920:4191], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:58:23.964401Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:4920:4191], Start read next stream part 2025-12-23T18:58:23.965034Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd695h4c0vrhwkmrz08je1k7", SessionId: ydb://session/3?node_id=2&id=ODMyZWRmMzEtODRkN2JjNmEtZGNhMGNiOTYtODFhMjQ5NTI=, Slow query, duration: 58.316872s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table2`", parameters: 0b 2025-12-23T18:58:23.965737Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:5105:4293], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:58:23.969223Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2025-12-23T18:58:23.969460Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:4920:4191], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:58:23.969517Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:4920:4191], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTQzNDZhYmUtZmNiYmMxNWYtMzdhOWNjNmQtZDQ5ZGIyYjU=, TxId: 2025-12-23T18:58:23.970580Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:5105:4293], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table2` 2025-12-23T18:58:23.970705Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:5105:4293], Start read next stream part 2025-12-23T18:58:24.043775Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:5124:4308]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:58:24.044023Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2025-12-23T18:58:24.044057Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:5124:4308], StatRequests.size() = 1 2025-12-23T18:59:10.267521Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:5105:4293], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:59:10.267654Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:5105:4293], Start read next stream part 2025-12-23T18:59:10.268252Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd697a3265464vac8wp8ayey", SessionId: ydb://session/3?node_id=2&id=NGMwMTI3ZjMtNDMwZmI3Zi0xOGE0YmU0Mi02YTU5ZGQ2MQ==, Slow query, duration: 46.293681s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table2`", parameters: 0b 2025-12-23T18:59:10.270262Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5281:4393], ActorId: [2:5282:4394], Starting query actor #1 [2:5283:4395] 2025-12-23T18:59:10.270318Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5282:4394], ActorId: [2:5283:4395], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:59:10.270575Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:5105:4293], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:59:10.270612Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4910:2473], ActorId: [2:5105:4293], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2IyMGU4NTgtYjhhNzAwMzAtZmJkYjE3ODgtZmRiNzBhNTQ=, TxId: 2025-12-23T18:59:10.270683Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33180, txId: 18446744073709551615] shutting down 2025-12-23T18:59:10.273841Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5282:4394], ActorId: [2:5283:4395], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZWU3ZDAzLTY4ZGViZGMwLTJhZDk5NDk0LTk3ZGRmMTgz, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:59:10.332693Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5282:4394], ActorId: [2:5283:4395], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWU3ZDAzLTY4ZGViZGMwLTJhZDk5NDk0LTk3ZGRmMTgz, TxId: 2025-12-23T18:59:10.332772Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5282:4394], ActorId: [2:5283:4395], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWU3ZDAzLTY4ZGViZGMwLTJhZDk5NDk0LTk3ZGRmMTgz, TxId: 2025-12-23T18:59:10.333137Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5281:4393], ActorId: [2:5282:4394], Got response [2:5283:4395] SUCCESS 2025-12-23T18:59:10.333862Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:59:10.348877Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 35] 2025-12-23T18:59:10.348951Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3443:3511] |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:92:2057] recipient: [11:91:2120] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:94:2057] recipient: [11:91:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:93:2121] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:209:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... r refreshed! new actor is[31:83:2113] Leader for TabletID 72057594037927937 is [31:83:2113] sender: [31:199:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:82:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:85:2057] recipient: [32:84:2115] Leader for TabletID 72057594037927937 is [32:86:2116] sender: [32:87:2057] recipient: [32:84:2115] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:86:2116] Leader for TabletID 72057594037927937 is [32:86:2116] sender: [32:202:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:82:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:85:2057] recipient: [33:84:2115] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:87:2057] recipient: [33:84:2115] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:86:2116] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:202:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:53:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:83:2057] recipient: [34:39:2086] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:86:2057] recipient: [34:85:2115] Leader for TabletID 72057594037927937 is [34:87:2116] sender: [34:88:2057] recipient: [34:85:2115] !Reboot 72057594037927937 (actor [34:58:2099]) rebooted! !Reboot 72057594037927937 (actor [34:58:2099]) tablet resolver refreshed! new actor is[34:87:2116] Leader for TabletID 72057594037927937 is [34:87:2116] sender: [34:105:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:85:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:88:2057] recipient: [35:87:2117] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:90:2057] recipient: [35:87:2117] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:89:2118] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:205:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:85:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:88:2057] recipient: [36:87:2117] Leader for TabletID 72057594037927937 is [36:89:2118] sender: [36:90:2057] recipient: [36:87:2117] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:89:2118] Leader for TabletID 72057594037927937 is [36:89:2118] sender: [36:205:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:86:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:89:2057] recipient: [37:88:2117] Leader for TabletID 72057594037927937 is [37:90:2118] sender: [37:91:2057] recipient: [37:88:2117] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:90:2118] Leader for TabletID 72057594037927937 is [37:90:2118] sender: [37:108:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:88:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:91:2057] recipient: [38:90:2119] Leader for TabletID 72057594037927937 is [38:92:2120] sender: [38:93:2057] recipient: [38:90:2119] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:92:2120] Leader for TabletID 72057594037927937 is [38:92:2120] sender: [38:208:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:88:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:91:2057] recipient: [39:90:2119] Leader for TabletID 72057594037927937 is [39:92:2120] sender: [39:93:2057] recipient: [39:90:2119] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:92:2120] Leader for TabletID 72057594037927937 is [39:92:2120] sender: [39:208:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:89:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:92:2057] recipient: [40:91:2119] Leader for TabletID 72057594037927937 is [40:93:2120] sender: [40:94:2057] recipient: [40:91:2119] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:93:2120] Leader for TabletID 72057594037927937 is [40:93:2120] sender: [40:209:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:92:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:95:2057] recipient: [41:94:2122] Leader for TabletID 72057594037927937 is [41:96:2123] sender: [41:97:2057] recipient: [41:94:2122] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:96:2123] Leader for TabletID 72057594037927937 is [41:96:2123] sender: [41:212:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:92:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:95:2057] recipient: [42:94:2122] Leader for TabletID 72057594037927937 is [42:96:2123] sender: [42:97:2057] recipient: [42:94:2122] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:96:2123] Leader for TabletID 72057594037927937 is [42:96:2123] sender: [42:212:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TxUsage::WriteToTopic_Demo_12_Query [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_MoveDeadLetterPolicy [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_DeleteDeadLetterPolicy >> TxUsage::WriteToTopic_Demo_13_Table >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options >> BasicUsage::ReadWithRestarts [GOOD] >> Describe::LocationWithKillTablets |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2025-12-23T18:58:10.699099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:10.699161Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:10.700770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:10.716530Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:10.716880Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:58:10.717173Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:10.753408Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:10.759168Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:10.759248Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:10.760778Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:58:10.760861Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:58:10.760924Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:58:10.761274Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:10.761346Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:10.761406Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:58:10.846833Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:10.867320Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:58:10.867465Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:10.867539Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:58:10.867562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:58:10.867588Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:58:10.867612Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:10.867723Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:10.867760Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:10.868017Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:58:10.868108Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:58:10.868219Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:10.868255Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:10.868283Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:58:10.868308Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:10.868337Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:10.868358Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:58:10.868383Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:10.868436Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:10.868459Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:10.868487Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:58:10.873323Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:58:10.873374Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:10.873462Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:58:10.873587Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:58:10.873616Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:58:10.873673Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:58:10.873704Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:10.873728Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:58:10.873751Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:58:10.873772Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:10.873976Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:10.874015Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:58:10.874052Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:58:10.874084Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:10.874117Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:58:10.874133Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:58:10.874158Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:58:10.874181Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:10.874199Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:10.886015Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:58:10.886070Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:10.886094Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:10.886120Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:58:10.886188Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:58:10.888059Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:10.888098Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:10.888145Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:58:10.888252Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:58:10.888275Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:58:10.888409Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:10.888455Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:58:10.888509Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:58:10.888551Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:58:10.890920Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:58:10.891004Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:10.891170Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:10.891199Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:10.891260Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:10.891289Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:10.891311Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:10.891344Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:58:10.891386Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2025-12-23T18:59:18.055411Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.055438Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2025-12-23T18:59:18.055514Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2025-12-23T18:59:18.055545Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.055573Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2025-12-23T18:59:18.055650Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2025-12-23T18:59:18.055679Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.055708Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2025-12-23T18:59:18.055794Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2025-12-23T18:59:18.055854Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.055883Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2025-12-23T18:59:18.055980Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-12-23T18:59:18.056013Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.056042Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2025-12-23T18:59:18.056130Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-12-23T18:59:18.056159Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.056187Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2025-12-23T18:59:18.056272Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-12-23T18:59:18.056303Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.056331Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2025-12-23T18:59:18.056433Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-12-23T18:59:18.056463Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.056490Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2025-12-23T18:59:18.056550Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-12-23T18:59:18.056579Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.056605Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2025-12-23T18:59:18.056688Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-12-23T18:59:18.056718Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.056748Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2025-12-23T18:59:18.056836Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-12-23T18:59:18.056867Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.056897Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2025-12-23T18:59:18.056978Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-12-23T18:59:18.057008Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.057033Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2025-12-23T18:59:18.057110Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-12-23T18:59:18.057139Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.057165Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2025-12-23T18:59:18.057245Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-12-23T18:59:18.057273Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.057300Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2025-12-23T18:59:18.057392Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-12-23T18:59:18.057446Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.057479Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2025-12-23T18:59:18.057576Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-12-23T18:59:18.057606Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.057633Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2025-12-23T18:59:18.057712Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-12-23T18:59:18.057741Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.057771Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2025-12-23T18:59:18.057859Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-12-23T18:59:18.057893Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.057921Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2025-12-23T18:59:18.058002Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-12-23T18:59:18.058033Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.058054Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2025-12-23T18:59:18.058106Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-12-23T18:59:18.058133Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:18.058151Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 31 30 28 21 28 31 30 26 28 31 22 7 17 30 31 17 23 9 28 23 24 24 24 0 4 4 4 10 - - 4 - actual 31 30 28 21 28 31 30 26 28 31 22 7 17 30 31 17 23 9 28 23 24 24 24 0 4 4 4 10 - - 4 - interm 6 4 1 5 6 3 0 3 6 3 6 6 6 6 - 6 2 6 2 2 4 6 2 0 4 4 4 4 - - 4 - >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Table |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Table >> TxUsage::WriteToTopic_Demo_42_Table [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_1_Query [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Table >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> TxUsage::WriteToTopic_Demo_42_Query >> TxUsage::Sinks_Oltp_WriteToTopic_2_Table >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:53:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:53:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:53:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:53:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:53:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... 85:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:88:2057] recipient: [35:87:2117] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:90:2057] recipient: [35:87:2117] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:89:2118] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:205:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:89:2057] recipient: [36:88:2117] Leader for TabletID 72057594037927937 is [36:90:2118] sender: [36:91:2057] recipient: [36:88:2117] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:90:2118] Leader for TabletID 72057594037927937 is [36:90:2118] sender: [36:206:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:78:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:80:2112] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:83:2057] recipient: [39:80:2112] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:82:2113] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:198:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:78:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:81:2057] recipient: [40:80:2112] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:83:2057] recipient: [40:80:2112] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:82:2113] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:198:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:79:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:82:2057] recipient: [41:81:2112] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:84:2057] recipient: [41:81:2112] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:83:2113] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:199:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:82:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:85:2057] recipient: [42:84:2115] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:87:2057] recipient: [42:84:2115] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:86:2116] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:202:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:82:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:85:2057] recipient: [43:84:2115] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:87:2057] recipient: [43:84:2115] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:86:2116] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:202:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:83:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:86:2057] recipient: [44:85:2115] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:88:2057] recipient: [44:85:2115] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:87:2116] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:105:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:85:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:88:2057] recipient: [45:87:2117] Leader for TabletID 72057594037927937 is [45:89:2118] sender: [45:90:2057] recipient: [45:87:2117] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:89:2118] Leader for TabletID 72057594037927937 is [45:89:2118] sender: [45:205:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:85:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:88:2057] recipient: [46:87:2117] Leader for TabletID 72057594037927937 is [46:89:2118] sender: [46:90:2057] recipient: [46:87:2117] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:89:2118] Leader for TabletID 72057594037927937 is [46:89:2118] sender: [46:205:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:86:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:89:2057] recipient: [47:88:2117] Leader for TabletID 72057594037927937 is [47:90:2118] sender: [47:91:2057] recipient: [47:88:2117] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] |98.8%| [TA] $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.8%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:19.451304Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:19.530202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:19.538379Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:19.538654Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:19.538735Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:19.917922Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:20.027083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:20.027218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:20.061801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:20.141660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004960/r3tmp/tmpFNGf1A/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15350, node 1 2025-12-23T18:55:21.195522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:21.196179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:21.196217Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:21.196246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:21.196787Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:21.260206Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:13880 2025-12-23T18:55:21.810695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:24.435104Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:24.442606Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:24.447253Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:24.479816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.479942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.519439Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:24.521259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:24.692225Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.692908Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.693546Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.694624Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.694923Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.695092Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.695287Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.695504Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.695650Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:24.870642Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:24.893018Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:24.893138Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:24.905707Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:25.048790Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:25.115860Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:25.115936Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:25.145354Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:25.146674Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:25.146947Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:25.147000Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:25.147049Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:25.147096Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:25.147157Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:25.147206Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:25.148049Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:25.260706Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.260821Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2202:2605], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:25.272133Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2629] 2025-12-23T18:55:25.273273Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2229:2629], schemeshard id = 72075186224037897 2025-12-23T18:55:25.384898Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2348:2693] 2025-12-23T18:55:25.385348Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:25.387743Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Describe result: PathErrorUnknown 2025-12-23T18:55:25.387801Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Creating table 2025-12-23T18:55:25.387887Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:25.391814Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2367:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:25.396059Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:25.409925Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:25.410089Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:25.423927Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:25.571432Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:25.993020Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:26.092060Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2694]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:26.092145Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2350:2695] Owner: [2:2349:2 ... Statistics::TEvSaveStatisticsQueryResponse, TBasicString> const&, NKikimr::TPathId co... 2025-12-23T18:57:28.491058Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4680:4017], ActorId: [2:4681:4018], Got response [2:4682:4019] SUCCESS 2025-12-23T18:57:28.491453Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:57:28.492368Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3227:3457] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2025-12-23T18:57:28.492424Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `operationId', DatabaseName: `', Types: 1 2025-12-23T18:57:28.492481Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `operationId', PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: 2025-12-23T18:57:28.529553Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:57:28.529621Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:49: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3227:3457] 2025-12-23T18:57:28.529674Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2025-12-23T18:57:28.529885Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T18:57:28.530579Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4719:4044], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:57:28.534575Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4719:4044], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2025-12-23T18:57:28.534767Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4719:4044], Start read next stream part 2025-12-23T18:57:28.582690Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4730:4054]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:57:28.582989Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T18:57:28.583040Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4730:4054], StatRequests.size() = 1 2025-12-23T18:58:28.887185Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4719:4044], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:58:28.887438Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4719:4044], Start read next stream part 2025-12-23T18:58:28.888235Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd695kyp567mx0wk710vdax9", SessionId: ydb://session/3?node_id=2&id=ODRmM2ExNjktNzBlMjVmMjEtNTZhYjc3NTgtOGQ2MDU5NzA=, Slow query, duration: 60.348745s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:58:28.888911Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4887:4138], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:58:28.892383Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4719:4044], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:58:28.892464Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4719:4044], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDhlYjA3MjktNzM2YTQ3ODItYTk4NjcwNzgtZmEzOWZiMQ==, TxId: 2025-12-23T18:58:28.892602Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33180, txId: 18446744073709551615] shutting down 2025-12-23T18:58:28.893099Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4887:4138], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table` 2025-12-23T18:58:28.893218Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4887:4138], Start read next stream part 2025-12-23T18:58:28.949122Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4906:4153]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:58:28.949408Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2025-12-23T18:58:28.949445Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:4906:4153], StatRequests.size() = 1 2025-12-23T18:59:16.609515Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4887:4138], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:59:16.609675Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4887:4138], Start read next stream part 2025-12-23T18:59:16.610359Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd697ewx5f4j09tjze8bgav9", SessionId: ydb://session/3?node_id=2&id=NDkzYzc1OS04YzZjZGY1OC04NGJlNGUzOC1mN2ZmZjgwOQ==, Slow query, duration: 47.712393s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T18:59:16.613119Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33180, txId: 18446744073709551615] shutting down 2025-12-23T18:59:16.613403Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5061:4236], ActorId: [2:5062:4237], Starting query actor #1 [2:5063:4238] 2025-12-23T18:59:16.613478Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5062:4237], ActorId: [2:5063:4238], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T18:59:16.616388Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4887:4138], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:59:16.616447Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4717:2468], ActorId: [2:4887:4138], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmI2MWEzNjctODEyYTJiYzgtOTEwMDk4NDktYTVlZDg4NTQ=, TxId: 2025-12-23T18:59:16.617712Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5062:4237], ActorId: [2:5063:4238], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MzllOWQzMDUtYjRkZjg3ODgtNGIxNjYwY2ItNTdlZWQ0NDI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T18:59:16.659115Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5062:4237], ActorId: [2:5063:4238], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzllOWQzMDUtYjRkZjg3ODgtNGIxNjYwY2ItNTdlZWQ0NDI=, TxId: 2025-12-23T18:59:16.659227Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5062:4237], ActorId: [2:5063:4238], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzllOWQzMDUtYjRkZjg3ODgtNGIxNjYwY2ItNTdlZWQ0NDI=, TxId: 2025-12-23T18:59:16.659704Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5061:4236], ActorId: [2:5062:4237], Got response [2:5063:4238] SUCCESS 2025-12-23T18:59:16.683102Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T18:59:16.709721Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T18:59:16.709792Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3227:3457] 2025-12-23T18:59:16.790789Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T18:59:16.790885Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T18:59:16.848364Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4325:3813], schemeshard count = 1 2025-12-23T18:59:21.187377Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-12-23T18:59:21.187727Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 1 2025-12-23T18:59:21.187947Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2025-12-23T18:59:21.299079Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2025-12-23T18:59:21.299191Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:59:21.299621Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2025-12-23T18:59:21.314551Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::WriteToTopic_Demo_13_Table [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> BasicUsage::CreateTopicWithSharedConsumer_DeleteDeadLetterPolicy [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_DisabledDeadLetterPolicy >> TxUsage::WriteToTopic_Demo_13_Query |98.8%| [TA] $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |98.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |98.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] >> KqpEffects::UpdateOn_Select >> KqpImmediateEffects::Insert >> KqpReattach::ReattachDeliveryProblem >> KqpWrite::Insert >> KqpEffects::InsertAbort_Params_Duplicates+UseSink >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2025-12-23T18:58:15.760163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:15.760203Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:15.761605Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:15.776162Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:15.776509Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:58:15.776797Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:15.825772Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:15.831471Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:15.831544Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:15.832738Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:58:15.832836Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:58:15.832903Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:58:15.833276Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:15.833360Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:15.833457Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:58:15.900298Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:15.926405Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:58:15.926546Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:15.926613Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:58:15.926642Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:58:15.926664Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:58:15.926688Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:15.926831Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:15.926885Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:15.927175Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:58:15.927261Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:58:15.927367Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:15.927409Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:15.927433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:58:15.927459Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:15.927481Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:15.927502Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:58:15.927526Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:15.927584Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:15.927608Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:15.927634Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:58:15.932473Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:58:15.932532Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:15.932625Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:58:15.932766Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:58:15.932798Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:58:15.932850Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:58:15.932877Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:15.932904Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:58:15.932929Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:58:15.932951Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:15.933136Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:15.933165Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:58:15.933203Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:58:15.933228Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:15.933272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:58:15.933295Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:58:15.933344Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:58:15.933376Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:15.933395Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:15.945047Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:58:15.945100Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:15.945127Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:15.945154Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:58:15.945217Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:58:15.945590Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:15.945636Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:15.945676Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:58:15.945769Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:58:15.945791Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:58:15.945902Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:15.945932Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:58:15.945967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:58:15.946001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:58:15.948457Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:58:15.948511Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:15.948676Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:15.948724Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:15.948781Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:15.948811Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:15.948834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:15.948863Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:58:15.948900Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ent [32:347:2314]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2025-12-23T18:59:27.519341Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.519360Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2025-12-23T18:59:27.519409Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2025-12-23T18:59:27.519430Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.519450Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2025-12-23T18:59:27.519500Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2025-12-23T18:59:27.519518Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.519538Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2025-12-23T18:59:27.519583Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2025-12-23T18:59:27.519603Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.519621Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2025-12-23T18:59:27.519679Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-12-23T18:59:27.519709Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.519729Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2025-12-23T18:59:27.519806Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-12-23T18:59:27.519829Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.519848Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2025-12-23T18:59:27.519901Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-12-23T18:59:27.519920Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.519940Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2025-12-23T18:59:27.519988Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-12-23T18:59:27.520009Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.520027Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2025-12-23T18:59:27.520073Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-12-23T18:59:27.520093Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.520113Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2025-12-23T18:59:27.520159Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-12-23T18:59:27.520177Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.520197Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2025-12-23T18:59:27.520242Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-12-23T18:59:27.520261Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.520280Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2025-12-23T18:59:27.520325Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-12-23T18:59:27.520344Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.520363Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2025-12-23T18:59:27.520418Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-12-23T18:59:27.520436Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.520455Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2025-12-23T18:59:27.520491Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-12-23T18:59:27.520510Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.520531Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2025-12-23T18:59:27.520577Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-12-23T18:59:27.520597Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.520617Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2025-12-23T18:59:27.520663Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-12-23T18:59:27.520687Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.520715Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2025-12-23T18:59:27.520776Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-12-23T18:59:27.520807Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.520836Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2025-12-23T18:59:27.520888Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-12-23T18:59:27.520907Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.520926Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2025-12-23T18:59:27.520970Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-12-23T18:59:27.520989Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.521009Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2025-12-23T18:59:27.521054Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-12-23T18:59:27.521071Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:27.521090Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 31 30 22 27 28 31 27 26 19 21 31 31 20 21 31 22 15 22 20 13 - 28 31 8 31 13 8 5 15 - - - actual 31 30 22 27 28 31 27 26 19 21 31 31 20 21 31 22 15 22 20 13 - 28 31 8 31 13 8 5 15 - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> KqpEffects::UpdateOn_Literal >> Describe::LocationWithKillTablets [GOOD] >> Describe::DescribePartitionPermissions |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2025-12-23T18:58:18.734169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:18.734224Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:18.735473Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:18.746623Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:18.747000Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:58:18.747267Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:18.779162Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:18.784478Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:18.784554Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:18.785831Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:58:18.785905Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:58:18.785950Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:58:18.786213Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:18.786264Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:18.786326Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:58:18.859132Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:18.891542Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:58:18.891721Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:18.891801Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:58:18.891839Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:58:18.891871Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:58:18.891902Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:18.892027Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:18.892075Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:18.892431Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:58:18.892570Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:58:18.892734Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:18.892796Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:18.892840Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:58:18.892878Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:18.892915Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:18.892954Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:58:18.893000Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:18.893103Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:18.893140Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:18.893195Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:58:18.902337Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:58:18.902429Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:18.902562Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:58:18.902724Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:58:18.902793Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:58:18.902861Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:58:18.902912Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:18.902954Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:58:18.903005Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:58:18.903040Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:18.903340Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:18.903417Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:58:18.903480Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:58:18.903523Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:18.903569Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:58:18.903605Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:58:18.903650Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:58:18.903685Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:18.903712Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:18.916338Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:58:18.916427Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:18.916466Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:18.916510Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:58:18.916585Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:58:18.917123Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:18.917180Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:18.917256Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:58:18.917413Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:58:18.917448Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:58:18.917587Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:18.917636Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:58:18.917705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:58:18.917754Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:58:18.921500Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:58:18.921580Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:18.921845Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:18.921890Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:18.921978Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:18.922043Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:18.922081Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:18.922124Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:58:18.922180Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... t [32:347:2314]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2025-12-23T18:59:30.238326Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.238346Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2025-12-23T18:59:30.238413Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2025-12-23T18:59:30.238433Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.238453Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2025-12-23T18:59:30.238503Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2025-12-23T18:59:30.238526Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.238548Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2025-12-23T18:59:30.238600Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2025-12-23T18:59:30.238623Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.238642Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2025-12-23T18:59:30.238696Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-12-23T18:59:30.238719Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.238759Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2025-12-23T18:59:30.238827Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-12-23T18:59:30.238849Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.238870Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2025-12-23T18:59:30.238924Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-12-23T18:59:30.238952Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.238980Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2025-12-23T18:59:30.239045Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-12-23T18:59:30.239065Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.239084Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2025-12-23T18:59:30.239137Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-12-23T18:59:30.239157Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.239180Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2025-12-23T18:59:30.239244Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-12-23T18:59:30.239265Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.239285Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2025-12-23T18:59:30.239327Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-12-23T18:59:30.239349Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.239370Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2025-12-23T18:59:30.239423Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-12-23T18:59:30.239444Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.239464Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2025-12-23T18:59:30.239525Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-12-23T18:59:30.239550Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.239570Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2025-12-23T18:59:30.239623Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-12-23T18:59:30.239643Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.239662Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2025-12-23T18:59:30.239712Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-12-23T18:59:30.239735Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.239754Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2025-12-23T18:59:30.239817Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-12-23T18:59:30.239840Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.239859Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2025-12-23T18:59:30.239911Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-12-23T18:59:30.239933Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.239957Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2025-12-23T18:59:30.240011Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-12-23T18:59:30.240041Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.240064Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2025-12-23T18:59:30.240131Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-12-23T18:59:30.240157Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.240179Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2025-12-23T18:59:30.240234Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:804:2730], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-12-23T18:59:30.240255Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T18:59:30.240274Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 23 30 28 29 31 25 30 28 24 24 29 28 22 22 22 28 10 14 14 14 20 28 28 13 14 - 28 - - - - - actual 23 30 28 29 31 25 30 28 24 24 29 28 22 22 22 28 10 14 14 14 20 28 28 13 14 - 28 - - - - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Table [GOOD] >> KqpImmediateEffects::InsertDuplicates-UseSink >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Table [GOOD] >> KqpInplaceUpdate::SingleRowIf+UseSink >> KqpImmediateEffects::WriteThenReadWithCommit >> ColumnShardTiers::DSConfigsStub [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_2_Table [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Query >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink >> KqpEffects::UpdateOn_Select [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates+UseSink [GOOD] >> KqpFail::Immediate >> KqpEffects::InsertAbort_Params_Duplicates-UseSink >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Query >> KqpImmediateEffects::Insert [GOOD] >> KqpImmediateEffects::InsertDuplicates+UseSink >> BasicUsage::CreateTopicWithSharedConsumer_DisabledDeadLetterPolicy [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_False >> Secret::SimpleQueryService [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_2_Query >> TxUsage::WriteToTopic_Demo_11_Table [GOOD] >> KqpWrite::Insert [GOOD] >> KqpWrite::CastValuesOptional >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:57:51.644851Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:57:51.730009Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:57:51.737414Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:57:51.737644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:57:51.737690Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:57:51.997092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:51.997208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:52.053710Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516268980893 != 1766516268980897 2025-12-23T18:57:52.064382Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:52.108489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:52.194402Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005fd7/r3tmp/tmp9dO3bJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15477, node 1 TClient is connected to server localhost:32010 2025-12-23T18:57:52.708126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:57:52.709263Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:52.709302Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:52.709335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:52.710109Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:52.722465Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:57:52.869448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2025-12-23T18:57:52.993701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:52.993981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:52.994178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:52.994255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:52.994327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:52.994413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:52.994565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:52.994685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:52.994780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:52.994920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:52.995055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:52.995140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:52.995221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:53.010703Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:140;event=start_subscribing_metadata; 2025-12-23T18:57:53.014259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:53.014359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:53.014474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:53.014510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:53.014683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:53.014730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:53.014854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:57:53.014892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:57:53.014939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:57:53.014980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:57:53.015030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:57:53.015064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:57:53.015248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:57:53.015287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:57:53.015374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:57:53.015407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:57:53.015444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:57:53.015469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:57:53.015503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:57:53.015532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSc ... 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:21.118879Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:21.119073Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:21.119107Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037888;has_config=0; 2025-12-23T18:59:21.119135Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:21.119158Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037888 2025-12-23T18:59:21.119185Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2025-12-23T18:59:21.119219Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037888 2025-12-23T18:59:21.119264Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:21.119288Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:21.119309Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037889;has_config=0; 2025-12-23T18:59:21.119328Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:21.119343Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037889 2025-12-23T18:59:21.119362Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2025-12-23T18:59:21.119387Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037889 2025-12-23T18:59:21.119413Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:21.119439Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:21.119462Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037890;has_config=0; 2025-12-23T18:59:21.119484Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:21.119501Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037890 2025-12-23T18:59:21.119519Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2025-12-23T18:59:21.119544Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037890 2025-12-23T18:59:21.119572Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:21.119772Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-12-23T18:59:21.119850Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2807];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-12-23T18:59:21.119906Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2814];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-12-23T18:59:32.372801Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-12-23T18:59:32.373184Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-12-23T18:59:32.373235Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-12-23T18:59:32.373272Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-12-23T18:59:32.374541Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-12-23T18:59:32.374607Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-12-23T18:59:32.374688Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:32.374729Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:32.374820Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:32.375281Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-12-23T18:59:32.375327Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037888;has_config=0; 2025-12-23T18:59:32.375366Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:32.375396Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:32.375450Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:32.375489Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-12-23T18:59:32.375518Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037889;has_config=0; 2025-12-23T18:59:32.375549Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:32.375576Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:32.375618Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:32.375770Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-12-23T18:59:32.375801Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037890;has_config=0; 2025-12-23T18:59:32.375832Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:32.375860Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:32.375902Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:32.376962Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-12-23T18:59:32.377072Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2807];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-12-23T18:59:32.377155Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2814];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 2025-12-23T18:59:33.332315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:59:33.332383Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TxUsage::WriteToTopic_Demo_13_Query [GOOD] >> TxUsage::WriteToTopic_Demo_11_Query |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> Secret::Simple [GOOD] >> TxUsage::WriteToTopic_Demo_14_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:57:18.825408Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:57:18.952814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:57:18.964892Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:57:18.965331Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:57:18.965420Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:57:19.271409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:19.271578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:19.326975Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516235648379 != 1766516235648383 2025-12-23T18:57:19.337077Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:19.380197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:19.482911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003cf9/r3tmp/tmpoIshYE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25476, node 1 TClient is connected to server localhost:7488 2025-12-23T18:57:20.073814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:57:20.075534Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:20.075686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:20.075716Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:20.076716Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:20.089872Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-12-23T18:57:31.935849Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1012:2867], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:31.935985Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1023:2872], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:31.946282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:31.949235Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1028:2877], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:31.949557Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:31.958141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:31.981500Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1026:2875], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-12-23T18:57:32.032171Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1079:2909] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:32.672628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:34.164867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:57:34.618728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.461573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:36.124468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:57:36.645628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:57:38.145883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-12-23T18:57:38.581669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 2025-12-23T18:57:41.636486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:57:41.636551Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-12-23T18:58:39.654018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715736:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:40.624611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715743:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:41.910635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715754:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:42.244575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715757:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |98.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Table [GOOD] >> KqpEffects::UpdateOn_Literal [GOOD] >> KqpEffects::UpdateOn_Params |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:57:18.324041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:57:18.464498Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:57:18.490625Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:57:18.491236Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:57:18.491350Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:57:18.979138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:18.979296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:19.057205Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516235648380 != 1766516235648384 2025-12-23T18:57:19.072586Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:19.118439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:19.229856Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003cfc/r3tmp/tmpAGqJHk/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4900, node 1 TClient is connected to server localhost:29612 2025-12-23T18:57:20.074872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:57:20.076386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:20.076447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:20.076484Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:20.077448Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:20.090595Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-12-23T18:57:32.630200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1014:2868], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:32.630506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:32.631142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1041:2875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:32.631243Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:32.653103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:32.987977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1133:2948], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:32.988148Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:32.988645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1137:2952], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:32.988778Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:32.988852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1140:2955], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:32.994732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:33.095151Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1142:2957], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:57:33.992887Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1236:3022] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:34.595688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:57:35.036450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:35.671247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:36.352311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:57:36.742893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:57:38.133707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-12-23T18:57:38.570827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 2025-12-23T18:57:41.659374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:57:41.659435Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-12-23T18:58:40.343378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715736:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:41.267128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715743:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:42.643066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715754:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:42.999499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715757:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |98.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpInplaceUpdate::SingleRowArithm+UseSink >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting >> KqpImmediateEffects::DeleteAfterUpsert >> KqpOverload::OltpOverloaded+Distributed >> TxUsage::WriteToTopic_Demo_42_Query [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] >> KqpReattach::ReattachDeliveryProblem [GOOD] >> KqpWrite::CastValues |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> TxUsage::WriteToTopic_Demo_43_Table >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups >> KqpImmediateEffects::Upsert |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf-UseSink >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey+UseSink >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] >> KqpInplaceUpdate::BigRow >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> KqpImmediateEffects::InsertDuplicates+UseSink [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted >> KqpWrite::CastValuesOptional [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::Interactive >> Describe::DescribePartitionPermissions [GOOD] >> DirectReadWithServer::KillPQTablet >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateExternalTablet >> KqpEffects::RandomWithIndex+UseSecondaryIndex-UseSink >> KqpEffects::UpdateOn_Params [GOOD] >> KqpEffects::RandomWithIndex-UseSecondaryIndex-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValuesOptional [GOOD] Test command err: Trying to start YDB, gRPC: 12631, MsgBus: 25468 2025-12-23T18:59:28.427866Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130029299762591:2167];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:28.428146Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ae8/r3tmp/tmpnfdSg4/pdisk_1.dat 2025-12-23T18:59:28.614506Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:28.660015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:28.660144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:28.708887Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130029299762457:2081] 1766516368421968 != 1766516368421971 2025-12-23T18:59:28.720830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:28.722532Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12631, node 1 2025-12-23T18:59:28.839560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:59:28.935794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:28.935813Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:28.935836Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:28.935914Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25468 TClient is connected to server localhost:25468 2025-12-23T18:59:29.433869Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516368778 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:29.506334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:29.526119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.644623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.758179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.813160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:30.918725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130037889698918:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:30.918873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:30.919224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130037889698928:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:30.919264Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.490800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.521062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.549215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.578189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.604409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.635348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.665439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.710462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.788776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130042184667094:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.788853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.788908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130042184667099:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.789051Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130042184667101:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.789103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.794387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:31.803641Z node 1 ... ons 2025-12-23T18:59:35.236156Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:35.236234Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:35.266456Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:35.276763Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31176, node 2 2025-12-23T18:59:35.358937Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:35.358955Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:35.358960Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:35.359030Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:35.424288Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16886 TClient is connected to server localhost:16886 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516375330 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:35.814174Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:35.824002Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:35.883695Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:36.014248Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:36.072432Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:36.109629Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:38.441782Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130073043585574:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:38.441857Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:38.442081Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130073043585583:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:38.442156Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:38.507631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:38.537331Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:38.567348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:38.597385Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:38.626282Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:38.686233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:38.713527Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:38.750848Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:38.822087Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130073043586453:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:38.822166Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:38.822207Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130073043586458:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:38.822364Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130073043586460:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:38.822411Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:38.825221Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:38.833307Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130073043586461:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:59:38.898607Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130073043586513:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:40.097812Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130060158681854:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:40.097880Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> TColumnShardTestReadWrite::WriteExoticTypes |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> THiveTest::TestCreateExternalTablet [GOOD] >> KqpInplaceUpdate::SingleRowArithm+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowArithm-UseSink >> KqpImmediateEffects::DeleteAfterUpsert [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex >> KqpWrite::CastValues [GOOD] >> KqpEffects::InsertAbort_Params_Success >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction+UseSink >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink >> KqpFail::Immediate [GOOD] >> KqpFail::OnCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValues [GOOD] Test command err: Trying to start YDB, gRPC: 5615, MsgBus: 18901 ... waiting for SysViewsRoster update finished 2025-12-23T18:59:30.707547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:59:30.786115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:59:30.795394Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:59:30.795704Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:59:30.795781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:59:31.052745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:31.052853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:31.107622Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516368418993 != 1766516368418997 2025-12-23T18:59:31.117356Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:31.162585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:31.339472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005aea/r3tmp/tmpbQIKlf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5615, node 1 2025-12-23T18:59:31.667749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:31.667825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:31.667911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:31.668486Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:31.682273Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18901 TClient is connected to server localhost:18901 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:32.026467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:32.132867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:32.441184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:32.796483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:33.082185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:33.765457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1908:3515], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:33.765714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:33.766627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1982:3534], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:33.766719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:33.796726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:34.007881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:34.279318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:34.517554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:34.756327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:35.026238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:35.351087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:35.654529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:36.015462Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:36.015595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:36.016057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2797:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:36.016174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:36.016237Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have ... Notification cookie mismatch for subscription [2:7587130074969683515:2081] 1766516378931336 != 1766516378931339 2025-12-23T18:59:39.041674Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:39.041749Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:39.046553Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23436, node 2 2025-12-23T18:59:39.083441Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:39.083472Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:39.083484Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:39.083597Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:39.121911Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:29564 TClient is connected to server localhost:29564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516379089 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:39.419397Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:39.428421Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:39.477448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:39.617973Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:39.669932Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:39.938853Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:41.520176Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130087854587264:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.520259Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.520473Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130087854587273:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.520512Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.577413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.597369Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.617534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.639389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.660047Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.684893Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.711458Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.750352Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.813920Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130087854588139:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.814012Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.814088Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130087854588144:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.814296Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130087854588146:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.814368Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.817662Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:41.827137Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130087854588148:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:59:41.897002Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130087854588199:3760] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:43.933003Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130074969683552:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:43.933076Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_False [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_True >> KqpImmediateEffects::Upsert [GOOD] >> KqpImmediateEffects::UpsertAfterInsert |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_2_Query [GOOD] >> KqpImmediateEffects::InsertExistingKey+UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey-UseSink >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] >> TxUsage::WriteToTopic_Demo_14_Table [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] >> KqpInplaceUpdate::BigRow [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Query [GOOD] >> TxUsage::WriteToTopic_Demo_14_Query >> TxUsage::Sinks_Oltp_WriteToTopic_3_Table |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> TColumnShardTestReadWrite::CompactionGC >> TxUsage::WriteToTopic_Demo_24_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19873, MsgBus: 13212 2025-12-23T18:59:34.086620Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130056584988106:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:34.086690Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ae0/r3tmp/tmpNtf5aM/pdisk_1.dat 2025-12-23T18:59:34.273392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:34.273489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:34.294850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:34.305673Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:34.327232Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130056584988076:2081] 1766516374085114 != 1766516374085117 2025-12-23T18:59:34.346839Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19873, node 1 2025-12-23T18:59:34.399291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:34.399313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:34.399320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:34.399403Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:34.519371Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13212 TClient is connected to server localhost:13212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516374392 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:34.832570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:34.847396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:34.969657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:35.110121Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:35.126895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:59:35.219427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.063838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130069469891840:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.063982Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.064389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130069469891850:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.064463Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.412171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.437569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.462551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.486987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.510093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.538979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.587035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.626086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.695936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130069469892722:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.696019Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.696108Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130069469892727:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.696274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130069469892729:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.696311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.699572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:37.709826Z node 1 ... tate: Disconnected -> Connecting 2025-12-23T18:59:40.820339Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:40.822718Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 5905, node 2 2025-12-23T18:59:40.867642Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:40.867663Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:40.867669Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:40.867735Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28834 TClient is connected to server localhost:28834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516380867 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:59:41.189600Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:59:41.206280Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.250287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.369347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.421976Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.714550Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:43.163402Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130095250987759:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.163474Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.163625Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130095250987768:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.163659Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.211456Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.232838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.253183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.295141Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.320318Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.345449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.371196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.408667Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.461462Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130095250988635:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.461512Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130095250988640:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.461534Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.461713Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130095250988643:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.461764Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.464213Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:43.472382Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130095250988642:2484], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:59:43.535512Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130095250988695:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:44.945803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:45.710521Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130082366084040:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:45.710603Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] |98.8%| [TA] $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |98.8%| [TA] {RESULT} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink |98.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::Interactive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::BigRow [GOOD] Test command err: Trying to start YDB, gRPC: 6653, MsgBus: 9744 2025-12-23T18:59:34.116718Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130055599341518:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:34.116818Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ae1/r3tmp/tmpQpRI7v/pdisk_1.dat 2025-12-23T18:59:34.314281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:34.314392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:34.352391Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:34.354875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:34.391421Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130055599341489:2081] 1766516374115283 != 1766516374115286 2025-12-23T18:59:34.398337Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6653, node 1 2025-12-23T18:59:34.448376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:34.448405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:34.448417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:34.448511Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:9744 2025-12-23T18:59:34.635063Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516374413 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:34.820244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:34.832456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:34.942630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:35.078131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:35.124330Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:35.146858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:37.288090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130068484245254:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.288218Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.288576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130068484245264:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.288644Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.608322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.636354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.663411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.691287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.720750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.754022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.784718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.825585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.887287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130068484246131:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.887379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130068484246136:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.887389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.887645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130068484246138:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.887715Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.891184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:37.901645Z node 1 :KQP ... ate: Disconnected -> Connecting 2025-12-23T18:59:41.025213Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29907, node 2 2025-12-23T18:59:41.056080Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:41.056102Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:41.056109Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:41.056189Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:41.170125Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16822 TClient is connected to server localhost:16822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516381070 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:41.430590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:41.439106Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.522302Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.637495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.684124Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.915178Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:43.607858Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130095255290467:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.607942Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.608176Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130095255290477:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.608216Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.662452Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.690032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.717325Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.742991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.787367Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.816010Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.845102Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.893372Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.964037Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130095255291351:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.964125Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.964207Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130095255291356:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.964637Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130095255291359:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.964713Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.967663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:43.977603Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130095255291358:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:59:44.031249Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130099550258707:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:45.450671Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:45.910872Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130082370386759:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:45.913666Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterInsert >> ColumnShardTiers::DSConfigs [GOOD] >> KqpOverload::OltpOverloaded+Distributed [GOOD] >> KqpOverload::OltpOverloaded-Distributed >> KqpImmediateEffects::UpsertDuplicates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 16944, MsgBus: 24218 2025-12-23T18:59:28.427642Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130031290995022:2162];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:28.427768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ae9/r3tmp/tmpESyLHB/pdisk_1.dat 2025-12-23T18:59:28.648767Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:28.653343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:28.653477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:28.681789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:28.738374Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130031290994894:2081] 1766516368421066 != 1766516368421069 2025-12-23T18:59:28.741608Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16944, node 1 2025-12-23T18:59:28.928308Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:59:28.935753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:28.935778Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:28.935849Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:28.935941Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24218 2025-12-23T18:59:29.432543Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:24218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516368792 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:29.518334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:29.530655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.664080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.774398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.826486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:31.094600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130044175898653:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.094721Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.095112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130044175898663:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.095172Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.490996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.519163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.546478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.574889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.599051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.628673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.656070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.720761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.788110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130044175899533:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.788180Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.788414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130044175899539:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.788460Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130044175899538:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.788493Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.794333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:31.806127Z node 1 ... 3T18:59:41.796833Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.938374Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.994246Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:42.263218Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:44.442803Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130097800847559:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.442912Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.443320Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130097800847568:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.443379Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.521298Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.549346Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.575750Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.601846Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.629561Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.659173Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.691858Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.761596Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.826412Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130097800848439:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.826499Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130097800848444:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.826501Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.826674Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130097800848446:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.826721Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.830296Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:44.840277Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130097800848447:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:59:44.928597Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130097800848499:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:46.258401Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130084915943834:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:46.258492Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:59:46.588482Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.923859Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=4; 2025-12-23T18:59:46.924058Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 4 at tablet 72075186224037927 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:59:46.924157Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 4 at tablet 72075186224037927 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:59:46.924301Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:966: SelfId: [3:7587130106390783616:2530], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [3:7587130106390783398:2530]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[3:7587130106390783616:2530].{
: Error: Conflict with existing key., code: 2012 } 2025-12-23T18:59:46.924383Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [3:7587130106390783608:2530], SessionActorId: [3:7587130106390783398:2530], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7587130106390783398:2530]. 2025-12-23T18:59:46.924606Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=3&id=YmI2MmY2NzktYjE5OWIwMjgtZjk0ODVlZjEtNjE3YTFkYTk=, ActorId: [3:7587130106390783398:2530], ActorState: ExecuteState, TraceId: 01kd699ty0csyhbttjcz0xtnkc, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7587130106390783609:2530] from: [3:7587130106390783608:2530] 2025-12-23T18:59:46.924710Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [3:7587130106390783609:2530] TxId: 281474976715675. Ctx: { TraceId: 01kd699ty0csyhbttjcz0xtnkc, Database: /Root, SessionId: ydb://session/3?node_id=3&id=YmI2MmY2NzktYjE5OWIwMjgtZjk0ODVlZjEtNjE3YTFkYTk=, PoolId: default, IsStreamingQuery: 0}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-12-23T18:59:46.925026Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=YmI2MmY2NzktYjE5OWIwMjgtZjk0ODVlZjEtNjE3YTFkYTk=, ActorId: [3:7587130106390783398:2530], ActorState: ExecuteState, TraceId: 01kd699ty0csyhbttjcz0xtnkc, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Constraint violated. Table: `/Root/TestImmediateEffects`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4187, MsgBus: 19167 2025-12-23T18:59:28.458654Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130031874329475:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:28.458814Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ae4/r3tmp/tmpoDyw5i/pdisk_1.dat 2025-12-23T18:59:28.686320Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:28.692568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:28.692700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:28.759531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:28.766970Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4187, node 1 2025-12-23T18:59:28.936037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:28.936075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:28.936091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:28.936204Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:28.945586Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19167 2025-12-23T18:59:29.462718Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:19167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516368848 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:29.523564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:29.538238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.662362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.800881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.853295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:30.839759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130040464265816:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:30.839863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:30.840163Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130040464265825:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:30.840236Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.490452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.515439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.540910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.568548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.593429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.624207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.654678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.705826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.789261Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130044759233992:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.789331Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.789577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130044759233997:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.789620Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.789659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130044759233998:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.794375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:31.803565Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587130044759234001:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Trans ... tatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516381343 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:41.773509Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:41.786396Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.832936Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.952015Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:42.010922Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:42.179741Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:44.280094Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130100444273574:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.280177Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.280410Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130100444273583:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.280462Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.332183Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.356095Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.379274Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.404577Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.435147Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.467881Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.499367Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.537183Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.613519Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130100444274451:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.613615Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.613655Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130100444274456:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.613762Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130100444274458:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.613830Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.617386Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:44.627889Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130100444274460:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:59:44.704385Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130100444274511:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:46.173355Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130087559369846:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:46.173442Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:59:46.849810Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [3:7587130109034209404:2532], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kd699tmy04b599mh0744qdnp. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=YjdkNzQ3NDItOTIwMGQ1MzMtYjk5YjY0ODYtOTZlNDNlNmY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-12-23T18:59:46.850069Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [3:7587130109034209406:2533], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01kd699tmy04b599mh0744qdnp. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=YjdkNzQ3NDItOTIwMGQ1MzMtYjk5YjY0ODYtOTZlNDNlNmY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7587130109034209401:2520], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T18:59:46.850531Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=YjdkNzQ3NDItOTIwMGQ1MzMtYjk5YjY0ODYtOTZlNDNlNmY=, ActorId: [3:7587130109034209368:2520], ActorState: ExecuteState, TraceId: 01kd699tmy04b599mh0744qdnp, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Query [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2025-12-23T18:54:14.772453Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2025-12-23T18:54:14.805299Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:14.808160Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-12-23T18:54:14.814704Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-12-23T18:54:14.818334Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-12-23T18:54:14.820851Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-12-23T18:54:14.820910Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2025-12-23T18:54:14.821941Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2025-12-23T18:54:14.821974Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2025-12-23T18:54:14.822267Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2025-12-23T18:54:14.822388Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2025-12-23T18:54:14.834164Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:182: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-12-23T18:54:14.834206Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:342: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-12-23T18:54:14.836315Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836500Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836646Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836791Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.836914Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.837073Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2085] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.837248Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2086] targetNodeId# 1 Marker# DSP01 2025-12-23T18:54:14.837268Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-12-23T18:54:14.837442Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2025-12-23T18:54:14.837473Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2025-12-23T18:54:14.837513Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:282: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-12-23T18:54:14.837574Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2025-12-23T18:54:14.840652Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-12-23T18:54:14.841861Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:14.862796Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-12-23T18:54:14.862892Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:14.873114Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:14.873757Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:14.873803Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-12-23T18:54:14.877254Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-12-23T18:54:14.881508Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-12-23T18:54:14.881598Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-12-23T18:54:14.881627Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-12-23T18:54:14.881674Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:14.883443Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-12-23T18:54:14.883587Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2025-12-23T18:54:14.883621Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2025-12-23T18:54:14.885098Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:52:2092] 2025-12-23T18:54:14.885139Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:52:2092] 2025-12-23T18:54:14.885188Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:14.885397Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:52:2092] 2025-12-23T18:54:14.885434Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:14.886780Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:14.886948Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-12-23T18:54:14.886994Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2025-12-23T18:54:14.887063Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2025-12-23T18:54:14.892748Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2025-12-23T18:54:14.895726Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:54:14.896038Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-12-23T18:54:14.896093Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-12-23T18:54:14.896677Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:14.897276Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2025-12-23T18:54:14.897455Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2025-12-23T18:54:14.898184Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2025-12-23T18:54:14.917159Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-12-23T18:54:14.917253Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-12-23T18:54:14.917352Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-12-23T18:54:14.919355Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:54:14.919596Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-12-23T18:54:14.919728Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2025-12-23T18:54:14.925468Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-12-23T18:54:14.925514Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-12-23T18:54:14.925548Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-12-23T18:54:14.925588Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:54:14.925703Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2025-12-23T18:54:14.92 ... 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [155:271:2264] 2025-12-23T18:59:42.957009Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [155:271:2264] 2025-12-23T18:59:42.957078Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [155:271:2264] 2025-12-23T18:59:42.957165Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [155:271:2264] 2025-12-23T18:59:42.957223Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [155:271:2264] 2025-12-23T18:59:42.957436Z node 155 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [155:270:2263] EventType# 268697601 2025-12-23T18:59:42.957676Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} queued, type NKikimr::NHive::TTxCreateTablet 2025-12-23T18:59:42.957785Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:59:42.958557Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{4, redo 1157b alter 0b annex 0, ~{ 14, 0, 1, 2 } -{ }, 0 gb} 2025-12-23T18:59:42.958659Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:59:42.958863Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [155:310:2290] 2025-12-23T18:59:42.958901Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [155:310:2290] 2025-12-23T18:59:42.958972Z node 155 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal leader: [155:93:2123] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:59:42.959042Z node 155 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 155 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [155:93:2123] 2025-12-23T18:59:42.959157Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [155:310:2290] 2025-12-23T18:59:42.959203Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037932033] forward result local node, try to connect [155:310:2290] 2025-12-23T18:59:42.959247Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037932033]::SendEvent [155:310:2290] 2025-12-23T18:59:42.959332Z node 155 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037932033] Accept Connect Originator# [155:310:2290] 2025-12-23T18:59:42.959427Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037932033] connected with status OK role: Leader [155:310:2290] 2025-12-23T18:59:42.959462Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037932033] send queued [155:310:2290] 2025-12-23T18:59:42.959491Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [155:310:2290] 2025-12-23T18:59:42.959540Z node 155 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [155:280:2269] EventType# 268637702 2025-12-23T18:59:42.959733Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-12-23T18:59:42.959847Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:59:42.960126Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-12-23T18:59:42.960232Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:59:42.960520Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-12-23T18:59:42.960613Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-12-23T18:59:42.960996Z node 155 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{137172421763168}(72075186224037888)::Execute - TryToBoot was not successfull 2025-12-23T18:59:42.961126Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2025-12-23T18:59:42.961208Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-12-23T18:59:42.972357Z node 155 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [8729fbeaec2f6015] bootstrap ActorId# [155:313:2293] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:698:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-12-23T18:59:42.972508Z node 155 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8729fbeaec2f6015] Id# [72057594037927937:2:4:0:0:698:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-12-23T18:59:42.972561Z node 155 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [8729fbeaec2f6015] restore Id# [72057594037927937:2:4:0:0:698:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-12-23T18:59:42.972622Z node 155 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [8729fbeaec2f6015] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG33 2025-12-23T18:59:42.972671Z node 155 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [8729fbeaec2f6015] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG32 2025-12-23T18:59:42.972834Z node 155 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [155:36:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:698:1] FDS# 698 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-12-23T18:59:42.974280Z node 155 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [8729fbeaec2f6015] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:698:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 18 } Cost# 85496 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 19 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-12-23T18:59:42.974390Z node 155 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [8729fbeaec2f6015] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-12-23T18:59:42.974460Z node 155 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [8729fbeaec2f6015] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-12-23T18:59:42.974619Z node 155 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-12-23T18:59:42.974761Z node 155 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-12-23T18:59:42.975104Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:59:42.975213Z node 155 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-12-23T18:59:42.975261Z node 155 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-12-23T18:59:42.975318Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:59:42.975366Z node 155 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-12-23T18:59:42.975403Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:59:42.975453Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:59:42.975815Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [155:317:2296] 2025-12-23T18:59:42.975888Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [155:317:2296] 2025-12-23T18:59:42.976118Z node 155 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-12-23T18:59:42.976272Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-12-23T18:59:42.976444Z node 155 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-12-23T18:59:42.976533Z node 155 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-12-23T18:59:42.976572Z node 155 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-12-23T18:59:42.976643Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:59:42.976734Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:59:42.976776Z node 155 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-12-23T18:59:42.976865Z node 155 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [0:0:0] followers: 0 2025-12-23T18:59:42.976976Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [155:317:2296] 2025-12-23T18:59:42.977037Z node 155 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [155:317:2296] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 20831, MsgBus: 4972 2025-12-23T18:59:28.427652Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130030792349308:2180];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:28.427862Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ae5/r3tmp/tmpnPh6hv/pdisk_1.dat 2025-12-23T18:59:28.665832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:28.665949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:28.726178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:28.753708Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:28.753781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:28.755327Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130030792349162:2081] 1766516368421056 != 1766516368421059 TServer::EnableGrpc on GrpcPort 20831, node 1 2025-12-23T18:59:28.936027Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:28.936053Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:28.936068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:28.936143Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:29.019851Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4972 2025-12-23T18:59:29.432692Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:4972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516368778 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:29.505793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:29.526110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.649074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.778477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.833966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:30.840556Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130039382285635:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:30.840670Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:30.840912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130039382285644:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:30.840964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.490271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.514693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.537912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.561938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.588431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.619191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.651544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.698646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.787603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130043677253815:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.787689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.787843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130043677253821:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.787880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130043677253823:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.787922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.794885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:31.806636Z node 1 :K ... State: Disconnected -> Connecting 2025-12-23T18:59:41.519492Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29221, node 3 2025-12-23T18:59:41.560625Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:41.560648Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:41.560653Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:41.560741Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:41.667457Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9224 TClient is connected to server localhost:9224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516381574 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:41.919980Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:41.928391Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.972490Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:42.079650Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:42.133628Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:42.400030Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:44.025870Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130099639757263:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.025956Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.026193Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130099639757272:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.026244Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.094164Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.122889Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.148080Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.172636Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.203874Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.237236Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.272547Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.356764Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.433303Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130099639758144:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.433468Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.433474Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130099639758149:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.433712Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130099639758151:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.433763Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:44.437317Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:44.446984Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130099639758152:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:59:44.544906Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130099639758204:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:46.394882Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130086754853556:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:46.394948Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:59:46.431115Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpEffects::InsertRevert_Literal_Success ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:57:50.736743Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:57:50.894963Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:57:50.920787Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:57:50.921367Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:57:50.921470Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:57:51.310326Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:51.310479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:51.399591Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516268083767 != 1766516268083771 2025-12-23T18:57:51.411810Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:51.455687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:51.565310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005fea/r3tmp/tmp7R7LuR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16110, node 1 TClient is connected to server localhost:2950 2025-12-23T18:57:52.401061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:57:52.402704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:52.402781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:52.402883Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:52.403950Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:52.417207Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-12-23T18:58:04.180340Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:957:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.180453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:968:2829], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.180526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.181381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:973:2834], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.181552Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.188333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:04.203739Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:971:2832], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-12-23T18:58:04.252592Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1024:2866] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:04.434841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:05.421091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:05.844858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:06.637176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:07.260078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:07.568463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:08.717917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:09.028773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 2025-12-23T18:58:12.530136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:58:12.530214Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-12-23T18:58:23.430700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-12-23T18:58:24.664468Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:227;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2025-12-23T18:58:24.664566Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:58:24.664632Z node 1 :TX_TIERING DEBUG: ... ading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:35.583083Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:35.583339Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-12-23T18:59:35.583377Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-12-23T18:59:35.583406Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier1' at tablet 0 2025-12-23T18:59:35.583439Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 2025-12-23T18:59:35.583479Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier1' started at tablet 0 2025-12-23T18:59:35.583509Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:35.583549Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:35.583800Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3245:4505];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-12-23T18:59:35.583918Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3247:4507];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-12-23T18:59:35.584006Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3255:4513];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-12-23T18:59:46.802463Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:46.802572Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:46.802619Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:46.802671Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:46.802885Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:46.803092Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:46.803248Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:46.803310Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-12-23T18:59:46.803374Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:46.803424Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:46.803493Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:46.803584Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:46.803627Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2025-12-23T18:59:46.803660Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:46.803690Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:46.803739Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:46.803775Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:46.803829Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2025-12-23T18:59:46.803860Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:46.803888Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:46.803934Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:46.803972Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:46.804002Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2025-12-23T18:59:46.804031Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:46.804062Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:46.804107Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:46.804162Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:46.804190Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-12-23T18:59:46.804222Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:46.804251Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:46.804290Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:46.804342Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:46.804371Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-12-23T18:59:46.804402Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:46.804439Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:46.804789Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3245:4505];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-12-23T18:59:46.804899Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3247:4507];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-12-23T18:59:46.804985Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3255:4513];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:57:52.295752Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:57:52.418233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:57:52.429509Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:57:52.429959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:57:52.430053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:57:52.730153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:52.730334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:52.808477Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516269860421 != 1766516269860425 2025-12-23T18:57:52.821992Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:52.869797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:52.959670Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005fd3/r3tmp/tmp0zPeDI/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9687, node 1 TClient is connected to server localhost:8726 2025-12-23T18:57:53.422599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:57:53.424066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:53.424117Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:53.424164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:53.425146Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:53.438154Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-12-23T18:58:05.439038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:954:2820], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:05.439260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:05.439799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:981:2827], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:05.439873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:05.443783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:05.627236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1073:2900], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:05.627345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:05.627880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1077:2904], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:05.627987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:05.628066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1080:2907], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:05.633052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:05.746914Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1082:2909], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:58:06.046128Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1176:2974] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:06.610052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:06.928905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:07.462911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:08.033388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:08.435620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:09.585653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:09.863612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 2025-12-23T18:58:13.629822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:58:13.629895Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-12-23T18:58:24.184300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST ... ading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:36.078854Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:36.079005Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-12-23T18:59:36.079056Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-12-23T18:59:36.079110Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier1' at tablet 0 2025-12-23T18:59:36.079147Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 2025-12-23T18:59:36.079186Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier1' started at tablet 0 2025-12-23T18:59:36.079215Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:36.079258Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:36.079535Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3251:4506];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-12-23T18:59:36.079660Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3253:4508];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-12-23T18:59:36.079775Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3261:4514];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-12-23T18:59:47.273523Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:47.273608Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:47.273648Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:47.273772Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:47.274020Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:47.274092Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-12-23T18:59:47.274151Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:47.274196Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:47.274261Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:47.274464Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:47.274697Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-12-23T18:59:47.274838Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:47.274881Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2025-12-23T18:59:47.274917Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:47.274952Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:47.275008Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:47.275070Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:47.275100Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2025-12-23T18:59:47.275134Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:47.275162Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:47.275206Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:47.275246Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:47.275276Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2025-12-23T18:59:47.275308Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:47.275335Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:47.275379Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:47.275490Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:47.275524Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-12-23T18:59:47.275554Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:47.275583Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:47.275626Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-12-23T18:59:47.275854Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3251:4506];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-12-23T18:59:47.275969Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3253:4508];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-12-23T18:59:47.276059Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3261:4514];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-12-23T18:59:47.276144Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-12-23T18:59:47.276182Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-12-23T18:59:47.276216Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-12-23T18:59:47.276259Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Table |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Query [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink >> KqpEffects::InsertAbort_Params_Success [GOOD] >> KqpEffects::InsertAbort_Select_Conflict+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29078, MsgBus: 11028 2025-12-23T18:59:37.981418Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130070578634657:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:37.982534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005adf/r3tmp/tmp4CGaxQ/pdisk_1.dat 2025-12-23T18:59:38.179930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:38.180056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:38.197861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:38.208357Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:38.221257Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:38.222143Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130070578634626:2081] 1766516377980034 != 1766516377980037 TServer::EnableGrpc on GrpcPort 29078, node 1 2025-12-23T18:59:38.291961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:38.291985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:38.291990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:38.292079Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:38.399069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11028 TClient is connected to server localhost:11028 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516378277 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:38.714128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:38.719688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:59:38.724904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:38.844505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:38.984822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:38.988045Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:39.060117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:40.708387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130083463538389:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:40.708510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:40.708887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130083463538399:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:40.708951Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.019628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.043421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.069204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.091313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.113481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.142801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.193193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.225850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.289123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130087758506565:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.289195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.289294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130087758506570:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.289344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130087758506572:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.289388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.292605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... ate: Disconnected -> Connecting 2025-12-23T18:59:43.937172Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26701, node 2 2025-12-23T18:59:43.972887Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:43.972914Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:43.972922Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:43.973009Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:43.989654Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17519 TClient is connected to server localhost:17519 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516383982 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:44.310403Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:44.318776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:44.365391Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:44.475726Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:44.522124Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:44.818759Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:46.410657Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130109936358257:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.410720Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.410973Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130109936358266:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.410999Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.488505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.518179Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.547053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.573272Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.599246Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.625730Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.653462Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.692055Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.776311Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130109936359136:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.776392Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.776426Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130109936359141:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.776528Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130109936359143:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.776558Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.779815Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:46.788833Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130109936359145:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:59:46.861064Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130109936359198:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:48.379524Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:48.814838Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130097051454539:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:48.814895Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::RandomWithIndex+UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::RandomWithIndex-UseSecondaryIndex+UseSink >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink >> KqpInplaceUpdate::SingleRowStr+UseSink >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction+UseSink >> KqpEffects::RandomWithIndex-UseSecondaryIndex-UseSink [GOOD] >> KqpImmediateEffects::UpsertAfterInsert [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink >> KqpImmediateEffects::ConflictingKeyRW1WR2 >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowSimple+UseSink >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] Test command err: 2025-12-23T18:59:43.074928Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:59:43.107380Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:59:43.107602Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:59:43.114794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:59:43.115066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:59:43.115322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:59:43.115444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:59:43.115562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:59:43.115861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:59:43.115991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:59:43.116113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:59:43.116212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:59:43.116326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:59:43.116447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:59:43.116550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:59:43.116671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:59:43.147614Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:59:43.147795Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:59:43.147858Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:59:43.148027Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:59:43.148188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:59:43.148264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:59:43.148305Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:59:43.148394Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:59:43.148463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:59:43.148509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:59:43.148540Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:59:43.148716Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:59:43.148779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:59:43.148830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:59:43.148856Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:59:43.148963Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:59:43.149020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:59:43.149064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:59:43.149099Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:59:43.149175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:59:43.149214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:59:43.149253Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:59:43.149312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:59:43.149353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:59:43.149388Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:59:43.149598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:59:43.149643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:59:43.149672Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:59:43.149832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:59:43.149887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:59:43.149919Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:59:43.149964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:59:43.150006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:59:43.150032Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:59:43.150076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:59:43.150110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:59:43.150140Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:59:43.150264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:59:43.150307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2025-12-23T18:59:49.224645Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> KqpImmediateEffects::MultiShardUpsertAfterRead >> ColumnShardTiers::TTLUsage [GOOD] >> DirectReadWithServer::KillPQTablet [GOOD] >> DirectReadWithServer::KillPQRBTablet [GOOD] >> LocalPartition::Restarts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::RandomWithIndex-UseSecondaryIndex-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64128, MsgBus: 22993 2025-12-23T18:59:31.205991Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130043242938758:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:31.206062Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ae3/r3tmp/tmpSr0pgv/pdisk_1.dat 2025-12-23T18:59:31.430568Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:31.434230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:31.434384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:31.471560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:31.519697Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:31.521128Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130043242938727:2081] 1766516371204380 != 1766516371204383 TServer::EnableGrpc on GrpcPort 64128, node 1 2025-12-23T18:59:31.579506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:31.579527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:31.579542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:31.579651Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:31.689647Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22993 TClient is connected to server localhost:22993 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516371543 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:32.027351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:32.041823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:32.149628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:32.214727Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:32.267410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:32.332319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:33.923740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130051832875190:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:33.923833Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:33.924204Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130051832875200:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:33.924280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:34.154874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:34.179264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:34.205756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:34.233501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:34.263208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:34.292754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:34.324155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:34.361410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:34.425339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130056127843369:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:34.425408Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:34.425434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130056127843374:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:34.425631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130056127843376:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:34.425679Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:34.428875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:34.436923Z node 1 ... ty maybe) 2025-12-23T18:59:42.934823Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:42.934833Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:42.934932Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:43.061567Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:29888 TClient is connected to server localhost:29888 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516382939 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:43.318055Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:43.332401Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:43.385475Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:43.507326Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:43.609185Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:43.781630Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:45.993824Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130102770155273:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:45.993918Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:45.994251Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130102770155282:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:45.994320Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.066298Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.098812Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.132124Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.165605Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.200390Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.240372Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.271155Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.362635Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.445467Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130107065123450:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.445545Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.445812Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130107065123455:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.445838Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130107065123456:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.445922Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.449432Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:46.458252Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130107065123459:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:59:46.554448Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130107065123510:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:47.775760Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130089885251543:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:47.775837Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:59:48.422633Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:48.484953Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TxUsage::WriteToTopic_Demo_43_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 7461, MsgBus: 18523 2025-12-23T18:59:40.052179Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130082284648967:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:40.054940Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ada/r3tmp/tmp0Q4qO0/pdisk_1.dat 2025-12-23T18:59:40.259286Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:40.286043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:40.286149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:40.322757Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:40.323512Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130082284648929:2081] 1766516380049713 != 1766516380049716 2025-12-23T18:59:40.328646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7461, node 1 2025-12-23T18:59:40.389781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:40.389824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:40.389837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:40.389906Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:40.500424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18523 TClient is connected to server localhost:18523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516380384 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:40.832097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:40.840513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:40.956939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.058713Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:41.091716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.154111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:42.573181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130090874585393:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:42.573265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:42.573641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130090874585403:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:42.573729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:42.819424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:42.847001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:42.874968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:42.902708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:42.928851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:42.961946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:42.995008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.041347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:43.113018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130095169553572:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.113090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.113234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130095169553577:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.113295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130095169553579:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.113372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.117059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:43.127578Z node 1 : ... eState: Disconnected -> Connecting 2025-12-23T18:59:46.190865Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1307, node 2 2025-12-23T18:59:46.229531Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:46.229556Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:46.229563Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:46.229637Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:46.272207Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5045 TClient is connected to server localhost:5045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516386243 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:46.592579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:46.601018Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:46.653778Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:46.787771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:46.850800Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:47.069376Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:48.923905Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130115546211044:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.924019Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.924318Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130115546211054:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.924390Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.996858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.035863Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.064549Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.093310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.120736Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.182447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.212591Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.258357Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.324415Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130119841179223:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.324503Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130119841179228:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.324517Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.324745Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130119841179230:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.324788Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.327638Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:49.337030Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130119841179231:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:59:49.400863Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130119841179283:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:50.874424Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:51.066830Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130106956274621:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:51.067818Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsertWithIndex >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:57:55.505365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:57:55.585163Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:57:55.594049Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:57:55.594303Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:57:55.594356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:57:55.799444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:55.799584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:55.845656Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516273633912 != 1766516273633916 2025-12-23T18:57:55.853551Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:55.894336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:55.976093Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005f87/r3tmp/tmpp0djAl/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22699, node 1 TClient is connected to server localhost:18198 2025-12-23T18:57:56.471928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:57:56.473041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:56.473080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:56.473107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:56.473842Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:56.486146Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:57:56.624238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2025-12-23T18:57:56.707576Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:907:2774], Recipient [1:949:2805]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:57:56.708692Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:907:2774], Recipient [1:949:2805]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:57:56.708989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:57:56.729944Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:57:56.730189Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037888 2025-12-23T18:57:56.736524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:57:56.736752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:57:56.736951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:57:56.737031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:57:56.737119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:57:56.737219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:57:56.737312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:57:56.737378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:57:56.737503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:57:56.737585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:57:56.737688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:57:56.737805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:57:56.737902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:57:56.753183Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:907:2774], Recipient [1:949:2805]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:57:56.754112Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:909:2775], Recipient [1:953:2807]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:57:56.755418Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 72075186224037888 2025-12-23T18:57:56.755709Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:57:56.755788Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:57:56.755961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:56.756084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:57:56.756155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:57:56.756205Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:57:56.756321Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:57:56.756382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:57:56.756417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:57:56.756443Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:57:56.756606Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:57:56.756655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:57:56.756682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:57:56.756704Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:57:56.756793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clea ... 2811]: NActors::TEvents::TEvWakeup 2025-12-23T18:59:52.097306Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:957:2811];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-12-23T18:59:52.097386Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:962:2814]: NActors::TEvents::TEvWakeup 2025-12-23T18:59:52.097422Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2814];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP START_SLEEP column0/uint64_value: 0 TEvBlobStorage::TEvPut tId=72057594046316545;c=1;:66/0:size=1466;count=9;size=4925;count=35;size=14676;count=211;size=1509;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3741;count=21;size=6567;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=0;:66/0:size=1466;count=9;size=4925;count=35;size=14746;count=212;size=1509;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3741;count=21;size=6567;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-12-23T18:59:52.279864Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:949:2805]: NActors::TEvents::TEvWakeup 2025-12-23T18:59:52.279956Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-12-23T18:59:52.280066Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:953:2807]: NActors::TEvents::TEvWakeup 2025-12-23T18:59:52.280102Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2807];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-12-23T18:59:52.280173Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:957:2811]: NActors::TEvents::TEvWakeup 2025-12-23T18:59:52.280207Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:957:2811];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-12-23T18:59:52.280276Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:962:2814]: NActors::TEvents::TEvWakeup 2025-12-23T18:59:52.280307Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2814];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP START_SLEEP TEvBlobStorage::TEvPut tId=72057594037968897;c=0;:66/0:size=1466;count=9;size=5196;count=36;size=14746;count=212;size=1509;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3741;count=21;size=6567;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=1;:66/0:size=1466;count=9;size=5196;count=36;size=14746;count=212;size=1509;count=21;size=778;count=4;;1:size=15433;count=1;size=19913;count=213;size=3741;count=21;size=6567;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=0;:66/0:size=1466;count=9;size=5196;count=36;size=14816;count=213;size=1509;count=21;size=778;count=4;;1:size=15433;count=1;size=19913;count=213;size=3741;count=21;size=6567;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-12-23T18:59:52.436376Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:949:2805]: NActors::TEvents::TEvWakeup 2025-12-23T18:59:52.436448Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:949:2805];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-12-23T18:59:52.436536Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:953:2807]: NActors::TEvents::TEvWakeup 2025-12-23T18:59:52.436572Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2807];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-12-23T18:59:52.436640Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:957:2811]: NActors::TEvents::TEvWakeup 2025-12-23T18:59:52.436673Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:957:2811];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-12-23T18:59:52.436742Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:962:2814]: NActors::TEvents::TEvWakeup 2025-12-23T18:59:52.436775Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2814];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP REQUEST=SELECT COUNT(*) FROM `/Root/olapStore/olapTable`;EXPECTATION=1 |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex-UseSink >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink >> KqpImmediateEffects::UpdateAfterInsert [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2740, MsgBus: 61482 2025-12-23T18:59:33.446690Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130052525110011:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:33.446915Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ae2/r3tmp/tmpvdDH5R/pdisk_1.dat 2025-12-23T18:59:33.661323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:33.661421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:33.688187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:33.697706Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:33.731341Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130052525109966:2081] 1766516373444051 != 1766516373444054 2025-12-23T18:59:33.734049Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2740, node 1 2025-12-23T18:59:33.778596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:33.778629Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:33.778640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:33.778723Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:33.859263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61482 TClient is connected to server localhost:61482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516373755 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:34.211269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:34.228258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:34.351748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:34.456088Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:34.485704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:34.542892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:36.655143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130065410013728:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:36.655254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:36.655548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130065410013738:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:36.655593Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:36.958756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:36.987785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.014289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.044618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.071141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.102171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.139875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.177380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:37.235193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130069704981904:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.235260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.235308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130069704981909:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.235418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130069704981911:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.235487Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:37.238817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:37.249644Z node 1 : ... GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516386544 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:46.892966Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:46.911147Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:46.965984Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:47.087168Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:47.140602Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:47.386907Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:49.794900Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130121441485959:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.795039Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.796150Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130121441485969:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.796239Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.873466Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.915397Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.949694Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.979399Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:50.009940Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:50.041474Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:50.081742Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:50.134267Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:50.240275Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130125736454134:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:50.240359Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:50.240552Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130125736454141:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:50.240568Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130125736454139:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:50.240592Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:50.245297Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:50.256213Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130125736454143:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:59:50.326104Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130125736454194:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:51.369021Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130108556582331:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:51.369096Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:59:52.154550Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.125561Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [3:7587130138621356519:2553], TxId: 281474976710678, task: 1. Ctx: { CheckpointId : . TraceId : 01kd69a0jb00sbw2jb4se4pmxk. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=NzdjYmJiZTItOWQ2OTNkYTYtMzEyOGNiZWMtNTU1M2JlNDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-12-23T18:59:53.125997Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [3:7587130138621356520:2554], TxId: 281474976710678, task: 2. Ctx: { TraceId : 01kd69a0jb00sbw2jb4se4pmxk. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=NzdjYmJiZTItOWQ2OTNkYTYtMzEyOGNiZWMtNTU1M2JlNDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7587130138621356516:2520], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T18:59:53.126497Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=NzdjYmJiZTItOWQ2OTNkYTYtMzEyOGNiZWMtNTU1M2JlNDE=, ActorId: [3:7587130134326389059:2520], ActorState: ExecuteState, TraceId: 01kd69a0jb00sbw2jb4se4pmxk, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TxUsage::WriteToTopic_Demo_43_Query >> KqpImmediateEffects::Delete >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex-UseSink >> KqpImmediateEffects::UpsertDuplicates [GOOD] >> KqpImmediateEffects::UpsertExistingKey >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_True [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_MoveDeadLetterPolicy >> KqpEffects::InsertRevert_Literal_Success [GOOD] >> KqpEffects::InsertRevert_Literal_Duplicates >> TxUsage::WriteToTopic_Demo_14_Query [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict+UseSink >> KqpEffects::AlterAfterUpsertTransaction+UseSink >> TxUsage::WriteToTopic_Demo_16_Table >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink >> KqpFail::OnCommit [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowStr-UseSink >> KqpImmediateEffects::ConflictingKeyRW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink >> KqpImmediateEffects::MultiShardUpsertAfterRead [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] >> KqpImmediateEffects::ImmediateUpdate >> KqpInplaceUpdate::SingleRowSimple+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowSimple-UseSink >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpFail::OnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 13585, MsgBus: 24570 2025-12-23T18:59:28.427519Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130029406170393:2179];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:28.427848Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ae7/r3tmp/tmpJBnteC/pdisk_1.dat 2025-12-23T18:59:28.625815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:28.700241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:28.700343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:28.707316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:28.708426Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130029406170248:2081] 1766516368420816 != 1766516368420819 2025-12-23T18:59:28.709126Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13585, node 1 2025-12-23T18:59:28.845462Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:59:28.936003Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:28.936027Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:28.936046Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:28.936158Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24570 TClient is connected to server localhost:24570 2025-12-23T18:59:29.432527Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516368764 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:29.521873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:29.540402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.676170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.817813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:29.872994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:31.144743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130042291074002:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.144898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.145303Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130042291074012:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.145376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.490353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.517917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.546478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.574422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.598721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.629031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.661635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.724426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:31.787768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130042291074881:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.787836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130042291074886:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.787842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.787992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130042291074888:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.788036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:31.794465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:31.807095Z node 1 ... ARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:49.804289Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ae7/r3tmp/tmp5kQXUO/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32155, node 3 2025-12-23T18:59:50.157093Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:50.157145Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:50.157177Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:50.157435Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:50.172226Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5645 TClient is connected to server localhost:5645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:50.612654Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:50.662376Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:50.915870Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:51.321073Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:51.611199Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:52.197874Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1905:3511], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.198217Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.199267Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1978:3530], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.199365Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.228607Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.381025Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.675636Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.946187Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.213608Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.488445Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.772717Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:54.058646Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:54.424314Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2789:4171], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.424456Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.424921Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2793:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.425017Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.425119Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2796:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.431792Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:54.589420Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2798:4180], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:59:54.648773Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:2858:4221] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:57.500894Z node 3 :RPC_REQUEST WARN: rpc_execute_query.cpp:487: Client lost 2025-12-23T18:59:57.501974Z node 3 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [3:3183:4454] TxId: 281474976715674. Ctx: { TraceId: 01kd69a4ew7d7f9scyn341zw4s, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NzdiMGJiNy00NjZmZTJhMS1iNWNmNDM5LTRkNzQ2NTRi, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Client lost } 2025-12-23T18:59:57.502495Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=NzdiMGJiNy00NjZmZTJhMS1iNWNmNDM5LTRkNzQ2NTRi, ActorId: [3:3152:4454], ActorState: ExecuteState, TraceId: 01kd69a4ew7d7f9scyn341zw4s, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Client lost" severity: 1 } |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TxUsage::WriteToTopic_Demo_24_Table [GOOD] >> KqpEffects::RandomWithIndex-UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::RandomWithIndex+UseSecondaryIndex+UseSink >> KqpOverload::OltpOverloaded-Distributed [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_3_Table [GOOD] >> TxUsage::WriteToTopic_Demo_24_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12646, MsgBus: 5133 2025-12-23T18:59:45.467991Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130104476177111:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:45.469327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ad3/r3tmp/tmpOweTgD/pdisk_1.dat 2025-12-23T18:59:45.682367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:45.682461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:45.684354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:45.727486Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:45.762320Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:45.763699Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130104476177078:2081] 1766516385464384 != 1766516385464387 TServer::EnableGrpc on GrpcPort 12646, node 1 2025-12-23T18:59:45.772827Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T18:59:45.774240Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T18:59:45.816153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:45.816186Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:45.816219Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:45.816330Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5133 2025-12-23T18:59:46.011820Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516385858 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:46.290819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:46.302695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:46.431917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:46.477575Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:46.578752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:46.648146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:48.673081Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130117361080838:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.673218Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.673574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130117361080848:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.673614Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.008954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.041323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.075994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.107883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.137044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.172364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.239916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.282219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:49.354989Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130121656049016:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.355083Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.355252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130121656049021:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.355298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130121656049022:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.355334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:49.358998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResour ... EnableGrpc on GrpcPort 25461, node 2 2025-12-23T18:59:52.855329Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:52.855352Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:52.855359Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:52.855436Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:52.909017Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4870 TClient is connected to server localhost:4870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516392858 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:59:53.234991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:59:53.243232Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:59:53.250159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.309941Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.455043Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.530089Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.705665Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:55.845568Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130147293228920:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.845648Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.845911Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130147293228929:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.845945Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.916911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.981763Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.013672Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.048288Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.079322Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.114422Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.148065Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.202771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.296266Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130151588197101:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.296379Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.296747Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130151588197106:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.296809Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130151588197107:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.296851Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.300663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:56.317812Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130151588197110:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:59:56.397458Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130151588197161:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:57.697676Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130134408325220:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:57.697734Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:59:58.062905Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_3_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpOverload::OltpOverloaded-Distributed [GOOD] Test command err: Trying to start YDB, gRPC: 8687, MsgBus: 18735 ... waiting for SysViewsRoster update finished 2025-12-23T18:59:40.948873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:59:41.028431Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:59:41.037731Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:59:41.038091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:59:41.038177Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:59:41.278016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:41.278162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:41.346774Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516378701795 != 1766516378701799 2025-12-23T18:59:41.361254Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:41.405746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:41.561156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005add/r3tmp/tmpLj4Et4/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8687, node 1 2025-12-23T18:59:41.885506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:41.885578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:41.885624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:41.885995Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:41.897521Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18735 TClient is connected to server localhost:18735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:42.213801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:42.256734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:42.532876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:42.874512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:43.136600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:43.898257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1901:3510], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.898642Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.899597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3529], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.899684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:43.931005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.147073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.373852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.645288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:44.874060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:45.123121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:45.405218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:45.686223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:46.013941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4170], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.014062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.014497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.014604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:46.014662Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4177], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have ... 968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:52.605404Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:52.631235Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766516389089718 != 1766516389089721 2025-12-23T18:59:52.640075Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:52.688646Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:52.803925Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005add/r3tmp/tmprp681i/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18749, node 2 2025-12-23T18:59:53.160542Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:53.160606Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:53.160639Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:53.161191Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:53.175563Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:19855 TClient is connected to server localhost:19855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:53.527775Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:53.563378Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.827666Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:54.258997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:54.540541Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:55.098686Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1900:3506], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.098931Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.099605Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1973:3525], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.099665Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.121325Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.335538Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.599681Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.880412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.150517Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.431142Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.725523Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:57.041595Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:57.426465Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2785:4166], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.426609Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.427155Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2789:4170], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.427275Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.427380Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2792:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.434405Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:57.609988Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2794:4175], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:59:57.656959Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2854:4216] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=3; FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=5; >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] |98.8%| [TA] $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::Replace >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] >> KqpImmediateEffects::Delete [GOOD] >> KqpImmediateEffects::DeleteAfterInsert >> KqpImmediateEffects::UpsertExistingKey [GOOD] >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8359, MsgBus: 3090 2025-12-23T18:59:48.267663Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130118323792465:2115];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:48.269207Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ad1/r3tmp/tmpKt1K9J/pdisk_1.dat 2025-12-23T18:59:48.445115Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:48.489555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:48.489655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:48.522994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:48.525814Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:48.527306Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130118323792379:2081] 1766516388264224 != 1766516388264227 TServer::EnableGrpc on GrpcPort 8359, node 1 2025-12-23T18:59:48.611825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:48.611858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:48.611868Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:48.611941Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:48.656853Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3090 TClient is connected to server localhost:3090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516388567 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:49.090239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:49.107138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:49.247103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:49.351336Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:49.390703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:49.447730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:51.392104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130131208696145:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:51.392230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:51.392631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130131208696155:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:51.392671Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:51.733775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:51.816759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:51.855185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:51.892934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:51.924642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:51.967344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.031218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.080085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.162316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130135503664320:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.162411Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.162730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130135503664326:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.162793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130135503664325:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.162834Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.166512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:52.178804Z node 1 :KQP ... EnableGrpc on GrpcPort 16523, node 2 2025-12-23T18:59:55.432461Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:55.432485Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:55.432495Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:55.432595Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:55.505577Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5167 TClient is connected to server localhost:5167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516395427 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:55.833712Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:55.842468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:59:55.857797Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:55.927894Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:56.073836Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:56.133975Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:56.299934Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:58.210795Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130158517295520:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.210893Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.211262Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130158517295529:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.211318Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.284627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.313619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.338105Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.366373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.393516Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.433356Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.468184Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.514687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.583219Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130158517296408:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.583291Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130158517296413:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.583302Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.583494Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130158517296415:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.583577Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.587063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:58.601364Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130158517296416:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:59:58.665655Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130158517296468:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:00.238175Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130145632391790:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:00.238263Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:00.290182Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Success >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink [GOOD] >> KqpFail::OnPrepare >> KqpEffects::InsertAbort_Literal_Conflict+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertTransaction+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink >> KqpEffects::InsertRevert_Literal_Conflict >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex+UseSink >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Table [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_4_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 4] [GOOD] >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] >> KqpEffects::AlterAfterUpsertTransaction-UseSink >> KqpImmediateEffects::ReplaceExistingKey >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] >> ColumnStatistics::SimpleColumnStatistics [GOOD] Test command err: Trying to start YDB, gRPC: 62037, MsgBus: 18880 2025-12-23T18:59:38.317347Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130073591667892:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:38.317410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ade/r3tmp/tmpmetiL8/pdisk_1.dat 2025-12-23T18:59:38.534175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:38.534303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:38.599551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:38.620319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:38.663161Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:38.665895Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130073591667860:2081] 1766516378316151 != 1766516378316154 TServer::EnableGrpc on GrpcPort 62037, node 1 2025-12-23T18:59:38.719082Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:38.719119Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:38.719127Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:38.719262Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:38.844269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18880 TClient is connected to server localhost:18880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516378669 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:39.147531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:39.163919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:39.276887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:39.390101Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:39.434501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:39.488765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:41.012383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130086476571618:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.012528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.012853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130086476571628:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.012960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.297519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.321898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.345462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.370339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.396098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.424971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.476559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.513839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:41.575072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130086476572498:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.575123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.575132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130086476572503:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.575274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130086476572505:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.575335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:41.577732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:41.586173Z node 1 ... : SchemeBoardDelete /Root Strong=0 2025-12-23T18:59:54.881507Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:54.881532Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:54.881538Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:54.881600Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:54.898201Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:12069 TClient is connected to server localhost:12069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516394916 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:55.306645Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:55.313176Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:59:55.327746Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:55.387766Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:55.558756Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:55.626655Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:55.762789Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:58.447972Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130157452065045:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.448061Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.448480Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130157452065055:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.448528Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.547140Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.576809Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.608422Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.640296Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.673155Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.711892Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.744857Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.791258Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.867933Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130157452065924:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.868030Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130157452065929:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.868040Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.868232Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130157452065931:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.868285Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.910115Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:58.919218Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130157452065932:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T18:59:58.996422Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130157452065984:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:59:59.700787Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130140272194044:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:59.700851Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:00.537119Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 >> KqpImmediateEffects::ManyFlushes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] Test command err: Trying to start YDB, gRPC: 16734, MsgBus: 15270 2025-12-23T18:59:48.901584Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130117511236215:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:48.901652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ad0/r3tmp/tmpbgHrqH/pdisk_1.dat 2025-12-23T18:59:49.149306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:49.149471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:49.173985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:49.200234Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:49.246528Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:49.247984Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130117511236185:2081] 1766516388900626 != 1766516388900629 TServer::EnableGrpc on GrpcPort 16734, node 1 2025-12-23T18:59:49.308527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:49.308549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:49.308576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:49.308696Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:49.473920Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15270 TClient is connected to server localhost:15270 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516389274 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:49.778133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:49.783819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:59:49.788434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:49.910446Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:59:49.916113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:50.085822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:59:50.150989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.136499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130134691107245:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.136608Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.136983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130134691107255:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.137058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.438651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.473752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.504808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.533191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.561735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.602850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.638518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.718393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.797267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130134691108131:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.797362Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.797766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130134691108137:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.797781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130134691108136:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.797806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.801309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:59:56.404923Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:56.469557Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:56.598513Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:56.649958Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:56.721144Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:58.992510Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130159380638708:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.992589Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.992791Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130159380638717:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.992831Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.119823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.151144Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.183886Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.224601Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.265249Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.305425Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.341608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.386801Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.458230Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130163675606886:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.458321Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.458612Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130163675606891:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.458674Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130163675606892:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.458760Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.462317Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:59.472574Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130163675606895:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T18:59:59.531611Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130163675606946:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:00.711099Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130146495734997:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:00.711183Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:01.015092Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.474985Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715677; 2025-12-23T19:00:01.485929Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:948: SelfId: [2:7587130172265542108:2558], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [2:7587130172265542052:2558]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[2:7587130172265542108:2558].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-12-23T19:00:01.486500Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [2:7587130172265542101:2558], SessionActorId: [2:7587130172265542052:2558], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7587130172265542052:2558]. 2025-12-23T19:00:01.486802Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=2&id=ZTRkYjVjNzMtZjg0MTA4ZDktZjQxZmM4Y2UtMmExNWNlMTY=, ActorId: [2:7587130172265542052:2558], ActorState: ExecuteState, TraceId: 01kd69a99zf25f6jtsb70nvny5, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7587130172265542102:2558] from: [2:7587130172265542101:2558] 2025-12-23T19:00:01.486907Z node 2 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [2:7587130172265542102:2558] TxId: 281474976715677. Ctx: { TraceId: 01kd69a99zf25f6jtsb70nvny5, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZTRkYjVjNzMtZjg0MTA4ZDktZjQxZmM4Y2UtMmExNWNlMTY=, PoolId: default, IsStreamingQuery: 0}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-12-23T19:00:01.487313Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=2&id=ZTRkYjVjNzMtZjg0MTA4ZDktZjQxZmM4Y2UtMmExNWNlMTY=, ActorId: [2:7587130172265542052:2558], ActorState: ExecuteState, TraceId: 01kd69a99zf25f6jtsb70nvny5, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/TestImmediateEffects`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 10917, MsgBus: 13072 2025-12-23T18:59:49.459137Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130122691838173:2146];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:49.459696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005acf/r3tmp/tmpLm3Jwy/pdisk_1.dat 2025-12-23T18:59:49.730720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:49.730852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:49.732276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:49.744363Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:49.782605Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:49.788675Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130122691838055:2081] 1766516389454845 != 1766516389454848 TServer::EnableGrpc on GrpcPort 10917, node 1 2025-12-23T18:59:49.850590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:49.850627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:49.850637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:49.850729Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:49.913259Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13072 TClient is connected to server localhost:13072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516389806 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:50.305959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:50.315202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:59:50.323459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:50.465860Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:50.499126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:50.657026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:50.725274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:52.684619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130135576741815:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.684748Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.685107Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130135576741825:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:52.685146Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:53.111530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.150213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.188899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.223982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.259257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.329085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.369496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.421220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.507505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130139871709992:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:53.507604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:53.507822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130139871709997:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:53.507846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130139871709998:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:53.507891Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:53.511299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sc ... ate: Disconnected -> Connecting 2025-12-23T18:59:56.409365Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28403, node 2 2025-12-23T18:59:56.512215Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:56.512239Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:56.512246Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:56.512326Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:56.593612Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:16703 TClient is connected to server localhost:16703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516396456 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:56.877838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:56.892996Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:56.959573Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:57.108009Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:57.164290Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:57.335838Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:59.741744Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130161572939959:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.741835Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.742132Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130161572939969:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.742176Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.807116Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.840431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.872489Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.904207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.940507Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.975802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:00.010162Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:00.064361Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:00.151213Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130165867908139:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:00.151330Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:00.151739Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130165867908144:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:00.151800Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130165867908145:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:00.152093Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:00.156223Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:00.166980Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130165867908148:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T19:00:00.229439Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130165867908199:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:01.287109Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130148688036254:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:01.287170Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:01.608498Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpImmediateEffects::ImmediateUpdate [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_MoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_DisableDeadLetterPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61026, MsgBus: 5663 2025-12-23T18:59:44.749242Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130097947047414:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:44.749307Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ad6/r3tmp/tmpzT27oY/pdisk_1.dat 2025-12-23T18:59:44.950212Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:44.978861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:44.978970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:45.013363Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:45.016044Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130097947047387:2081] 1766516384747870 != 1766516384747873 2025-12-23T18:59:45.026480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61026, node 1 2025-12-23T18:59:45.107819Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:45.107854Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:45.107861Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:45.107959Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:45.114724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:5663 TClient is connected to server localhost:5663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516385074 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:45.588746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:45.607696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:45.727698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:45.819184Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:45.864864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:45.927253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:47.460165Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130110831951150:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:47.460273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:47.460580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130110831951160:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:47.460626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:47.792854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:47.822481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:47.852172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:47.877394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:47.903962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:47.939860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:48.004290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:48.050930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:48.124248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130115126919327:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.124333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130115126919332:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.124332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.124528Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130115126919334:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.124579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.128339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:48.139291Z node 1 :K ... GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516398479 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:58.887480Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:58.896032Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:58.945073Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:59.128258Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:59.193595Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:59.342348Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:01.427514Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130171809391593:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.427596Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.427842Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130171809391603:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.427898Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.501690Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.533786Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.568994Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.598778Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.628399Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.660007Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.695746Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.749389Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.829277Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130171809392471:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.829351Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.829387Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130171809392476:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.829563Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130171809392478:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.829626Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.833389Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:01.844429Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130171809392480:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T19:00:01.898945Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130171809392531:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:03.318833Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130158924487968:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:03.318907Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:03.763852Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:04.266021Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [3:7587130184694294830:2547], TxId: 281474976715677, task: 1. Ctx: { CheckpointId : . TraceId : 01kd69abpw7gxec35rmpe4pdr8. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZjhhNTdmODMtMmJmMTU4NTctMjQyNzU4MDEtZGIzZGFkNGE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-12-23T19:00:04.266247Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [3:7587130184694294832:2548], TxId: 281474976715677, task: 2. Ctx: { CheckpointId : . TraceId : 01kd69abpw7gxec35rmpe4pdr8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZjhhNTdmODMtMmJmMTU4NTctMjQyNzU4MDEtZGIzZGFkNGE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7587130184694294827:2520], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T19:00:04.266588Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=ZjhhNTdmODMtMmJmMTU4NTctMjQyNzU4MDEtZGIzZGFkNGE=, ActorId: [3:7587130180399327386:2520], ActorState: ExecuteState, TraceId: 01kd69abpw7gxec35rmpe4pdr8, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> YdbProxy::ReadTopic [GOOD] >> YdbProxy::ReadNonExistentTopic >> ColumnStatistics::CountMinSketchStatistics [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table >> KqpImmediateEffects::UpdateOn >> KqpImmediateEffects::Replace [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_2_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 2] [GOOD] >> KqpImmediateEffects::DeleteAfterInsert [GOOD] >> KqpImmediateEffects::ReplaceDuplicates >> KqpImmediateEffects::ConflictingKeyW1WRR2 >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Table [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_8_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 8] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4405, MsgBus: 17290 2025-12-23T18:59:52.677268Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130132341247917:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:52.677359Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ac7/r3tmp/tmp26EGhm/pdisk_1.dat 2025-12-23T18:59:52.910651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:52.942925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:52.943066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:53.023772Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:53.029100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:53.030842Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130132341247889:2081] 1766516392676083 != 1766516392676086 TServer::EnableGrpc on GrpcPort 4405, node 1 2025-12-23T18:59:53.129519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:53.129547Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:53.129555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:53.129656Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:53.132340Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17290 TClient is connected to server localhost:17290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516393082 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:53.639340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:53.646436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T18:59:53.671384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.684929Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:53.812686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.984216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:54.049294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:55.889426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130145226151644:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.889561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.889973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130145226151654:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.890060Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.217992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.248561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.277716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.312710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.343014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.381177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.418575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.479647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.556798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130149521119822:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.556878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.557029Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130149521119827:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.557075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130149521119828:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.557110Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.560602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at sche ... eState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 9416, node 2 2025-12-23T18:59:59.665316Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:59.731377Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:59.731403Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:59.731413Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:59.731500Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:59.807529Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3242 TClient is connected to server localhost:3242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516399704 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T19:00:00.164874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:00:00.183706Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:00.267840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:00.450816Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:00.515134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:00.595106Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:02.986135Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130175611024316:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.986246Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.986520Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130175611024325:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.986573Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:03.057621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.091514Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.126055Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.190590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.223576Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.263432Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.296943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.352739Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.431720Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130179905992491:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:03.431842Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:03.431949Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130179905992496:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:03.432374Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130179905992498:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:03.432483Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:03.435681Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:03.448547Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130179905992499:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T19:00:03.537288Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130179905992551:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:04.538840Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130162726120595:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:04.538919Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:05.292450Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::SimpleColumnStatistics [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:15.688122Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:15.766226Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:15.771385Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:15.771855Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:15.772010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:16.103998Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:16.201585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:16.201706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:16.261299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:16.299133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00324a/r3tmp/tmpiGZhfa/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20447, node 1 2025-12-23T18:52:17.022160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:17.022999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:17.023036Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:17.023090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:17.023273Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:17.086904Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:19322 2025-12-23T18:52:17.641098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:20.674958Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:20.680231Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:20.683081Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:20.711251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:20.711376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:20.750114Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:20.752396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:20.991710Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:20.992356Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:20.993322Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:20.993957Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:20.994265Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:20.994388Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:20.994564Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:20.994734Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:20.994892Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:21.139427Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:52:21.151247Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:21.151374Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:21.164870Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:21.338128Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:21.392657Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:21.392761Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:21.419570Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:21.420891Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:21.421066Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:21.421114Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:21.421170Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:21.421219Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:21.421262Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:21.421305Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:21.421671Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:21.461904Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:21.461992Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:21.472562Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:52:21.480394Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:52:21.481503Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:52:21.486511Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:52:21.513690Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:52:21.513742Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:52:21.513810Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:52:21.532720Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:21.541509Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:21.548741Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:21.548852Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:21.558898Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:21.658318Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:22.032871Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:22.135069Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:22.135158Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2 ... 397Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:52:46.918527Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:52:46.918584Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4654:4119], StatRequests.size() = 1 2025-12-23T18:52:46.918689Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:56:00.057300Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4517:2473], ActorId: [2:4527:4042], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:56:00.057616Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4517:2473], ActorId: [2:4527:4042], Start read next stream part 2025-12-23T18:56:00.057796Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd68x0d41pb2f6p5zctbstnm", SessionId: ydb://session/3?node_id=2&id=NGYwOGVmNDctOTQ3YTgyMjQtYTU2MGEwN2QtYjA2YmM0MDI=, Slow query, duration: 193.679148s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1`", parameters: 0b 2025-12-23T18:56:00.060267Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32310, txId: 18446744073709551615] shutting down 2025-12-23T18:56:00.060480Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4517:2473], ActorId: [2:4854:4242], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:56:00.064194Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4517:2473], ActorId: [2:4854:4242], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(LowCardinalityString,$f0(256,8)),AGGREGATE_BY(LowCardinalityInt,$f0(256,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0,9)),AGGREGATE_BY(Float,$f0(256,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(256,8)),AGGREGATE_BY(Date,$f1(32,10000,10099)),AGGREGATE_BY(NearNumericLimits,$f0(256,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808,9223372036854775807)) FROM `/Root/Database/Table1` 2025-12-23T18:56:00.064349Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4517:2473], ActorId: [2:4854:4242], Start read next stream part 2025-12-23T18:56:00.065296Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4517:2473], ActorId: [2:4527:4042], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:56:00.065384Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4517:2473], ActorId: [2:4527:4042], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDE4NDlmNzEtZWI3Yjk1N2EtZjI4MzM3ZTktNTFmYTIyYmU=, TxId: 2025-12-23T18:56:00.268444Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4880:4264]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:56:00.268667Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:56:00.268708Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4880:4264], StatRequests.size() = 1 2025-12-23T19:00:04.216027Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4517:2473], ActorId: [2:4854:4242], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T19:00:04.216243Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4517:2473], ActorId: [2:4854:4242], Start read next stream part 2025-12-23T19:00:04.217064Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd692xj04dbqd3rgz101mrps", SessionId: ydb://session/3?node_id=2&id=NTg0YmI4NDctN2YwOGI2NzAtOTQxODE5ZjQtYjI4Y2RiMTU=, Slow query, duration: 244.148762s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f1 = ($p0,$p1,$p2) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::EWHMerge,\n StatisticsInternal::EWHFinalize,\n StatisticsInternal::EWHSerialize,\n StatisticsInternal::EWHDeserialize,\n)\n};\n$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(LowCardinalityString,$f0(256,8)),AGGREGATE_BY(LowCardinalityInt,$f0(256,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0,9)),AGGREGATE_BY(Float,$f0(256,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(256,8)),AGGREGATE_BY(Date,$f1(32,10000,10099)),AGGREGATE_BY(NearNumericLimits,$f0(256,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808,9223372036854775807)) FROM `/Root/Database/Table1`", parameters: 0b 2025-12-23T19:00:04.219954Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5057:4366], ActorId: [2:5058:4367], Starting query actor #1 [2:5059:4368] 2025-12-23T19:00:04.220018Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5058:4367], ActorId: [2:5059:4368], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T19:00:04.222556Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32400, txId: 18446744073709551615] shutting down 2025-12-23T19:00:04.223339Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5058:4367], ActorId: [2:5059:4368], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MTU5MTY0ODctYjZhMGJmYWQtZmU2OWQ5NDgtNzNjNGY5MjA=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T19:00:04.225459Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4517:2473], ActorId: [2:4854:4242], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T19:00:04.225509Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4517:2473], ActorId: [2:4854:4242], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTk5YzkyZmEtZTdhNDYzNTUtYjVmNWNlNDItNmEzMzlmMTg=, TxId: 2025-12-23T19:00:04.288979Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5076:4382]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T19:00:04.289226Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T19:00:04.289262Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:5076:4382], StatRequests.size() = 1 2025-12-23T19:00:04.451379Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5058:4367], ActorId: [2:5059:4368], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTU5MTY0ODctYjZhMGJmYWQtZmU2OWQ5NDgtNzNjNGY5MjA=, TxId: 2025-12-23T19:00:04.451503Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5058:4367], ActorId: [2:5059:4368], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTU5MTY0ODctYjZhMGJmYWQtZmU2OWQ5NDgtNzNjNGY5MjA=, TxId: 2025-12-23T19:00:04.452011Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5057:4366], ActorId: [2:5058:4367], Got response [2:5059:4368] SUCCESS 2025-12-23T19:00:04.452868Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T19:00:04.503856Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T19:00:04.503950Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3374:3464] 2025-12-23T19:00:04.505026Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:5095:4392]], StatType[ 1 ], StatRequestsCount[ 2 ] 2025-12-23T19:00:04.505552Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T19:00:04.505624Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T19:00:04.505995Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T19:00:04.506074Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T19:00:04.506150Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 1 ], ColumnTag[ 1 ] 2025-12-23T19:00:04.506528Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 1 ], ColumnTag[ 2 ] 2025-12-23T19:00:04.514766Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2025-12-23T19:00:04.515706Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28624, MsgBus: 19413 2025-12-23T18:59:44.948329Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130097362068905:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:44.948436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ad4/r3tmp/tmpn4hMEe/pdisk_1.dat 2025-12-23T18:59:45.132970Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:45.192250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:45.192366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:45.193752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:45.231359Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130097362068862:2081] 1766516384946712 != 1766516384946715 2025-12-23T18:59:45.241118Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28624, node 1 2025-12-23T18:59:45.285763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:45.285799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:45.285810Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:45.285902Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:45.390321Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19413 TClient is connected to server localhost:19413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516385256 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:45.752185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:45.771549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:45.860933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:45.957563Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:45.994695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:46.051345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:48.204702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130114541939920:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.204808Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.205170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130114541939930:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.205290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.562720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:48.597058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:48.628206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:48.660902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:48.693172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:48.751651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:48.780245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:48.827476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:48.899232Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130114541940798:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.899354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.899418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130114541940803:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.899599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130114541940805:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.899639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:48.903285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:48.913772Z node 1 ... ation TClient is connected to server localhost:19436 TClient is connected to server localhost:19436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516399256 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:59.705108Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:59.710356Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:59:59.717417Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:59.780110Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:59.923116Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:00.033259Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:00.165759Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:02.851014Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130177013299791:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.851138Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.851579Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130177013299800:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.851650Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.937731Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.969899Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.000126Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.028016Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.058395Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.097585Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.140842Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.189146Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:03.269374Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130181308267968:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:03.269471Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:03.269557Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130181308267973:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:03.269852Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130181308267975:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:03.269937Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:03.273283Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:03.283851Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130181308267976:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:03.372253Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130181308268028:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:04.099499Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130164128396074:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:04.099590Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:05.254338Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:05.468939Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T19:00:05.664862Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=M2I5NDM3YWMtNjM2ZTZhMWItZDM5Y2ZkNWMtYzM2Y2I3ZA==, ActorId: [3:7587130189898202910:2522], ActorState: ExecuteState, TraceId: 01kd69ad7fe467785r9qsarf8w, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Tables: `/Root/TestTable`" issue_code: 2001 severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24336, MsgBus: 24046 2025-12-23T18:59:52.041483Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130132312871319:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:52.042649Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005aca/r3tmp/tmpFbKxMY/pdisk_1.dat 2025-12-23T18:59:52.311255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:52.311386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:52.380752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:52.429837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:52.431298Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130132312871279:2081] 1766516392039782 != 1766516392039785 2025-12-23T18:59:52.437063Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24336, node 1 2025-12-23T18:59:52.503404Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:52.503425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:52.503444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:52.503533Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24046 2025-12-23T18:59:52.722873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516392445 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:53.031097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:53.054535Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:53.061825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.210260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.358552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.425613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:55.379033Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130145197775045:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.379160Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.380540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130145197775055:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.380622Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.792464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.823932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.857524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.892021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.928790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.994181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.026569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.074719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.167014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130149492743223:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.167134Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.167719Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130149492743228:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.167757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130149492743229:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.167785Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.171952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:56.185271Z node 1 ... State: Disconnected -> Connecting 2025-12-23T18:59:59.357279Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18629, node 2 2025-12-23T18:59:59.409867Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:59.409887Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:59.409895Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:59.409968Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:59.475840Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4881 TClient is connected to server localhost:4881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516399410 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T18:59:59.794178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:59:59.812812Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:59:59.870956Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:00.026824Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:00.103988Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:00.246090Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:02.490387Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130176403761545:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.490488Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.490822Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130176403761554:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.490878Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.591763Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.624653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.655956Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.684592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.716525Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.751338Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.785993Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.833979Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.909156Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130176403762423:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.909248Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.909286Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130176403762428:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.909423Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130176403762430:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.909493Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.912675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:02.921630Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130176403762432:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:02.994990Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130176403762483:3760] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:04.195738Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130163518857837:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:04.195792Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:04.803760Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |98.8%| [TA] {RESULT} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:23.368896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:23.446224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:23.450814Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:23.451126Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:23.451223Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:23.779124Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:23.864649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:23.864748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:23.922596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:24.030452Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0031f3/r3tmp/tmpSAxC7W/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30358, node 1 2025-12-23T18:52:24.652824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:24.653864Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:24.653931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:24.653965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:24.654193Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:24.718481Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21336 2025-12-23T18:52:25.245019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:27.797269Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:27.802860Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:52:27.805478Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:27.829500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:27.829591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:27.867003Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:52:27.868707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:28.097180Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:28.097763Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:28.098473Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:28.099038Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:28.099316Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:28.099425Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:28.099584Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:28.099707Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:28.099802Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:28.212569Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:52:28.223932Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:28.224029Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:28.236663Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:28.365389Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:28.405312Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:28.405400Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:28.430475Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:28.431717Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:28.431919Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:28.431983Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:28.432036Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:28.432088Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:28.432134Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:28.432176Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:28.432493Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:28.473219Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:28.473316Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:28.486268Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:52:28.495527Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:52:28.496683Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:52:28.502211Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:52:28.527575Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:52:28.527641Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:52:28.527708Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:52:28.544676Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:28.553145Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:28.559543Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:28.559664Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:52:28.568618Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:52:28.669064Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:28.993082Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:52:29.095488Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:52:29.095579Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2 ... 5-12-23T18:52:53.519187Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:52:53.519266Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4649:4117], StatRequests.size() = 1 2025-12-23T18:52:53.519347Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T18:55:58.256252Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4514:2473], ActorId: [2:4524:4041], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T18:55:58.256547Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4514:2473], ActorId: [2:4524:4041], Start read next stream part 2025-12-23T18:55:58.256699Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd68x6w3bfbem4vg4hrc7wcd", SessionId: ydb://session/3?node_id=2&id=NTgzYTNjNzQtYTEyY2RiMzgtMmY4MWYzMzEtNzg5MWRjZjU=, Slow query, duration: 185.255683s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1`", parameters: 0b 2025-12-23T18:55:58.258149Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32310, txId: 18446744073709551615] shutting down 2025-12-23T18:55:58.258285Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4514:2473], ActorId: [2:4849:4240], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:55:58.260770Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4514:2473], ActorId: [2:4524:4041], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T18:55:58.260818Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4514:2473], ActorId: [2:4524:4041], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NmFiOTMzZmYtZjgxY2VlNzgtNjk1MTM2NGItOTgwZGE4YmM=, TxId: 2025-12-23T18:55:58.261318Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4514:2473], ActorId: [2:4849:4240], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(LowCardinalityString,$f0(256,8)),AGGREGATE_BY(LowCardinalityInt,$f0(256,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0,9)),AGGREGATE_BY(Float,$f0(256,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(256,8)),AGGREGATE_BY(Date,$f1(32,10000,10099)),AGGREGATE_BY(NearNumericLimits,$f0(256,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808,9223372036854775807)) FROM `/Root/Database/Table1` 2025-12-23T18:55:58.261408Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4514:2473], ActorId: [2:4849:4240], Start read next stream part 2025-12-23T18:55:58.476830Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4875:4262]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:55:58.477022Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T18:55:58.477057Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4875:4262], StatRequests.size() = 1 2025-12-23T19:00:06.102566Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4514:2473], ActorId: [2:4849:4240], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T19:00:06.102858Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4514:2473], ActorId: [2:4849:4240], Start read next stream part 2025-12-23T19:00:06.103850Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd692vsnc8va39a93k3hbmt0", SessionId: ydb://session/3?node_id=2&id=NDJiMTc1Y2QtM2QwNTBjYTUtZjM2NjYwMjItZGE0MGE1Y2I=, Slow query, duration: 247.839578s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f1 = ($p0,$p1,$p2) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::EWHMerge,\n StatisticsInternal::EWHFinalize,\n StatisticsInternal::EWHSerialize,\n StatisticsInternal::EWHDeserialize,\n)\n};\n$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(LowCardinalityString,$f0(256,8)),AGGREGATE_BY(LowCardinalityInt,$f0(256,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0,9)),AGGREGATE_BY(Float,$f0(256,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(256,8)),AGGREGATE_BY(Date,$f1(32,10000,10099)),AGGREGATE_BY(NearNumericLimits,$f0(256,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808,9223372036854775807)) FROM `/Root/Database/Table1`", parameters: 0b 2025-12-23T19:00:06.107167Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32360, txId: 18446744073709551615] shutting down 2025-12-23T19:00:06.107485Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5052:4364], ActorId: [2:5053:4365], Starting query actor #1 [2:5054:4366] 2025-12-23T19:00:06.107556Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5053:4365], ActorId: [2:5054:4366], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T19:00:06.110511Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5053:4365], ActorId: [2:5054:4366], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YjIxNTBjZTUtYWVhM2VhZS05YzU5YjgzNC1mMjU5NWFlNA==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T19:00:06.113108Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4514:2473], ActorId: [2:4849:4240], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T19:00:06.113196Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4514:2473], ActorId: [2:4849:4240], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Mzk5ZmNmYWItY2JiOGY3ZGEtNjAyMGRiZTYtNmJiNGQ5NmE=, TxId: 2025-12-23T19:00:06.188426Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5071:4380]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T19:00:06.188879Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T19:00:06.188943Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:5071:4380], StatRequests.size() = 1 2025-12-23T19:00:06.349147Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5053:4365], ActorId: [2:5054:4366], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjIxNTBjZTUtYWVhM2VhZS05YzU5YjgzNC1mMjU5NWFlNA==, TxId: 2025-12-23T19:00:06.349255Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5053:4365], ActorId: [2:5054:4366], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjIxNTBjZTUtYWVhM2VhZS05YzU5YjgzNC1mMjU5NWFlNA==, TxId: 2025-12-23T19:00:06.349697Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5052:4364], ActorId: [2:5053:4365], Got response [2:5054:4366] SUCCESS 2025-12-23T19:00:06.350170Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T19:00:06.405716Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T19:00:06.405824Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3374:3464] 2025-12-23T19:00:06.407000Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:5090:4390]], StatType[ 2 ], StatRequestsCount[ 2 ] 2025-12-23T19:00:06.407543Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T19:00:06.407622Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2025-12-23T19:00:06.408058Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T19:00:06.408136Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-12-23T19:00:06.408215Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-12-23T19:00:06.408569Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-12-23T19:00:06.415731Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-12-23T19:00:06.416782Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2025-12-23T19:00:06.417090Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29880, MsgBus: 3960 2025-12-23T18:59:51.873126Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130129622126218:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:51.873192Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005acb/r3tmp/tmpTGSA3F/pdisk_1.dat 2025-12-23T18:59:52.101786Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:52.123248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:52.123358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:52.205235Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:52.212490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29880, node 1 2025-12-23T18:59:52.314691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:52.314724Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:52.314736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:52.314863Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:52.327199Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3960 TClient is connected to server localhost:3960 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516392263 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:52.830601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:52.839817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:59:52.854162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:52.898452Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:53.022932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:59:53.187219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.252636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.084768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130146801997240:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.084874Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.085142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130146801997250:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.085184Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.486093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.513572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.541168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.566558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.594700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.651188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.681806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.724315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.804019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130146801998124:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.804115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.804354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130146801998129:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.804360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130146801998130:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.804410Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.808400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:55.819552Z node 1 :KQP_WORKLOAD_SE ... State: Disconnected -> Connecting 2025-12-23T18:59:59.451604Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10912, node 2 2025-12-23T18:59:59.522581Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:59.522598Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:59.522606Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:59.522664Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:59.630501Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8973 TClient is connected to server localhost:8973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516399494 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:59.939997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:59:59.960529Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:00.018176Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:00.161503Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:00.255097Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:00.383180Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:02.540064Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130176543967699:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.540167Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.540394Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130176543967709:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.540481Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.612244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.642077Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.669106Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.695097Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.722098Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.752542Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.790683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.841089Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:02.926694Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130176543968577:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.926786Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130176543968582:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.926801Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.926990Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130176543968585:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.927069Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:02.930478Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:02.941321Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130176543968584:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T19:00:03.019402Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130180838935933:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:04.302261Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130163659064168:2255];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:04.302332Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:04.629189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Success [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates+UseSink >> TxUsage::WriteToTopic_Demo_43_Query [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Query >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::EffectWithSelect+UseSink >> KqpEffects::AlterAfterUpsertTransaction-UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] >> YdbProxy::ReadNonExistentTopic [GOOD] |98.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpEffects::RandomWithIndex+UseSecondaryIndex+UseSink [GOOD] >> TxUsage::WriteToTopic_Demo_16_Table [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::DeleteWithIndex-UseSecondaryIndex-UseSink >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish >> Secret::ValidationQueryService [GOOD] >> TxUsage::WriteToTopic_Demo_16_Query >> TGRpcStreamingTest::SimpleEcho >> TxUsage::WriteToTopic_Demo_45_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21168, MsgBus: 24117 2025-12-23T18:59:57.386347Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130154265735474:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:57.386640Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ac0/r3tmp/tmpHJIsr2/pdisk_1.dat 2025-12-23T18:59:57.605572Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:57.610164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:57.610291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:57.652644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:57.699657Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21168, node 1 2025-12-23T18:59:57.755271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:57.755298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:57.755305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:57.755402Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:57.860566Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24117 TClient is connected to server localhost:24117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516397730 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:58.235141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:58.250561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:58.392415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:58.396955Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:58.531133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:58.591518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:00.635581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130167150639186:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:00.635695Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:00.636071Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130167150639196:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:00.636124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:00.885762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:00.922455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:00.956641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:00.983998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.011129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.040990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.095476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.130719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.193053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130171445607363:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.193154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.193286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130171445607368:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.193329Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130171445607369:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.193378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.196869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:01.206869Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587130171445607372:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Tra ... tate: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 4040, node 2 2025-12-23T19:00:04.100980Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:04.145453Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:04.145475Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:04.145481Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:04.145539Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:04.268972Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:62978 TClient is connected to server localhost:62978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516404149 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:04.559511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:04.577633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:04.652186Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:04.785755Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:04.850160Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:04.995078Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:07.055801Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130195992317612:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.055897Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.056331Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130195992317621:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.056378Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.143578Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.182349Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.244317Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.287269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.328703Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.368694Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.421007Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.471769Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.577106Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130195992318488:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.577202Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.577490Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130195992318493:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.577533Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130195992318494:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.577626Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.584786Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:07.601514Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130195992318497:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:07.675364Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130195992318548:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:08.970782Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130178812446600:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:08.970852Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:09.621426Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2025-12-23T18:57:57.268079Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129638689592997:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:57:57.268872Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005057/r3tmp/tmpzFo1Zf/pdisk_1.dat 2025-12-23T18:57:57.517129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:57:57.542574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:57.542698Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:57.605789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:57.693619Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:57.737565Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:15778 TServer::EnableGrpc on GrpcPort 8591, node 1 2025-12-23T18:57:57.972977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:57.973007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:57.973017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:57.973132Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:58.278808Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:15778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516277701 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:57:58.477776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:57:58.484285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:57:58.684273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:59.986654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129647279528682:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:59.986679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129647279528671:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:59.986762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129647279528683:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:59.986836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:59.987301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129647279528692:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:59.987364Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:59.992026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:59.995427Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129647279528694:2639] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T18:57:59.999751Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129647279528691:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:57:59.999751Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129647279528690:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-12-23T18:58:00.078178Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129651574496037:2670] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:00.088214Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129651574496055:2678] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:01.427845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:01.715839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:02.119369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T18:58:02.351907Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129638689592997:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:02.354967Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T18:58:02.440850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T18:58:02.786805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:12.484690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:58:12.484714Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:07.248836Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587130197836947525:2144];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:07.248967Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005057/r3tmp/tmpcFf55S/pdisk_1.dat 2025-12-23T19:00:07.275245Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:07.410553Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:07.415662Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587130197836947410:2081] 1766516407239181 != 1766516407239184 2025-12-23T19:00:07.431985Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:07.432091Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:07.464534Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:07.475618Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:63470 TServer::EnableGrpc on GrpcPort 4003, node 2 2025-12-23T19:00:07.822449Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:07.822473Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:07.822482Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:07.822594Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:63470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516407516 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:08.120185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 11596, MsgBus: 22510 2025-12-23T18:59:50.178645Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130126724447903:2248];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:50.179368Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ace/r3tmp/tmp6XGEtN/pdisk_1.dat 2025-12-23T18:59:50.446544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:50.446656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:50.500497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:50.501706Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:50.538133Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:50.547003Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130126724447693:2081] 1766516390169470 != 1766516390169473 TServer::EnableGrpc on GrpcPort 11596, node 1 2025-12-23T18:59:50.607368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:50.607388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:50.607400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:50.607466Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:50.689318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22510 TClient is connected to server localhost:22510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516390562 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:51.108491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:51.134157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:51.179147Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:51.274237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:51.438299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:51.496096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.458097Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130139609351452:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:53.458198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:53.458590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130139609351462:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:53.458638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:53.812213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.847739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.880468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.913720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.946728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.984560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:54.042132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:54.087946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:54.166328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130143904319628:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.166407Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.166677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130143904319633:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.166719Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130143904319634:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.166722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.171104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:54.181799Z node 1 ... ons 2025-12-23T19:00:04.137069Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:04.174297Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:04.174385Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:04.180459Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23936, node 3 2025-12-23T19:00:04.219294Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:04.219343Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:04.219352Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:04.219447Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:04.248586Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26137 TClient is connected to server localhost:26137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516404226 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:04.591455Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:04.606658Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:04.658229Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:04.782907Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:04.872490Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:05.055689Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:07.403312Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130199206441267:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.403402Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.403857Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130199206441276:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.403914Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.486135Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.529814Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.568317Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.607068Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.645517Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.715988Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.749383Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.794297Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.876512Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130199206442146:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.876641Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.876936Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130199206442151:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.877006Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130199206442152:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.877044Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.881416Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:07.893544Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130199206442155:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:07.981813Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130199206442206:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:09.047747Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130186321537540:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:09.047824Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest >> KqpImmediateEffects::ReplaceExistingKey [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::RandomWithIndex+UseSecondaryIndex+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19610, MsgBus: 23051 2025-12-23T18:59:42.550495Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130089579883357:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:42.551919Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ad8/r3tmp/tmphGCec4/pdisk_1.dat 2025-12-23T18:59:42.739601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:42.739747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:42.757199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:42.780385Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:42.825041Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130089579883315:2081] 1766516382548897 != 1766516382548900 2025-12-23T18:59:42.825671Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19610, node 1 2025-12-23T18:59:42.884146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:42.884168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:42.884176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:42.884270Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23051 2025-12-23T18:59:43.081170Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516382841 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:43.358029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:43.367770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:43.484397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:43.584708Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:43.618129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:43.675688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:45.143644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130102464787074:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:45.143779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:45.144135Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130102464787084:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:45.144183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:45.432976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:45.461430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:45.491934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:45.522552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:45.554319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:45.587784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:45.619748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:45.660029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:45.731372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130102464787958:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:45.731504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:45.731551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130102464787963:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:45.731705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130102464787965:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:45.731765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:45.735390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:45.744745Z node 1 ... ty maybe) 2025-12-23T19:00:00.450072Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:00.450079Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:00.450165Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:00.578894Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:23954 TClient is connected to server localhost:23954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516400432 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:00.902272Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:00.910290Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:00.964129Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:01.098447Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:01.165753Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:01.179953Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:04.458275Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130184717954935:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:04.458372Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:04.458626Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130184717954944:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:04.458677Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:04.534339Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:04.574525Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:04.606572Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:04.638236Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:04.668875Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:04.717926Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:04.754616Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:04.818974Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:04.905397Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130184717955814:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:04.905515Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:04.905602Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130184717955819:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:04.905760Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130184717955821:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:04.905812Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:04.909031Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:04.919673Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130184717955823:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T19:00:05.009181Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130189012923170:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:05.191618Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130167538084023:2173];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:05.191692Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:07.037933Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.127371Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TxUsage::WriteToTopic_Demo_24_Query [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15851, MsgBus: 26748 2025-12-23T18:59:51.056755Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130127652814700:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:51.056850Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005acc/r3tmp/tmpyITzOY/pdisk_1.dat 2025-12-23T18:59:51.325373Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:51.351174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:51.351306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:51.418552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:51.425419Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130127652814672:2081] 1766516391054070 != 1766516391054073 2025-12-23T18:59:51.453095Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15851, node 1 2025-12-23T18:59:51.536069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:51.536085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:51.536089Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:51.536154Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:51.604374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26748 TClient is connected to server localhost:26748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516391500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:52.038324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:59:52.060461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:52.063580Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:52.203530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:52.357837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:52.422224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:54.362396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130140537718440:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.362549Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.362995Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130140537718450:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.363082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:54.702772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:54.737254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:54.767580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:54.797439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:54.823417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:54.855407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:54.889784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:54.936569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:55.010232Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130144832686619:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.010308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.010333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130144832686624:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.010536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130144832686626:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.010580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.014703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:55.025995Z node 1 ... tatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516404695 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:05.109389Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:05.118499Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:05.181463Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:05.316821Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:05.378206Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:05.519326Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:08.036666Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130204129628510:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.036771Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.037084Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130204129628520:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.037134Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.100854Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.127952Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.155782Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.181777Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.212673Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.242245Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.271620Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.313547Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.416779Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130204129629388:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.416837Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.416891Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130204129629393:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.416987Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130204129629395:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.417038Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.419884Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:08.428864Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130204129629397:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:08.495889Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130204129629448:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:09.518843Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130186949757498:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:09.518912Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:10.770539Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [3:7587130212719564366:2535], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01kd69aj28cvhgdbfkkrmfsaxv. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=NmE4ZDJkOWQtODc4MTJjOGMtNzE2NTM2NTktMTBlZWM3YTY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-12-23T19:00:10.770885Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [3:7587130212719564368:2536], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01kd69aj28cvhgdbfkkrmfsaxv. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=NmE4ZDJkOWQtODc4MTJjOGMtNzE2NTM2NTktMTBlZWM3YTY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7587130212719564363:2520], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T19:00:10.771378Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=NmE4ZDJkOWQtODc4MTJjOGMtNzE2NTM2NTktMTBlZWM3YTY=, ActorId: [3:7587130212719564317:2520], ActorState: ExecuteState, TraceId: 01kd69aj28cvhgdbfkkrmfsaxv, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |98.8%| [TA] $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] |98.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 28859, MsgBus: 22245 2025-12-23T18:59:53.850381Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130137683952133:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:53.852004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ac4/r3tmp/tmpuIshwy/pdisk_1.dat 2025-12-23T18:59:54.101191Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:54.110765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:54.110896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:54.170939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:54.193288Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28859, node 1 2025-12-23T18:59:54.275983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:54.276006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:54.276012Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:54.276169Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:54.304230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22245 TClient is connected to server localhost:22245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516394244 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:54.780653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:54.800853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:54.861504Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:54.915226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:55.078889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:55.146949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:57.185222Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130154863823139:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.185353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.185730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130154863823149:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.185782Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.519379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:57.552532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:57.581439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:57.607490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:57.633522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:57.663375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:57.725167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:57.765685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:57.834086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130154863824018:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.834183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.834502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130154863824024:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.834513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130154863824023:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.834583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:57.838661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:57.854222Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587130154863824027:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Tra ... ate: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29018, node 2 2025-12-23T19:00:05.074312Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:05.110811Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:05.110836Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:05.110843Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:05.110915Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:05.206902Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:28967 TClient is connected to server localhost:28967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516405115 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:05.488673Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:05.497154Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:05.538768Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:05.725777Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:05.794322Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:05.951100Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:08.295730Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130202929962211:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.295812Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.296028Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130202929962221:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.296064Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.353139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.380668Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.409536Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.434139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.460646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.514918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.546849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.595597Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:08.680492Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130202929963091:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.680620Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.680678Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130202929963096:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.680860Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130202929963098:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.680906Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.683957Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:08.694847Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130202929963099:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T19:00:08.779392Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130202929963151:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:09.936380Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130185750091181:2064];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:09.936443Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:10.503235Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TxUsage::WriteToTopic_Demo_27_Table >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:57:23.352846Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:57:23.449361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:57:23.457456Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:57:23.457734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:57:23.457785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:57:23.682922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:23.683074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:23.759483Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516240695271 != 1766516240695275 2025-12-23T18:57:23.772251Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:23.815042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:23.903331Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003ce4/r3tmp/tmpoB9YIg/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6889, node 1 TClient is connected to server localhost:27872 2025-12-23T18:57:24.368157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:57:24.369674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:24.369727Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:24.369761Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:24.370803Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:24.383893Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-12-23T18:57:36.124822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:952:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.124942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:963:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.125040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.126164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:968:2829], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.126378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:36.131285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:36.153049Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:966:2827], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-12-23T18:57:36.204037Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1019:2861] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:36.267086Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:1029:2870], status: GENERIC_ERROR, issues:
:1:20: Error: mismatched input '-' expecting '(' 2025-12-23T18:57:36.269482Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=Nzg4OWI5ZjgtMjFkYmM2ZDgtZmJiMzhhZjEtZjcwOTFlZTc=, ActorId: [1:950:2817], ActorState: ExecuteState, TraceId: 01kd695vbnbmwdqtra1tj14bgs, ReplyQueryCompileError, status: GENERIC_ERROR, issues: { position { row: 1 column: 20 } message: "mismatched input \'-\' expecting \'(\'" end_position { row: 1 column: 20 } severity: 1 }, remove tx with tx_id: REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-12-23T18:57:46.689723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:47.450295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:57:47.923867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:48.547032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:49.469487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:57:49.944367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:57:50.600010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:51.530097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-12-23T18:57:51.796069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T18:57:51.796138Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:53.323630Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=YWU1MDBlMC1lNjI5OGYwYy05OWY2Y2UyNy00Yzc2ZTA0MA==, ActorId: [1:1051:2884], ActorState: ExecuteState, TraceId: 01kd6965dxd4143gpaeb5r3zdc, Create QueryResponse for error on request, msg: , status: GENERIC_ERROR, issues: { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "preparation problem: secret secret1 not found for alter" severity: 1 } } REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-12-23T18:58:27.933305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but ... Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715726:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:31.189945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715731:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 2025-12-23T18:58:43.789586Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=NjU4OTA1ZTItOTVhYzRhMDItOGI0MDRkOGMtZGFiOThkOQ==, ActorId: [1:3395:4629], ActorState: ExecuteState, TraceId: 01kd697wwtfgcbvrhhfara5kry, Create QueryResponse for error on request, msg: , status: GENERIC_ERROR, issues: { message: "Executing operation with object \"SECRET_ACCESS\"" severity: 1 issues { message: "preparation problem: used in access secret secret2 not found" severity: 1 } } REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Executing operation with object "SECRET_ACCESS"
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-12-23T18:58:56.024740Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=15; 2025-12-23T18:58:56.024965Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 15 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:58:56.025127Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 15 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-12-23T18:58:56.025439Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:966: SelfId: [1:3784:4845], Table: `//Root/.metadata/secrets/access` ([72057594046644480:49:1]), SessionActorId: [1:3689:4845]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3784:4845].{
: Error: Conflict with existing key., code: 2012 } 2025-12-23T18:58:56.025846Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4643: SelfId: [1:3777:4845], SessionActorId: [1:3689:4845], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:3689:4845]. 2025-12-23T18:58:56.026185Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2237: SessionId: ydb://session/3?node_id=1&id=NzZlMTM0N2UtY2I2ZjE5MjItY2YwMzdhODEtNzU2MDNkYWI=, ActorId: [1:3689:4845], ActorState: ExecuteState, TraceId: 01kd69899k80p3abws50vhksxb, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3778:4845] from: [1:3777:4845] 2025-12-23T18:58:56.026314Z node 1 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1184: ActorId: [1:3778:4845] TxId: 281474976715757. Ctx: { TraceId: 01kd69899k80p3abws50vhksxb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NzZlMTM0N2UtY2I2ZjE5MjItY2YwMzdhODEtNzU2MDNkYWI=, PoolId: default, IsStreamingQuery: 0}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-12-23T18:58:56.026655Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=NzZlMTM0N2UtY2I2ZjE5MjItY2YwMzdhODEtNzU2MDNkYWI=, ActorId: [1:3689:4845], ActorState: ExecuteState, TraceId: 01kd69899k80p3abws50vhksxb, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } 2025-12-23T18:58:56.035317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01kd69891bcbmfpce5c1p19esh" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=NzZlMTM0N2UtY2I2ZjE5MjItY2YwMzdhODEtNzU2MDNkYWI=" tx_control { tx_id: "01kd69891bcbmfpce5c1p19esh" } query { yql_text: "--!syntax_v1\nDECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-12-23T18:59:08.152248Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=NDkzMjAwOTYtOGE4MjdiOTItNDBkNDUxZS04YTYyOWUyMg==, ActorId: [1:3980:5056], ActorState: ExecuteState, TraceId: 01kd698mjsb7sqkj8rw970393k, Create QueryResponse for error on request, msg: , status: GENERIC_ERROR, issues: { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "preparation problem: secret secret1 using in access for test@test1" severity: 1 } } REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Executing operation with object "SECRET"
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-12-23T18:59:19.685297Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4289:5287], for# root@builtin, access# DescribeSchema 2025-12-23T18:59:19.685440Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4289:5287], for# root@builtin, access# DescribeSchema 2025-12-23T18:59:19.687771Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:4286:5284], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:59:19.690844Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YjUyMzYxYzAtZmJkZTNmLTE5MWJlMDQ2LTc3OWE4MTEx, ActorId: [1:4280:5279], ActorState: ExecuteState, TraceId: 01kd6990fh3xj84rec8t1xa431, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/secrets/values]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-12-23T18:59:31.241680Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (51449FAE): Could not find correct token validator 2025-12-23T18:59:31.997226Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=Yjg2YzhiZWYtMWM4NTM0ZDEtZTliMzU4ODctZjZjOGU1MDM=, ActorId: [1:4537:5470], ActorState: ExecuteState, TraceId: 01kd699bs027b24jh927rnk71q, Create QueryResponse for error on request, msg: , status: GENERIC_ERROR, issues: { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "cannot CREATE objects: Secret already exists: secret1" severity: 1 } } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-12-23T18:59:44.207183Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=1&id=YzM2OWQyZDMtMWUwYTc5YmMtYjA1MDdiNGQtODVlNDg5MmY=, ActorId: [1:4918:5752], ActorState: ExecuteState, TraceId: 01kd699qs35r2sdv8n6qr4st8z, Create QueryResponse for error on request, msg: , status: GENERIC_ERROR, issues: { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "cannot UPSERT objects: Secret already exists: secret1" severity: 1 } } REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |98.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpImmediateEffects::ManyFlushes [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_3_Query [GOOD] |98.9%| [TA] $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::UpdateOn [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] Test command err: Trying to start YDB, gRPC: 6403, MsgBus: 13754 2025-12-23T18:59:52.412418Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130132508217322:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:52.412486Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ac8/r3tmp/tmpIQZqIg/pdisk_1.dat 2025-12-23T18:59:52.666043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:52.666185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:52.693639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:52.715551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:52.764273Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:52.766291Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130132508217286:2081] 1766516392410833 != 1766516392410836 TServer::EnableGrpc on GrpcPort 6403, node 1 2025-12-23T18:59:52.853284Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:52.853313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:52.853321Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:52.853415Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:52.925257Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13754 TClient is connected to server localhost:13754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516392788 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:53.420433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:59:53.421915Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:59:53.437783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.573556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.751204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.813428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:55.683141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130145393121048:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.683269Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.683595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130145393121058:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.683641Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.016409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.052149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.082175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.112119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.144136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.200422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.275285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.359633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.451829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130149688089234:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.451899Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.451973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130149688089239:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.452104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130149688089241:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.452152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.455755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:56.466194Z node 1 : ... eState: Disconnected -> Connecting 2025-12-23T19:00:06.471124Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9312, node 3 2025-12-23T19:00:06.519946Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:06.519967Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:06.519975Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:06.520058Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:06.619259Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:9997 TClient is connected to server localhost:9997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516406522 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T19:00:06.932201Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:00:06.951272Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:07.019307Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:07.235005Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:07.324732Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:07.436567Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:09.538143Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130206614224795:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.538233Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.538460Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130206614224805:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.538510Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.604327Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.636367Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.661370Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.689210Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.717084Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.747824Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.803821Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.852192Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.936720Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130206614225668:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.936827Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.937077Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130206614225673:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.937079Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130206614225674:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.937149Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.941128Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:09.951311Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130206614225677:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:10.018259Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130210909193024:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:11.343501Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130193729321086:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:11.343580Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:12.088609Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 4037, MsgBus: 17649 2025-12-23T18:59:52.673646Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130134663961255:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:52.673736Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ac6/r3tmp/tmpDXtqeb/pdisk_1.dat 2025-12-23T18:59:52.918821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:52.951122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:52.951236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:53.014152Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130134663961221:2081] 1766516392672393 != 1766516392672396 2025-12-23T18:59:53.018870Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:53.025957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4037, node 1 2025-12-23T18:59:53.157775Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:53.157809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:53.157816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:53.157902Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:53.169388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17649 TClient is connected to server localhost:17649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516393082 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:53.641779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:53.674917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:53.681306Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:59:53.810905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.956627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:54.024229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:55.620598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130147548864986:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.620716Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.621086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130147548864996:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.621136Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.002565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.043312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.078807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.108665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.138555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.176718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.205550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.277743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.379690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130151843833164:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.379769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.380065Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130151843833169:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.380154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130151843833170:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.380241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.384067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:56.400350Z node 1 : ... erSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:06.895381Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T19:00:06.960081Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.150159Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T19:00:07.235264Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.405985Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:10.074001Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130213019851261:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.074129Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.074720Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130213019851271:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.074803Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.075149Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130213019851273:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.075209Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.126913Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.161975Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.194068Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.227243Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.262329Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.300006Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.331697Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.407366Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.502793Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130213019852148:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.502917Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.503200Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130213019852153:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.503256Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130213019852154:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.503333Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.507946Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:10.520347Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130213019852157:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:10.586976Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130213019852208:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:11.288062Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130195839980241:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:11.288124Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:12.397394Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:13.035488Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2061: SelfId: [3:7587130225904754699:2534], TxId: 281474976710679, task: 1. Ctx: { CheckpointId : . TraceId : 01kd69amgheq84vqqgv6nh4cg8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=NjA5MDcwYmQtMjFhZWJlOTMtYmJkMWNlNDktM2NjZTcwNjE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } } 2025-12-23T19:00:13.035573Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [3:7587130225904754699:2534], TxId: 281474976710679, task: 1. Ctx: { CheckpointId : . TraceId : 01kd69amgheq84vqqgv6nh4cg8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=NjA5MDcwYmQtMjFhZWJlOTMtYmJkMWNlNDktM2NjZTcwNjE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } }. 2025-12-23T19:00:13.036410Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=NjA5MDcwYmQtMjFhZWJlOTMtYmJkMWNlNDktM2NjZTcwNjE=, ActorId: [3:7587130221609787122:2534], ActorState: ExecuteState, TraceId: 01kd69amgheq84vqqgv6nh4cg8, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Read request aborted" severity: 1 issues { message: "Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready)" severity: 1 } } |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table >> KqpFail::OnPrepare [GOOD] >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> TGRpcStreamingTest::SimpleEcho [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation-UseSchemaSecrets >> KqpImmediateEffects::ReplaceDuplicates [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex >> KikimrIcGateway::TestLoadExternalTable >> TGRpcStreamingTest::ReadFinish [GOOD] >> KikimrIcGateway::TestLoadTableMetadata ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ManyFlushes [GOOD] Test command err: Trying to start YDB, gRPC: 15062, MsgBus: 18714 2025-12-23T18:59:52.948704Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130134854410792:2146];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:52.949053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ac5/r3tmp/tmpBK3f8D/pdisk_1.dat 2025-12-23T18:59:53.172913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:53.206789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:53.206942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:53.266332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:53.272717Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:53.272876Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130134854410672:2081] 1766516392897749 != 1766516392897752 TServer::EnableGrpc on GrpcPort 15062, node 1 2025-12-23T18:59:53.353791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:53.353806Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:53.353820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:53.353906Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:53.363225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18714 TClient is connected to server localhost:18714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516393341 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:53.859823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:53.866197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:59:53.877463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:53.954246Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:54.001717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:54.156270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:54.222879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:55.948999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130147739314430:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.949152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.949616Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130147739314440:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:55.949671Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.252378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.283783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.318401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.353762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.386602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.439894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.477983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.525717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:56.593069Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130152034282608:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.593145Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.593480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130152034282613:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.593514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130152034282614:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.593759Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:56.597183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... ate: Disconnected -> Connecting 2025-12-23T19:00:06.226331Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20852, node 3 2025-12-23T19:00:06.291571Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:06.291603Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:06.291610Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:06.291696Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:06.315314Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:64275 TClient is connected to server localhost:64275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516406270 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:06.755797Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:06.768129Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:06.840449Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:06.986733Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:07.056579Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:07.106274Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:09.855204Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130207352978705:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.855321Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.855579Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130207352978714:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.855638Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.927501Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.963729Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.995426Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.045718Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.077603Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.112296Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.154438Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.205227Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.289947Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130211647946882:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.290048Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.290395Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130211647946887:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.290445Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130211647946888:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.290477Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.293676Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:10.307001Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130211647946891:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:10.386693Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130211647946942:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:11.094713Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130194468074985:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:11.094810Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:12.233682Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets-UseAuthToken >> KikimrIcGateway::TestCreateExternalTable >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> KikimrIcGateway::TestDropExternalTable >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] >> KikimrIcGateway::TestListPath |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> TxUsage::WriteToTopic_Demo_11_Query [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2025-12-23T19:00:12.949437Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130218892603969:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:12.950366Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00466d/r3tmp/tmpT2xYiO/pdisk_1.dat 2025-12-23T19:00:13.265509Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:13.290242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:13.290346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:13.320536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:13.423294Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:13.465566Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:00:13.471268Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d0b8e3a0880] stream accepted Name# Session ok# true peer# ipv6:[::1]:56922 2025-12-23T19:00:13.471680Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d0b8e3a0880] facade attach Name# Session actor# [1:7587130223187571990:2462] peer# ipv6:[::1]:56922 2025-12-23T19:00:13.471730Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7d0b8e3a0880] facade read Name# Session peer# ipv6:[::1]:56922 2025-12-23T19:00:13.471907Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7d0b8e3a0880] read finished Name# Session ok# true data# peer# ipv6:[::1]:56922 2025-12-23T19:00:13.472086Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:142: Received TEvReadFinished, success = 1 2025-12-23T19:00:13.472127Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7d0b8e3a0880] facade write Name# Session data# peer# ipv6:[::1]:56922 2025-12-23T19:00:13.472381Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7d0b8e3a0880] facade finish Name# Session peer# ipv6:[::1]:56922 grpc status# (0) message# 2025-12-23T19:00:13.472384Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d0b8e3a0880] write finished Name# Session ok# true peer# ipv6:[::1]:56922 2025-12-23T19:00:13.472648Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d0b8e3a0880] stream done notification Name# Session ok# true peer# ipv6:[::1]:56922 2025-12-23T19:00:13.472791Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d0b8e3a0880] stream finished Name# Session ok# true peer# ipv6:[::1]:56922 grpc status# (0) message# 2025-12-23T19:00:13.472955Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d0b8e3a0880] deregistering request Name# Session peer# ipv6:[::1]:56922 (finish done) |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2025-12-23T19:00:12.949726Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130220053231178:2074];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:12.949866Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:00:13.004295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00466f/r3tmp/tmpUAxM4E/pdisk_1.dat 2025-12-23T19:00:13.312847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:13.312929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:13.322719Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:13.347219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:13.350217Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:13.471050Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d1c608b0480] stream accepted Name# Session ok# true peer# ipv6:[::1]:41658 2025-12-23T19:00:13.471386Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d1c608b0480] facade attach Name# Session actor# [1:7587130224348199173:2461] peer# ipv6:[::1]:41658 2025-12-23T19:00:13.471431Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7d1c608b0480] facade read Name# Session peer# ipv6:[::1]:41658 2025-12-23T19:00:13.471506Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7d1c608b0480] facade finish Name# Session peer# ipv6:[::1]:41658 grpc status# (0) message# 2025-12-23T19:00:13.471786Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7d1c608b0480] read finished Name# Session ok# false data# peer# ipv6:[::1]:41658 2025-12-23T19:00:13.471822Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:265: Received TEvReadFinished, success = 0 2025-12-23T19:00:13.471831Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d1c608b0480] stream finished Name# Session ok# true peer# ipv6:[::1]:41658 grpc status# (0) message# 2025-12-23T19:00:13.471914Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d1c608b0480] deregistering request Name# Session peer# ipv6:[::1]:41658 (finish done) 2025-12-23T19:00:13.472005Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d1c608b0480] stream done notification Name# Session ok# true peer# ipv6:[::1]:41658 2025-12-23T19:00:13.498206Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> KqpEffects::InsertAbort_Select_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates-UseSink >> TxUsage::The_TxWriteInfo_Is_Deleted_After_The_Immediate_Transaction |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 19356, MsgBus: 10357 2025-12-23T18:59:56.032642Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130149627140577:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:56.033788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ac2/r3tmp/tmp8eLsqL/pdisk_1.dat 2025-12-23T18:59:56.281822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:56.387635Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:56.393711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:56.393828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:56.396240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19356, node 1 2025-12-23T18:59:56.500583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:56.500608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:56.500616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:56.500732Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:56.503866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10357 TClient is connected to server localhost:10357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516396442 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:56.995320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:57.009459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:57.035424Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:57.165886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:57.309166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:57.377098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:59.063335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130162512044309:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.063508Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.063831Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130162512044319:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.063907Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.454880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.524375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.556990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.592495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.639278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.674217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.710775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.780047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.855809Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130162512045192:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.855900Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.856123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130162512045198:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.856125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130162512045197:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.856180Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.859692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:59.869975Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587130162512045201:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Tra ... true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516409616 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T19:00:10.008136Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:00:10.026688Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:10.103593Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:10.241698Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:10.311212Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:10.475859Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:12.891233Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130220718076800:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:12.891324Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:12.891599Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130220718076809:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:12.891655Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:12.951309Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:12.990635Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:13.026667Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:13.060470Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:13.100304Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:13.135313Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:13.172080Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:13.215513Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:13.309789Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130225013044979:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:13.309885Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130225013044984:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:13.309890Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:13.310072Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130225013044986:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:13.310134Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:13.313645Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:13.324873Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130225013044987:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T19:00:13.389776Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130225013045039:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:14.441368Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130207833173068:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:14.441470Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:14.861641Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:15.347017Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2061: SelfId: [3:7587130233602980212:2531], TxId: 281474976715678, task: 1. Ctx: { CheckpointId : . TraceId : 01kd69apqv8g0p2980hm9fef1x. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZjlhMWE1YzYtNjA4ODM5MjYtODFjZjEwYjAtZGU1MjgxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } } 2025-12-23T19:00:15.349736Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [3:7587130233602980212:2531], TxId: 281474976715678, task: 1. Ctx: { CheckpointId : . TraceId : 01kd69apqv8g0p2980hm9fef1x. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZjlhMWE1YzYtNjA4ODM5MjYtODFjZjEwYjAtZGU1MjgxZjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } }. 2025-12-23T19:00:15.350465Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=ZjlhMWE1YzYtNjA4ODM5MjYtODFjZjEwYjAtZGU1MjgxZjI=, ActorId: [3:7587130229308012642:2531], ActorState: ExecuteState, TraceId: 01kd69apqv8g0p2980hm9fef1x, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Read request aborted" severity: 1 issues { message: "Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready)" severity: 1 } } |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink [GOOD] >> TMonitoringTests::ValidActorId >> BasicUsage::AlterTopicWithSharedConsumer_DisableDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_SetDeleteDeadLetterPolicy >> TMonitoringTests::InvalidActorId >> KqpQueryPerf::Update-QueryService-UseSink |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] >> TMonitoringTests::InvalidActorId [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 30479, MsgBus: 27905 2025-12-23T18:59:58.112586Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130160055732685:2150];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:58.112919Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005abf/r3tmp/tmpR4lnQz/pdisk_1.dat 2025-12-23T18:59:58.330817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:58.356034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:58.356184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:58.379829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:58.432108Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30479, node 1 2025-12-23T18:59:58.532397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:58.532424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:58.532430Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:58.532515Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:58.560212Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27905 TClient is connected to server localhost:27905 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516398472 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:58.953736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:58.978943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:59.116675Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:59.120905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:59.262206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:59.321825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:01.168436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130172940636315:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.168577Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.168935Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130172940636325:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.169012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.532757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.560469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.587631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.614987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.639828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.670928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.723201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.766076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:01.837038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130172940637196:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.837110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130172940637201:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.837150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.837390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130172940637204:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.837447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:01.840786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:01.849790Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587130172940637203:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Tra ... State: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516411359 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:11.755118Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:11.763522Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:11.853550Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:11.992803Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:12.051311Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:12.220896Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:14.534662Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130229931856503:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.534799Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.535368Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130229931856513:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.535429Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.535606Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130229931856515:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.535654Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.591122Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.624250Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.653666Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.682070Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.711504Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.741609Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.774288Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.821796Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.893547Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130229931857387:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.893635Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.893645Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130229931857392:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.893848Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130229931857394:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.893893Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.897363Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:14.906846Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130229931857395:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:14.984352Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130229931857447:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:16.205739Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130217046952784:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:16.205826Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:16.740137Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:17.088579Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2025-12-23T19:00:17.308727Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=Yzc1ODcwM2UtN2I0NjlkODAtZmFmYmU5NTEtMjdlYmI2MzU=, ActorId: [3:7587130238521792302:2521], ActorState: ExecuteState, TraceId: 01kd69arrhe8z8mqndds6xh19b, Create QueryResponse for error on request, msg: , status: ABORTED, issues: { message: "Transaction locks invalidated. Table: `/Root/TestTable`" issue_code: 2001 severity: 1 } >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KikimrIcGateway::TestDropExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalDataSource >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateSameExternalTable >> LocalPartition::Restarts [GOOD] >> LocalPartition::WithoutPartitionWithRestart |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> Donor::ContinueWithFaultyDonor >> Donor::SkipBadDonor |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::BridgeTimeDifference Test command err: 2025-12-23T18:53:57.694009Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587128611017374564:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:53:57.694054Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c02/r3tmp/tmpkDHuUG/pdisk_1.dat 2025-12-23T18:53:57.952176Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:53:57.983241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:53:57.983378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:53:58.025466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:53:58.039420Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:53:58.040493Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587128611017374526:2081] 1766516037689716 != 1766516037689719 TServer::EnableGrpc on GrpcPort 25127, node 1 2025-12-23T18:53:58.202865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:53:58.238780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:53:58.238871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:53:58.238912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:53:58.238995Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:32726 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516038084 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:53:58.684768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:53:58.701500Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; issue_log { id: "0" status: GREY message: "Database does not exist" } location { id: 1 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2025-12-23T18:54:05.950058Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:05.950382Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:05.961687Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:05.963042Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:05.965219Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:496:2402], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:05.965461Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:05.965634Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:05.968365Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:489:2166], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:05.968726Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:05.968978Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:06.245293Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:06.332811Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:06.332949Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:06.333913Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:06.334005Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:06.378215Z node 2 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-12-23T18:54:06.379361Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:06.379763Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:54:06.449359Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:54:06.473726Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002c02/r3tmp/tmpmuZpDN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2677, node 2 TClient is connected to server localhost:17465 2025-12-23T18:54:07.280231Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:54:07.280300Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:54:07.280335Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:54:07.280729Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD location { id: 2 host: "::1" port: 12001 pile { name: "pile0" } } ... waiting for SysViewsRoster update finished 2025-12-23T18:54:13.135257Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:13.135535Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:54:13.144334Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:13.145278Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:54:13.146552Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:498:2402], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:13.146918Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:13.147054Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:13.148775Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:491:2167], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:54:13.148993Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:54:13.149258Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:54:13.429992Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:54:13.513277Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:54:13.513395Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:54:13.513781Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Discon ... incorrect path status: LookupError; 2025-12-23T19:00:13.005749Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:13.005810Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:13.005992Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:13.006043Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:13.006092Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:13.006142Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:13.006194Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:13.826330Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:13.826424Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:13.826481Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:13.826535Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:13.826705Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:13.826786Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:13.826836Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:13.826888Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:14.529860Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:14.529947Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:14.530029Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:14.530079Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:14.530260Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:14.530642Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:14.530695Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:14.530765Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:15.181089Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:15.181183Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:15.181236Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:15.181346Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:15.181621Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:15.181678Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:15.181726Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:15.181777Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:15.881487Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:15.881559Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:15.881757Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:15.881867Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:15.881914Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:15.881947Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:15.881982Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:15.882016Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; ================================================================= ==1534811==ERROR: AddressSanitizer: heap-use-after-free on address 0x7b878ee6cca8 at pc 0x000022dc0d2e bp 0x7b575d263390 sp 0x7b575d263388 READ of size 1 at 0x7b878ee6cca8 thread T510 (ydb-core-health) VERIFY failed (2025-12-23T19:00:16.561682Z): must not release when count is not zero: 1 library/cpp/messagebus/use_count_checker.cpp:11 ~TUseCountChecker(): requirement count == 0 failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+888 (0x1B3207A8) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x1B30E4DB) TUseCountChecker::~TUseCountChecker()+310 (0x22DD8DD6) NKikimr::NMsgBusProxy::TMessageBusServer::~TMessageBusServer()+556 (0x3E55F2EC) NKikimr::NMsgBusProxy::TMessageBusServer::~TMessageBusServer()+14 (0x3E55F3AE) NKikimr::Tests::TServer::TServer(TIntrusiveConstPtr>, bool)+2382 (0x3E40595E) NKikimr::Tests::TServer::TServer(NKikimr::Tests::TServerSettings const&, bool)+92 (0x3E4084EC) NKikimr::NTestSuiteTHealthCheckTest::TTestCaseBridgeTimeDifference::Execute_(NUnitTest::TTestContext&)+2294 (0x1AD87EB6) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AECF3C8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B7DC5BA) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B7AA628) NKikimr::NTestSuiteTHealthCheckTest::TCurrentTest::Execute()+1229 (0x1AECE4CD) NUnitTest::TTestFactory::Execute()+2176 (0x1B7ABDE0) NUnitTest::RunMain(int, char**)+5805 (0x1B7D641D) ??+0 (0x7F578A04ED90) __libc_start_main+128 (0x7F578A04EE40) _start+41 (0x1866C029) |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> IcbAsActorTests::TestHttpPostReaction >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> IcbAsActorTests::TestHttpPostReaction [GOOD] >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> KqpEffects::DeleteWithIndex-UseSecondaryIndex-UseSink [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] |98.9%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] |98.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> test_select.py::TestDML::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TxUsage::WriteToTopic_Demo_45_Table [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> KikimrIcGateway::TestSecretsExistingValidation-UseSchemaSecrets [GOOD] >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeleteWithIndex-UseSecondaryIndex-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20747, MsgBus: 4296 2025-12-23T18:59:56.167719Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130149682474743:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:56.168599Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ac1/r3tmp/tmpu1Jt06/pdisk_1.dat 2025-12-23T18:59:56.441705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:56.441899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:56.443915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:56.508815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:56.512496Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20747, node 1 2025-12-23T18:59:56.578197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:56.578224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:56.578235Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:56.578347Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:56.760624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4296 TClient is connected to server localhost:4296 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516396617 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:57.041173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:57.061412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:57.169153Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T18:59:57.204871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:57.357165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:57.415806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:59.120657Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130162567378444:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.120762Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.121026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130162567378454:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.121052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.427050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.460557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.489556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.518219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.545948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.584794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.653238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.702850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:59.766215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130162567379323:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.766291Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.766355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130162567379328:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.766766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130162567379330:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.766835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:59.770356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:59:59.781100Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587130162567379331:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transa ... WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:12.297038Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:12.312640Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26175 TClient is connected to server localhost:26175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516412304 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:12.699896Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:12.704100Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:00:12.716859Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:12.798188Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:12.957222Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:13.035274Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:13.223531Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:15.517714Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130231971756159:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:15.517868Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:15.518188Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130231971756169:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:15.518236Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:15.581501Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:15.608032Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:15.638062Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:15.664091Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:15.696438Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:15.728847Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:15.760233Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:15.808222Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:15.916062Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130231971757043:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:15.916178Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:15.916540Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130231971757048:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:15.916579Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130231971757049:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:15.916621Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:15.920556Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:15.933022Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130231971757052:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T19:00:16.008581Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130236266724399:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:17.122864Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130219086852448:2076];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:17.122950Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:18.256154Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:18.379877Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 >> TxUsage::WriteToTopic_Demo_45_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 7339, MsgBus: 61927 2025-12-23T19:00:08.460098Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130202097563441:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:08.460167Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ab7/r3tmp/tmpg1lyq6/pdisk_1.dat 2025-12-23T19:00:08.675742Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:08.700859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:08.700970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:08.739852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:08.775405Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:08.776308Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130202097563414:2081] 1766516408458732 != 1766516408458735 TServer::EnableGrpc on GrpcPort 7339, node 1 2025-12-23T19:00:08.832400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:08.832422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:08.832431Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:08.832517Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:08.950533Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61927 TClient is connected to server localhost:61927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516408797 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:09.270833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:09.290291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:09.430415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:09.494913Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:09.572213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:09.642040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:11.411757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130214982467170:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:11.411862Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:11.412147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130214982467180:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:11.412219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:11.776801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:11.804814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:11.834064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:11.869209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:11.900534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:11.964492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:11.997421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:12.044411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:12.111068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130219277435348:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:12.111207Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:12.111232Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130219277435353:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:12.111499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130219277435355:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:12.111545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:12.114997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:12.125069Z node 1 : ... #72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:15.156225Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:15.176885Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:15.176908Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:15.176914Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:15.176992Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:15.265100Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4257 TClient is connected to server localhost:4257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516415174 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:15.542178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:15.560272Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:15.615245Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:15.749047Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:15.812326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:16.034622Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:18.328927Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130245650596975:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:18.329007Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:18.329454Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130245650596985:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:18.329536Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:18.393706Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:18.427320Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:18.457744Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:18.489245Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:18.525921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:18.564898Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:18.599627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:18.645357Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:18.725743Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130245650597851:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:18.725831Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:18.725919Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130245650597856:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:18.726187Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587130245650597858:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:18.726246Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:18.729297Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:18.739679Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587130245650597859:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:18.816029Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130245650597911:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:20.028708Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587130232765693246:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:20.028798Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:20.259512Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KikimrIcGateway::TestDropExternalDataSource [GOOD] >> KikimrIcGateway::TestDropResourcePool >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> KqpEffects::EffectWithSelect+UseSink [GOOD] >> Donor::ContinueWithFaultyDonor [GOOD] >> Donor::SkipBadDonor [GOOD] |98.9%| [TA] $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] Test command err: Trying to start YDB, gRPC: 27662, MsgBus: 5794 2025-12-23T19:00:16.138613Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130236390934381:2175];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:16.138724Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0044fe/r3tmp/tmpnTeLrJ/pdisk_1.dat 2025-12-23T19:00:16.427218Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:16.463448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:16.463579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:16.525081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:16.538593Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130236390934226:2081] 1766516416116941 != 1766516416116944 2025-12-23T19:00:16.556206Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27662, node 1 2025-12-23T19:00:16.619002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:00:16.819599Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:16.819633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:16.819645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:16.819768Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:17.147242Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:5794 TClient is connected to server localhost:5794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516416609 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:17.696986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:17.707131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:17.718169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:17.894215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:18.066731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:18.139415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:19.695318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130249275837984:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.695437Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.695727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130249275837994:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.695771Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.063980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.100485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.134302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.171704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.220062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.251653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.314311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.368228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.468293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130253570806172:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.468373Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.468577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130253570806175:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.468635Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.468766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130253570806179:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.473503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:20.484628Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587130253570806181:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:20.571792Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587130253570806232:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:21.138235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587130236390934381:2175];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:21.138286Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest |98.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SkipBadDonor [GOOD] Test command err: RandomSeed# 4682426281433396299 2025-12-23T19:00:22.933705Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T19:00:22.935829Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2638432900902205856] 2025-12-23T19:00:22.955570Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ContinueWithFaultyDonor [GOOD] Test command err: RandomSeed# 14470148399618985776 2025-12-23T19:00:22.936330Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-12-23T19:00:22.938357Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 5624576258656081354] 2025-12-23T19:00:22.958654Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] Test command err: 2025-12-23T18:59:45.030626Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:59:45.065461Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:59:45.065733Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:59:45.073832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:59:45.074160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:59:45.074440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:59:45.074607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:59:45.074730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:59:45.074885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:59:45.075003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:59:45.075151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:59:45.075278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:59:45.075394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:59:45.075503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:59:45.075630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:59:45.075747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:59:45.108221Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:59:45.108465Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:59:45.108528Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:59:45.108734Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:59:45.108930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:59:45.109025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:59:45.109093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:59:45.109210Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:59:45.109282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:59:45.109333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:59:45.109369Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:59:45.109583Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:59:45.109665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:59:45.109711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:59:45.109751Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:59:45.109871Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:59:45.109940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:59:45.109986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:59:45.110020Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:59:45.110081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:59:45.110122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:59:45.110165Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:59:45.110245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:59:45.110290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:59:45.110333Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:59:45.110606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:59:45.110691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:59:45.110728Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:59:45.110898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:59:45.110944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:59:45.110980Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:59:45.111054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:59:45.111104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:59:45.111140Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:59:45.111193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:59:45.111239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:59:45.111276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:59:45.111529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:59:45.111589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2025-12-23T19:00:20.880490Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1070; 2025-12-23T19:00:20.880547Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=71775; 2025-12-23T19:00:20.880593Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=71940; 2025-12-23T19:00:20.880666Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=15; 2025-12-23T19:00:20.881026Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=309; 2025-12-23T19:00:20.881074Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=72855; 2025-12-23T19:00:20.881247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=116; 2025-12-23T19:00:20.881373Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=76; 2025-12-23T19:00:20.881779Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=357; 2025-12-23T19:00:20.882117Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=286; 2025-12-23T19:00:20.898643Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=16442; 2025-12-23T19:00:20.914887Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=16080; 2025-12-23T19:00:20.915018Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=17; 2025-12-23T19:00:20.915093Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-12-23T19:00:20.915137Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-12-23T19:00:20.915240Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2025-12-23T19:00:20.915288Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-12-23T19:00:20.915378Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=54; 2025-12-23T19:00:20.915421Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-12-23T19:00:20.915488Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2025-12-23T19:00:20.915583Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=59; 2025-12-23T19:00:20.915665Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=48; 2025-12-23T19:00:20.915702Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=116876; 2025-12-23T19:00:20.915846Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T19:00:20.915964Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T19:00:20.916022Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T19:00:20.916090Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T19:00:20.916136Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T19:00:20.916369Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T19:00:20.916439Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T19:00:20.916479Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T19:00:20.916527Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T19:00:20.916607Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766516087435;tx_id=18446744073709551615;;current_snapshot_ts=1766516386500; 2025-12-23T19:00:20.916658Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T19:00:20.916703Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T19:00:20.916742Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T19:00:20.916836Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T19:00:20.917037Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.032000s; 2025-12-23T19:00:20.921520Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T19:00:20.921876Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T19:00:20.921937Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T19:00:20.922017Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T19:00:20.922070Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T19:00:20.922140Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766516087435;tx_id=18446744073709551615;;current_snapshot_ts=1766516386500; 2025-12-23T19:00:20.922185Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T19:00:20.922234Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T19:00:20.922274Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T19:00:20.922352Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2025-12-23T19:00:20.922403Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T19:00:20.923411Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.146000s; 2025-12-23T19:00:20.923462Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3097:5091];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] |98.9%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::EffectWithSelect+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 63731, MsgBus: 27582 2025-12-23T18:59:55.107914Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130145133480123:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:59:55.107982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ac3/r3tmp/tmp0Avigq/pdisk_1.dat 2025-12-23T18:59:55.332233Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:59:55.362461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:59:55.362579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:59:55.366383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:59:55.412679Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:59:55.434654Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130145133480095:2081] 1766516395106637 != 1766516395106640 TServer::EnableGrpc on GrpcPort 63731, node 1 2025-12-23T18:59:55.494052Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:59:55.494088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:59:55.494100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:59:55.494186Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:59:55.610807Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:27582 TClient is connected to server localhost:27582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516395448 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:59:56.001677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:59:56.009588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T18:59:56.022728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:56.116235Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:59:56.187957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:56.336229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:56.401732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T18:59:58.315330Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130158018383859:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.315463Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.315771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130158018383869:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.315812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.699549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.729271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.756293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.782019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.807130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.840325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.871903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.915945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:59:58.988520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130158018384734:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.988634Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.988748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130158018384739:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.988784Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130158018384741:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.988821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:59:58.992356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... nableGrpc on GrpcPort 5600, node 3 2025-12-23T19:00:11.275543Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:11.275566Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:11.275573Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:11.275646Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:11.287714Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10381 TClient is connected to server localhost:10381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516411289 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:11.650906Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:11.656187Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:00:11.673836Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:11.748471Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T19:00:11.915604Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:11.985902Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:12.152630Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:14.507298Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130228921930627:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.507429Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.507754Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130228921930637:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.507814Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.581750Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.610529Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.641927Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.675213Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.705953Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.734308Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.764787Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.810669Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:14.880726Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130228921931507:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.880832Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.880839Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130228921931512:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.880994Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130228921931514:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.881042Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:14.884261Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:14.894624Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130228921931515:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T19:00:14.970677Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130228921931567:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:16.119654Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130216037026908:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:16.119733Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:16.644355Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KikimrIcGateway::TestDropTable [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> TxUsage::WriteToTopic_Demo_16_Query [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13124, MsgBus: 14043 2025-12-23T19:00:03.732762Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130181253476029:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:03.732845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005abb/r3tmp/tmpnVMb54/pdisk_1.dat 2025-12-23T19:00:03.965605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:03.965781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:03.992368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:04.070723Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:04.073396Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:04.074726Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130181253475988:2081] 1766516403731111 != 1766516403731114 TServer::EnableGrpc on GrpcPort 13124, node 1 2025-12-23T19:00:04.134839Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:04.134874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:04.134888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:04.134999Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:04.301167Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:14043 TClient is connected to server localhost:14043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516404086 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:04.630486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:04.645079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:04.739907Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:04.747324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:04.899578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:04.966363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:06.784042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130194138379746:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:06.784183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:06.784690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130194138379756:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:06.784761Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.144892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.186977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.225406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.259536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.299843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.336494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.383432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.440156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.535284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130198433347921:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.535356Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.535435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130198433347926:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.535726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130198433347928:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.535772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:07.539794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:07.565791Z node 1 ... } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516417645 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:18.161146Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:18.168781Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:18.188814Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T19:00:18.268050Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:18.428222Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:18.428725Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T19:00:18.491061Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:20.852446Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130255846646775:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.852560Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.852908Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130255846646785:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.852950Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.923966Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.965720Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.999065Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:21.032532Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:21.063522Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:21.095952Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:21.133944Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:21.211173Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:21.294843Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130260141614952:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:21.294986Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:21.295306Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130260141614958:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:21.295359Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130260141614957:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:21.295483Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:21.299545Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:21.313098Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130260141614961:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:21.410802Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130260141615012:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:22.407477Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130242961743081:2085];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:22.407554Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:23.253747Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:23.879791Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:731: SelfId: [3:7587130268731550031:2550], TxId: 281474976710677, task: 1. Ctx: { CheckpointId : . TraceId : 01kd69ayrx2r3hvya2qfpt811g. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=YWU2Nzc2MzgtMjNmZGIzODctNWQwMTNjZTMtNjFkMDMyNmY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-12-23T19:00:23.880153Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1261: SelfId: [3:7587130268731550032:2551], TxId: 281474976710677, task: 2. Ctx: { CheckpointId : . TraceId : 01kd69ayrx2r3hvya2qfpt811g. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=YWU2Nzc2MzgtMjNmZGIzODctNWQwMTNjZTMtNjFkMDMyNmY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7587130268731550028:2520], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-12-23T19:00:23.880664Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:3047: SessionId: ydb://session/3?node_id=3&id=YWU2Nzc2MzgtMjNmZGIzODctNWQwMTNjZTMtNjFkMDMyNmY=, ActorId: [3:7587130268731549868:2520], ActorState: ExecuteState, TraceId: 01kd69ayrx2r3hvya2qfpt811g, Create QueryResponse for error on request, msg: , status: PRECONDITION_FAILED, issues: { message: "Duplicated keys found." issue_code: 2012 severity: 1 } |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 63611, MsgBus: 11259 2025-12-23T19:00:18.985457Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130246458177943:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:18.985530Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003fd7/r3tmp/tmpDYjkgq/pdisk_1.dat 2025-12-23T19:00:19.216884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:19.216962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:19.217548Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:19.246837Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:19.250162Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130246458177917:2081] 1766516418983984 != 1766516418983987 2025-12-23T19:00:19.262568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63611, node 1 2025-12-23T19:00:19.341156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:19.341181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:19.341192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:19.341283Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:19.498799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:11259 TClient is connected to server localhost:11259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516419311 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:19.818374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:19.832816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:19.954080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:20.002873Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:20.105268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:20.179399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:22.064141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130263638048969:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:22.064251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:22.064594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130263638048979:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:22.064661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:22.371756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:22.401673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:22.433605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:22.463618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:22.490435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:22.523504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:22.566262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:22.611540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:22.685398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130263638049852:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:22.685454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:22.685555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130263638049857:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:22.685610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130263638049858:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:22.685664Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:22.689576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:22.699632Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587130263638049861:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:22.780943Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587130263638049912:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:23.987031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587130246458177943:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:23.987153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets-UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets-UseAuthToken >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> TxUsage::WriteToTopic_Demo_18_RestartNo_Table >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestAlterResourcePool >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> KikimrIcGateway::TestDropResourcePool [GOOD] >> KikimrIcGateway::TestCreateStreamingQuery >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Query [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types12-all_types12-index12---] >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] |98.9%| [TA] $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Table >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11336, MsgBus: 2771 2025-12-23T19:00:05.758026Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130187928520985:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:05.758125Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ab8/r3tmp/tmp1xGBKo/pdisk_1.dat 2025-12-23T19:00:05.984376Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:05.987805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:05.987919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:06.032210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:06.079915Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:06.081597Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130187928520939:2081] 1766516405756298 != 1766516405756301 TServer::EnableGrpc on GrpcPort 11336, node 1 2025-12-23T19:00:06.147845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:06.147877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:06.147888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:06.147998Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:06.284247Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:2771 TClient is connected to server localhost:2771 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516406102 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:06.684086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:06.695431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:06.768698Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:06.844552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T19:00:06.980393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:07.056655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:08.908466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130200813424696:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.908567Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.908853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130200813424706:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:08.908893Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.293590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.327847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.356551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.383947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.412075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.458006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.489404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.529576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:09.607305Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130205108392874:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.607352Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.607405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130205108392879:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.607543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130205108392881:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.607564Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:09.610846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:09.620626Z node 1 :K ... ate: Disconnected -> Connecting 2025-12-23T19:00:20.248485Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11782, node 3 2025-12-23T19:00:20.319970Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:20.319996Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:20.320004Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:20.320085Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:20.333763Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24222 TClient is connected to server localhost:24222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516420291 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:20.864176Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:20.880371Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:20.941973Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:21.087643Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:21.135474Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:21.164466Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:23.976190Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130267962210594:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:23.976267Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:23.976681Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130267962210604:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:23.976727Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:24.061316Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:24.101828Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:24.140532Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:24.173275Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:24.207335Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:24.276079Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:24.310257Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:24.357820Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:24.429446Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130272257178770:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:24.429511Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130272257178775:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:24.429532Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:24.429682Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130272257178778:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:24.429713Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:24.433035Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:24.442879Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130272257178777:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-12-23T19:00:24.546492Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130272257178830:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:25.123368Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130255077306869:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:25.123462Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:26.372976Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 21124, MsgBus: 61399 2025-12-23T19:00:02.269049Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130177479652459:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:02.269131Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005abc/r3tmp/tmpGbqgcB/pdisk_1.dat 2025-12-23T19:00:02.508870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:02.508957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:02.549275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:02.557671Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:02.602594Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130177479652412:2081] 1766516402265350 != 1766516402265353 2025-12-23T19:00:02.619667Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21124, node 1 2025-12-23T19:00:02.664161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:02.664206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:02.664217Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:02.664366Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:61399 2025-12-23T19:00:02.826243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:61399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516402623 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:03.144947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:03.151995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:03.158170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:03.277179Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:03.297379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:03.455413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:03.523708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:05.412585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130190364556180:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:05.412835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:05.413317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130190364556190:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:05.413375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:05.752901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:05.782371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:05.813822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:05.843553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:05.879258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:05.922123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:05.960354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:06.006098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:06.079703Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130194659524357:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:06.079796Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130194659524362:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:06.079859Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:06.080286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130194659524364:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:06.080340Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:06.083575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... om file: (empty maybe) 2025-12-23T19:00:16.203441Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:16.203530Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17315 TClient is connected to server localhost:17315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516416182 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:16.645164Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:16.661617Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:16.723747Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:16.894644Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:16.966873Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:17.096922Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:19.639766Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130247522069925:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.639862Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.640116Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130247522069935:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.640162Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.714698Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.751213Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.779260Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.804023Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.828111Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.859852Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.894936Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.947761Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.035103Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130251817038096:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.035206Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.035414Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130251817038101:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.035460Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130251817038102:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.035505Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.040316Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:20.053079Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130251817038105:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:20.109675Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130251817038156:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:21.012180Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130234637166226:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:21.012256Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:21.847874Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:21.886594Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:21.926056Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query >> BasicUsage::AlterTopicWithSharedConsumer_SetDeleteDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_SetMoveDeadLetterPolicy >> KikimrIcGateway::TestAlterResourcePool [GOOD] >> KikimrIcGateway::TestAlterStreamingQuery |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> DataShardTxOrder::RandomPoints_DelayData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 18140, MsgBus: 25873 ... waiting for SysViewsRoster update finished 2025-12-23T19:00:06.768374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:00:06.878774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:00:06.890044Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:00:06.890411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:00:06.890494Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:00:07.172282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:07.172451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:07.255532Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516404115094 != 1766516404115098 2025-12-23T19:00:07.272548Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:07.324039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:07.504813Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005aba/r3tmp/tmp66zzwi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18140, node 1 2025-12-23T19:00:07.871391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:07.871458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:07.871494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:07.872146Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:07.884804Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:25873 TClient is connected to server localhost:25873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 500 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:08.230230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:08.252965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:08.529072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:08.889674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:09.156558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:10.036704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1901:3510], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.037158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.038143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3529], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.038277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:10.073765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.301898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.551626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:10.834272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:11.075529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:11.328042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:11.594186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:11.888317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:12.239722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2792:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:12.239958Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:12.240426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:12.240526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:12.240593Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2799:4182], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't hav ... ableGrpc on GrpcPort 10880, node 3 2025-12-23T19:00:23.510882Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:23.510902Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:23.510909Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:23.510989Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:23.545341Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:24253 TClient is connected to server localhost:24253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516423476 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:24.056104Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:24.064029Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:24.072357Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T19:00:24.139241Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:24.281704Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:24.317029Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:24.343267Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:26.877055Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130278701178186:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:26.877152Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:26.877399Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130278701178195:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:26.877452Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:26.956917Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:26.986982Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:27.016169Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:27.046769Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:27.081008Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:27.153610Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:27.185905Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:27.231056Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:27.298175Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130282996146366:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:27.298263Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:27.298354Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130282996146371:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:27.298425Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587130282996146373:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:27.298471Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:27.301938Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:27.314644Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587130282996146375:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:27.416293Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587130282996146428:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:28.293178Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587130265816274486:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:28.293285Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:29.075834Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestDropStreamingQuery >> TxUsage::WriteToTopic_Demo_45_Query [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2025-12-23T18:58:25.690562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T18:58:25.690620Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:25.692105Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvBoot 2025-12-23T18:58:25.704320Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvRestored 2025-12-23T18:58:25.704582Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:134:2156] 2025-12-23T18:58:25.704791Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T18:58:25.742003Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:134:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T18:58:25.747794Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T18:58:25.747863Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T18:58:25.749152Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-12-23T18:58:25.749230Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2025-12-23T18:58:25.749301Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2025-12-23T18:58:25.749612Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T18:58:25.749670Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T18:58:25.749729Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:206:2156] in generation 2 2025-12-23T18:58:25.821082Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T18:58:25.852160Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2025-12-23T18:58:25.852356Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T18:58:25.852466Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:223:2219] 2025-12-23T18:58:25.852519Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2025-12-23T18:58:25.852556Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-12-23T18:58:25.852617Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:25.852787Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:25.852853Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:25.853216Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2025-12-23T18:58:25.853343Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-12-23T18:58:25.853483Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:25.853544Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-12-23T18:58:25.853581Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2025-12-23T18:58:25.853621Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2025-12-23T18:58:25.853657Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2025-12-23T18:58:25.853690Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2025-12-23T18:58:25.853734Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T18:58:25.853826Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:217:2216], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:25.853865Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:25.853909Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:215:2215], serverId# [1:217:2216], sessionId# [0:0:0] 2025-12-23T18:58:25.861095Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:134:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-12-23T18:58:25.861164Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T18:58:25.861289Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2025-12-23T18:58:25.861481Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-12-23T18:58:25.861525Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-12-23T18:58:25.861589Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2025-12-23T18:58:25.861643Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-12-23T18:58:25.861683Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-12-23T18:58:25.861719Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-12-23T18:58:25.861754Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:25.862055Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-12-23T18:58:25.862119Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-12-23T18:58:25.862197Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2025-12-23T18:58:25.862238Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:25.862282Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2025-12-23T18:58:25.862313Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-12-23T18:58:25.862353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-12-23T18:58:25.862389Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:25.862415Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-12-23T18:58:25.874494Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2025-12-23T18:58:25.874571Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-12-23T18:58:25.874606Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-12-23T18:58:25.874649Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-12-23T18:58:25.874722Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2025-12-23T18:58:25.875324Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:229:2225], Recipient [1:134:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:25.875389Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T18:58:25.875464Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:228:2224], serverId# [1:229:2225], sessionId# [0:0:0] 2025-12-23T18:58:25.875619Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:134:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-12-23T18:58:25.875654Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-12-23T18:58:25.875804Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-12-23T18:58:25.875861Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2025-12-23T18:58:25.875917Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-12-23T18:58:25.875965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-12-23T18:58:25.879902Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-12-23T18:58:25.879977Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T18:58:25.880199Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:134:2156], Recipient [1:134:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:25.880246Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T18:58:25.880341Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2025-12-23T18:58:25.880388Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-12-23T18:58:25.880423Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2025-12-23T18:58:25.880468Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-12-23T18:58:25.880523Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... 9437185 dest 9437184 consumer 9437184 txId 521 2025-12-23T19:00:30.944147Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 522 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2025-12-23T19:00:30.944179Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.944206Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 522 2025-12-23T19:00:30.944337Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 523 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-12-23T19:00:30.944376Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.944403Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 523 2025-12-23T19:00:30.944528Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 524 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-12-23T19:00:30.944562Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.944590Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 524 2025-12-23T19:00:30.944732Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 525 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-12-23T19:00:30.944764Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.944794Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 525 2025-12-23T19:00:30.944953Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 526 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-12-23T19:00:30.944985Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.945014Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 526 2025-12-23T19:00:30.945144Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 527 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-12-23T19:00:30.945175Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.945209Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 527 2025-12-23T19:00:30.945342Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 528 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-12-23T19:00:30.945374Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.945402Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 528 2025-12-23T19:00:30.945484Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 529 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-12-23T19:00:30.945514Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.945542Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 529 2025-12-23T19:00:30.945737Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 530 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-12-23T19:00:30.945770Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.945795Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 530 2025-12-23T19:00:30.945947Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 531 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-12-23T19:00:30.945979Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.946007Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 531 2025-12-23T19:00:30.946108Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 532 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-12-23T19:00:30.946138Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.946166Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 532 2025-12-23T19:00:30.946316Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 533 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-12-23T19:00:30.946349Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.946378Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 533 2025-12-23T19:00:30.946523Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 534 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-12-23T19:00:30.946554Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.946581Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 534 2025-12-23T19:00:30.946726Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 535 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-12-23T19:00:30.946783Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.946811Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 535 2025-12-23T19:00:30.946952Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 536 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-12-23T19:00:30.946984Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.947020Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 536 2025-12-23T19:00:30.947133Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 537 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-12-23T19:00:30.947164Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.947191Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 537 2025-12-23T19:00:30.973924Z node 10 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T19:00:30.973985Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2025-12-23T19:00:30.974043Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [10:103:2137], exec latency: 1 ms, propose latency: 2 ms 2025-12-23T19:00:30.974118Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-12-23T19:00:30.974152Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T19:00:30.974347Z node 10 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2025-12-23T19:00:30.974366Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:539] at 9437184 on unit CompleteOperation 2025-12-23T19:00:30.974403Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 539] from 9437184 at tablet 9437184 send result to client [10:103:2137], exec latency: 0 ms, propose latency: 1 ms 2025-12-23T19:00:30.974432Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-12-23T19:00:30.974569Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:349:2317]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-12-23T19:00:30.974610Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:00:30.974641Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 30 28 30 28 29 25 31 21 30 30 30 31 31 28 27 22 21 17 30 30 27 28 30 30 30 - 0 - - - - - actual 30 28 30 28 29 25 31 21 30 30 30 31 31 28 27 22 21 17 30 30 27 28 30 30 30 - 0 - - - - - interm 30 28 30 28 29 25 30 21 30 30 30 25 27 28 27 22 21 17 30 30 27 28 30 30 30 - 0 - - - - - |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TxUsage::WriteToTopic_Demo_46_Table >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets-UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets+UseAuthToken >> LocalPartition::WithoutPartitionWithRestart [GOOD] >> LocalPartition::WithoutPartitionUnknownEndpoint >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] >> KikimrIcGateway::TestCreateStreamingQuery [GOOD] >> TxUsage::The_TxWriteInfo_Is_Deleted_After_The_Immediate_Transaction [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table >> TxUsage::WriteToTopic_Demo_27_Table [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestCreateStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 21126, MsgBus: 22108 2025-12-23T19:00:16.143892Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130236427669014:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:16.144031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004506/r3tmp/tmprDaafg/pdisk_1.dat 2025-12-23T19:00:16.398460Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:16.422498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:16.422646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:16.501134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:16.602345Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21126, node 1 2025-12-23T19:00:16.700451Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:00:16.816598Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:16.816634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:16.816641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:16.816790Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:17.164960Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22108 TClient is connected to server localhost:22108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516416581 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:17.662873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:17.703676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-12-23T19:00:17.741461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) 2025-12-23T19:00:17.752028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24505, MsgBus: 15963 2025-12-23T19:00:20.053191Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587130251983095000:2147];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:20.053841Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004506/r3tmp/tmpv0Ypf7/pdisk_1.dat 2025-12-23T19:00:20.066556Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:20.142660Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:20.144816Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587130251983094879:2081] 1766516420048274 != 1766516420048277 2025-12-23T19:00:20.184639Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:20.184719Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:20.187076Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24505, node 2 2025-12-23T19:00:20.286837Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:00:20.287371Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:20.287381Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:20.287388Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:20.287474Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15963 TClient is connected to server localhost:15963 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516420235 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:20.723607Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:20.728669Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:00:20.740057Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Trying to start YDB, gRPC: 32106, MsgBus: 13531 2025-12-23T19:00:23.742601Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587130267736558117:2190];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:23.745369Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004506/r3tmp/tmpP0jvon/pdisk_1.dat 2025-12-23T19:00:23.771772Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:23.857281Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:23.857364Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:23.882236Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:23.883893Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32106, node 3 2025-12-23T19:00:23.955510Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:23.955542Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:23.955552Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:23.955628Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:24.053600Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:13531 TClient is connected to server localho ... d: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516423945 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:24.332953Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:24.338929Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:00:24.355246Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) Trying to start YDB, gRPC: 21417, MsgBus: 28755 2025-12-23T19:00:27.687133Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587130282741943187:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:27.687172Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004506/r3tmp/tmpn4ZKMe/pdisk_1.dat 2025-12-23T19:00:27.714838Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:27.811353Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:27.811445Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:27.811602Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:27.820075Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587130282741943161:2081] 1766516427685733 != 1766516427685736 2025-12-23T19:00:27.843964Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21417, node 4 2025-12-23T19:00:27.894541Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:00:27.920760Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:27.920782Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:27.920789Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:27.920871Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28755 TClient is connected to server localhost:28755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516427900 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:28.350473Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:28.715471Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:28.723253Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:31.089290Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130299921813334:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:31.089420Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:31.089576Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130299921813346:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:31.089718Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130299921813354:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:31.089966Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:31.090865Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130299921813348:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:31.093293Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:31.096980Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587130299921813373:2575] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T19:00:31.104264Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587130299921813356:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T19:00:31.104267Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587130299921813372:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T19:00:31.193435Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587130299921813405:2603] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:31.194093Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587130299921813407:2605] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:32.018463Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T19:00:32.470638Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:32.585779Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T19:00:32.893323Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587130282741943187:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:32.893868Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] >> TxUsage::WriteToTopic_Demo_27_Query >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:57:52.458683Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:57:52.541308Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:57:52.549414Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:57:52.549720Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:57:52.549772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:57:52.765013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:52.765172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:52.842032Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516270061104 != 1766516270061108 2025-12-23T18:57:52.850902Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:52.892427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:52.988441Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005fca/r3tmp/tmp562nmy/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25542, node 1 TClient is connected to server localhost:23049 2025-12-23T18:57:53.501584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:57:53.503446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:57:53.503531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:57:53.503582Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:57:53.504799Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:57:53.518494Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-12-23T18:58:04.045609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:897:2763], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.045830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.046359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:924:2770], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.046422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.051187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:04.329701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1017:2844], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.329784Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.330178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1021:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.330225Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.330269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:04.333922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:05.008316Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1026:2853], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T18:58:05.091391Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1119:2917] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:05.595797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:06.071045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:06.744564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:07.626727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:08.020937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T18:58:08.521737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:09.501078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-12-23T18:58:23.927643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715704:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="access ... ine=abstract.cpp:13;event=new_stage;stage=Written;task_id=aa355b8e-e03111f0-8b95ac13-4dd0d60c; 2025-12-23T19:00:36.225155Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=1328;raw_bytes=1089;count=1;records=1} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=129416;raw_bytes=3680236;count=3;records=3046} inactive {blob_bytes=3040;raw_bytes=2194;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 2025-12-23T19:00:36.225541Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=aa35c52e-e03111f0-bef79757-3ba8f361;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=ReadBlobs;task_id=aa35c52e-e03111f0-bef79757-3ba8f361; 2025-12-23T19:00:36.225871Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: external_task_id=aa35c52e-e03111f0-bef79757-3ba8f361;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-12-23T19:00:36.226263Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=aa35c52e-e03111f0-bef79757-3ba8f361; 2025-12-23T19:00:36.226442Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=aa35c52e-e03111f0-bef79757-3ba8f361;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=aa35c52e-e03111f0-bef79757-3ba8f361; 2025-12-23T19:00:36.234811Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=aa35c52e-e03111f0-bef79757-3ba8f361;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=aa35c52e-e03111f0-bef79757-3ba8f361;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=aa35c52e-e03111f0-bef79757-3ba8f361; 2025-12-23T19:00:36.241251Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=aa35c52e-e03111f0-bef79757-3ba8f361;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=aa35c52e-e03111f0-bef79757-3ba8f361;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037893;parent_id=[1:3076:4355];task_id=aa35c52e-e03111f0-bef79757-3ba8f361;task_class=CS::GENERAL;fline=general_compaction.cpp:138;event=blobs_created_diff;appended=0;;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:192];;column_id:2;chunk_idx:0;blob_range:[NO_BLOB:192:232];;column_id:3;chunk_idx:0;blob_range:[NO_BLOB:424:256];;column_id:4;chunk_idx:0;blob_range:[NO_BLOB:680:192];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:872:264];;column_id:6;chunk_idx:0;blob_range:[NO_BLOB:1136:192];;column_id:4294967040;chunk_idx:0;blob_range:[NO_BLOB:1328:192];;column_id:4294967041;chunk_idx:0;blob_range:[NO_BLOB:1520:192];;;;switched=(portion_id:20;path_id:1000000895;records_count:1;schema_version:2;level:0;cs:plan_step=1757878164500;tx_id=18446744073709551615;;wi:10;;column_size:1328;index_size:0;meta:(()););(portion_id:19;path_id:1000000895;records_count:1;schema_version:2;level:0;;column_size:1712;index_size:0;meta:(()););; 2025-12-23T19:00:36.241362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=aa35c52e-e03111f0-bef79757-3ba8f361;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=aa35c52e-e03111f0-bef79757-3ba8f361;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037893;parent_id=[1:3076:4355];task_id=aa35c52e-e03111f0-bef79757-3ba8f361;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=2; 2025-12-23T19:00:36.241422Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=aa35c52e-e03111f0-bef79757-3ba8f361;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=aa35c52e-e03111f0-bef79757-3ba8f361;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037893;parent_id=[1:3076:4355];task_id=aa35c52e-e03111f0-bef79757-3ba8f361;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=aa35c52e-e03111f0-bef79757-3ba8f361; 2025-12-23T19:00:36.241640Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3076:4355];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-12-23T19:00:36.241750Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3076:4355];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:63;event=Limiter; 2025-12-23T19:00:36.241790Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3076:4355];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=AskDiskQuota;task_id=aa35c52e-e03111f0-bef79757-3ba8f361; 2025-12-23T19:00:36.241837Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3076:4355];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Writing;task_id=aa35c52e-e03111f0-bef79757-3ba8f361; 2025-12-23T19:00:36.242305Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 72075186224037893 2025-12-23T19:00:36.242444Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3076:4355];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=94;data_size=70;sum=2726;count=57; 2025-12-23T19:00:36.242494Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3076:4355];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:66;memory_size=190;data_size=182;sum=5510;count=58;size_of_meta=112; 2025-12-23T19:00:36.242547Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3076:4355];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:44;memory_size=270;data_size=262;sum=7830;count=29;size_of_portion=192; 2025-12-23T19:00:36.242608Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3076:4355];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=aa35c52e-e03111f0-bef79757-3ba8f361; 2025-12-23T19:00:36.250937Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[49] (CS::GENERAL) apply at tablet 72075186224037893 2025-12-23T19:00:36.252164Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3076:4355];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=72075186224037893;external_task_id=aa35c52e-e03111f0-bef79757-3ba8f361;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=aa35c52e-e03111f0-bef79757-3ba8f361; 2025-12-23T19:00:36.252247Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 72075186224037893 Save Batch GenStep: 1:17 Blob count: 1 2025-12-23T19:00:36.252385Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=1328;raw_bytes=1089;count=1;records=1} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=129416;raw_bytes=3680236;count=3;records=3046} inactive {blob_bytes=3040;raw_bytes=2194;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 Cleaning waiting... Fake storage clean FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=51;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=53;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=54;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=50;stage=CLEANUP_PORTIONS; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=52;stage=CLEANUP_PORTIONS; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=aa355b8e-e03111f0-8b95ac13-4dd0d60c; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=CLEANUP_PORTIONS;event=free;usage=560;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=6240;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=CLEANUP_PORTIONS;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=5680;delta=560; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=aa35c52e-e03111f0-bef79757-3ba8f361; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=3600;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=3600;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=560;delta=3040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=560;delta=3040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |98.9%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_18_RestartNo_Table [GOOD] >> KikimrIcGateway::TestAlterStreamingQuery [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets >> KikimrIcGateway::TestDropStreamingQuery [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartNo_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestAlterStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 8060, MsgBus: 12008 2025-12-23T19:00:16.134327Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130237218075419:2258];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:16.134421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004503/r3tmp/tmpaE2WJO/pdisk_1.dat 2025-12-23T19:00:16.371086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:16.417433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:16.417519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:16.519684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:16.526840Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130237218075184:2081] 1766516416121343 != 1766516416121346 2025-12-23T19:00:16.530677Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8060, node 1 2025-12-23T19:00:16.680341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:00:16.817267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:16.817290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:16.817349Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:16.817446Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:17.134199Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:12008 TClient is connected to server localhost:12008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516416602 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:17.660970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:17.668060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:17.705190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-12-23T19:00:17.744235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) Trying to start YDB, gRPC: 7550, MsgBus: 19493 2025-12-23T19:00:20.110938Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587130254418060333:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:20.114888Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004503/r3tmp/tmpX47Btv/pdisk_1.dat 2025-12-23T19:00:20.234825Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:20.262556Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:20.273700Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587130254418060311:2081] 1766516420104384 != 1766516420104387 2025-12-23T19:00:20.287478Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:20.287560Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:20.296845Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7550, node 2 2025-12-23T19:00:20.371383Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:20.371416Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:20.371424Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:20.371511Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:20.503191Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19493 TClient is connected to server localhost:19493 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516420347 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:20.790595Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:20.796647Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:20.816357Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-12-23T19:00:20.832264Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) 2025-12-23T19:00:20.844835Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587130254418061208:2536] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/f1/f2/external_table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeExternalTable, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:20.845151Z node 2 :KQP_GATEWAY ERROR: scheme.h:178: Unexpected error on scheme request, TxId: 281474976710660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeExternalTable, state: EPathStateNoChanges)
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeExternalTable, state: EPathStateNoChanges) Trying to start YDB, gRPC: 20590, MsgBus: 22357 2025-12-23T19:00:23.694999Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587130268707687617:2063];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:23.695070Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:00:23.746416Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_clien ... ted: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:28.160044Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:28.183752Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:28.203094Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp:155) Trying to start YDB, gRPC: 24705, MsgBus: 26350 2025-12-23T19:00:31.374516Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7587130299768734108:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:31.374655Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004503/r3tmp/tmpzfvILH/pdisk_1.dat 2025-12-23T19:00:31.466270Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:31.483743Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:31.551510Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:31.551596Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:31.556580Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24705, node 5 2025-12-23T19:00:31.617524Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:31.617546Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:31.617556Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:31.617638Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:31.743230Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:26350 TClient is connected to server localhost:26350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516431610 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:32.109891Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:32.118847Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:00:32.401815Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:32.409088Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:35.459629Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587130316948604215:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:35.459948Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:35.460752Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587130316948604255:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:35.460808Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587130316948604256:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:35.460847Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587130316948604257:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:35.461060Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:35.462965Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7587130316948604263:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:35.463023Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:35.466137Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:35.470230Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587130316948604265:2574] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T19:00:35.481148Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587130316948604262:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T19:00:35.481211Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7587130316948604261:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T19:00:35.565868Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587130316948604312:2605] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:35.588349Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7587130316948604328:2612] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:36.374958Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7587130299768734108:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:36.375032Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:36.708661Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T19:00:37.201746Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:37.333507Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 2739, MsgBus: 20852 2025-12-23T19:00:16.717746Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130235288909219:2065];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:16.717786Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0044f8/r3tmp/tmp4vKYS3/pdisk_1.dat 2025-12-23T19:00:17.013581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:17.013732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:17.072116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2739, node 1 2025-12-23T19:00:17.142345Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:17.147840Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130235288909190:2081] 1766516416716168 != 1766516416716171 2025-12-23T19:00:17.173444Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:17.280554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:17.280605Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:17.280614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:17.280711Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:17.312046Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20852 2025-12-23T19:00:17.734736Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:20852 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516417155 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T19:00:18.002764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:00:18.008067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:20.064824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130252468779310:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.064946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.065471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130252468779320:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.065515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.338503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.430282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.466805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.498881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.540032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130252468779624:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.540112Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.540342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130252468779629:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.540375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130252468779630:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.540640Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.544390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:20.556609Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587130252468779633:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-12-23T19:00:20.649120Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587130252468779684:2769] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 4400, MsgBus: 26785 2025-12-23T19:00:21.729134Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587130256707018970:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:21.729200Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0044f8/r3tmp/tmpr8Mx7F/pdisk_1.dat 2025-12-23T19:00:21.744009Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:21.819105Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:21.821801Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587130256707018944:2081] 1766516421727779 != 1766516421727782 2025-12-23T19:00:21.861076Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:21.861157Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:21.863938Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4400, node 2 2025-12-23T19:00:21.927122Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:21.927152Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:21.927160Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:21.927246Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:21. ... ut propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2025-12-23T19:00:31.881322Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2025-12-23T19:00:31.932936Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Trying to start YDB, gRPC: 22390, MsgBus: 1497 2025-12-23T19:00:33.160125Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7587130311643693987:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:33.160417Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:00:33.194271Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0044f8/r3tmp/tmpd4IkaK/pdisk_1.dat 2025-12-23T19:00:33.296671Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7587130311643693776:2081] 1766516433132785 != 1766516433132788 2025-12-23T19:00:33.324902Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:33.324999Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:33.326477Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:33.332909Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:33.338828Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 22390, node 4 2025-12-23T19:00:33.470872Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:33.470895Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:33.470905Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:33.471013Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:33.579954Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:1497 TClient is connected to server localhost:1497 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516433409 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:33.914703Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:33.927189Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:34.160939Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:34.181117Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:37.444333Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130328823563952:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:37.444457Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:37.444972Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130328823563968:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:37.445022Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130328823563969:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:37.445059Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130328823563970:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:37.445366Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:37.450072Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:37.454381Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587130328823563977:2574] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-12-23T19:00:37.465088Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-12-23T19:00:37.465162Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-12-23T19:00:37.465307Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587130328823563974:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T19:00:37.465350Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587130328823563975:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T19:00:37.525730Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587130328823564023:2604] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:37.565772Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587130328823564041:2612] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:38.162867Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587130311643693987:2249];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:38.162948Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:38.183181Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T19:00:38.771927Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:38.923307Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> BasicUsage::AlterTopicWithSharedConsumer_SetMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_AlterMoveDeadLetterPolicy |98.9%| [TA] $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] Test command err: 2025-12-23T18:59:50.188488Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:59:50.222689Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:59:50.222981Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:59:50.230461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:59:50.230783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:59:50.231047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:59:50.231189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:59:50.231320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:59:50.231430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:59:50.231553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:59:50.231689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:59:50.231797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:59:50.231930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:59:50.232041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:59:50.232160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:59:50.232297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:59:50.264249Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:59:50.264491Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:59:50.264586Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:59:50.264781Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:59:50.264997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:59:50.265089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:59:50.265160Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:59:50.265274Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:59:50.265351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:59:50.265403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:59:50.265441Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:59:50.265654Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:59:50.265732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:59:50.265783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:59:50.265818Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:59:50.265942Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:59:50.266005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:59:50.266063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:59:50.266117Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:59:50.266192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:59:50.266242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:59:50.266276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:59:50.266353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:59:50.266404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:59:50.266451Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:59:50.266698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:59:50.266777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:59:50.266812Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:59:50.266982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:59:50.267077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:59:50.267120Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:59:50.267176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:59:50.267228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:59:50.267263Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:59:50.267343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:59:50.267389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:59:50.267427Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:59:50.267573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:59:50.267621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2025-12-23T19:00:42.484909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=970; 2025-12-23T19:00:42.484969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=61057; 2025-12-23T19:00:42.485020Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=61188; 2025-12-23T19:00:42.485092Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=15; 2025-12-23T19:00:42.485474Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=332; 2025-12-23T19:00:42.485523Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=62140; 2025-12-23T19:00:42.485706Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=116; 2025-12-23T19:00:42.485834Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=77; 2025-12-23T19:00:42.486239Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=356; 2025-12-23T19:00:42.486590Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=296; 2025-12-23T19:00:42.502286Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15610; 2025-12-23T19:00:42.519164Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=16738; 2025-12-23T19:00:42.519296Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=16; 2025-12-23T19:00:42.519358Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2025-12-23T19:00:42.519401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-12-23T19:00:42.519480Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2025-12-23T19:00:42.519525Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-12-23T19:00:42.519612Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=51; 2025-12-23T19:00:42.519652Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-12-23T19:00:42.519721Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2025-12-23T19:00:42.520186Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=420; 2025-12-23T19:00:42.520296Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=53; 2025-12-23T19:00:42.520336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=106469; 2025-12-23T19:00:42.520484Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T19:00:42.520604Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-12-23T19:00:42.520663Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-12-23T19:00:42.520733Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-12-23T19:00:42.520781Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];process=SwitchToWork;fline=column_engine_logs.cpp:540;event=OnTieringModified;new_count_tierings=0; 2025-12-23T19:00:42.520990Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T19:00:42.521059Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2025-12-23T19:00:42.521100Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T19:00:42.521152Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T19:00:42.521220Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766516094260;tx_id=18446744073709551615;;current_snapshot_ts=1766516391716; 2025-12-23T19:00:42.521265Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T19:00:42.521314Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T19:00:42.521353Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T19:00:42.521450Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T19:00:42.521639Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.122000s; 2025-12-23T19:00:42.524520Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-12-23T19:00:42.524714Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-12-23T19:00:42.524771Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T19:00:42.524841Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2025-12-23T19:00:42.524897Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2025-12-23T19:00:42.524971Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1766516094260;tx_id=18446744073709551615;;current_snapshot_ts=1766516391716; 2025-12-23T19:00:42.525027Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T19:00:42.525080Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T19:00:42.525121Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T19:00:42.525199Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2025-12-23T19:00:42.525253Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T19:00:42.525763Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.165000s; 2025-12-23T19:00:42.525810Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5443:7075];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Table [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query >> TxUsage::WriteToTopic_Demo_46_Table [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |99.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets+UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Table |99.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_46_Query >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Query >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 6618, MsgBus: 4085 2025-12-23T19:00:16.115676Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130238463633543:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:16.115746Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004502/r3tmp/tmpgdtlEJ/pdisk_1.dat 2025-12-23T19:00:16.394021Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:16.426120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:16.426250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:16.484018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:16.575415Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:16.576540Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130238463633496:2081] 1766516416109395 != 1766516416109398 TServer::EnableGrpc on GrpcPort 6618, node 1 2025-12-23T19:00:16.621145Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:00:16.819228Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:16.819249Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:16.819254Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:16.819327Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:17.126916Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:4085 TClient is connected to server localhost:4085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516416588 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting...2025-12-23T19:00:17.770023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:00:17.780149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:19.433146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130251348536316:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.433320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.433645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130251348536326:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.433702Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.829234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.906922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.933698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.963420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.015945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130255643503926:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.016082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.016171Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130255643503931:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.016434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130255643503933:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.016511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.022193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:20.033186Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587130255643503935:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-12-23T19:00:20.100545Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587130255643503986:2765] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 1964, MsgBus: 27988 2025-12-23T19:00:21.471858Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587130259542383272:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:21.472822Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004502/r3tmp/tmpzSPlLd/pdisk_1.dat 2025-12-23T19:00:21.490263Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:21.553542Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:21.553603Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:21.554341Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587130259542383245:2081] 1766516421470562 != 1766516421470565 2025-12-23T19:00:21.560782Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1964, node 2 2025-12-23T19:00:21.573425Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T19:00:21.573524Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T19:00:21.583209Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:21.603034Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:21.603066Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:21.603091Z node 2 :NET ... , node 4 2025-12-23T19:00:41.843560Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:41.844922Z node 4 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T19:00:41.845057Z node 4 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2025-12-23T19:00:41.851098Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:41.867847Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:41.867871Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:41.867879Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:41.867962Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4760 2025-12-23T19:00:42.285193Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:4760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516441914 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T19:00:42.523373Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:00:42.545805Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:42.546940Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T19:00:42.654324Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:42.878013Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:42.964692Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:45.960925Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130363386187932:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:45.961023Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:45.961559Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130363386187941:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:45.961613Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:46.053953Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:46.100243Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:46.138928Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:46.183475Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:46.230972Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:46.285372Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:46.343376Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:46.408606Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:46.505532Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130367681156104:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:46.505715Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:46.506190Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130367681156109:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:46.506315Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130367681156110:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:46.506728Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:46.515992Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:46.523176Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587130346206316907:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:46.523883Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:46.539616Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587130367681156113:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:46.601286Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587130367681156167:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> LocalPartition::WithoutPartitionUnknownEndpoint [GOOD] >> LocalPartition::WithoutPartitionPartitionRelocation >> TTabletPipeTest::TestSendWithoutWaitOpen >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TxUsage::WriteToTopic_Demo_18_RestartNo_Query [GOOD] |99.0%| [TA] $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_simple.py::TestSimple::test[alter_table] >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Table >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 >> BasicUsage::AlterTopicWithSharedConsumer_AlterMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_DeleteDeadLetterPolicy_AlterMoveDeadLetterPolicy >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query [GOOD] >> TColumnShardTestReadWrite::CompactionGC [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_1_Table >> TxUsage::WriteToTopic_Demo_27_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken [GOOD] Test command err: Trying to start YDB, gRPC: 31480, MsgBus: 17964 2025-12-23T19:00:16.125026Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130234944148094:2199];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:16.125204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0044fd/r3tmp/tmpUK3HT2/pdisk_1.dat 2025-12-23T19:00:16.396933Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:16.424405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:16.424504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:16.481363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:16.526090Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:16.529896Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130234944147926:2081] 1766516416109461 != 1766516416109464 2025-12-23T19:00:16.608780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 31480, node 1 2025-12-23T19:00:16.819474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:16.819510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:16.819517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:16.819588Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:17.125245Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:17964 TClient is connected to server localhost:17964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516416560 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:17.627434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:17.635814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:17.657631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:17.869616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T19:00:18.046961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:18.126452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:19.313810Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130247829051681:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.313943Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.315015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130247829051690:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.315103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.828457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.860616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.892998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.926359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.953890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.985914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.015763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.060904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.159308Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130252124019854:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.159398Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.159751Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130252124019860:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.159799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130252124019859:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.159832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.164195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... usCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516449852 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T19:00:50.633449Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:00:50.640201Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:50.657238Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:50.763731Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:50.974783Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T19:00:51.093344Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:54.611026Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587130378653707551:2155];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:54.611140Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:55.512655Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130404423513071:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:55.512769Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:55.513334Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130404423513081:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:55.513405Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:55.861195Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:56.061239Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:56.198341Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:56.349827Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:56.424426Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:56.520531Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:56.629039Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:56.760301Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:57.091002Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130413013448581:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:57.091148Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:57.091525Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130413013448587:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:57.091588Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130413013448586:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:57.091627Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:57.096917Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:57.120001Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587130413013448590:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:57.222253Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587130413013448641:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:59.791255Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2025-12-23T19:00:59.806503Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2025-12-23T19:00:59.900074Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2025-12-23T19:00:59.926803Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) 2025-12-23T19:00:59.941325Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710675, at schemeshard: 72057594046644480 |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Table [GOOD] >> TxUsage::WriteToTopic_Demo_38_Table >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 3613, MsgBus: 19113 2025-12-23T19:00:16.262311Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130238294959987:2140];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:16.262382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:00:16.293284Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0044fb/r3tmp/tmpuc50ix/pdisk_1.dat 2025-12-23T19:00:16.588768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:16.588879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:16.655546Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130238294959885:2081] 1766516416254073 != 1766516416254076 2025-12-23T19:00:16.655617Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:16.658731Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:00:16.671828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3613, node 1 2025-12-23T19:00:16.816309Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:16.816408Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:16.816418Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:16.816513Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:16.904478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:19113 2025-12-23T19:00:17.271688Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:19113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516416721 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:17.620446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:17.635734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:17.663781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:17.879452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:18.050173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:18.118371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:19.639980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130251179863642:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.640094Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.640388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130251179863652:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.640433Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.967569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.998889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.025087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.054949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.085270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.120032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.154270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.228009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.317372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130255474831821:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.317485Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.317671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130255474831827:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.317724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.317729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130255474831826:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025 ... Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516452078 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:53.509343Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:53.527698Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:53.597175Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T19:00:53.783528Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:54.025341Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:54.147947Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:56.500001Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587130387402063256:2081];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:56.500101Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:57.600862Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130413171868874:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:57.600958Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:57.601471Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130413171868884:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:57.601528Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:57.769499Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:57.830066Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:57.890585Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:57.964095Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:58.059439Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:58.170276Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:58.280381Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:58.482407Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:58.659245Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130417466837057:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:58.659373Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:58.659936Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130417466837062:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:58.659994Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130417466837063:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:58.660037Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:58.664697Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:58.707217Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587130417466837066:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:58.792779Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587130417466837119:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:01:02.216660Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2025-12-23T19:01:02.234495Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2025-12-23T19:01:02.288004Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2025-12-23T19:01:02.311373Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710674, at schemeshard: 72057594046644480 2025-12-23T19:01:02.378337Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> TxUsage::WriteToTopic_Demo_46_Query [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGC [GOOD] Test command err: 2025-12-23T18:59:47.580757Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2025-12-23T18:59:47.615034Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2025-12-23T18:59:47.615264Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-12-23T18:59:47.623052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-12-23T18:59:47.623334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-12-23T18:59:47.623600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-12-23T18:59:47.623743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-12-23T18:59:47.623884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-12-23T18:59:47.624023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-12-23T18:59:47.624137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-12-23T18:59:47.624272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-12-23T18:59:47.624405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-12-23T18:59:47.624513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-12-23T18:59:47.624609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-12-23T18:59:47.624740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-12-23T18:59:47.624887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-12-23T18:59:47.655470Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-12-23T18:59:47.655667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-12-23T18:59:47.655720Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-12-23T18:59:47.655907Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:59:47.656078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-12-23T18:59:47.656154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-12-23T18:59:47.656206Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-12-23T18:59:47.656298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-12-23T18:59:47.656408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-12-23T18:59:47.656466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-12-23T18:59:47.656521Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-12-23T18:59:47.656734Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-12-23T18:59:47.656812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-12-23T18:59:47.656860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-12-23T18:59:47.656892Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-12-23T18:59:47.656989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-12-23T18:59:47.657045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-12-23T18:59:47.657090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-12-23T18:59:47.657123Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-12-23T18:59:47.657182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-12-23T18:59:47.657221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-12-23T18:59:47.657255Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-12-23T18:59:47.657329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-12-23T18:59:47.657386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-12-23T18:59:47.657431Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-12-23T18:59:47.657680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-12-23T18:59:47.657758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-12-23T18:59:47.657793Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-12-23T18:59:47.657937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-12-23T18:59:47.657997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-12-23T18:59:47.658028Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-12-23T18:59:47.658081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-12-23T18:59:47.658119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-12-23T18:59:47.658152Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-12-23T18:59:47.658201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-12-23T18:59:47.658241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-12-23T18:59:47.658309Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-12-23T18:59:47.658474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-12-23T18:59:47.658521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... t=StartCleanup;portions_count=2; 2025-12-23T19:00:42.142282Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=20;drop=0;skip=0;portions_counter=20;chunks=2240;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T19:00:42.142490Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=manager.cpp:10;event=lock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::adbe1a66-e03111f0-ab693b8c-518ef100; 2025-12-23T19:00:42.142571Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=ro_controller.cpp:41;event=CS::CLEANUP::PORTIONS;tablet_id=9437184; 2025-12-23T19:00:42.142654Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=abstract.cpp:13;event=new_stage;stage=Started;task_id=adbe1a66-e03111f0-ab693b8c-518ef100; 2025-12-23T19:00:42.143476Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessorResources;task_id=adbe1a66-e03111f0-ab693b8c-518ef100; 2025-12-23T19:00:42.143619Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessors;task_id=adbe1a66-e03111f0-ab693b8c-518ef100; 2025-12-23T19:00:42.143835Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T19:00:42.143932Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.631500s; 2025-12-23T19:00:42.144002Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T19:00:42.144882Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=adbe1a66-e03111f0-ab693b8c-518ef100; 2025-12-23T19:00:42.145229Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_finished;consumer=CLEANUP_PORTIONS;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=adbe1a66-e03111f0-ab693b8c-518ef100; Cleanup old portions: 2 4 9 8 1 6 3 7 10 5 19 14 18 13 21 11 15 22 20 17 2025-12-23T19:00:42.145522Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2025-12-23T19:00:42.145592Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=adbe1a66-e03111f0-ab693b8c-518ef100; 2025-12-23T19:00:42.145822Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[190] (CS::CLEANUP::PORTIONS) apply at tablet 9437184 2025-12-23T19:00:42.146715Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=adbe1a66-e03111f0-ab693b8c-518ef100;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=adbe1a66-e03111f0-ab693b8c-518ef100; 2025-12-23T19:00:42.147868Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=148108480;raw_bytes=150092142;count=25;records=1800002} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=12353440;raw_bytes=14738900;count=2;records=150000} inactive {blob_bytes=123422240;raw_bytes=125076680;count=20;records=1500000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-12-23T19:00:42.162496Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=adbe1a66-e03111f0-ab693b8c-518ef100; 2025-12-23T19:00:42.162609Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2025-12-23T19:00:42.163097Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;fline=manager.cpp:15;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::adbe1a66-e03111f0-ab693b8c-518ef100; 2025-12-23T19:00:42.163223Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2025-12-23T19:00:42.163321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2025-12-23T19:00:42.163392Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2025-12-23T19:00:42.163482Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-12-23T19:00:42.163552Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2025-12-23T19:00:42.163617Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2025-12-23T19:00:42.163723Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.630000s; 2025-12-23T19:00:42.164163Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=adbe1a66-e03111f0-ab693b8c-518ef100;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2025-12-23T19:00:42.164311Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:7:3:0:6171112:0] 2025-12-23T19:00:42.164394Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:19:3:0:6171112:0] 2025-12-23T19:00:42.164466Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:17:4:0:6171112:0] 2025-12-23T19:00:42.164513Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:22:3:0:6171112:0] 2025-12-23T19:00:42.164573Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:3:2:0:6171112:0] 2025-12-23T19:00:42.164621Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:21:2:0:6171112:0] 2025-12-23T19:00:42.164663Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:14:4:0:6171112:0] 2025-12-23T19:00:42.164736Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:8:4:0:6171112:0] 2025-12-23T19:00:42.164793Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:6171112:0] 2025-12-23T19:00:42.164839Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:9:2:0:6171112:0] 2025-12-23T19:00:42.164896Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:10:3:0:6171112:0] 2025-12-23T19:00:42.164942Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:13:3:0:6171112:0] 2025-12-23T19:00:42.164983Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:15:2:0:6171112:0] 2025-12-23T19:00:42.165022Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:11:4:0:6171112:0] 2025-12-23T19:00:42.165064Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:5:4:0:6171112:0] 2025-12-23T19:00:42.165105Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:6:2:0:6171112:0] 2025-12-23T19:00:42.165146Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:4:3:0:6171112:0] 2025-12-23T19:00:42.165187Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:18:2:0:6171112:0] 2025-12-23T19:00:42.165233Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:6171112:0] 2025-12-23T19:00:42.165273Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:20:4:0:6171112:0] GC for channel 4 deletes blobs: WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 Compactions happened: 2 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TxUsage::WriteToTopic_Demo_47_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 26565, MsgBus: 10312 2025-12-23T19:00:16.119332Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587130235710358772:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:16.119404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0044ff/r3tmp/tmpjR7R7d/pdisk_1.dat 2025-12-23T19:00:16.442080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:00:16.442229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:00:16.498310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:00:16.498350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:00:16.542881Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587130235710358671:2081] 1766516416109414 != 1766516416109417 2025-12-23T19:00:16.552089Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26565, node 1 2025-12-23T19:00:16.721350Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:00:16.816828Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:00:16.816858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:00:16.816864Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:00:16.816948Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:00:17.134077Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10312 TClient is connected to server localhost:10312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516416609 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:00:17.620689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:00:17.631891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:00:17.662452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:17.862651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:18.029825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:18.104180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:19.569564Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130248595262433:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.569724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.570137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130248595262443:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.570217Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:19.898089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.929516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.963000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:19.995618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.024025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.054420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.084792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.153513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:20.235119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130252890230616:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.235214Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.235299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130252890230621:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.235447Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587130252890230623:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.235491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:20.239600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at sc ... led at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:00:45.154587Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:00:45.209974Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T19:00:45.298956Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:48.454386Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130373123986846:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:48.454472Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:48.454786Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130373123986855:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:48.454828Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:48.542215Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:48.596214Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:48.635530Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:48.676953Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:48.725981Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:48.793375Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:48.867286Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:48.959805Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:49.122494Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587130355944115926:2151];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:00:49.122581Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:00:49.134371Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130377418955028:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:49.134481Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:49.134951Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130377418955033:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:49.134996Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587130377418955034:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:49.135033Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:00:49.140698Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:00:49.167589Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587130377418955037:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:00:49.240889Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587130377418955090:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:00:52.172824Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:53.212628Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T19:00:54.463383Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:55.315789Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:56.359945Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T19:00:57.560565Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710694:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-12-23T19:00:58.517955Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:00:58.613274Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2025-12-23T19:00:59.208707Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T19:00:59.208757Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:01:03.638982Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710730:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest |99.0%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_15_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 15] >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Query [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_compression-COMPRESSION = "zstd"] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Table [GOOD] >> LocalPartition::WithoutPartitionPartitionRelocation [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Query |99.0%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_DeleteDeadLetterPolicy_AlterMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterDeadLetterPolicy_StreamingConsumer >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Table >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Query [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_18_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 18] >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Table >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> TxUsage::WriteToTopic_Demo_38_Table [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] |99.0%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T19:01:06.650854Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:01:07.035732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:01:07.096415Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:01:07.097058Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:01:07.097162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:01:08.493491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:01:08.493656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:01:08.727877Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516462075618 != 1766516462075622 2025-12-23T19:01:08.784435Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:01:08.884367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:01:09.045921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/006112/r3tmp/tmpQBwh5C/pdisk_1.dat 2025-12-23T19:01:09.516829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:01:09.551398Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:01:09.713698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:01:09.837701Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T19:01:09.838149Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T19:01:09.960337Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T19:01:09.960541Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T19:01:09.968977Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T19:01:09.969073Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T19:01:09.969149Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T19:01:09.975313Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T19:01:09.975526Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T19:01:09.975663Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T19:01:09.987493Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T19:01:10.074671Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T19:01:10.087446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T19:01:10.087683Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T19:01:10.087745Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:01:10.087791Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T19:01:10.087840Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:10.094999Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T19:01:10.095157Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T19:01:10.095719Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:10.095827Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:01:10.095901Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T19:01:10.095961Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:10.096047Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T19:01:10.096215Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T19:01:10.096581Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T19:01:10.096706Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T19:01:10.099176Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T19:01:10.111606Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T19:01:10.111761Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T19:01:10.253185Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T19:01:10.267952Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T19:01:10.268086Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:10.269378Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:10.269452Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T19:01:10.269534Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T19:01:10.269898Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T19:01:10.270119Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T19:01:10.270568Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:10.270662Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T19:01:10.291164Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T19:01:10.295584Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:01:10.297964Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T19:01:10.298032Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:10.302900Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T19:01:10.303130Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:10.305272Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:10.305343Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:01:10.305398Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T19:01:10.305488Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T19:01:10.305556Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T19:01:10.305652Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:10.328068Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T19:01:10.330415Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T19:01:10.330501Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 7207518622403 ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T19:01:22.282438Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T19:01:22.282695Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T19:01:22.298453Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T19:01:22.299221Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:22.299296Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T19:01:22.299829Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T19:01:22.300263Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:01:22.301948Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T19:01:22.302001Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:22.324071Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T19:01:22.324169Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:22.325008Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:22.325059Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:01:22.325124Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T19:01:22.325187Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T19:01:22.325249Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T19:01:22.325338Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:22.328954Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T19:01:22.330771Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T19:01:22.330986Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T19:01:22.331053Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T19:01:22.349160Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:950:2808], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:01:22.349267Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:961:2813], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:01:22.349711Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:01:22.350399Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:966:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:01:22.350586Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:01:22.355416Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:01:22.363816Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T19:01:22.500510Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T19:01:22.512238Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:964:2816], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-12-23T19:01:22.549045Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1022:2855] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:01:22.927396Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1053:2872], serverId# [2:1054:2873], sessionId# [0:0:0] 2025-12-23T19:01:22.927909Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2025-12-23T19:01:22.928093Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2025-12-23T19:01:22.941455Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:22.968377Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1061:2879], serverId# [2:1062:2880], sessionId# [0:0:0] 2025-12-23T19:01:22.969294Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-12-23T19:01:22.983518Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-12-23T19:01:22.983596Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:22.983837Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T19:01:22.983882Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-12-23T19:01:22.984201Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:22.984246Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:01:22.984306Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T19:01:22.984374Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:22.984464Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1061:2879], serverId# [2:1062:2880], sessionId# [0:0:0] 2025-12-23T19:01:22.985463Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T19:01:22.985844Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T19:01:22.986060Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:22.986107Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T19:01:22.986168Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-12-23T19:01:22.986390Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-12-23T19:01:22.986449Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:22.995528Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-12-23T19:01:22.995820Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-12-23T19:01:22.996038Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-12-23T19:01:22.996093Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-12-23T19:01:23.066984Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-12-23T19:01:23.067055Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2025-12-23T19:01:23.067197Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:23.067236Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-12-23T19:01:23.067274Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-12-23T19:01:23.067396Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:01:23.068028Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:23.068098Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T19:01:06.690361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:01:07.038194Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:01:07.096491Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:01:07.097021Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:01:07.097132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:01:08.495721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:01:08.495867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:01:08.743577Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516462075120 != 1766516462075124 2025-12-23T19:01:08.785401Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:01:08.864269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:01:09.020368Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00611f/r3tmp/tmpLkapwr/pdisk_1.dat 2025-12-23T19:01:09.520428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:01:09.548217Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:01:09.718099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:01:09.848916Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T19:01:09.849283Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T19:01:09.968803Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T19:01:09.968978Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T19:01:09.971418Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T19:01:09.971529Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T19:01:09.971598Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T19:01:09.976763Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T19:01:09.976954Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T19:01:09.977066Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T19:01:09.991480Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T19:01:10.091349Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T19:01:10.091580Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T19:01:10.091701Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T19:01:10.091754Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:01:10.091804Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T19:01:10.091844Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:10.092556Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T19:01:10.092680Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T19:01:10.093214Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:10.093349Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:01:10.093399Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T19:01:10.093446Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:10.093555Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T19:01:10.093692Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T19:01:10.094156Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T19:01:10.099391Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T19:01:10.101533Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T19:01:10.112662Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T19:01:10.112768Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T19:01:10.249454Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:916:2782], serverId# [1:918:2784], sessionId# [0:0:0] 2025-12-23T19:01:10.282615Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T19:01:10.291666Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:10.293030Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:10.293100Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T19:01:10.293174Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T19:01:10.293478Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T19:01:10.293682Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T19:01:10.294130Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:10.294208Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T19:01:10.327390Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T19:01:10.327974Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:01:10.330184Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T19:01:10.330261Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:10.335062Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T19:01:10.335195Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:10.336969Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:10.337033Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:01:10.337084Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T19:01:10.337170Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T19:01:10.337225Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T19:01:10.337326Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:10.359539Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T19:01:10.361705Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T19:01:10.361782Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 7207518622403 ... on=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:01:22.144988Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:01:22.607675Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:01:22.607886Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:01:22.662913Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766516474953007 != 1766516474953010 2025-12-23T19:01:22.683029Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:01:22.772441Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:01:22.847284Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00611f/r3tmp/tmpjAb4Ff/pdisk_1.dat 2025-12-23T19:01:23.300444Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:01:23.320827Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:01:23.451887Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:01:23.517978Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:884:2764] 2025-12-23T19:01:23.518241Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T19:01:23.631521Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T19:01:23.631680Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T19:01:23.635665Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T19:01:23.635788Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T19:01:23.635891Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T19:01:23.636337Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T19:01:23.636490Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T19:01:23.636591Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:900:2764] in generation 1 2025-12-23T19:01:23.650086Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T19:01:23.650173Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T19:01:23.650321Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T19:01:23.650424Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:902:2774] 2025-12-23T19:01:23.650466Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:01:23.650509Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T19:01:23.650567Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:23.651037Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T19:01:23.651180Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T19:01:23.651265Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:23.651308Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:01:23.651352Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T19:01:23.651395Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:23.651527Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:882:2762], serverId# [2:887:2765], sessionId# [0:0:0] 2025-12-23T19:01:23.651973Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T19:01:23.652199Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T19:01:23.652292Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T19:01:23.654217Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T19:01:23.664969Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T19:01:23.665120Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-12-23T19:01:23.805624Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:916:2782], serverId# [2:918:2784], sessionId# [0:0:0] 2025-12-23T19:01:23.806348Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-12-23T19:01:23.806419Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:23.808062Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:23.808140Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T19:01:23.808203Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2025-12-23T19:01:23.808530Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2025-12-23T19:01:23.808680Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-12-23T19:01:23.809192Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:23.809266Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2025-12-23T19:01:23.809739Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T19:01:23.810237Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:01:23.820288Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-12-23T19:01:23.820374Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:23.821568Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2025-12-23T19:01:23.821667Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:23.822539Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:23.822594Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:01:23.822657Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T19:01:23.822730Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-12-23T19:01:23.835951Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-12-23T19:01:23.836177Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:23.839312Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T19:01:23.848639Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2025-12-23T19:01:23.848957Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-12-23T19:01:23.849058Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-12-23T19:01:23.867751Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:952:2810], serverId# [2:953:2811], sessionId# [0:0:0] 2025-12-23T19:01:23.867900Z node 2 :TX_DATASHARD NOTICE: datashard__op_rows.cpp:209: Rejecting erase request on datashard: tablet# 72075186224037888, error# Can't execute erase at replicated table 2025-12-23T19:01:23.868108Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:952:2810], serverId# [2:953:2811], sessionId# [0:0:0] |99.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> TxUsage::Sinks_Oltp_WriteToTopics_1_Table [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> DSProxyStrategyTest::Restore_block42 [GOOD] >> TxUsage::WriteToTopic_Demo_47_Table [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> DSProxyStrategyTest::Restore_block42 [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> TxUsage::WriteToTopic_Demo_47_Query >> TxUsage::Sinks_Oltp_WriteToTopics_1_Query >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_20_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 20] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_12_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 12] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> TxUsage::WriteToTopic_Demo_38_Query >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[lz4_compression-COMPRESSION = "lz4"] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] >> LocalPartition::WithoutPartitionWithSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T19:01:06.734891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:01:07.056024Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:01:07.100168Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:01:07.100522Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:01:07.100600Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:01:08.499772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:01:08.499936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:01:08.767944Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516462075469 != 1766516462075473 2025-12-23T19:01:08.799064Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:01:08.871860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:01:09.021906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00610e/r3tmp/tmpVFOIw4/pdisk_1.dat 2025-12-23T19:01:09.532700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:01:09.555476Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:01:09.713318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:01:09.873200Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:906:2780] 2025-12-23T19:01:09.873618Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T19:01:10.049860Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T19:01:10.050210Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T19:01:10.052185Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T19:01:10.052268Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T19:01:10.052343Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T19:01:10.052752Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T19:01:10.053103Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:910:2782] 2025-12-23T19:01:10.053335Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T19:01:10.063866Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T19:01:10.064027Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:947:2780] in generation 1 2025-12-23T19:01:10.064935Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:914:2786] 2025-12-23T19:01:10.065177Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T19:01:10.078889Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T19:01:10.079068Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T19:01:10.080643Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-12-23T19:01:10.080717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-12-23T19:01:10.080772Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-12-23T19:01:10.081110Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T19:01:10.081372Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T19:01:10.081460Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:955:2782] in generation 1 2025-12-23T19:01:10.081860Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T19:01:10.081942Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T19:01:10.083773Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-12-23T19:01:10.083854Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2025-12-23T19:01:10.083902Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2025-12-23T19:01:10.084273Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T19:01:10.084390Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T19:01:10.084464Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:956:2786] in generation 1 2025-12-23T19:01:10.095641Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T19:01:10.155727Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T19:01:10.156003Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T19:01:10.156154Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:960:2812] 2025-12-23T19:01:10.156211Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:01:10.156261Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T19:01:10.156308Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:10.156748Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T19:01:10.156794Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-12-23T19:01:10.156861Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T19:01:10.156930Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:961:2813] 2025-12-23T19:01:10.156960Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-12-23T19:01:10.156985Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-12-23T19:01:10.157011Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T19:01:10.157391Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T19:01:10.157440Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2025-12-23T19:01:10.157500Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T19:01:10.157561Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:962:2814] 2025-12-23T19:01:10.157584Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2025-12-23T19:01:10.157606Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-12-23T19:01:10.157630Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-12-23T19:01:10.163441Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T19:01:10.163632Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T19:01:10.163981Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:10.164057Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:01:10.164144Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T19:01:10.164218Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:10.164293Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-12-23T19:01:10.164377Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-12-23T19:01:10.164602Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:897:2775], serverId# [1:911:2783], sessionId# [0:0:0] 2025-12-23T19:01:10.164666Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T19:01:10.164697Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:01:10.164723Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-12-23T19:01:10.164755Z no ... d__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-12-23T19:01:37.185690Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-12-23T19:01:37.185856Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-12-23T19:01:37.185934Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-12-23T19:01:37.186003Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1292:3021] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 1 2025-12-23T19:01:37.186127Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-12-23T19:01:37.186194Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1292:3021] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037889, status# 1 2025-12-23T19:01:37.186260Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2025-12-23T19:01:37.186312Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2025-12-23T19:01:37.186378Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2025-12-23T19:01:37.186415Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2025-12-23T19:01:37.186447Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1292:3021] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 1 2025-12-23T19:01:37.186477Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:904: [DistEraser] [3:1292:3021] Register plan: txId# 281474976715662, minStep# 2502, maxStep# 32502 2025-12-23T19:01:37.200090Z node 3 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 72075186224037888 2025-12-23T19:01:37.202224Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2025-12-23T19:01:37.205472Z node 3 :TX_DATASHARD ERROR: datashard_distributed_erase.cpp:167: [DistEraser] [3:1292:3021] Reply: txId# 281474976715662, status# SHARD_UNKNOWN, error# Tx state unknown: reason# lost pipe while waiting for reply (plan), txId# 281474976715662, shard# 72075186224037888 2025-12-23T19:01:37.205649Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset 2025-12-23T19:01:37.205727Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037888 from 72075186224037890 is reset 2025-12-23T19:01:37.206561Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2025-12-23T19:01:37.206622Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-12-23T19:01:37.206854Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1286:3016], serverId# [3:1287:3017], sessionId# [0:0:0] 2025-12-23T19:01:37.207317Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-12-23T19:01:37.207378Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-12-23T19:01:37.207426Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 1 2025-12-23T19:01:37.207488Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-12-23T19:01:37.264637Z node 3 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:1303:3031] 2025-12-23T19:01:37.264976Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T19:01:37.270913Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T19:01:37.287201Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T19:01:37.289625Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T19:01:37.289743Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T19:01:37.289790Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T19:01:37.290255Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T19:01:37.290632Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T19:01:37.290702Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [3:1318:3031] in generation 2 2025-12-23T19:01:37.303845Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T19:01:37.304002Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037888 2025-12-23T19:01:37.304153Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-12-23T19:01:37.304610Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [3:1320:3039] 2025-12-23T19:01:37.304689Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:01:37.304752Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-12-23T19:01:37.304798Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:37.305151Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2025-12-23T19:01:37.305448Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2025-12-23T19:01:37.306836Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T19:01:37.306965Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T19:01:37.307147Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-12-23T19:01:37.307360Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 2501 2025-12-23T19:01:37.307410Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:01:37.307539Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:01:37.307591Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-12-23T19:01:37.307640Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 1 2025-12-23T19:01:37.307686Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:01:37.307792Z node 3 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2025-12-23T19:01:37.307871Z node 3 :TX_DATASHARD INFO: datashard.cpp:4131: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2025-12-23T19:01:37.307926Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2025-12-23T19:01:37.308148Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715661 2025-12-23T19:01:37.308247Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-12-23T19:01:37.308305Z node 3 :TX_DATASHARD NOTICE: datashard_pipeline.cpp:734: Outdated readset for 2501:281474976715661 at 72075186224037889 2025-12-23T19:01:37.308357Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2025-12-23T19:01:37.308441Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037889 {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-12-23T19:01:37.308525Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 2500 next step 2501 2025-12-23T19:01:37.308602Z node 3 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2025-12-23T19:01:37.308630Z node 3 :TX_DATASHARD INFO: datashard.cpp:4131: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2025-12-23T19:01:37.308681Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2025-12-23T19:01:37.308860Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715661 2025-12-23T19:01:37.309009Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715661 2025-12-23T19:01:37.309067Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-12-23T19:01:37.309103Z node 3 :TX_DATASHARD NOTICE: datashard_pipeline.cpp:734: Outdated readset for 2501:281474976715661 at 72075186224037890 2025-12-23T19:01:37.309138Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2025-12-23T19:01:37.309179Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037890 {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-12-23T19:01:37.309257Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Query [GOOD] >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> BasicUsage::AlterDeadLetterPolicy_StreamingConsumer [GOOD] >> BasicUsage::ConflictingWrites |99.0%| [TA] $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Table >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Table [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Query >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_6_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 6] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> TxUsage::WriteToTopic_Demo_47_Query [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_1_Query [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Table |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Table [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] >> TxUsage::WriteToTopic_Demo_38_Query [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> TxUsage::Sinks_Oltp_WriteToTopics_2_Table |99.0%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |99.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[table] >> TxUsage::Write_And_Read_Small_Messages_1 >> TxUsage::WriteToTopic_Demo_40_Table |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Query |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Query [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Table [GOOD] |99.0%| [TA] $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Table >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test[tablestores] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_select.py::TestDML::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Table [GOOD] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query >> TxUsage::Write_And_Read_Small_Messages_1 [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> LocalPartition::WithoutPartitionWithSplit [FAIL] >> TxUsage::ReadRuleGeneration >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_table] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test_multi[table] [GOOD] >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] >> TxUsage::Write_And_Read_Small_Messages_2 >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics 2025-12-23 19:02:14,363 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-12-23 19:02:14,852 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1506564 58.6M 58.6M 32.6M test_tool run_ut @/home/runner/.ya/build/build_root/8ptz/003236/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk15/testing_out_stuff/test_tool.arg 1506880 2.1G 2.1G 1.6G └─ ydb-core-statistics-service-ut --trace-path-append /home/runner/.ya/build/build_root/8ptz/003236/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:52:18.546950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:18.656827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:52:18.663050Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:611:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:52:18.663417Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:52:18.663579Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:52:18.998127Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:19.114013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:19.114139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:19.149114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:19.248561Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003236/r3tmp/tmpND04OQ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 65333, node 1 2025-12-23T18:52:19.915461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:52:19.916146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:52:19.916192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:52:19.916220Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:52:19.916542Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:52:19.980748Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:23896 2025-12-23T18:52:20.565868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:52:23.346835Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:52:23.353320Z node 4 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 4 2025-12-23T18:52:23.380525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:23.380625Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:23.409099Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T18:52:23.411641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:23.577064Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:52:23.577188Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:52:23.578662Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.579311Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.579930Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.580719Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.581015Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.581218Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.581336Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.581578Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.581719Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:52:23.597286Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:52:23.781108Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:52:23.813622Z node 4 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:52:23.813719Z node 4 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:52:23.851618Z node 4 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:52:23.853257Z node 4 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:52:23.853489Z node 4 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:52:23.853552Z node 4 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:52:23.853603Z node 4 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:52:23.853672Z node 4 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:52:23.853733Z node 4 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:52:23.853789Z node 4 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:52:23.854958Z node 4 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:52:23.856305Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [4:2076:2454] 2025-12-23T18:52:23.859744Z node 4 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-12-23T18:52:23.863171Z node 4 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [4:2285:2583] Owner: [4:2284:2582]. Describe result: PathErrorUnknown 2025-12-23T18:52:23.863214Z node 4 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [4:2285:2583] Owner: [4:2284:2582]. Creating table 2025-12-23T18:52:23.863273Z node 4 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [4:2285:2583] Owner: [4:2284:2582]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:52:23.876708Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:23.876844Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [4:2315:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:52:23.880050Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [4:2327:2609], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:23.905873Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [4:2386:2649] 2025-12-23T18:52:23.906485Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:2386:2649], schemeshard id = 72075186224037897 2025-12-23T18:52:23.909847Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:2296:2587] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-12-23T18:52:23.912720Z node 4 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [4:2285:2583] Owner: [4:2284:2582]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-12-23T18:52:24.043913Z node 4 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:52:24.154023Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-12-23T18:52:24.325765Z node 4 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [4:2285:2583] Owner: [4:2284:2582]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2025-12-23T18:52:24.328071Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [4:2744:2884], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:52:24.331902Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:52:24.335769Z node 4 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [4:2285:2583] Owner: [4:2284:2582]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:52:24.335890Z node 4 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [4:2285:2583] Owner: [4:2 ... ActorId: [4:8194:5669], RunDataQuery with SessionId: ydb://session/3?node_id=4&id=MjRkZWY0MzktMjdkNTI2NzQtYmU5MWFkNWItNzkxOTMyZGU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T19:00:17.430324Z node 4 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [4:8210:5683]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T19:00:17.430919Z node 4 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-12-23T19:00:17.431171Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 4, schemeshard count = 1, urgent = 0 2025-12-23T19:00:17.431245Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 4, schemeshard count = 1 2025-12-23T19:00:17.431537Z node 4 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 4 cookie: 18446744073709551615 2025-12-23T19:00:17.431605Z node 4 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [4:8210:5683], StatRequests.size() = 1 2025-12-23T19:00:17.431766Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T19:00:17.662231Z node 4 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [4:8193:5668], ActorId: [4:8194:5669], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=MjRkZWY0MzktMjdkNTI2NzQtYmU5MWFkNWItNzkxOTMyZGU=, TxId: 2025-12-23T19:00:17.662344Z node 4 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [4:8193:5668], ActorId: [4:8194:5669], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=MjRkZWY0MzktMjdkNTI2NzQtYmU5MWFkNWItNzkxOTMyZGU=, TxId: 2025-12-23T19:00:17.662980Z node 4 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [4:8191:5667], ActorId: [4:8193:5668], Got response [4:8194:5669] SUCCESS 2025-12-23T19:00:17.663794Z node 4 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T19:00:17.701257Z node 4 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 32] 2025-12-23T19:00:17.701367Z node 4 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=opId1, ActorId=[1:6336:3996] 2025-12-23T19:00:17.709092Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [4:8232:5694] 2025-12-23T19:00:17.714303Z node 4 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:8230:4517] , Record { OperationId: "opId1" Tables { PathId { OwnerId: 72075186224037905 LocalId: 32 } } Types: TYPE_COUNT_MIN_SKETCH Database: "/Root/Serverless2" } 2025-12-23T19:00:17.714430Z node 4 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `opId1', DatabaseName: `/Root/Serverless2', Types: 1 2025-12-23T19:00:17.714496Z node 4 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `opId1', PathId: [OwnerId: 72075186224037905, LocalPathId: 32], ColumnTags: 2025-12-23T19:00:17.753344Z node 4 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2025-12-23T19:00:17.753750Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T19:00:17.753829Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:656: [72075186224037894] ScheduleNextAnalyze. Don't start analyze for table [OwnerId: 72075186224037905, LocalPathId: 32] as there is still no info from its SchemeShard 2025-12-23T19:00:17.753890Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:687: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=opId1 2025-12-23T19:00:17.753940Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:691: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-12-23T19:00:17.820235Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2025-12-23T19:00:17.820352Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-12-23T19:00:17.845859Z node 4 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [4:7789:5422], schemeshard count = 1 2025-12-23T19:00:17.857673Z node 4 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [4:7789:5422], schemeshard count = 1 2025-12-23T19:00:18.252017Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T19:00:18.252112Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:656: [72075186224037894] ScheduleNextAnalyze. Don't start analyze for table [OwnerId: 72075186224037905, LocalPathId: 32] as there is still no info from its SchemeShard 2025-12-23T19:00:18.252155Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:687: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=opId1 2025-12-23T19:00:18.252188Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:691: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-12-23T19:00:18.756278Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T19:00:18.756375Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:656: [72075186224037894] ScheduleNextAnalyze. Don't start analyze for table [OwnerId: 72075186224037905, LocalPathId: 32] as there is still no info from its SchemeShard 2025-12-23T19:00:18.756416Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:687: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=opId1 2025-12-23T19:00:18.756448Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:691: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-12-23T19:00:18.768448Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T19:00:18.768543Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:656: [72075186224037894] ScheduleNextAnalyze. Don't start analyze for table [OwnerId: 72075186224037905, LocalPathId: 32] as there is still no info from its SchemeShard 2025-12-23T19:00:18.768587Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:687: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=opId1 2025-12-23T19:00:18.768620Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:691: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-12-23T19:00:19.206689Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037905 2025-12-23T19:00:19.206826Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.186000s, at schemeshard: 72075186224037905 2025-12-23T19:00:19.207168Z node 4 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 25, entries count: 1, are all stats full: 0 2025-12-23T19:00:19.223982Z node 4 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T19:00:19.359751Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T19:00:19.366551Z node 4 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [4:8313:2463], ActorId: [4:8323:5738], Bootstrap. Database: /Root/Serverless2, IsSystemUser: 0, run create session 2025-12-23T19:00:19.370832Z node 4 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [4:8313:2463], ActorId: [4:8323:5738], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Serverless2/Table2` 2025-12-23T19:00:19.370980Z node 4 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [4:8313:2463], ActorId: [4:8323:5738], Start read next stream part 2025-12-23T19:00:19.482454Z node 4 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [4:8349:5751]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T19:00:19.482832Z node 4 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-12-23T19:00:19.483142Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 4, schemeshard count = 1, urgent = 0 2025-12-23T19:00:19.483200Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 4, schemeshard count = 1 2025-12-23T19:00:19.483333Z node 4 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 4 cookie: 18446744073709551615 2025-12-23T19:00:19.483412Z node 4 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [4:8349:5751], StatRequests.size() = 1 2025-12-23T19:00:19.483568Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/003236/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk15/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/003236/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk15/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TxUsage::WriteToTopic_Demo_40_Table [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> BasicUsage::ConflictingWrites [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_2_Table [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_2_Query >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> BasicUsage::ConflictingWrites [GOOD] Test command err: 2025-12-23T18:58:54.660552Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129884153953140:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:54.660647Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:58:54.726780Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005e54/r3tmp/tmpGuZqeU/pdisk_1.dat 2025-12-23T18:58:54.925917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:54.926065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:54.951406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:54.988224Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:54.988795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:54.997415Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129884153953035:2081] 1766516334655036 != 1766516334655039 TServer::EnableGrpc on GrpcPort 15526, node 1 2025-12-23T18:58:55.199210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:58:55.233917Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005e54/r3tmp/yandex7jSoRd.tmp 2025-12-23T18:58:55.233943Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005e54/r3tmp/yandex7jSoRd.tmp 2025-12-23T18:58:55.234885Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005e54/r3tmp/yandex7jSoRd.tmp 2025-12-23T18:58:55.235011Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:58:55.483140Z INFO: TTestServer started on Port 25936 GrpcPort 15526 TClient is connected to server localhost:25936 2025-12-23T18:58:55.669552Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:15526 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516335059 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:55.760920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:58:55.786450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:58:57.311147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129897038855958:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.311289Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129897038855971:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.311329Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.311651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129897038855973:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.311695Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.318253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:57.321059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129897038856007:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.321155Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.326280Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129897038855975:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-12-23T18:58:57.424467Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129897038856041:2643] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:57.724164Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587129897038856056:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:58:57.729551Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=NTQ5MTQ3ZmMtODM2ZjZhYjMtY2Y4NDRkOGItN2Q1OTFlYTc=, ActorId: [1:7587129897038855956:2328], ActorState: ExecuteState, TraceId: 01kd698amf12fz689m7tfxcfvb, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:58:57.732314Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:58:57.775054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:57.800647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:57.863290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587129901333823639:2821] 2025-12-23T18:58:59.660849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129884153953140:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:59.660955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initia ... 9: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:26.419962Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:26.419990Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:26.420022Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:26.420052Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:26.420087Z node 14 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T19:02:26.420147Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'test-topic' partition: 0 messageNo: 18 requestId: cookie: 9 2025-12-23T19:02:26.420193Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'test-topic' partition: 0 messageNo: 19 requestId: cookie: 10 2025-12-23T19:02:26.420281Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-12-23T19:02:26.420316Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-12-23T19:02:26.421557Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 0 2025-12-23T19:02:26.421722Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] Write session got write response: acks { seq_no: 99 written { offset: 98 } } write_statistics { persisting_time { nanos: 11000000 } min_queue_wait_time { nanos: 101000000 } max_queue_wait_time { nanos: 101000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-12-23T19:02:26.421752Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] OnAck: seqNo=99, txId=? 2025-12-23T19:02:26.421780Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] Write session: acknoledged message 99 2025-12-23T19:02:26.421969Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 0 2025-12-23T19:02:26.422075Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] Write session got write response: acks { seq_no: 100 written { offset: 99 } } write_statistics { persisting_time { nanos: 11000000 } min_queue_wait_time { nanos: 97000000 } max_queue_wait_time { nanos: 97000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-12-23T19:02:26.422099Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] OnAck: seqNo=100, txId=? 2025-12-23T19:02:26.422119Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] Write session: acknoledged message 100 2025-12-23T19:02:26.421187Z node 14 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 98 partno 0 count 2 parts 0 suffix '63' size 289 2025-12-23T19:02:26.476982Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] Write session will now close 2025-12-23T19:02:26.477098Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] Write session: aborting 2025-12-23T19:02:26.479155Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] Write session: gracefully shut down, all writes complete 2025-12-23T19:02:26.480194Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-12-23T19:02:26.480249Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2025-12-23T19:02:26.480313Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] Write session is aborting and will not restart 2025-12-23T19:02:26.482471Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 13 sessionId: test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0 grpc read done: success: 0 data: 2025-12-23T19:02:26.482502Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 13 sessionId: test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0 grpc read failed 2025-12-23T19:02:26.482537Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 13 sessionId: test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0 grpc closed 2025-12-23T19:02:26.482552Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 13 sessionId: test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0 is DEAD 2025-12-23T19:02:26.483445Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T19:02:26.483645Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [14:7587130780976639923:2710] destroyed 2025-12-23T19:02:26.483696Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T19:02:26.483757Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:26.483784Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:26.483807Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:26.483841Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:26.483866Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:26.490437Z node 14 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2025-12-23T19:02:26.490716Z node 14 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path test-topic 2025-12-23T19:02:26.492888Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72075186224037892] server connected, pipe [14:7587130793861541877:2730], now have 1 active actors on pipe 2025-12-23T19:02:26.492964Z node 14 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][test-topic] pipe [14:7587130793861541878:2731] connected; active server actors: 1 2025-12-23T19:02:26.493730Z node 14 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186224037892][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 3600 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-12-23T19:02:26.502063Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [14:7587130793861541877:2730] destroyed 2025-12-23T19:02:26.511952Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|17c6ddd8-1f7a49e0-ad829357-c3fc2a13_0] PartitionId [0] Generation [11] Write session: destroy 2025-12-23T19:02:26.515381Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:26.515429Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:26.515456Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:26.515484Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:26.515508Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:26.619050Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:26.619100Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:26.619130Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:26.619172Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:26.619199Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:26.720096Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:26.720145Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:26.720186Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:26.720217Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:26.720247Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:26.822917Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:26.822976Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:26.823009Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:26.823043Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:26.823072Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Query [GOOD] >> TxUsage::WriteToTopic_Demo_40_Query >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Table >> test_ttl.py::TestTTLDefaultEnv::test_case >> test_ttl.py::TestTTLAlterSettings::test_case ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query [GOOD] Test command err: 2025-12-23T18:58:54.667139Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129886481962499:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:54.667317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:58:54.727687Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005e52/r3tmp/tmpHTzR0y/pdisk_1.dat 2025-12-23T18:58:54.926872Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:54.943365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:54.943460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:54.992351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:55.049207Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63404, node 1 2025-12-23T18:58:55.140431Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:58:55.233833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005e52/r3tmp/yandex16RBTb.tmp 2025-12-23T18:58:55.233862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005e52/r3tmp/yandex16RBTb.tmp 2025-12-23T18:58:55.234843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005e52/r3tmp/yandex16RBTb.tmp 2025-12-23T18:58:55.234945Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:58:55.480450Z INFO: TTestServer started on Port 22497 GrpcPort 63404 TClient is connected to server localhost:22497 2025-12-23T18:58:55.680389Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:63404 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516335066 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:55.776831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:58:55.806848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:58:57.846848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129899366865389:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.846950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.847096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129899366865398:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.847392Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129899366865404:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.847505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.850560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:57.860070Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129899366865406:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:58:58.097393Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129899366865470:2644] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:58.127435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:58.158159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:58.227094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:58.239339Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587129903661832774:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:58:58.241337Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=NzM0ZjM0NjYtZDhjYWI5NzEtMTk3M2Y2YS1hZjk1ODM5ZQ==, ActorId: [1:7587129899366865386:2329], ActorState: ExecuteState, TraceId: 01kd698b5m9e2p9eaj79fmyw67, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:58:58.243197Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587129903661833049:2821] 2025-12-23T18:58:59.666730Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129886481962499:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:59.666825Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-12-23T18:59:04.304004Z :WriteToTopic_Demo_12_Table INFO: TTopicSdkTestSetup started 2025-12-23T18:59:04.316924Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-12-23T18:59:04.332852Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7587129929431637066:2931] connected; active server actors: 1 2025-12-23T18:59:04.333406Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-12-23T18:59:04.335172Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 720575940466444 ... OXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_14_1_12306072537326987981_v1 grpc closed 2025-12-23T19:02:30.263354Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [14:7587130805788689527:2550]: session cookie 2 consumer test-consumer session test-consumer_14_1_12306072537326987981_v1grpc read failed 2025-12-23T19:02:30.263387Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_14_1_12306072537326987981_v1 is DEAD 2025-12-23T19:02:30.263387Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [14:7587130805788689527:2550]: session cookie 2 consumer test-consumer session test-consumer_14_1_12306072537326987981_v1 grpc closed 2025-12-23T19:02:30.263408Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [14:7587130805788689527:2550]: session cookie 2 consumer test-consumer session test-consumer_14_1_12306072537326987981_v1 proxy is DEAD 2025-12-23T19:02:30.263975Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|dad3d3c5-54cb9a3a-c6d04c90-d24a9b68_0 grpc read done: success: 0 data: 2025-12-23T19:02:30.263992Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|dad3d3c5-54cb9a3a-c6d04c90-d24a9b68_0 grpc read failed 2025-12-23T19:02:30.264029Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|dad3d3c5-54cb9a3a-c6d04c90-d24a9b68_0 grpc closed 2025-12-23T19:02:30.264045Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|dad3d3c5-54cb9a3a-c6d04c90-d24a9b68_0 is DEAD 2025-12-23T19:02:30.264585Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T19:02:30.264849Z node 14 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [14:7587130805788689519:2545] disconnected. 2025-12-23T19:02:30.264880Z node 14 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [14:7587130805788689519:2545] disconnected; active server actors: 1 2025-12-23T19:02:30.264903Z node 14 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [14:7587130805788689519:2545] client test-consumer disconnected session test-consumer_14_1_12306072537326987981_v1 2025-12-23T19:02:30.265033Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72075186224037894] Destroy direct read session test-consumer_14_1_12306072537326987981_v1 2025-12-23T19:02:30.265069Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037894] server disconnected, pipe [14:7587130805788689522:2548] destroyed 2025-12-23T19:02:30.265104Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037894] server disconnected, pipe [14:7587130797198754875:2527] destroyed 2025-12-23T19:02:30.265139Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T19:02:30.265170Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:30.265192Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.265205Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:30.265227Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.265245Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:30.265317Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_14_1_12306072537326987981_v1 2025-12-23T19:02:30.271293Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:30.271333Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.271348Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:30.271367Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.271382Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:30.315346Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:30.315399Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.315416Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:30.315442Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.315465Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:30.371554Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:30.371593Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.371610Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:30.371632Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.371650Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:30.419098Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:30.419132Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.419163Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:30.419181Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.419195Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:30.472143Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:30.472176Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.472194Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:30.472214Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.472232Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:30.522873Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:30.522917Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.522934Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:30.522955Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.522973Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:30.572250Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:30.572288Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.572314Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:30.572335Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.572353Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:30.632157Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:30.632200Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.632216Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:30.632239Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.632254Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:30.672694Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:30.672735Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.672750Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:30.672770Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.672786Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:30.732748Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:30.732789Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.732806Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:30.732827Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:30.732843Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> TxUsage::ReadRuleGeneration [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> TxUsage::Write_And_Read_Small_Messages_2 [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |99.0%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |99.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query [GOOD] Test command err: 2025-12-23T18:58:54.667466Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129885418074547:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:54.667740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:58:54.726662Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005e2f/r3tmp/tmpPhUjh6/pdisk_1.dat 2025-12-23T18:58:54.916558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:54.928435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:54.928538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:54.985470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:54.993858Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:54.998367Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129885418074520:2081] 1766516334665625 != 1766516334665628 TServer::EnableGrpc on GrpcPort 64499, node 1 2025-12-23T18:58:55.142031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:58:55.233960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005e2f/r3tmp/yandexTAyU5X.tmp 2025-12-23T18:58:55.233990Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005e2f/r3tmp/yandexTAyU5X.tmp 2025-12-23T18:58:55.234859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005e2f/r3tmp/yandexTAyU5X.tmp 2025-12-23T18:58:55.234970Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:58:55.480417Z INFO: TTestServer started on Port 6906 GrpcPort 64499 TClient is connected to server localhost:6906 2025-12-23T18:58:55.676253Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:64499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516335066 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:55.775584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:58:55.781902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:58:55.796336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:58:57.344200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129898302977439:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.344214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129898302977453:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.344284Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.344565Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129898302977459:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.344606Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.347749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:57.355968Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129898302977460:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:58:57.412175Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129898302977525:2643] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:57.723928Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587129898302977540:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:58:57.729631Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=Yjk5YzM4NjQtOTA1YjMxOGYtNTBiMzNmM2ItYzQ3OGRkMTc=, ActorId: [1:7587129898302977426:2327], ActorState: ExecuteState, TraceId: 01kd698anp565c5678ez7dsnjp, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:58:57.732219Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:58:57.775088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:57.800518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:57.864998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587129902597945110:2820] 2025-12-23T18:58:59.667848Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129885418074547:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:59.667941Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-12-23T18:59:03.994398Z :WriteToTopic_Demo_21_RestartNo_Table INFO: TTopicSdkTestSetup started 2025-12-23T18:59:04.006350Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-12-23T18:59:04.022637Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7587129928367749121:2927] connecte ... 72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.368786Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:43.368808Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.368827Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:43.437517Z :INFO: [/Root] [/Root] [baf22940-cc413195-aea4a879-ced28614] Closing read session. Close timeout: 0.000000s 2025-12-23T19:02:43.437613Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:1:2 2025-12-23T19:02:43.437666Z :INFO: [/Root] [/Root] [baf22940-cc413195-aea4a879-ced28614] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2124 BytesRead: 14000000 MessagesRead: 2 BytesReadCompressed: 14000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T19:02:43.437777Z :NOTICE: [/Root] [/Root] [baf22940-cc413195-aea4a879-ced28614] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-12-23T19:02:43.437825Z :DEBUG: [/Root] [/Root] [baf22940-cc413195-aea4a879-ced28614] [] Abort session to cluster 2025-12-23T19:02:43.438896Z :DEBUG: [/Root] 0x00007D9E222A4190 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_1510139751092718341_v1 Close 2025-12-23T19:02:43.439400Z :DEBUG: [/Root] 0x00007D9E222A4190 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_1510139751092718341_v1 Close 2025-12-23T19:02:43.439558Z :NOTICE: [/Root] [/Root] [baf22940-cc413195-aea4a879-ced28614] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T19:02:43.439264Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:43.439295Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.439310Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:43.439328Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.439345Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:43.439430Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_13_1_1510139751092718341_v1 grpc read done: success# 0, data# { } 2025-12-23T19:02:43.439446Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_13_1_1510139751092718341_v1 grpc read failed 2025-12-23T19:02:43.439472Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_13_1_1510139751092718341_v1 grpc closed 2025-12-23T19:02:43.439508Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_13_1_1510139751092718341_v1 is DEAD 2025-12-23T19:02:43.441007Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [13:7587130859360249893:2530] disconnected. 2025-12-23T19:02:43.441038Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [13:7587130859360249893:2530] disconnected; active server actors: 1 2025-12-23T19:02:43.441045Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_1510139751092718341_v1 2025-12-23T19:02:43.441057Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [13:7587130859360249893:2530] client test-consumer disconnected session test-consumer_13_1_1510139751092718341_v1 2025-12-23T19:02:43.441081Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037894] server disconnected, pipe [13:7587130859360249896:2533] destroyed 2025-12-23T19:02:43.441146Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [13:7587130859360249901:2535]: session cookie 2 consumer test-consumer session test-consumer_13_1_1510139751092718341_v1 grpc read done: success# 0, data# { } 2025-12-23T19:02:43.441167Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:399: Direct read cache: DestroyPartitionSession, sessionId: test-consumer_13_1_1510139751092718341_v1, proxy: [13:7587130859360249901:2535] 2025-12-23T19:02:43.441173Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [13:7587130859360249901:2535]: session cookie 2 consumer test-consumer session test-consumer_13_1_1510139751092718341_v1grpc read failed 2025-12-23T19:02:43.441197Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_13_1_1510139751092718341_v1 2025-12-23T19:02:43.441202Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [13:7587130859360249901:2535]: session cookie 2 consumer test-consumer session test-consumer_13_1_1510139751092718341_v1 grpc closed 2025-12-23T19:02:43.441228Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [13:7587130859360249901:2535]: session cookie 2 consumer test-consumer session test-consumer_13_1_1510139751092718341_v1 proxy is DEAD 2025-12-23T19:02:43.442636Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e27d8ab5-43a62ef0-4190ec46-d14b969c_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2025-12-23T19:02:43.442681Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e27d8ab5-43a62ef0-4190ec46-d14b969c_0] PartitionId [0] Generation [2] Write session will now close 2025-12-23T19:02:43.442732Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|e27d8ab5-43a62ef0-4190ec46-d14b969c_0] PartitionId [0] Generation [2] Write session: aborting 2025-12-23T19:02:43.443196Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|e27d8ab5-43a62ef0-4190ec46-d14b969c_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2025-12-23T19:02:43.443246Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|e27d8ab5-43a62ef0-4190ec46-d14b969c_0] PartitionId [0] Generation [2] Write session: destroy 2025-12-23T19:02:43.450577Z node 13 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|e27d8ab5-43a62ef0-4190ec46-d14b969c_0 grpc read done: success: 0 data: 2025-12-23T19:02:43.450609Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|e27d8ab5-43a62ef0-4190ec46-d14b969c_0 grpc read failed 2025-12-23T19:02:43.450646Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|e27d8ab5-43a62ef0-4190ec46-d14b969c_0 grpc closed 2025-12-23T19:02:43.450664Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|e27d8ab5-43a62ef0-4190ec46-d14b969c_0 is DEAD 2025-12-23T19:02:43.451635Z node 13 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T19:02:43.457791Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037894] server disconnected, pipe [13:7587130850770315247:2511] destroyed 2025-12-23T19:02:43.457848Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T19:02:43.457880Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:43.457900Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.457915Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:43.457937Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.457952Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:43.469786Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:43.469836Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.469862Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:43.469895Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.469927Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:43.539779Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:43.539813Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.539831Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:43.539853Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.539872Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:43.570239Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:43.570279Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.570296Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:43.570318Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.570336Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:43.643058Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:43.643098Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.643115Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:43.643137Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:43.643172Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Write_And_Read_Small_Messages_2 [GOOD] Test command err: 2025-12-23T18:58:54.660772Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129884688125361:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:54.660864Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:58:54.727032Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005e59/r3tmp/tmpin5tjQ/pdisk_1.dat 2025-12-23T18:58:54.916379Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:54.941216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:54.941313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:55.006249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:55.026173Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:55.033463Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129884688125256:2081] 1766516334654983 != 1766516334654986 TServer::EnableGrpc on GrpcPort 19107, node 1 2025-12-23T18:58:55.105150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:58:55.234190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005e59/r3tmp/yandexe459SN.tmp 2025-12-23T18:58:55.234221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005e59/r3tmp/yandexe459SN.tmp 2025-12-23T18:58:55.234888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005e59/r3tmp/yandexe459SN.tmp 2025-12-23T18:58:55.234999Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:58:55.480359Z INFO: TTestServer started on Port 24083 GrpcPort 19107 TClient is connected to server localhost:24083 2025-12-23T18:58:55.670964Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:19107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516335080 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:55.776544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:58:55.797856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:58:55.923355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:58:57.991977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129897573028181:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.992142Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.992388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129897573028192:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.992536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129897573028197:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.992599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.996036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:58.004871Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129897573028199:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:58:58.067326Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129901867995559:2642] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:58.261118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:58.264614Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587129901867995574:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:58:58.265142Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=OTc0YzdkM2MtZjljMDI0MDEtMzc3YTg1NzctMjQ3ZTg0YTg=, ActorId: [1:7587129897573028164:2328], ActorState: ExecuteState, TraceId: 01kd698b9929gq01jdzmhxada4, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:58:58.267794Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:58:58.289630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:58.354844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587129901867995846:2818] 2025-12-23T18:58:59.660685Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129884688125361:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:59.660898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-12-23T18:59:04.485219Z :WriteToTopic_Demo_41_Table INFO: TTopicSdkTestSetup started 2025-12-23T18:59:04.496992Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-12-23T18:59:04.512533Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7587129927637799866:2929] connected; activ ... imeout: 0.000000s 2025-12-23T19:02:48.499754Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:319:320 2025-12-23T19:02:48.499823Z :INFO: [/Root] [/Root] [d2b056f6-90ecbc53-386a8d01-495c9323] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2001 BytesRead: 20480000 MessagesRead: 320 BytesReadCompressed: 20480000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T19:02:48.499952Z :NOTICE: [/Root] [/Root] [d2b056f6-90ecbc53-386a8d01-495c9323] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-12-23T19:02:48.500006Z :DEBUG: [/Root] [/Root] [d2b056f6-90ecbc53-386a8d01-495c9323] [] Abort session to cluster 2025-12-23T19:02:48.500575Z :DEBUG: [/Root] 0x00007DBD9D623590 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_18357438293002860118_v1 Close 2025-12-23T19:02:48.501046Z :DEBUG: [/Root] 0x00007DBD9D623590 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_18357438293002860118_v1 Close 2025-12-23T19:02:48.501193Z :NOTICE: [/Root] [/Root] [d2b056f6-90ecbc53-386a8d01-495c9323] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T19:02:48.507549Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_13_1_18357438293002860118_v1 grpc read done: success# 0, data# { } 2025-12-23T19:02:48.507595Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_13_1_18357438293002860118_v1 grpc read failed 2025-12-23T19:02:48.507633Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_13_1_18357438293002860118_v1 grpc closed 2025-12-23T19:02:48.507674Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_13_1_18357438293002860118_v1 is DEAD 2025-12-23T19:02:48.508566Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [13:7587130879957479026:2556]: session cookie 2 consumer test-consumer session test-consumer_13_1_18357438293002860118_v1 grpc read done: success# 0, data# { } 2025-12-23T19:02:48.508589Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [13:7587130879957479026:2556]: session cookie 2 consumer test-consumer session test-consumer_13_1_18357438293002860118_v1grpc read failed 2025-12-23T19:02:48.508619Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [13:7587130879957479026:2556]: session cookie 2 consumer test-consumer session test-consumer_13_1_18357438293002860118_v1 grpc closed 2025-12-23T19:02:48.508641Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [13:7587130879957479026:2556]: session cookie 2 consumer test-consumer session test-consumer_13_1_18357438293002860118_v1 proxy is DEAD 2025-12-23T19:02:48.510636Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [13:7587130879957479011:2550] disconnected. 2025-12-23T19:02:48.510696Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [13:7587130879957479011:2550] disconnected; active server actors: 1 2025-12-23T19:02:48.510722Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [13:7587130879957479011:2550] client test-consumer disconnected session test-consumer_13_1_18357438293002860118_v1 2025-12-23T19:02:48.512014Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_18357438293002860118_v1 2025-12-23T19:02:48.512064Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037894] server disconnected, pipe [13:7587130879957479014:2553] destroyed 2025-12-23T19:02:48.512158Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_13_1_18357438293002860118_v1 2025-12-23T19:02:48.538942Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:48.538990Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.539007Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:48.539031Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.539049Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:48.540536Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:48.540557Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.540571Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:48.540601Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.540617Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:48.639481Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:48.639556Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.639585Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:48.639632Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.639665Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:48.639757Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:48.639778Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.639792Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:48.639815Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.639832Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:48.741342Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:48.741382Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.741396Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:48.741419Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.741436Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:48.741494Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:48.741506Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.741517Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:48.741529Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.741540Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:48.841695Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:48.841739Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.841754Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:48.841774Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.841790Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:48.841853Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:48.841867Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.841877Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:48.841893Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.841903Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:48.947298Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:48.947344Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.947362Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:48.947385Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.947403Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:48.947453Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:48.947464Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.947476Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:48.947487Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:48.947495Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> test_ttl.py::TestTTLOnIndexedTable::test_case |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::ReadRuleGeneration [GOOD] Test command err: 2025-12-23T18:58:54.660075Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129882760014712:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:54.660409Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:58:54.727497Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005e5b/r3tmp/tmpQppnrt/pdisk_1.dat 2025-12-23T18:58:54.898024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:54.928768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:54.928885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:54.997359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:55.015975Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129882760014673:2081] 1766516334655044 != 1766516334655047 2025-12-23T18:58:55.019745Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25786, node 1 2025-12-23T18:58:55.109623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:58:55.234790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005e5b/r3tmp/yandexVNYRhL.tmp 2025-12-23T18:58:55.234821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005e5b/r3tmp/yandexVNYRhL.tmp 2025-12-23T18:58:55.234963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005e5b/r3tmp/yandexVNYRhL.tmp 2025-12-23T18:58:55.235068Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:58:55.480482Z INFO: TTestServer started on Port 19376 GrpcPort 25786 TClient is connected to server localhost:19376 2025-12-23T18:58:55.668546Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:25786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516335073 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:55.760741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:58:55.787132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:58:57.326611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129895644917579:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.326867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129895644917608:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.326932Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.327242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129895644917611:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.327312Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.329707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:57.338809Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129895644917610:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-12-23T18:58:57.396042Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129895644917676:2642] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:57.764427Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587129895644917691:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:58:57.767163Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=NWM1Zjc3OTktYWQyMzQyZDUtZDFhZDliOTItODE3MDBmYmE=, ActorId: [1:7587129895644917577:2327], ActorState: ExecuteState, TraceId: 01kd698amj4bsczm7wcwkbgnce, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:58:57.769596Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:58:57.775110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:57.803382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:57.865122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587129899939885272:2819] 2025-12-23T18:58:59.660188Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129882760014712:2073];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:59.660262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-12-23T18:59:03.893927Z :CreateTopicWithStreamingConsumer INFO: TTopicSdkTestSetup started 2025-12-23T18:59:03.907151Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-12-23T18:59:03.921799Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7587129921414721995:2928] connected; active server actors: 1 2025-12-23T18:59:03.922205Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][topic_name] updating configuration. Delete ... ion][0][StateIdle] Process user action and tx events 2025-12-23T19:02:46.027023Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.027047Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:46.027081Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.027108Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:46.128013Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:46.128065Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.128089Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:46.128125Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.128152Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:46.206947Z :INFO: [/Root] [/Root] [3e578654-23b34fb0-27639113-fa8ac590] Closing read session. Close timeout: 0.000000s 2025-12-23T19:02:46.207050Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2025-12-23T19:02:46.207123Z :INFO: [/Root] [/Root] [3e578654-23b34fb0-27639113-fa8ac590] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2003 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T19:02:46.207293Z :NOTICE: [/Root] [/Root] [3e578654-23b34fb0-27639113-fa8ac590] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-12-23T19:02:46.207370Z :DEBUG: [/Root] [/Root] [3e578654-23b34fb0-27639113-fa8ac590] [] Abort session to cluster 2025-12-23T19:02:46.208035Z :DEBUG: [/Root] 0x00007D9C26AB0590 TDirectReadSessionManager ServerSessionId=consumer-1_14_3_9654093007887053630_v1 Close 2025-12-23T19:02:46.208573Z :DEBUG: [/Root] 0x00007D9C26AB0590 TDirectReadSessionManager ServerSessionId=consumer-1_14_3_9654093007887053630_v1 Close 2025-12-23T19:02:46.208762Z :NOTICE: [/Root] [/Root] [3e578654-23b34fb0-27639113-fa8ac590] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T19:02:46.211421Z node 14 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 3 consumer consumer-1 session consumer-1_14_3_9654093007887053630_v1 grpc read done: success# 0, data# { } 2025-12-23T19:02:46.211471Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 3 consumer consumer-1 session consumer-1_14_3_9654093007887053630_v1 grpc read failed 2025-12-23T19:02:46.211508Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 3 consumer consumer-1 session consumer-1_14_3_9654093007887053630_v1 grpc closed 2025-12-23T19:02:46.211548Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer consumer-1 session consumer-1_14_3_9654093007887053630_v1 is DEAD 2025-12-23T19:02:46.212664Z node 14 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [14:7587130872513423410:2538]: session cookie 4 consumer consumer-1 session consumer-1_14_3_9654093007887053630_v1 grpc read done: success# 0, data# { } 2025-12-23T19:02:46.212701Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [14:7587130872513423410:2538]: session cookie 4 consumer consumer-1 session consumer-1_14_3_9654093007887053630_v1grpc read failed 2025-12-23T19:02:46.212737Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [14:7587130872513423410:2538]: session cookie 4 consumer consumer-1 session consumer-1_14_3_9654093007887053630_v1 grpc closed 2025-12-23T19:02:46.212763Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [14:7587130872513423410:2538]: session cookie 4 consumer consumer-1 session consumer-1_14_3_9654093007887053630_v1 proxy is DEAD 2025-12-23T19:02:46.227111Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|448e7ab1-3de3a5f3-bc8ec02a-8734ea74_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-12-23T19:02:46.227227Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|448e7ab1-3de3a5f3-bc8ec02a-8734ea74_0] PartitionId [0] Generation [1] Write session will now close 2025-12-23T19:02:46.227310Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|448e7ab1-3de3a5f3-bc8ec02a-8734ea74_0] PartitionId [0] Generation [1] Write session: aborting 2025-12-23T19:02:46.226941Z node 14 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][test-topic] pipe [14:7587130872513423401:2533] disconnected. 2025-12-23T19:02:46.227019Z node 14 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][test-topic] pipe [14:7587130872513423401:2533] disconnected; active server actors: 1 2025-12-23T19:02:46.227062Z node 14 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][test-topic] pipe [14:7587130872513423401:2533] client consumer-1 disconnected session consumer-1_14_3_9654093007887053630_v1 2025-12-23T19:02:46.227251Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2270: [PQ: 72075186224037892] Destroy direct read session consumer-1_14_3_9654093007887053630_v1 2025-12-23T19:02:46.227307Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [14:7587130872513423404:2536] destroyed 2025-12-23T19:02:46.227394Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: consumer-1_14_3_9654093007887053630_v1 2025-12-23T19:02:46.227981Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|448e7ab1-3de3a5f3-bc8ec02a-8734ea74_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-12-23T19:02:46.228279Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|448e7ab1-3de3a5f3-bc8ec02a-8734ea74_0] PartitionId [0] Generation [1] Write session: destroy 2025-12-23T19:02:46.230883Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:46.230939Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.230962Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:46.230995Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.231020Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:46.274134Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: test-message_group_id|448e7ab1-3de3a5f3-bc8ec02a-8734ea74_0 grpc read done: success: 0 data: 2025-12-23T19:02:46.274185Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 4 sessionId: test-message_group_id|448e7ab1-3de3a5f3-bc8ec02a-8734ea74_0 grpc read failed 2025-12-23T19:02:46.274239Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 4 sessionId: test-message_group_id|448e7ab1-3de3a5f3-bc8ec02a-8734ea74_0 grpc closed 2025-12-23T19:02:46.274265Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: test-message_group_id|448e7ab1-3de3a5f3-bc8ec02a-8734ea74_0 is DEAD 2025-12-23T19:02:46.285501Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T19:02:46.287612Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037892] server disconnected, pipe [14:7587130859628521363:2493] destroyed 2025-12-23T19:02:46.287684Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T19:02:46.287734Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:46.287765Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.287787Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:46.287817Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.287838Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:46.331249Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:46.331296Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.331319Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:46.331362Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.331387Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:46.385429Z node 14 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T19:02:46.431261Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:46.431304Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.431325Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:46.431369Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.431394Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:46.531983Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:46.532038Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.532064Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:46.532102Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:46.532137Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> TxUsage::Sinks_Oltp_WriteToTopics_2_Query [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> TxUsage::Sinks_Oltp_WriteToTopics_3_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query [GOOD] Test command err: 2025-12-23T18:58:54.663300Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129884614734871:2155];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:54.663371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:58:54.727231Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005e39/r3tmp/tmpFRzePA/pdisk_1.dat 2025-12-23T18:58:54.916990Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:54.922953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:54.923082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:54.930296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:55.002855Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129884614734747:2081] 1766516334655051 != 1766516334655054 2025-12-23T18:58:55.013960Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25988, node 1 2025-12-23T18:58:55.206407Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:58:55.233911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005e39/r3tmp/yandexWaw0iP.tmp 2025-12-23T18:58:55.233931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005e39/r3tmp/yandexWaw0iP.tmp 2025-12-23T18:58:55.234841Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005e39/r3tmp/yandexWaw0iP.tmp 2025-12-23T18:58:55.234961Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:58:55.480391Z INFO: TTestServer started on Port 25022 GrpcPort 25988 TClient is connected to server localhost:25022 2025-12-23T18:58:55.667455Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:25988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516335073 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:55.770039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T18:58:55.777910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:58:55.795160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:58:57.829140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129897499637676:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.829166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129897499637668:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.829408Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.829936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129897499637684:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.830011Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.833722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:57.844718Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129897499637683:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:58:57.920876Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129897499637749:2639] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:58.119612Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587129897499637757:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:58:58.120176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:58.120195Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=NDBhM2UwYTctYjE5ODQ2ZTQtZDJhMDRmNi1jZjEzZmM4MA==, ActorId: [1:7587129897499637666:2329], ActorState: ExecuteState, TraceId: 01kd698b52d6gawc310dkj7s9s, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:58:58.122675Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:58:58.145551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:58.207002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587129901794605337:2815] 2025-12-23T18:58:59.662411Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129884614734871:2155];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:59.662472Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-12-23T18:59:04.359889Z :Sinks_Oltp_WriteToTopic_1_Table INFO: TTopicSdkTestSetup started 2025-12-23T18:59:04.372963Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-12-23T18:59:04.389372Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7587129927564409352:2923] connected; ... 2-23T19:02:54.688993Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.689009Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T19:02:54.689059Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|c7eb78f9-4758d777-7e116e8f-d4a9bec7_0 grpc read done: success: 0 data: 2025-12-23T19:02:54.689076Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|c7eb78f9-4758d777-7e116e8f-d4a9bec7_0 grpc read failed 2025-12-23T19:02:54.689106Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|c7eb78f9-4758d777-7e116e8f-d4a9bec7_0 grpc closed 2025-12-23T19:02:54.689123Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|c7eb78f9-4758d777-7e116e8f-d4a9bec7_0 is DEAD 2025-12-23T19:02:54.690002Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T19:02:54.690040Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T19:02:54.695549Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037894] server disconnected, pipe [14:7587130895323614832:2528] destroyed 2025-12-23T19:02:54.695593Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037894] server disconnected, pipe [14:7587130895323614835:2528] destroyed 2025-12-23T19:02:54.695626Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T19:02:54.695664Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:54.695682Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.695698Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:54.695718Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.695737Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:54.751267Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:54.751320Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.751342Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:54.751366Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.751392Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T19:02:54.751454Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:54.751466Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.751478Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:54.751492Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.751503Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:54.751534Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:54.751545Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.751556Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:54.751569Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.751580Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:54.854276Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:54.854312Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.854327Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:54.854349Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.854367Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T19:02:54.854428Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:54.854440Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.854450Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:54.854464Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.854472Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:54.854500Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:54.854510Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.854520Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:54.854533Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.854541Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:54.963730Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:54.963771Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.963788Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:54.963813Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.963831Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T19:02:54.963911Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:54.963925Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.963938Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:54.963951Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.963961Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:54.963993Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:54.964006Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.964025Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:54.964040Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:54.964052Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:02:55.067509Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:55.067553Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:55.067569Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:55.067603Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:55.067621Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T19:02:55.067695Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:55.067709Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:55.067721Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:55.067734Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:55.067746Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:02:55.067779Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:02:55.067802Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:55.067811Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:02:55.067824Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:02:55.067835Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> DSProxyStrategyTest::Restore_mirror3dc [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> DSProxyStrategyTest::Restore_mirror3dc [GOOD] Test command err: diskMask# 168 nonWorkingDomain# 0 148794 diskMask# 168 nonWorkingDomain# 1 64800 diskMask# 169 nonWorkingDomain# 0 124764 diskMask# 169 nonWorkingDomain# 1 8640 diskMask# 170 nonWorkingDomain# 0 124764 diskMask# 170 nonWorkingDomain# 1 8640 diskMask# 171 nonWorkingDomain# 0 73344 diskMask# 171 nonWorkingDomain# 1 47544 diskMask# 172 nonWorkingDomain# 0 124764 diskMask# 172 nonWorkingDomain# 1 64800 diskMask# 173 nonWorkingDomain# 0 73344 diskMask# 173 nonWorkingDomain# 1 8640 diskMask# 174 nonWorkingDomain# 0 73344 diskMask# 174 nonWorkingDomain# 1 8640 diskMask# 175 nonWorkingDomain# 0 31656 diskMask# 175 nonWorkingDomain# 1 47544 diskMask# 176 nonWorkingDomain# 0 383040 diskMask# 176 nonWorkingDomain# 1 64800 diskMask# 177 nonWorkingDomain# 0 78444 diskMask# 177 nonWorkingDomain# 1 8640 diskMask# 178 nonWorkingDomain# 0 191520 |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_strategy/unittest >> TxUsage::WriteToTopic_Demo_40_Query [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |99.0%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} |99.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Query |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> DataShardWrite::UpsertWithDefaults >> TxUsage::WriteToTopic_Demo_41_Query >> DataShardWrite::UpsertImmediate |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> YdbSdkSessions::TestActiveSessionCountAfterBadSession |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [GOOD] >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> YdbSdkSessions::TestMultipleSessions >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> YdbSdkSessions::TestMultipleSessions [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] >> DataShardWrite::UpsertWithDefaults [GOOD] >> DataShardWrite::WriteImmediateBadRequest >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::UpsertImmediateManyColumns >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint16-pk_types6-all_types6-index6] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int8-pk_types3-all_types3-index3] >> YdbSdkSessions::TestSessionPool |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:588: Enable after accepting a pull request with merging configs |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint32-pk_types5-all_types5-index5] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint64-pk_types4-all_types4-index4] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> YdbSdkSessions::TestSessionPool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:248: Test is failing right now |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::UpsertPrepared+Volatile >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Query [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:200: Test is failing right now |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int64-pk_types0-all_types0-index0] >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Table >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int16-pk_types2-all_types2-index2] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSessionPool [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] >> TxUsage::WriteToTopic_Demo_41_Query [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [FAIL] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_41_Query [GOOD] Test command err: 2025-12-23T18:58:56.274790Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129892170775520:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:56.274907Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005e1f/r3tmp/tmpz3t5EL/pdisk_1.dat 2025-12-23T18:58:56.312610Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:58:56.447998Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:56.525556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:56.525657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:56.527703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:56.568271Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:56.570690Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129892170775492:2081] 1766516336273095 != 1766516336273098 TServer::EnableGrpc on GrpcPort 65223, node 1 2025-12-23T18:58:56.614326Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005e1f/r3tmp/yandexxI0i7x.tmp 2025-12-23T18:58:56.614350Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005e1f/r3tmp/yandexxI0i7x.tmp 2025-12-23T18:58:56.614500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005e1f/r3tmp/yandexxI0i7x.tmp 2025-12-23T18:58:56.614597Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:58:56.647709Z INFO: TTestServer started on Port 21572 GrpcPort 65223 2025-12-23T18:58:56.675153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21572 PQClient connected to localhost:65223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516336606 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:56.861259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:58:56.878506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:58:57.282682Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T18:58:58.593796Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129900760711130:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:58.593800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129900760711118:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:58.593941Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:58.594381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129900760711133:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:58.594464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:58.597940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:58.599467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129900760711166:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:58.599575Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:58.599787Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129900760711169:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:58.599852Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:58.606503Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129900760711134:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:58:58.736294Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129900760711193:2642] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:58.760262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:58.785936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:58.854710Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587129900760711209:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:58:58.855684Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=MmM0OTMzZS1iOGM4ZmJkNy1kNWU3YjIyYS0zYzI2YzU3YQ==, ActorId: [1:7587129900760711115:2327], ActorState: ExecuteState, TraceId: 01kd698bw67s9xpmq474skf8ax, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:58:58.858330Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:58:58.858630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster ... seqNo 81 partNo 1 2025-12-23T19:03:31.121942Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:136: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Topic 'topic_A' partition {0, {13, 281474976715674}, 100000} part blob complete sourceId '' seqNo 81 partNo 1 FormedBlobsCount 0 NewHead: Offset 79 PartNo 0 PackedSize 2000467 count 2 nextOffset 81 batches 3 2025-12-23T19:03:31.121995Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:366: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Part 80:1 appended 2025-12-23T19:03:31.123505Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:634: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Add new write blob: topic 'topic_A' partition {0, {13, 281474976715674}, 100000} compactOffset 79,2 HeadOffset 71 endOffset 72 curOffset 81 D0000100000_00000000000000000079_00000_0000000002_00002| size 2000457 WTime 1766516611120 2025-12-23T19:03:31.167363Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:03:31.167395Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:03:31.167409Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:03:31.167428Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:03:31.167443Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:03:31.167622Z node 13 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T19:03:31.167684Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] TPartition::HandleWriteResponse writeNewSize# 1000117 WriteNewSizeFromSupportivePartitions# 0 2025-12-23T19:03:31.167727Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] TPartition::ReplyWrite. Partition: {0, {13, 281474976715674}, 100000} 2025-12-23T19:03:31.167779Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {13, 281474976715674}, 100000}, SeqNo: 82, partNo: 0, Offset: 81 is stored on disk 2025-12-23T19:03:31.167809Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] TPartition::ReplyWrite. Partition: {0, {13, 281474976715674}, 100000} 2025-12-23T19:03:31.167837Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'topic_A', Partition: {0, {13, 281474976715674}, 100000}, SeqNo: 82, partNo: 1, Offset: 81 is stored on disk 2025-12-23T19:03:31.168242Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2025-12-23T19:03:31.168261Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:03:31.168278Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2025-12-23T19:03:31.168295Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:03:31.168308Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Try persist 2025-12-23T19:03:31.168329Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:178: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Blobs compaction in progress 2025-12-23T19:03:31.168386Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:428: Erasing blob in L1. Partition {0, {13, 281474976715674}, 100000} offset 64 size 2000457 cause it's been evicted from L2. Actual L1 size: 26 2025-12-23T19:03:31.168404Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:428: Erasing blob in L1. Partition {0, {13, 281474976715674}, 100000} offset 63 size 1000243 cause it's been evicted from L2. Actual L1 size: 25 2025-12-23T19:03:31.168424Z node 13 :PERSQUEUE DEBUG: read.h:371: [72075186224037894][PQCacheProxy]Have to remove new data from cache. Topic topic_A, cookie 0 2025-12-23T19:03:31.168980Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:428: Erasing blob in L1. Partition {0, {13, 281474976715674}, 100000} offset 66 size 1000243 cause it's been evicted from L2. Actual L1 size: 24 2025-12-23T19:03:31.168996Z node 13 :PERSQUEUE DEBUG: read.h:371: [72075186224037894][PQCacheProxy]Have to remove new data from cache. Topic topic_A, cookie 0 2025-12-23T19:03:31.169230Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:03:31.169248Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:03:31.169261Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:03:31.169277Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:03:31.169289Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:03:31.169319Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2025-12-23T19:03:31.169330Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:03:31.169340Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2025-12-23T19:03:31.169355Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:03:31.169366Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Try persist 2025-12-23T19:03:31.169418Z node 13 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T19:03:31.169556Z node 13 :PERSQUEUE DEBUG: read.h:313: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough blob. Partition 100000 offset 71 partNo 0 count 8 size 8001772 2025-12-23T19:03:31.169648Z node 13 :PERSQUEUE DEBUG: read.h:313: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough blob. Partition 100000 offset 79 partNo 0 count 2 size 2000457 2025-12-23T19:03:31.169714Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic_A' partition: 0 messageNo: 163 requestId: cookie: 82 2025-12-23T19:03:31.169782Z node 13 :PERSQUEUE DEBUG: partition.cpp:4402: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Handle TEvPQ::TEvDeletePartition 2025-12-23T19:03:31.170108Z node 13 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-12-23T19:03:31.170129Z node 13 :PERSQUEUE DEBUG: read.h:350: [72075186224037894][PQCacheProxy]CacheProxy. Delete blobs from D0000100000(+) to D0000100001(-) 2025-12-23T19:03:31.179030Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 63 count 8 actorID [13:7587131047963043415:2474] 2025-12-23T19:03:31.179062Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 71 count 1 actorID [13:7587131047963043415:2474] 2025-12-23T19:03:31.179076Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 72 count 1 actorID [13:7587131047963043415:2474] 2025-12-23T19:03:31.179087Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 73 count 1 actorID [13:7587131047963043415:2474] 2025-12-23T19:03:31.179102Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 74 count 1 actorID [13:7587131047963043415:2474] 2025-12-23T19:03:31.179115Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 75 count 1 actorID [13:7587131047963043415:2474] 2025-12-23T19:03:31.179126Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 76 count 1 actorID [13:7587131047963043415:2474] 2025-12-23T19:03:31.179137Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 77 count 1 actorID [13:7587131047963043415:2474] 2025-12-23T19:03:31.179151Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 78 count 1 actorID [13:7587131047963043415:2474] 2025-12-23T19:03:31.179162Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 79 count 2 actorID [13:7587131047963043415:2474] 2025-12-23T19:03:31.179174Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 81 count 1 actorID [13:7587131047963043415:2474] 2025-12-23T19:03:31.179253Z node 13 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Received TEvKeyValue::TEvResponse 2025-12-23T19:03:31.179402Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:178: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Blobs compaction in progress 2025-12-23T19:03:31.179430Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2025-12-23T19:03:31.179447Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:03:31.179475Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2025-12-23T19:03:31.179492Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:03:31.179505Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Try persist 2025-12-23T19:03:31.179526Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:178: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Blobs compaction in progress 2025-12-23T19:03:31.180670Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:5186: [PQ: 72075186224037894] DeletePartition {0, {13, 281474976715674}, 100000} 2025-12-23T19:03:31.180731Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:3575: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:539: Enable after interactive tx support |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> KqpWrite::UpsertNullKey >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile >> KqpWrite::InsertRevert >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> KqpEffects::DeleteWithIndex+UseSecondaryIndex-UseSink >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> KqpEffects::EmptyUpdate+UseSink |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_3_Table [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_3_Query >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] >> KqpEffects::EmptyUpdate+UseSink [GOOD] >> KqpEffects::EmptyUpdate-UseSink >> KqpWrite::UpsertNullKey [GOOD] >> KqpWrite::ProjectReplace-UseSink >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] >> KqpWrite::InsertRevert [GOOD] >> KqpWrite::ProjectReplace+UseSink >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> DataShardWrite::WriteCommitVersion >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter >> KqpEffects::EmptyUpdate-UseSink [GOOD] >> KqpEffects::EffectWithSelect-UseSink >> KqpEffects::DeleteWithIndex+UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::DeleteWithIndex-UseSecondaryIndex+UseSink >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> KqpWrite::ProjectReplace-UseSink [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] >> KqpWrite::ProjectReplace+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4513, MsgBus: 21293 2025-12-23T19:03:38.224747Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587131103682959469:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:03:38.228185Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005adb/r3tmp/tmpPfdc3z/pdisk_1.dat 2025-12-23T19:03:38.726701Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:03:38.778476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:03:38.778590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:03:38.789900Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587131103682959441:2081] 1766516618222035 != 1766516618222038 2025-12-23T19:03:38.800869Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:03:38.813860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4513, node 1 2025-12-23T19:03:38.980776Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:03:39.063514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:03:39.063536Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:03:39.063543Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:03:39.063616Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:03:39.245251Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21293 TClient is connected to server localhost:21293 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516618874 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:03:40.018189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:03:40.035934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-12-23T19:03:40.045799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:40.263593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:40.524111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:40.663428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:43.227001Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587131103682959469:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:03:43.227735Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:03:43.276954Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131125157797797:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:43.277072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:43.277523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131125157797807:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:43.277568Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:43.749204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:43.805658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:43.863958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:43.943974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:44.002007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:44.137501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:44.222374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:44.293687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:44.447843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131129452765985:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:44.447945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131129452765990:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:44.447985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:44.450540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131129452765992:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:44.450622Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [Wor ... latileState: Unknown -> Disconnected 2025-12-23T19:03:49.260097Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:03:49.261300Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587131147324804201:2081] 1766516628990244 != 1766516628990247 2025-12-23T19:03:49.294639Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:03:49.298548Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 62368, node 2 2025-12-23T19:03:49.615355Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:03:49.615377Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:03:49.615385Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:03:49.615461Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:03:49.730217Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:22904 2025-12-23T19:03:50.078621Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:22904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516629367 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T19:03:50.200835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:03:50.217585Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:50.315560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:50.503892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:50.598796Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:54.040581Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587131173094609860:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:54.040677Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:54.041127Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587131173094609870:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:54.041176Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:54.151270Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:54.212316Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:54.313570Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:54.383412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:54.426142Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:54.494156Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:54.626337Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:54.739447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:54.891221Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587131173094610748:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:54.891314Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:54.892130Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587131173094610754:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:54.892371Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587131173094610753:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:54.892412Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:54.896470Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:03:54.913679Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-12-23T19:03:54.914713Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587131173094610757:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:03:55.008466Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587131177389578104:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12664, MsgBus: 7530 2025-12-23T19:03:38.749098Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587131103909262125:2262];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:03:38.749165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:03:38.759354Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.008795s 2025-12-23T19:03:38.802682Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ad7/r3tmp/tmpZixR4b/pdisk_1.dat 2025-12-23T19:03:39.262228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:03:39.262325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:03:39.351212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:03:39.393095Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:03:39.464275Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:03:39.466974Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587131103909261886:2081] 1766516618701206 != 1766516618701209 TServer::EnableGrpc on GrpcPort 12664, node 1 2025-12-23T19:03:39.610859Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:03:39.746670Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:03:39.746696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:03:39.746703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:03:39.746795Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:03:39.748339Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:7530 TClient is connected to server localhost:7530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516619490 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:03:40.971377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:03:41.003127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:03:41.020350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:41.233760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:41.503211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:41.607602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:43.750576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587131103909262125:2262];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:03:43.763511Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:03:44.488722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131129679067540:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:44.488840Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:44.489436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131129679067550:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:44.489522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:44.942972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:44.991866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.037367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.075682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.120492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.191288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.271352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.342852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.469383Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131133974035728:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:45.469471Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:45.470150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131133974035734:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:45.470194Z node 1 :KQP_WORKLOAD_SERVICE WARN: sche ... : Disconnected -> Connecting 2025-12-23T19:03:50.809601Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27262, node 2 2025-12-23T19:03:51.034940Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:03:51.047423Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:03:51.047446Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:03:51.047454Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:03:51.047533Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:18290 2025-12-23T19:03:51.553273Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:18290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516630879 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:03:51.713431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:03:51.728455Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:03:51.746697Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:51.882657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:52.199887Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:52.327743Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:55.537611Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587131155848214306:2135];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:03:55.537696Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:03:55.549693Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587131177323052551:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:55.549814Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:55.550568Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587131177323052561:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:55.550632Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:55.706273Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:55.771392Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:55.870390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:55.925465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:55.978465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:56.061798Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:56.152782Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:56.281744Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:56.475914Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587131181618020737:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:56.476021Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:56.476303Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587131181618020742:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:56.476336Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587131181618020743:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:56.476581Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:56.480536Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:03:56.506286Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-12-23T19:03:56.506929Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587131181618020746:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:03:56.578522Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587131181618020803:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> DataShardWrite::WriteCommitVersion [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] Test command err: Thread 0 failed |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] >> KqpEffects::DeleteWithIndex-UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::DeleteWithIndex+UseSecondaryIndex+UseSink >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Table [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] >> KqpEffects::EffectWithSelect-UseSink [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::EffectWithSelect-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11659, MsgBus: 4198 2025-12-23T19:03:40.386350Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587131111681915401:2259];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:03:40.386432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ad5/r3tmp/tmptQcKBZ/pdisk_1.dat 2025-12-23T19:03:40.489652Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:03:41.074657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:03:41.081007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:03:41.135927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:03:41.188993Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:03:41.213460Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587131111681915167:2081] 1766516620325679 != 1766516620325682 2025-12-23T19:03:41.236356Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11659, node 1 2025-12-23T19:03:41.390855Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:03:41.505342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:03:41.552783Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:03:41.552802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:03:41.552809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:03:41.552883Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4198 TClient is connected to server localhost:4198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516621331 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:03:42.458585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:03:45.390842Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587131111681915401:2259];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:03:45.391729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131133156752536:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:45.391800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:03:45.391872Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:45.392494Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131133156752546:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:45.392538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:45.813078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.851545Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:7587131133156752585:2521], Recipient [1:7587131133156752596:2332]: NKikimr::TEvTablet::TEvBoot 2025-12-23T19:03:45.852394Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:7587131133156752585:2521], Recipient [1:7587131133156752596:2332]: NKikimr::TEvTablet::TEvRestored 2025-12-23T19:03:45.852696Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7587131133156752596:2332] 2025-12-23T19:03:45.852912Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T19:03:45.871699Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:7587131133156752585:2521], Recipient [1:7587131133156752596:2332]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T19:03:45.882912Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T19:03:45.882994Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T19:03:45.884763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T19:03:45.884804Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T19:03:45.884837Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T19:03:45.885192Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T19:03:45.885235Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T19:03:45.885259Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7587131133156752611:2332] in generation 1 2025-12-23T19:03:45.891350Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T19:03:45.998987Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T19:03:45.999135Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T19:03:45.999210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7587131133156752614:2333] 2025-12-23T19:03:45.999226Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:03:45.999237Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T19:03:45.999247Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:03:45.999410Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:7587131133156752596:2332], Recipient [1:7587131133156752596:2332]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T19:03:45.999437Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T19:03:45.999539Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T19:03:45.999634Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T19:03:45.999655Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:03:45.999676Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:03:45.999714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T19:03:45.999732Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T19:03:45.999762Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T19:03:45.999779Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T19:03:45.999794Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:03:46.001058Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:7587131133156752598:2529], Recipient [1:7587131133156752596:2332]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T19:03:46.001089Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:03:46.001109Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7587131133156752592:2526], serverId# [1:7587131133156752598:2529], sessionId# [0:0:0] 2025-12-23T19:03:46.001142Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:7587131111681915544:2158], Recipien ... 2025-12-23T19:03:55.847276Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:03:55.847299Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:03:55.847307Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:03:55.847384Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:03:55.847485Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:20014 2025-12-23T19:03:56.419947Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:20014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516635737 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:03:56.488548Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:03:56.514493Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:56.608599Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:56.967530Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:57.063939Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:04:00.160251Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587131197783738925:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:00.160354Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:00.160947Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587131197783738935:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:00.161025Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:00.301967Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:00.349680Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:00.418864Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587131176308900657:2095];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:04:00.419613Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:04:00.457878Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:00.526086Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:00.583654Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:00.659438Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:00.733279Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:00.812161Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:01.059902Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587131202078707104:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:01.060021Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:01.060472Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587131202078707109:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:01.060523Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587131202078707110:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:01.060640Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:01.065660Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:04:01.088608Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587131202078707113:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:04:01.167250Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587131202078707166:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:04:04.089057Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:10.618889Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T19:04:10.618925Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |99.1%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit [GOOD] >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> KqpEffects::DeleteWithIndex+UseSecondaryIndex+UseSink [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeleteWithIndex+UseSecondaryIndex+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24171, MsgBus: 6997 2025-12-23T19:03:39.243799Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587131107192054250:2166];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:03:39.243854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005ad9/r3tmp/tmp9IfT6Z/pdisk_1.dat 2025-12-23T19:03:39.722427Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:03:39.732588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:03:39.732708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:03:39.779166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:03:39.917716Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587131107192054121:2081] 1766516619222466 != 1766516619222469 2025-12-23T19:03:39.923910Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24171, node 1 2025-12-23T19:03:39.997544Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:03:40.131402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:03:40.131424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:03:40.131431Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:03:40.131527Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:03:40.268758Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:6997 TClient is connected to server localhost:6997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516619980 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:03:41.073198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:03:41.080833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:03:41.099624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:41.378620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:41.705000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:41.846304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:03:44.237404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587131107192054250:2166];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:03:44.240956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:03:44.465178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131128666892477:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:44.465278Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:44.465603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131128666892487:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:44.465652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:45.048606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.096036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.129146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.164222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.198842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.270101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.344966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.452331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:45.578580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131132961860659:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:45.578666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:45.579160Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131132961860664:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:45.579231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587131132961860665:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:03:45.579260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [Work ... oken or outdated, will use file: (empty maybe) 2025-12-23T19:04:11.522277Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:04:11.522286Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:04:11.522375Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8608 2025-12-23T19:04:12.002339Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:8608 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516651368 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:04:12.340667Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:04:12.354237Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:04:12.493617Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:04:12.706228Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:04:12.808648Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:04:15.978518Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7587131240365788314:2137];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:04:15.978587Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:04:16.320231Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587131266135593882:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:16.320320Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:16.320829Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587131266135593893:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:16.320875Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:16.448336Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:16.511038Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:16.575615Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:16.636016Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:16.689816Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:16.758898Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:16.834250Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:16.936005Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:17.186945Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587131270430562060:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:17.187075Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:17.187653Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587131270430562066:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:17.187685Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7587131270430562065:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:17.187713Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:04:17.192680Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:04:17.231193Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7587131270430562069:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:04:17.314452Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7587131270430562122:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:04:20.861657Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:21.015986Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:04:26.116557Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7746: Cannot get console configs 2025-12-23T19:04:26.116582Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] |99.1%| [TA] $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::Sinks_Oltp_WriteToTopics_3_Query [GOOD] |99.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |99.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_15_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 15] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_16_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 16] >> TxUsage::Sinks_Oltp_WriteToTopics_4_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T19:03:15.435050Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:03:15.806894Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.006064s 2025-12-23T19:03:15.839299Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:03:15.909216Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:03:15.909771Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:03:15.909880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:03:16.779803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:03:16.779962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:03:16.976984Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516591855165 != 1766516591855169 2025-12-23T19:03:17.048083Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:03:17.131560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:03:17.285295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004eec/r3tmp/tmpXfNYoD/pdisk_1.dat 2025-12-23T19:03:17.825575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:03:17.852020Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:03:18.024346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:18.095301Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T19:03:18.096608Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T19:03:18.097112Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T19:03:18.097424Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T19:03:18.179925Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T19:03:18.180806Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T19:03:18.180943Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T19:03:18.190995Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T19:03:18.191127Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T19:03:18.191213Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T19:03:18.194916Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T19:03:18.195096Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T19:03:18.195189Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T19:03:18.207573Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T19:03:18.367927Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T19:03:18.374991Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T19:03:18.375267Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T19:03:18.375340Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:03:18.375419Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T19:03:18.375465Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:03:18.375814Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T19:03:18.385611Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T19:03:18.394063Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T19:03:18.394205Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T19:03:18.394710Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:03:18.394788Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:03:18.394864Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T19:03:18.394926Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T19:03:18.394991Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T19:03:18.395046Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T19:03:18.395111Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:03:18.395261Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T19:03:18.395316Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:03:18.395375Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T19:03:18.395481Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T19:03:18.395531Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T19:03:18.395637Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T19:03:18.395881Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T19:03:18.395965Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T19:03:18.396064Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T19:03:18.396194Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T19:03:18.396276Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T19:03:18.396322Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T19:03:18.396359Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T19:03:18.396721Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T19:03:18.396770Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T19:03:18.396806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T19:03:18.396846Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T19:03:18.396934Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T19:03:18.396976Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T19:03:18.397012Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T19:03:18.397056Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T19:03:18.397084Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T19:03:18.402357Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T19:03:18.402440Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id ... 189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:04:32.721744Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [10:1166:2974], serverId# [10:1167:2975], sessionId# [0:0:0] 2025-12-23T19:04:32.721830Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [10:1165:2973], Recipient [10:942:2796]: NKikimrTxDataShard.TEvGetInfoRequest 2025-12-23T19:04:32.722718Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1170:2978], Recipient [10:942:2796]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T19:04:32.723086Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:04:32.723134Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [10:1169:2977], serverId# [10:1170:2978], sessionId# [0:0:0] 2025-12-23T19:04:32.723339Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1168:2976], Recipient [10:942:2796]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-12-23T19:04:32.723472Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-12-23T19:04:32.723515Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T19:04:32.723550Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037890 changed HEAD read to non-repeatable v5000/18446744073709551615 2025-12-23T19:04:32.723596Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2025-12-23T19:04:32.723662Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2025-12-23T19:04:32.723690Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2025-12-23T19:04:32.723716Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-12-23T19:04:32.723741Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2025-12-23T19:04:32.723782Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037890 2025-12-23T19:04:32.723814Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2025-12-23T19:04:32.723837Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-12-23T19:04:32.723859Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2025-12-23T19:04:32.723887Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2025-12-23T19:04:32.723961Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-12-23T19:04:32.724146Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037890 Complete read# {[10:1168:2976], 1002} after executionsCount# 1 2025-12-23T19:04:32.724194Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037890 read iterator# {[10:1168:2976], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T19:04:32.724257Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037890 read iterator# {[10:1168:2976], 1002} finished in read 2025-12-23T19:04:32.724302Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2025-12-23T19:04:32.724325Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2025-12-23T19:04:32.724347Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2025-12-23T19:04:32.724371Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2025-12-23T19:04:32.724410Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2025-12-23T19:04:32.724434Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2025-12-23T19:04:32.724457Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037890 has finished 2025-12-23T19:04:32.724484Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-12-23T19:04:32.724559Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-12-23T19:04:32.725217Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1173:2981], Recipient [10:936:2792]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T19:04:32.725261Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:04:32.725297Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [10:1172:2980], serverId# [10:1173:2981], sessionId# [0:0:0] 2025-12-23T19:04:32.725406Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [10:1171:2979], Recipient [10:936:2792]: NKikimrTxDataShard.TEvGetInfoRequest 2025-12-23T19:04:32.726261Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1176:2984], Recipient [10:936:2792]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T19:04:32.726302Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:04:32.726336Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [10:1175:2983], serverId# [10:1176:2984], sessionId# [0:0:0] 2025-12-23T19:04:32.726487Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1174:2982], Recipient [10:936:2792]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-12-23T19:04:32.726595Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-12-23T19:04:32.726638Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037891 CompleteEdge# v2000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-12-23T19:04:32.726670Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037891 changed HEAD read to non-repeatable v5000/18446744073709551615 2025-12-23T19:04:32.726731Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2025-12-23T19:04:32.728472Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2025-12-23T19:04:32.728510Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2025-12-23T19:04:32.728537Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-12-23T19:04:32.728568Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2025-12-23T19:04:32.728617Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037891 2025-12-23T19:04:32.728649Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2025-12-23T19:04:32.728677Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-12-23T19:04:32.728700Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2025-12-23T19:04:32.728724Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2025-12-23T19:04:32.728810Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-12-23T19:04:32.728948Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037891 Complete read# {[10:1174:2982], 1003} after executionsCount# 1 2025-12-23T19:04:32.729007Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037891 read iterator# {[10:1174:2982], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T19:04:32.729073Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037891 read iterator# {[10:1174:2982], 1003} finished in read 2025-12-23T19:04:32.729123Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2025-12-23T19:04:32.729149Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2025-12-23T19:04:32.729173Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2025-12-23T19:04:32.729200Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2025-12-23T19:04:32.729242Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2025-12-23T19:04:32.729268Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2025-12-23T19:04:32.729293Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037891 has finished 2025-12-23T19:04:32.729321Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-12-23T19:04:32.729405Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] Test command err: Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 3417 30% 3417 50% 3417 90% 3417 99% 3417 ms Write: 10% 2729 30% 2729 50% 2729 90% 2729 99% 2729 ms Write: 10% 2560 30% 2560 50% 2560 90% 2560 99% 2560 ms Write: 10% 3352 30% 3352 50% 3352 90% 3352 99% 3352 ms Write: 10% 7859 30% 7859 50% 7859 90% 7859 99% 7859 ms Write: 10% 8795 30% 8795 50% 8795 90% 8795 99% 8795 ms Write: 10% 6030 30% 6030 50% 6030 90% 6030 99% 6030 ms Write: 10% 6535 30% 6535 50% 6535 90% 6535 99% 6535 ms Write: 10% 8213 30% 8213 50% 8213 90% 8213 99% 8213 ms Write: 10% 7503 30% 7503 50% 7503 90% 7503 99% 7503 ms Write: 10% 10211 30% 10211 50% 10211 90% 10211 99% 10211 ms Write: 10% 7481 30% 7481 50% 7481 90% 7481 99% 7481 ms Write: 10% 12103 30% 12103 50% 12103 90% 12103 99% 12103 ms Write: 10% 10820 30% 10820 50% 10820 90% 10820 99% 10820 ms Write: 10% 10534 30% 10534 50% 10534 90% 10534 99% 10534 ms Write: 10% 10808 30% 10808 50% 10808 90% 10808 99% 10808 ms Write: 10% 10601 30% 10601 50% 10601 90% 10601 99% 10601 ms Write: 10% 13635 30% 13635 50% 13635 90% 13635 99% 13635 ms Write: 10% 14906 30% 14906 50% 14906 90% 14906 99% 14906 ms Write: 10% 14614 30% 14614 50% 14614 90% 14614 99% 14614 ms Write: 10% 13716 30% 13716 50% 13716 90% 13716 99% 13716 ms Write: 10% 12964 30% 12964 50% 12964 90% 12964 99% 12964 ms Write: 10% 14075 30% 14075 50% 14075 90% 14075 99% 14075 ms Write: 10% 12400 30% 12400 50% 12400 90% 12400 99% 12400 ms Write: 10% 12715 30% 12715 50% 12715 90% 12715 99% 12715 ms Write: 10% 14159 30% 14159 50% 14159 90% 14159 99% 14159 ms Write: 10% 11965 30% 11965 50% 11965 90% 11965 99% 11965 ms Write: 10% 13030 30% 13030 50% 13030 90% 13030 99% 13030 ms Write: 10% 14260 30% 14260 50% 14260 90% 14260 99% 14260 ms Write: 10% 10997 30% 10997 50% 10997 90% 10997 99% 10997 ms Write: 10% 9776 30% 9776 50% 9776 90% 9776 99% 9776 ms Write: 10% 3330 30% 3330 50% 3330 90% 3330 99% 3330 ms Write: 10% 11955 30% 11955 50% 11955 90% 11955 99% 11955 ms Write: 10% 9912 30% 9912 50% 9912 90% 9912 99% 9912 ms Write: 10% 4540 30% 4540 50% 4540 90% 4540 99% 4540 ms Write: 10% 15687 30% 15687 50% 15687 90% 15687 99% 15687 ms Write: 10% 8329 30% 8329 50% 8329 90% 8329 99% 8329 ms Write: 10% 5914 30% 5914 50% 5914 90% 5914 99% 5914 ms Write: 10% 2198 30% 2198 50% 2198 90% 2198 99% 2198 ms Write: 10% 2108 30% 2108 50% 2108 90% 2108 99% 2108 ms Write: 10% 10702 30% 10702 50% 10702 90% 10702 99% 10702 ms Write: 10% 4539 30% 4539 50% 4539 90% 4539 99% 4539 ms Write: 10% 7481 30% 7481 50% 7481 90% 7481 99% 7481 ms Write: 10% 3020 30% 3020 50% 3020 90% 3020 99% 3020 ms Write: 10% 9748 30% 9748 50% 9748 90% 9748 99% 9748 ms Write: 10% 3773 30% 3773 50% 3773 90% 3773 99% 3773 ms Write: 10% 2806 30% 2806 50% 2806 90% 2806 99% 2806 ms Write: 10% 7312 30% 7312 50% 7312 90% 7312 99% 7312 ms Write: 10% 4470 30% 4470 50% 4470 90% 4470 99% 4470 ms Write: 10% 10482 30% 10482 50% 10482 90% 10482 99% 10482 ms Write: 10% 3638 30% 3638 50% 3638 90% 3638 99% 3638 ms Write: 10% 4438 30% 4438 50% 4438 90% 4438 99% 4438 ms Write: 10% 10092 30% 10092 50% 10092 90% 10092 99% 10092 ms Write: 10% 10558 30% 10558 50% 10558 90% 10558 99% 10558 ms Write: 10% 4693 30% 4693 50% 4693 90% 4693 99% 4693 ms Write: 10% 2861 30% 2861 50% 2861 90% 2861 99% 2861 ms Write: 10% 12362 30% 12362 50% 12362 90% 12362 99% 12362 ms Write: 10% 12154 30% 12154 50% 12154 90% 12154 99% 12154 ms Write: 10% 2640 30% 2640 50% 2640 90% 2640 99% 2640 ms Write: 10% 4158 30% 4158 50% 4158 90% 4158 99% 4158 ms Write: 10% 10418 30% 10418 50% 10418 90% 10418 99% 10418 ms Write: 10% 12827 30% 12827 50% 12827 90% 12827 99% 12827 ms Write: 10% 2881 30% 2881 50% 2881 90% 2881 99% 2881 ms Write: 10% 2360 30% 2360 50% 2360 90% 2360 99% 2360 ms Step 2. read write Write: 10% 3159 30% 3159 50% 3159 90% 3159 99% 3159 ms Write: 10% 3775 30% 3775 50% 3775 90% 3775 99% 3775 ms Write: 10% 3810 30% 3810 50% 3810 90% 3810 99% 3810 ms Write: 10% 4820 30% 4820 50% 4820 90% 4820 99% 4820 ms Write: 10% 4379 30% 4379 50% 4379 90% 4379 99% 4379 ms Write: 10% 5301 30% 5301 50% 5301 90% 5301 99% 5301 ms Write: 10% 13201 30% 13201 50% 13201 90% 13201 99% 13201 ms Write: 10% 12590 30% 12590 50% 12590 90% 12590 99% 12590 ms Write: 10% 13016 30% 13016 50% 13016 90% 13016 99% 13016 ms Write: 10% 14478 30% 14478 50% 14478 90% 14478 99% 14478 ms Write: 10% 17392 30% 17392 50% 17392 90% 17392 99% 17392 ms Write: 10% 18673 30% 18673 50% 18673 90% 18673 99% 18673 ms Write: 10% 18526 30% 18526 50% 18526 90% 18526 99% 18526 ms Write: 10% 19637 30% 19637 50% 19637 90% 19637 99% 19637 ms Write: 10% 21903 30% 21903 50% 21903 90% 21903 99% 21903 ms Write: 10% 20006 30% 20006 50% 20006 90% 20006 99% 20006 ms Was written: 12.5 MiB, Speed: 0.20833333333333334 MiB/s Write: 10% 20804 30% 20804 50% 20804 90% 20804 99% 20804 ms Write: 10% 22325 30% 22325 50% 22325 90% 22325 99% 22325 ms Write: 10% 20026 30% 20026 50% 20026 90% 20026 99% 20026 ms Write: 10% 20321 30% 20321 50% 20321 90% 20321 99% 20321 ms Write: 10% 19706 30% 19706 50% 19706 90% 19706 99% 19706 ms Write: 10% 11872 30% 11872 50% 11872 90% 11872 99% 11872 ms Write: 10% 17337 30% 17337 50% 17337 90% 17337 99% 17337 ms Write: 10% 20937 30% 20937 50% 20937 90% 20937 99% 20937 ms Write: 10% 22317 30% 22317 50% 22317 90% 22317 99% 22317 ms Write: 10% 6886 30% 6886 50% 6886 90% 6886 99% 6886 ms Write: 10% 18404 30% 18404 50% 18404 90% 18404 99% 18404 ms Write: 10% 18624 30% 18624 50% 18624 90% 18624 99% 18624 ms Write: 10% 19022 30% 19022 50% 19022 90% 19022 99% 19022 ms Write: 10% 13340 30% 13340 50% 13340 90% 13340 99% 13340 ms Write: 10% 19659 30% 19659 50% 19659 90% 19659 99% 19659 ms Write: 10% 13770 30% 13770 50% 13770 90% 13770 99% 13770 ms Write: 10% 10801 30% 10801 50% 10801 90% 10801 99% 10801 ms Write: 10% 3274 30% 3274 50% 3274 90% 3274 99% 3274 ms Write: 10% 4005 30% 4005 50% 4005 90% 4005 99% 4005 ms Write: 10% 10761 30% 10761 50% 10761 90% 10761 99% 10761 ms Write: 10% 3859 30% 3859 50% 3859 90% 3859 99% 3859 ms Write: 10% 4474 30% 4474 50% 4474 90% 4474 99% 4474 ms Write: 10% 4350 30% 4350 50% 4350 90% 4350 99% 4350 ms Write: 10% 10607 30% 10607 50% 10607 90% 10607 99% 10607 ms Write: 10% 2801 30% 2801 50% 2801 90% 2801 99% 2801 ms Write: 10% 6348 30% 6348 50% 6348 90% 6348 99% 6348 ms Write: 10% 7264 30% 7264 50% 7264 90% 7264 99% 7264 ms Write: 10% 11219 30% 11219 50% 11219 90% 11219 99% 11219 ms Write: 10% 1938 30% 1938 50% 1938 90% 1938 99% 1938 ms Write: 10% 2610 30% 2610 50% 2610 90% 2610 99% 2610 ms Write: 10% 9852 30% 9852 50% 9852 90% 9852 99% 9852 ms Write: 10% 6663 30% 6663 50% 6663 90% 6663 99% 6663 ms Write: 10% 4549 30% 4549 50% 4549 90% 4549 99% 4549 ms Write: 10% 5647 30% 5647 50% 5647 90% 5647 99% 5647 ms Write: 10% 2613 30% 2613 50% 2613 90% 2613 99% 2613 ms Write: 10% 9425 30% 9425 50% 9425 90% 9425 99% 9425 ms Write: 10% 4960 30% 4960 50% 4960 90% 4960 99% 4960 ms Write: 10% 2950 30% 2950 50% 2950 90% 2950 99% 2950 ms Write: 10% 11296 30% 11296 50% 11296 90% 11296 99% 11296 ms Write: 10% 5429 30% 5429 50% 5429 90% 5429 99% 5429 ms Write: 10% 9259 30% 9259 50% 9259 90% 9259 99% 9259 ms Write: 10% 2349 30% 2349 50% 2349 90% 2349 99% 2349 ms Write: 10% 8011 30% 8011 50% 8011 90% 8011 99% 8011 ms Write: 10% 2949 30% 2949 50% 2949 90% 2949 99% 2949 ms Write: 10% 10670 30% 10670 50% 10670 90% 10670 99% 10670 ms Write: 10% 2674 30% 2674 50% 2674 90% 2674 99% 2674 ms Write: 10% 2098 30% 2098 50% 2098 90% 2098 99% 2098 ms Write: 10% 4703 30% 4703 50% 4703 90% 4703 99% 4703 ms Read: 10% 6359 30% 11159 50% 15958 90% 19861 99% 20739 ms Step 3. write modify Write: 10% 12412 30% 12412 50% 12412 90% 12412 99% 12412 ms Write: 10% 17103 30% 17103 50% 17103 90% 17103 99% 17103 ms Write: 10% 19211 30% 19211 50% 19211 90% 19211 99% 19211 ms Write: 10% 22476 30% 22476 50% 22476 90% 22476 99% 22476 ms Write: 10% 23587 30% 23587 50% 23587 90% 23587 99% 23587 ms Write: 10% 23899 30% 23899 50% 23899 90% 23899 99% 23899 ms Write: 10% 24334 30% 24334 50% 24334 90% 24334 99% 24334 ms Write: 10% 20944 30% 20944 50% 20944 90% 20944 99% 20944 ms Write: 10% 21390 30% 21390 50% 21390 90% 21390 99% 21390 ms Write: 10% 25700 30% 25700 50% 25700 90% 25700 99% 25700 ms Write: 10% 20538 30% 20538 50% 20538 90% 20538 99% 20538 ms Write: 10% 17631 30% 17631 50% 17631 90% 17631 99% 17631 ms Write: 10% 23531 30% 23531 50% 23531 90% 23531 99% 23531 ms Write: 10% 18083 30% 18083 50% 18083 90% 18083 99% 18083 ms Write: 10% 22130 30% 22130 50% 22130 90% 22130 99% 22130 ms Write: 10% 25911 30% 25911 50% 25911 90% 25911 99% 25911 ms Write: 10% 14249 30% 14249 50% 14249 90% 14249 99% 14249 ms Write: 10% 20560 30% 20560 50% 20560 90% 20560 99% 20560 ms Write: 10% 19836 30% 19836 50% 19836 90% 19836 99% 19836 ms Write: 10% 21052 30% 21052 50% 21052 90% 21052 99% 21052 ms Write: 10% 12911 30% 12911 50% 12911 90% 12911 99% 12911 ms Write: 10% 19567 30% 19567 50% 19567 90% 19567 99% 19567 ms Write: 10% 18448 30% 18448 50% 18448 90% 18448 99% 18448 ms Write: 10% 11632 30% 11632 50% 11632 90% 11632 99% 11632 ms Write: 10% 18354 30% 18354 50% 18354 90% 18354 99% 18354 ms Write: 10% 11524 30% 11524 50% 11524 90% 11524 99% 11524 ms Write: 10% 11579 30% 11579 50% 11579 90% 11579 99% 11579 ms Write: 10% 18005 30% 18005 50% 18005 90% 18005 99% 18005 ms Write: 10% 9314 30% 9314 50% 9314 90% 9314 99% 9314 ms Write: 10% 16733 30% 16733 50% 16733 90% 16733 99% 16733 ms Write: 10% 18103 30% 18103 50% 18103 90% 18103 99% 18103 ms Write: 10% 17684 30% 17684 50% 17684 90% 17684 99% 17684 ms Write: 10% 8657 30% 8657 50% 8657 90% 8657 99% 8657 ms Write: 10% 7518 30% 7518 50% 7518 90% 7518 99% 7518 ms Write: 10% 15395 30% 15395 50% 15395 90% 15395 99% 15395 ms Write: 10% 15239 30% 15239 50% 15239 90% 15239 99% 15239 ms Write: 10% 21113 30% 21113 50% 21113 90% 21113 99% 21113 ms Write: 10% 5811 30% 5811 50% 5811 90% 5811 99% 5811 ms Write: 10% 20341 30% 20341 50% 20341 90% 20341 99% 20341 ms Write: 10% 12488 30% 12488 50% 12488 90% 12488 99% 12488 ms Write: 10% 19379 30% 19379 50% 19379 90% 19379 99% 19379 ms Write: 10% 3739 30% 3739 50% 3739 90% 3739 99% 3739 ms Write: 10% 10591 30% 10591 50% 10591 90% 10591 99% 10591 ms Write: 10% 16679 30% 16679 50% 16679 90% 16679 99% 16679 ms Write: 10% 3618 30% 3618 50% 3618 90% 3618 99% 3618 ms Write: 10% 9782 30% 9782 50% 9782 90% 9782 99% 9782 ms Write: 10% 7143 30% 7143 50% 7143 90% 7143 99% 7143 ms Write: 10% 6084 30% 6084 50% 6084 90% 6084 99% 6084 ms Write: 10% 5907 30% 5907 50% 5907 90% 5907 99% 5907 ms Write: 10% 5815 30% 5815 50% 5815 90% 5815 99% 5815 ms Write: 10% 5101 30% 5101 50% 5101 90% 5101 99% 5101 ms Write: 10% 5264 30% 5264 50% 5264 90% 5264 99% 5264 ms Write: 10% 4822 30% 4822 50% 4822 90% 4822 99% 4822 ms Write: 10% 5080 30% 5080 50% 5080 90% 5080 99% 5080 ms Write: 10% 4338 30% 4338 50% 4338 90% 4338 99% 4338 ms Write: 10% 4505 30% 4505 50% 4505 90% 4505 99% 4505 ms Write: 10% 19322 30% 19322 50% 19322 90% 19322 99% 19322 ms Write: 10% 8639 30% 8639 50% 8639 90% 8639 99% 8639 ms Write: 10% 3875 30% 3875 50% 3875 90% 3875 99% 3875 ms Write: 10% 7840 30% 7840 50% 7840 90% 7840 99% 7840 ms Write: 10% 4102 30% 4102 50% 4102 90% 4102 99% 4102 ms Write: 10% 4199 30% 4199 50% 4199 90% 4199 99% 4199 ms Write: 10% 6835 30% 6835 50% 6835 90% 6835 99% 6835 ms Write: 10% 10688 30% 10688 50% 10688 90% 10688 99% 10688 ms Update: 10% 1797 30% 1797 50% 1797 90% 1797 99% 1797 ms Step 4. read modify write Write: 10% 3297 30% 3297 50% 3297 90% 3297 99% 3297 ms Write: 10% 6618 30% 6618 50% 6618 90% 6618 99% 6618 ms Write: 10% 8263 30% 8263 50% 8263 90% 8263 99% 8263 ms Was written: 23.33984375 MiB, Speed: 0.1806640625 MiB/s Write: 10% 15717 30% 15717 50% 15717 90% 15717 99% 15717 ms Write: 10% 16475 30% 16475 50% 16475 90% 16475 99% 16475 ms Write: 10% 19014 30% 19014 50% 19014 90% 19014 99% 19014 ms Write: 10% 17037 30% 17037 50% 17037 90% 17037 99% 17037 ms Write: 10% 18293 30% 18293 50% 18293 90% 18293 99% 18293 ms Write: 10% 21340 30% 21340 50% 21340 90% 21340 99% 21340 ms Write: 10% 20568 30% 20568 50% 20568 90% 20568 99% 20568 ms Write: 10% 18507 30% 18507 50% 18507 90% 18507 99% 18507 ms Write: 10% 21024 30% 21024 50% 21024 90% 21024 99% 21024 ms Write: 10% 19359 30% 19359 50% 19359 90% 19359 99% 19359 ms Write: 10% 20722 30% 20722 50% 20722 90% 20722 99% 20722 ms Write: 10% 21375 30% 21375 50% 21375 90% 21375 99% 21375 ms Write: 10% 21023 30% 21023 50% 21023 90% 21023 99% 21023 ms Write: 10% 21841 30% 21841 50% 21841 90% 21841 99% 21841 ms Write: 10% 16379 30% 16379 50% 16379 90% 16379 99% 16379 ms Write: 10% 17059 30% 17059 50% 17059 90% 17059 99% 17059 ms Write: 10% 15568 30% 15568 50% 15568 90% 15568 99% 15568 ms Write: 10% 20480 30% 20480 50% 20480 90% 20480 99% 20480 ms Write: 10% 16539 30% 16539 50% 16539 90% 16539 99% 16539 ms Write: 10% 17470 30% 17470 50% 17470 90% 17470 99% 17470 ms Write: 10% 14935 30% 14935 50% 14935 90% 14935 99% 14935 ms Write: 10% 7554 30% 7554 50% 7554 90% 7554 99% 7554 ms Write: 10% 7728 30% 7728 50% 7728 90% 7728 99% 7728 ms Write: 10% 19300 30% 19300 50% 19300 90% 19300 99% 19300 ms Write: 10% 16930 30% 16930 50% 16930 90% 16930 99% 16930 ms Write: 10% 19168 30% 19168 50% 19168 90% 19168 99% 19168 ms Write: 10% 18734 30% 18734 50% 18734 90% 18734 99% 18734 ms Write: 10% 14036 30% 14036 50% 14036 90% 14036 99% 14036 ms Write: 10% 3506 30% 3506 50% 3506 90% 3506 99% 3506 ms Write: 10% 6918 30% 6918 50% 6918 90% 6918 99% 6918 ms Write: 10% 11514 30% 11514 50% 11514 90% 11514 99% 11514 ms Write: 10% 5728 30% 5728 50% 5728 90% 5728 99% 5728 ms Write: 10% 15431 30% 15431 50% 15431 90% 15431 99% 15431 ms Write: 10% 4167 30% 4167 50% 4167 90% 4167 99% 4167 ms Write: 10% 13482 30% 13482 50% 13482 90% 13482 99% 13482 ms Write: 10% 9856 30% 9856 50% 9856 90% 9856 99% 9856 ms Write: 10% 7935 30% 7935 50% 7935 90% 7935 99% 7935 ms Write: 10% 4068 30% 4068 50% 4068 90% 4068 99% 4068 ms Write: 10% 3912 30% 3912 50% 3912 90% 3912 99% 3912 ms Write: 10% 6784 30% 6784 50% 6784 90% 6784 99% 6784 ms Write: 10% 7246 30% 7246 50% 7246 90% 7246 99% 7246 ms Write: 10% 3888 30% 3888 50% 3888 90% 3888 99% 3888 ms Write: 10% 7446 30% 7446 50% 7446 90% 7446 99% 7446 ms Write: 10% 11429 30% 11429 50% 11429 90% 11429 99% 11429 ms Write: 10% 5367 30% 5367 50% 5367 90% 5367 99% 5367 ms Write: 10% 4809 30% 4809 50% 4809 90% 4809 99% 4809 ms Write: 10% 6308 30% 6308 50% 6308 90% 6308 99% 6308 ms Write: 10% 9656 30% 9656 50% 9656 90% 9656 99% 9656 ms Write: 10% 3470 30% 3470 50% 3470 90% 3470 99% 3470 ms Write: 10% 4537 30% 4537 50% 4537 90% 4537 99% 4537 ms Write: 10% 9934 30% 9934 50% 9934 90% 9934 99% 9934 ms Write: 10% 4413 30% 4413 50% 4413 90% 4413 99% 4413 ms Write: 10% 7020 30% 7020 50% 7020 90% 7020 99% 7020 ms Write: 10% 14526 30% 14526 50% 14526 90% 14526 99% 14526 ms Write: 10% 5482 30% 5482 50% 5482 90% 5482 99% 5482 ms Write: 10% 6446 30% 6446 50% 6446 90% 6446 99% 6446 ms Write: 10% 10103 30% 10103 50% 10103 90% 10103 99% 10103 ms Write: 10% 5549 30% 5549 50% 5549 90% 5549 99% 5549 ms Write: 10% 3686 30% 3686 50% 3686 90% 3686 99% 3686 ms Write: 10% 4402 30% 4402 50% 4402 90% 4402 99% 4402 ms Write: 10% 5287 30% 5287 50% 5287 90% 5287 99% 5287 ms Read: 10% 7293 30% 12186 50% 17080 90% 26867 99% 29069 ms Update: 10% 2027 30% 2027 50% 2027 90% 2027 99% 2027 ms ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T19:03:15.973622Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:03:16.107498Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:03:16.121193Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:03:16.121698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:03:16.121810Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:03:16.776047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:03:16.776234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:03:16.993348Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766516591854803 != 1766516591854807 2025-12-23T19:03:17.051764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:03:17.124140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:03:17.285474Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004ee9/r3tmp/tmpb9b0jB/pdisk_1.dat 2025-12-23T19:03:17.829955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:03:17.855342Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:03:18.027455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:03:18.096731Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T19:03:18.097794Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T19:03:18.098098Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T19:03:18.098388Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T19:03:18.189140Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T19:03:18.189979Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T19:03:18.190126Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T19:03:18.192256Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T19:03:18.192390Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T19:03:18.192462Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T19:03:18.194125Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T19:03:18.194300Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T19:03:18.194420Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T19:03:18.206122Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T19:03:18.384412Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T19:03:18.384748Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T19:03:18.384906Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T19:03:18.384954Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:03:18.384990Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T19:03:18.385056Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:03:18.385351Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T19:03:18.385416Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T19:03:18.391542Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T19:03:18.391691Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T19:03:18.392218Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:03:18.392271Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:03:18.392327Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T19:03:18.392367Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T19:03:18.392409Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T19:03:18.392471Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T19:03:18.392547Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:03:18.392676Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T19:03:18.392721Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:03:18.392786Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T19:03:18.392903Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T19:03:18.392955Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T19:03:18.393066Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T19:03:18.393522Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T19:03:18.397732Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T19:03:18.397891Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T19:03:18.397957Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T19:03:18.398001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T19:03:18.398089Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T19:03:18.398137Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T19:03:18.398564Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T19:03:18.398620Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T19:03:18.398656Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T19:03:18.398723Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T19:03:18.398807Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T19:03:18.398862Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T19:03:18.398911Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T19:03:18.398949Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T19:03:18.398975Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T19:03:18.400821Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T19:03:18.400889Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-12-23T19:04:34.103811Z node 10 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2025-12-23T19:04:34.103969Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:918:2783], Recipient [10:1151:2956]: {TEvReadSet step# 3001 txid# 1234567890012 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2025-12-23T19:04:34.104013Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-12-23T19:04:34.104055Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890012 ... validating table 2025-12-23T19:04:34.601166Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1203:2995], Recipient [10:1151:2956]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T19:04:34.601272Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:04:34.601347Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [10:1202:2994], serverId# [10:1203:2995], sessionId# [0:0:0] 2025-12-23T19:04:34.625035Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1209:2998], Recipient [10:1151:2956]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-12-23T19:04:34.625251Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-12-23T19:04:34.625368Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-12-23T19:04:34.625510Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T19:04:34.625568Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-12-23T19:04:34.625629Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T19:04:34.625679Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T19:04:34.625745Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2025-12-23T19:04:34.625831Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T19:04:34.625867Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T19:04:34.625892Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T19:04:34.625917Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-12-23T19:04:34.626073Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-12-23T19:04:34.626409Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-12-23T19:04:34.626486Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[10:1209:2998], 0} after executionsCount# 1 2025-12-23T19:04:34.626554Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[10:1209:2998], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T19:04:34.626666Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[10:1209:2998], 0} finished in read 2025-12-23T19:04:34.626778Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T19:04:34.626810Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T19:04:34.626839Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T19:04:34.626865Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-12-23T19:04:34.626947Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2025-12-23T19:04:34.626978Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T19:04:34.627024Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2025-12-23T19:04:34.627074Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T19:04:34.627208Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T19:04:34.628253Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1209:2998], Recipient [10:918:2783]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-12-23T19:04:34.628409Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [10:1209:2998], Recipient [10:1151:2956]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T19:04:34.628466Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-12-23T19:04:34.628702Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-12-23T19:04:34.628762Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-12-23T19:04:34.628823Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2025-12-23T19:04:34.628849Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-12-23T19:04:34.628876Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-12-23T19:04:34.628902Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-12-23T19:04:34.628948Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2025-12-23T19:04:34.629002Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2025-12-23T19:04:34.629030Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-12-23T19:04:34.629051Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-12-23T19:04:34.629074Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-12-23T19:04:34.629169Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-12-23T19:04:34.629392Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-12-23T19:04:34.629438Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[10:1209:2998], 1} after executionsCount# 1 2025-12-23T19:04:34.629477Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[10:1209:2998], 1} sends rowCount# 3, bytes# 96, quota rows left# 996, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-12-23T19:04:34.629539Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[10:1209:2998], 1} finished in read 2025-12-23T19:04:34.629587Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2025-12-23T19:04:34.629652Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-12-23T19:04:34.629678Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-12-23T19:04:34.629723Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-12-23T19:04:34.629770Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2025-12-23T19:04:34.629792Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-12-23T19:04:34.629818Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2025-12-23T19:04:34.629845Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-12-23T19:04:34.629920Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-12-23T19:04:34.630584Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [10:1209:2998], Recipient [10:918:2783]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-12-23T19:04:34.630636Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 2 } items { int32_value: 1004 } }, { items { int32_value: 11 } items { int32_value: 1002 } }, { items { int32_value: 12 } items { int32_value: 1003 } }, { items { int32_value: 13 } items { int32_value: 1004 } } |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] [GOOD] |99.1%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |99.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_6_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 6] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] |99.1%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |99.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} >> TxUsage::Sinks_Oltp_WriteToTopics_4_Table [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_4_Query |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |99.1%| [TA] $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |99.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_compression-COMPRESSION = "zstd"] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[lz4_compression-COMPRESSION = "lz4"] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_10_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 10] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query [GOOD] Test command err: 2025-12-23T18:58:54.660390Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129883130520615:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:54.660466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005e51/r3tmp/tmpLCqMZP/pdisk_1.dat 2025-12-23T18:58:54.729190Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T18:58:54.918116Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:54.953845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:54.953941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:54.999450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:58:55.005006Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587129883130520510:2081] 1766516334655032 != 1766516334655035 2025-12-23T18:58:55.006102Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25288, node 1 2025-12-23T18:58:55.116476Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:58:55.234032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005e51/r3tmp/yandexSsNYUX.tmp 2025-12-23T18:58:55.234062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005e51/r3tmp/yandexSsNYUX.tmp 2025-12-23T18:58:55.234861Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005e51/r3tmp/yandexSsNYUX.tmp 2025-12-23T18:58:55.235019Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:58:55.480314Z INFO: TTestServer started on Port 5910 GrpcPort 25288 TClient is connected to server localhost:5910 2025-12-23T18:58:55.668191Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:25288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766516335087 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:55.775311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:58:55.800597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... 2025-12-23T18:58:55.903056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2025-12-23T18:58:57.827910Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129896015423426:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.828017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129896015423445:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.828078Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.829013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129896015423452:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.829124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.831863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:57.841020Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129896015423450:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-12-23T18:58:58.056527Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129896015423516:2643] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:58.087163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:58.118154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:58.187858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587129900310391107:2821] 2025-12-23T18:58:59.660481Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587129883130520615:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:59.660524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-12-23T18:59:03.572287Z :Sinks_Oltp_WriteToTopicAndTable_1_Table INFO: TTopicSdkTestSetup started 2025-12-23T18:59:03.591345Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-12-23T18:59:03.618566Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7587129921785227810:2920] connected; active server actors: 1 2025-12-23T18:59:03.619574Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-12-23T18:59:03.625781Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-12-23T18:59:03.629426Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-12-23T18:59:03.635230Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2972: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-12-23T18:59:03.645836Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3004: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-12-23T18:59:03.647386Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:787: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-12-23T18:59:03.647461Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:926: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-12-23T18:59:03.647504Z node 1 :PERSQUEUE INFO: pq_impl.cpp:612: [PQ: 72075186224037892] doesn't have tx writes info 2025-12-23T18:59:03.647751Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-12-23T18:59:03.648778Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72075186224037892] server connected, pipe [1:7587129921785227809:2919], now have 1 active actors on pipe 2025-12-23T18:59:03.648815Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2733: [PQ: 72075186224037892] server connected, pipe [1:7587129921785227828:2447], now have 1 active actors on pipe 2025-12-23T18:59:03.686483Z node 1 :PERSQUEUE DEBUG: ... 76520:3296] destroyed 2025-12-23T19:04:57.995511Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037896] server disconnected, pipe [14:7587131396551076517:3296] destroyed 2025-12-23T19:04:57.995548Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037896][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T19:04:57.995584Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:04:57.995606Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:57.995623Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:04:57.995645Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:57.995662Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T19:04:57.996333Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:04:57.996364Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:57.996382Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:04:57.996403Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:57.996422Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:04:57.998051Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:04:57.998077Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:57.998092Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:04:57.998111Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:57.998126Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:04:58.003256Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|55d89cdb-6a482036-34d0d95-89d7498a_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-12-23T19:04:58.003315Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|55d89cdb-6a482036-34d0d95-89d7498a_0] PartitionId [0] Generation [1] Write session will now close 2025-12-23T19:04:58.003355Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|55d89cdb-6a482036-34d0d95-89d7498a_0] PartitionId [0] Generation [1] Write session: aborting 2025-12-23T19:04:58.004621Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|55d89cdb-6a482036-34d0d95-89d7498a_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-12-23T19:04:58.004641Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|55d89cdb-6a482036-34d0d95-89d7498a_0 grpc read done: success: 0 data: 2025-12-23T19:04:58.004675Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|55d89cdb-6a482036-34d0d95-89d7498a_0 grpc read failed 2025-12-23T19:04:58.004718Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|55d89cdb-6a482036-34d0d95-89d7498a_0 grpc closed 2025-12-23T19:04:58.004740Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|55d89cdb-6a482036-34d0d95-89d7498a_0 is DEAD 2025-12-23T19:04:58.005799Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T19:04:58.005847Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-12-23T19:04:58.007015Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|55d89cdb-6a482036-34d0d95-89d7498a_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-12-23T19:04:58.007066Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2025-12-23T19:04:58.007099Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|55d89cdb-6a482036-34d0d95-89d7498a_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2025-12-23T19:04:58.007199Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|55d89cdb-6a482036-34d0d95-89d7498a_0] PartitionId [0] Generation [1] Write session: destroy 2025-12-23T19:04:58.009699Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037894] server disconnected, pipe [14:7587131396551076472:3288] destroyed 2025-12-23T19:04:58.009752Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2758: [PQ: 72075186224037894] server disconnected, pipe [14:7587131396551076475:3288] destroyed 2025-12-23T19:04:58.009788Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2025-12-23T19:04:58.009829Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:04:58.009851Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.009870Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:04:58.009893Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.009911Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:04:58.016486Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:04:58.016527Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.016545Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:04:58.016567Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.016586Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T19:04:58.098058Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:04:58.098103Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.098121Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:04:58.098147Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.098166Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:04:58.099163Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:04:58.099206Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.099224Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:04:58.099248Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.099278Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:04:58.117654Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:04:58.117702Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.117723Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:04:58.117749Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.117770Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2025-12-23T19:04:58.199324Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:04:58.199368Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.199387Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:04:58.199421Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.199450Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:04:58.199557Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:04:58.199571Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.199584Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:04:58.199599Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.199626Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2025-12-23T19:04:58.219226Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:04:58.219288Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.219307Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:04:58.219331Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:04:58.219353Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint16-pk_types6-all_types6-index6] [GOOD] >> YdbSdkSessionsPool::WaitQueue/1 >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint32-pk_types5-all_types5-index5] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__SYNC-pk_types3-all_types3-index3-pgint8--SYNC] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int8-pk_types3-all_types3-index3] [GOOD] >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int64-pk_types0-all_types0-index0] [GOOD] >> alter_compression.py::TestAlterCompression::test_availability_data >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint64-pk_types4-all_types4-index4] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |99.1%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |99.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int16-pk_types2-all_types2-index2] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__ASYNC-pk_types34-all_types34-index34-Date--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int8-pk_types3-all_types3-index3] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId 2025-12-23 19:05:18,827 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-12-23 19:05:19,086 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1570663 58.6M 58.4M 32.6M test_tool run_ut @/home/runner/.ya/build/build_root/8ptz/004941/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.a 1570926 1.7G 1.7G 1.2G └─ ydb-core-statistics-aggregator-ut --trace-path-append /home/runner/.ya/build/build_root/8ptz/004941/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_ Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:55:23.009127Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:23.104841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:23.112482Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:55:23.112801Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:55:23.112898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:55:23.453071Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:23.535832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:23.535951Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:23.594547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:23.711910Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004941/r3tmp/tmpsJPgze/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3635, node 1 2025-12-23T18:55:24.343686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:55:24.344763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:55:24.344813Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:55:24.344844Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:55:24.345073Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:55:24.410267Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:4794 2025-12-23T18:55:24.948080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:55:27.307246Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:55:27.312534Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:55:27.315216Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:55:27.339387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:27.339480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:27.376526Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:55:27.378287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:27.606779Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.607258Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.607830Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.608306Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.608489Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.608616Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.608761Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.608858Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.608956Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:55:27.719824Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:55:27.731108Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:55:27.731196Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:55:27.743882Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:55:27.906351Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:55:27.943728Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:55:27.943803Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:55:27.967279Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:55:27.968831Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:55:27.969043Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:55:27.969097Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:55:27.969152Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:55:27.969205Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:55:27.969263Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:55:27.969312Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:55:27.969702Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:55:28.010153Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:28.010243Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:55:28.020484Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:55:28.029930Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:55:28.030993Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:55:28.034913Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:55:28.060538Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:55:28.060587Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:55:28.060661Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:55:28.078664Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:55:28.086528Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:55:28.092540Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:55:28.092632Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:55:28.101578Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:55:28.198216Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:55:28.531455Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:55:28.632278Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:55:28.632347Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:265 ... tor] OwnerId: [2:5450:4464], ActorId: [2:5451:4465], Got response [2:5452:4466] SUCCESS 2025-12-23T19:01:35.657215Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T19:01:35.679142Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T19:01:35.679217Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId2, ActorId=[1:3190:3444] 2025-12-23T19:01:36.780283Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T19:01:36.782206Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5519:4511], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T19:01:36.789539Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5519:4511], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2025-12-23T19:01:36.789749Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5519:4511], Start read next stream part 2025-12-23T19:01:37.074237Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 8 ], ReplyToActorId[ [2:5530:4521]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T19:01:37.074614Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 8 ] 2025-12-23T19:01:37.074667Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 8, ReplyToActorId = [2:5530:4521], StatRequests.size() = 1 2025-12-23T19:03:36.421737Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5519:4511], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T19:03:36.421943Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5519:4511], Start read next stream part 2025-12-23T19:03:36.422721Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd69d6cnbjte9yfeems8sxtk", SessionId: ydb://session/3?node_id=2&id=ODAzMDQzZmYtM2M4OTQzMjMtYTNkNDAzMGQtZGU0NTQ5MjA=, Slow query, duration: 119.617632s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T19:03:36.423470Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5687:4605], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T19:03:36.428057Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5519:4511], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T19:03:36.428125Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5519:4511], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTAxZDVjOGQtMTlmYzAwYjQtYmI4MWYyYzYtMTNjZDE0ZTc=, TxId: 2025-12-23T19:03:36.428229Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 35000, txId: 18446744073709551615] shutting down 2025-12-23T19:03:36.431300Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5687:4605], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table` 2025-12-23T19:03:36.431441Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5687:4605], Start read next stream part 2025-12-23T19:03:36.516001Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 9 ], ReplyToActorId[ [2:5706:4620]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T19:03:36.516409Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 9 ] 2025-12-23T19:03:36.516459Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 9, ReplyToActorId = [2:5706:4620], StatRequests.size() = 1 2025-12-23T19:04:59.435490Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5687:4605], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T19:04:59.435669Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5687:4605], Start read next stream part 2025-12-23T19:04:59.436655Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd69gv7f95hthqk4adr602qy", SessionId: ydb://session/3?node_id=2&id=YTllNDdhNzYtNDM5OWVhNDktYzNlMWIwLWJmMTIyZmM0, Slow query, duration: 83.000012s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(256,8)) FROM `/Root/Database/Table`", parameters: 0b 2025-12-23T19:04:59.439963Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5860:4703], ActorId: [2:5862:4704], Starting query actor #1 [2:5863:4705] 2025-12-23T19:04:59.440047Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5862:4704], ActorId: [2:5863:4705], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-12-23T19:04:59.440606Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5687:4605], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T19:04:59.440671Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5517:2473], ActorId: [2:5687:4605], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGY5Yzg5YmMtZjU5NDAwOTEtNDU0N2ZiNGQtNmIwNjRlNzI=, TxId: 2025-12-23T19:04:59.440805Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 35100, txId: 18446744073709551615] shutting down 2025-12-23T19:04:59.445985Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5862:4704], ActorId: [2:5863:4705], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NWUyNDkxODEtOWM5ZmMxNDQtMmE3OWUzZWYtZDBhYjU0ZjI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2025-12-23T19:04:59.567493Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5862:4704], ActorId: [2:5863:4705], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWUyNDkxODEtOWM5ZmMxNDQtMmE3OWUzZWYtZDBhYjU0ZjI=, TxId: 2025-12-23T19:04:59.567582Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5862:4704], ActorId: [2:5863:4705], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWUyNDkxODEtOWM5ZmMxNDQtMmE3OWUzZWYtZDBhYjU0ZjI=, TxId: 2025-12-23T19:04:59.568089Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5860:4703], ActorId: [2:5862:4704], Got response [2:5863:4705] SUCCESS 2025-12-23T19:04:59.568543Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2025-12-23T19:04:59.616076Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2025-12-23T19:04:59.616159Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId3, ActorId=[1:3190:3444] 2025-12-23T19:04:59.738600Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T19:04:59.741488Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5904:2473], ActorId: [2:5906:4727], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T19:04:59.751836Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5904:2473], ActorId: [2:5906:4727], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2025-12-23T19:04:59.752066Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5904:2473], ActorId: [2:5906:4727], Start read next stream part 2025-12-23T19:04:59.850197Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 10 ], ReplyToActorId[ [2:5917:4737]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T19:04:59.850677Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 10 ] 2025-12-23T19:04:59.851055Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 10, ReplyToActorId = [2:5917:4737], StatRequests.size() = 1 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/004941/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/004941/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopics_4_Query [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint32-pk_types5-all_types5-index5] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_20_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 20] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] [GOOD] |99.1%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint16-pk_types6-all_types6-index6] [GOOD] |99.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int64-pk_types0-all_types0-index0] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_12_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 12] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_13_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 13] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0_UNIQUE_SYNC-pk_types32-all_types32-index32-Date-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint64-pk_types4-all_types4-index4] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int16-pk_types2-all_types2-index2] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0_UNIQUE_SYNC-pk_types2-all_types2-index2-Datetime-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__ASYNC-pk_types1-all_types1-index1-pgint4--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0_UNIQUE_SYNC-pk_types8-all_types8-index8-pgdate-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Date_0__SYNC-pk_types30-all_types30-index30-Date--SYNC] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] [GOOD] >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__ASYNC-pk_types10-all_types10-index10-pgtimestamp--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [GOOD] Test command err: 2025-12-23T18:58:54.660624Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587129883554640514:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T18:58:54.660697Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T18:58:54.726951Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005e31/r3tmp/tmpWKchkT/pdisk_1.dat 2025-12-23T18:58:54.941322Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:58:54.941438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:58:54.967325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T18:58:54.984730Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:58:54.998402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11151, node 1 2025-12-23T18:58:55.207742Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T18:58:55.234455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005e31/r3tmp/yandexRz7WoK.tmp 2025-12-23T18:58:55.234482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005e31/r3tmp/yandexRz7WoK.tmp 2025-12-23T18:58:55.234918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005e31/r3tmp/yandexRz7WoK.tmp 2025-12-23T18:58:55.235064Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:58:55.480320Z INFO: TTestServer started on Port 28801 GrpcPort 11151 TClient is connected to server localhost:28801 2025-12-23T18:58:55.671003Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:11151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766516335066 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T18:58:55.769896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T18:58:55.796329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T18:58:57.669481Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129896439543345:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.669516Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129896439543312:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.669589Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.670016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129896439543348:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.670092Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.674229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:58:57.676200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129896439543384:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.676319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.677866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587129896439543386:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.677937Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:58:57.682846Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587129896439543349:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T18:58:57.748353Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587129896439543410:2644] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:58:57.995346Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587129896439543426:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T18:58:57.995836Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=NGNhZTNjYTEtYjE0ODM0OTgtN2Y0ZDlkOTYtNDgzZWZmNTA=, ActorId: [1:7587129896439543307:2327], ActorState: ExecuteState, TraceId: 01kd698azeen0w550b8c14be6n, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T18:58:57.997775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:57.998238Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T18:58:58.025970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:58:58.090972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:75 ... :05:49.202018Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2025-12-23T19:05:49.214861Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:05:49.214899Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.214917Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.214937Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.214953Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2025-12-23T19:05:49.215199Z node 14 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2025-12-23T19:05:49.230959Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:05:49.230998Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.231016Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.231039Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.231058Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:05:49.263229Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2025-12-23T19:05:49.263264Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.263281Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.263301Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.263335Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2025-12-23T19:05:49.263385Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2025-12-23T19:05:49.263398Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.263410Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.263424Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.263437Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2025-12-23T19:05:49.302410Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2025-12-23T19:05:49.302445Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.302464Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.302487Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.302506Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2025-12-23T19:05:49.316044Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:05:49.316083Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.316102Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.316126Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.316146Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2025-12-23T19:05:49.330994Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:05:49.331037Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.331055Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.331078Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.331095Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:05:49.366949Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2025-12-23T19:05:49.366990Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.367010Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.367034Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.367054Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2025-12-23T19:05:49.367108Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2025-12-23T19:05:49.367122Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.367134Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.367147Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.367159Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2025-12-23T19:05:49.402874Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2025-12-23T19:05:49.402911Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.402927Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.402949Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.402964Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2025-12-23T19:05:49.418862Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:05:49.418904Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.418922Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.418945Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.418962Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2025-12-23T19:05:49.430983Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2025-12-23T19:05:49.431026Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.431043Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.431065Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.431081Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2025-12-23T19:05:49.467125Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2025-12-23T19:05:49.467172Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.467192Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.467217Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.467236Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2025-12-23T19:05:49.467298Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2025-12-23T19:05:49.467329Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.467342Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2025-12-23T19:05:49.467357Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2025-12-23T19:05:49.467369Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist |99.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |99.2%| [TA] $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.2%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_18_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 18] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_10_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 10] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] >> test_select.py::TestDML::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__SYNC-pk_types3-all_types3-index3-pgint8--SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__SYNC-pk_types0-all_types0-index0-Datetime--SYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::EqWidthHistogram 2025-12-23 19:06:54,245 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-12-23 19:06:54,772 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1587445 58.6M 58.6M 32.6M test_tool run_ut @/home/runner/.ya/build/build_root/8ptz/00324d/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk17/testing_out_stuff/test_tool.arg 1587612 2.0G 2.0G 1.5G └─ ydb-core-statistics-service-ut --trace-path-append /home/runner/.ya/build/build_root/8ptz/00324d/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T18:56:57.838265Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:56:57.909663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:56:57.913894Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:448:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T18:56:57.914190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T18:56:57.914259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T18:56:58.233851Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:56:58.328352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:56:58.328479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:56:58.387820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:56:58.487147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00324d/r3tmp/tmpTFEFIh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 61971, node 1 2025-12-23T18:56:59.123423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T18:56:59.124538Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T18:56:59.124581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T18:56:59.124609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T18:56:59.124857Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T18:56:59.192301Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:64865 2025-12-23T18:56:59.784102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-12-23T18:57:02.276853Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T18:57:02.284519Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2025-12-23T18:57:02.288217Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T18:57:02.323446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:02.323561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:02.363454Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T18:57:02.365939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:02.631612Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:57:02.632470Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:57:02.633407Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:57:02.634216Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:57:02.634562Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:57:02.634697Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:57:02.634913Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:57:02.635085Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:57:02.635263Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-12-23T18:57:02.745096Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-12-23T18:57:02.768532Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T18:57:02.768662Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T18:57:02.781815Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T18:57:03.006110Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T18:57:03.059784Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2025-12-23T18:57:03.059881Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-12-23T18:57:03.085914Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-12-23T18:57:03.087218Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-12-23T18:57:03.087458Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-12-23T18:57:03.087507Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-12-23T18:57:03.087565Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-12-23T18:57:03.087605Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-12-23T18:57:03.087664Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-12-23T18:57:03.087712Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2025-12-23T18:57:03.088029Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2025-12-23T18:57:03.130601Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8329: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:57:03.130713Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8359: ConnectToSA(), pipe client id: [2:2184:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-12-23T18:57:03.145020Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2192:2608] 2025-12-23T18:57:03.155280Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2228:2641] 2025-12-23T18:57:03.156740Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2228:2641], schemeshard id = 72075186224037897 2025-12-23T18:57:03.162847Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-12-23T18:57:03.204278Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Describe result: PathErrorUnknown 2025-12-23T18:57:03.204368Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Creating table 2025-12-23T18:57:03.204465Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2025-12-23T18:57:03.236218Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2354:2697], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:57:03.250455Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T18:57:03.260947Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2025-12-23T18:57:03.261121Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on create table tx: 281474976720657 2025-12-23T18:57:03.277575Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Subscribe on tx: 281474976720657 registered 2025-12-23T18:57:03.424166Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-12-23T18:57:03.821452Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-12-23T18:57:03.892889Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2652]. Table already exists, number of columns: 5, has SecurityObject: true 2025-12-23T18:57:03.892990Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2245:2653] Owner: [2:2244:2 ... shard: 72057594046644480 2025-12-23T18:57:28.086484Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8461: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2025-12-23T18:57:28.086560Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8298: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-12-23T18:57:28.086802Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2025-12-23T18:57:28.099970Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-12-23T18:57:28.261837Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2025-12-23T18:57:28.266205Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4516:2473], ActorId: [2:4526:4041], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T18:57:28.269804Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4516:2473], ActorId: [2:4526:4041], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1` 2025-12-23T18:57:28.269983Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4516:2473], ActorId: [2:4526:4041], Start read next stream part 2025-12-23T18:57:28.281804Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4539:4047], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:28.281928Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4550:4052], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:28.282469Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:28.283654Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4555:4057], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:28.284075Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T18:57:28.289980Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4571:4061], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:57:28.293564Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T18:57:28.353642Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4553:4055], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-12-23T18:57:28.459216Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4630:4104], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T18:57:28.519291Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4629:4103] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T18:57:28.973346Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4651:4117]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T18:57:28.973738Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-12-23T18:57:28.973845Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4653:4119] 2025-12-23T18:57:28.973925Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4653:4119] 2025-12-23T18:57:28.974363Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4654:4120] 2025-12-23T18:57:28.974507Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4654:4120], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-12-23T18:57:28.974582Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-12-23T18:57:28.974694Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4653:4119], server id = [2:4654:4120], tablet id = 72075186224037894, status = OK 2025-12-23T18:57:28.974824Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-12-23T18:57:28.974925Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4651:4117], StatRequests.size() = 1 2025-12-23T18:57:28.975090Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-12-23T19:00:51.864413Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4516:2473], ActorId: [2:4526:4041], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T19:00:51.864885Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4516:2473], ActorId: [2:4526:4041], Start read next stream part 2025-12-23T19:00:51.866018Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01kd695kpd7jzv1erqkv2r3wsj", SessionId: ydb://session/3?node_id=2&id=ZGZhOGIyMmMtYTk4NmM3N2YtZjM4MDI1MWUtYTRjNDg0MDI=, Slow query, duration: 203.588956s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1`", parameters: 0b 2025-12-23T19:00:51.866840Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4516:2473], ActorId: [2:4850:4239], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2025-12-23T19:00:51.870648Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32310, txId: 18446744073709551615] shutting down 2025-12-23T19:00:51.870801Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4516:2473], ActorId: [2:4850:4239], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(LowCardinalityString,$f0(256,8)),AGGREGATE_BY(LowCardinalityInt,$f0(256,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0,9)),AGGREGATE_BY(Float,$f0(256,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(256,8)),AGGREGATE_BY(Date,$f1(32,10000,10099)),AGGREGATE_BY(NearNumericLimits,$f0(256,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808,9223372036854775807)) FROM `/Root/Database/Table1` 2025-12-23T19:00:51.870921Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4516:2473], ActorId: [2:4850:4239], Start read next stream part 2025-12-23T19:00:51.874493Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4516:2473], ActorId: [2:4526:4041], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T19:00:51.874566Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4516:2473], ActorId: [2:4526:4041], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzdhODk4MmYtOTUxNTg4NDItMzFjMDdlMWEtZDJhYzc3NmQ=, TxId: 2025-12-23T19:00:52.155818Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4877:4262]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-12-23T19:00:52.156141Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-12-23T19:00:52.156195Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4877:4262], StatRequests.size() = 1 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/00324d/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk17/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10658668958/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/8ptz/00324d/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk17/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |99.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] |99.2%| [TA] $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.2%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__ASYNC-pk_types4-all_types4-index4-Datetime--ASYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__ASYNC-pk_types1-all_types1-index1-pgint4--ASYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__SYNC-pk_types6-all_types6-index6-pgdate--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__ASYNC-pk_types34-all_types34-index34-Date--ASYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0_UNIQUE_SYNC-pk_types5-all_types5-index5-pgint8-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0_UNIQUE_SYNC-pk_types8-all_types8-index8-pgdate-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_16_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 16] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__ASYNC-pk_types10-all_types10-index10-pgtimestamp--ASYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_13_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 13] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0_UNIQUE_SYNC-pk_types32-all_types32-index32-Date-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0_UNIQUE_SYNC-pk_types2-all_types2-index2-Datetime-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Date_0__SYNC-pk_types30-all_types30-index30-Date--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] >> test_select.py::TestDML::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_select.py::TestDML::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types12-all_types12-index12---] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |99.2%| [TA] $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |99.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |99.2%| [TA] {RESULT} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_select.py::TestDML::test_as_table |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] [GOOD] >> test_select.py::TestDML::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_select.py::TestDML::test_as_table [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] |99.2%| [TA] $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} |99.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} |99.2%| [TA] {RESULT} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Utf8-pk_types9-all_types9-index9] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_select.py::TestDML::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__SYNC-pk_types6-all_types6-index6-pgdate--SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__SYNC-pk_types0-all_types0-index0-Datetime--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_as_table [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0_UNIQUE_SYNC-pk_types5-all_types5-index5-pgint8-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] >> test_select.py::TestDML::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] >> test_select.py::TestDML::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] [FAIL] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__ASYNC-pk_types4-all_types4-index4-Datetime--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] [FAIL] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] |99.3%| [TA] $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} |99.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} |99.3%| [TA] {RESULT} $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_select.py::TestDML::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_select.py::TestDML::test_select[table_all_types-pk_types12-all_types12-index12---] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Utf8-pk_types9-all_types9-index9] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message >> test_select.py::TestDML::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_select.py::TestDML::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] |99.3%| [TA] $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] [GOOD] |99.3%| [TA] {RESULT} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] |99.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] >> test_select.py::TestDML::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_select.py::TestDML::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> TestFilterSet::FilterGroup >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] >> BulkUpsert::BulkUpsert >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Utf8-pk_types9-all_types9-index9] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> KqpTpch::Query01 |99.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/py3test >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |99.3%| [TS] {RESULT} ydb/tests/library/ut/py3test |99.3%| [TS] {BAZEL_UPLOAD} ydb/tests/library/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] |99.3%| [TA] $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |99.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |99.3%| [TA] {RESULT} $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} >> TMLPConsumerTests::ReloadPQTablet >> TestFilterSet::FilterGroup [GOOD] >> TestFilterSet::DuplicationValidation >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> LongTxService::BasicTransactions >> TestFilterSet::DuplicationValidation [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] >> TestFilterSet::CompilationValidation >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] >> LongTxService::BasicTransactions [GOOD] >> LongTxService::AcquireSnapshot |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> KqpTpch::Query01 [GOOD] >> KqpTpch::Query02 |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> CoordinatorTests::Route >> CoordinatorTests::Route [GOOD] >> TestFilterSet::CompilationValidation [GOOD] >> CoordinatorTests::RouteTwoTopicWichSameName [GOOD] >> CoordinatorTests::WaitNodesConnected >> TestFilterSet::Watermark >> LongTxService::AcquireSnapshot [GOOD] >> LongTxService::LockSubscribe >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> CoordinatorTests::WaitNodesConnected [GOOD] >> CoordinatorTests::ProcessMappingWithNodeIds >> CoordinatorTests::ProcessMappingWithNodeIds [GOOD] >> CoordinatorTests::RebalanceAfterNewNodeConnected >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column [GOOD] >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> CoordinatorTests::RebalanceAfterNewNodeConnected [GOOD] >> CoordinatorTests::RebalanceAfterNodeDisconnected >> CoordinatorTests::RebalanceAfterNodeDisconnected [GOOD] >> Discovery::DelayedNameserviceResponse >> LeaderElectionTests::Test1 >> SdkCredProvider::PingFromProviderSyncDiscovery >> LeaderElectionTests::Test1 [GOOD] >> LeaderElectionTests::TestLocalMode >> LeaderElectionTests::TestLocalMode [GOOD] >> LongTxService::LockSubscribe [GOOD] >> TopicSessionTests::TwoSessionsWithoutOffsets >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/ut/unittest >> LongTxService::LockSubscribe [GOOD] Test command err: 2025-12-23T19:10:09.428930Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T19:10:09.429488Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c6d/r3tmp/tmpm3eK7G/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T19:10:09.430367Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c6d/r3tmp/tmpm3eK7G/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/003c6d/r3tmp/tmpm3eK7G/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 556265007402337687 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T19:10:09.519967Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:94: TLongTxService [Node 1] Received TEvBeginTx from [1:442:2332] 2025-12-23T19:10:09.520135Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:123: TLongTxService [Node 1] Created new LongTxId# ydb://long-tx/000000001k1j7dz2td1v7tyhs1?node_id=1 2025-12-23T19:10:09.531540Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:265: TLongTxService [Node 2] Received TEvAttachColumnShardWrites from [2:443:2101] LongTxId# ydb://long-tx/000000001k1j7dz2td1v7tyhs1?node_id=1 2025-12-23T19:10:09.531706Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 2] Received TEvNodeConnected for NodeId# 1 from session [2:103:2048] 2025-12-23T19:10:09.531885Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:265: TLongTxService [Node 1] Received TEvAttachColumnShardWrites from [2:154:2090] LongTxId# ydb://long-tx/000000001k1j7dz2td1v7tyhs1?node_id=1 2025-12-23T19:10:09.532162Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:139: TLongTxService [Node 2] Received TEvCommitTx from [2:443:2101] LongTxId# ydb://long-tx/000000001k1j7dz2td1v7tyhs1?node_id=1 2025-12-23T19:10:09.532359Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:139: TLongTxService [Node 1] Received TEvCommitTx from [2:154:2090] LongTxId# ydb://long-tx/000000001k1j7dz2td1v7tyhs1?node_id=1 2025-12-23T19:10:09.532419Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:162: TLongTxService [Node 1] Committed LongTxId# ydb://long-tx/000000001k1j7dz2td1v7tyhs1?node_id=1 without side-effects 2025-12-23T19:10:09.532605Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 2] Received TEvRollbackTx from [2:443:2101] LongTxId# ydb://long-tx/000000001k1j7dz2td1v7tyhs1?node_id=1 2025-12-23T19:10:09.532791Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 1] Received TEvRollbackTx from [2:154:2090] LongTxId# ydb://long-tx/000000001k1j7dz2td1v7tyhs1?node_id=1 2025-12-23T19:10:09.534197Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 2] Received TEvRollbackTx from [2:443:2101] LongTxId# ydb://long-tx/000000001k1j7dz2td1v7tyhs1?node_id=1 2025-12-23T19:10:09.534388Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 1] Received TEvRollbackTx from [2:154:2090] LongTxId# ydb://long-tx/000000001k1j7dz2td1v7tyhs1?node_id=1 2025-12-23T19:10:09.535768Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2025-12-23T19:10:09.535890Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2025-12-23T19:10:09.535978Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 2 2025-12-23T19:10:09.536391Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 1 from session [2:103:2048] 2025-12-23T19:10:09.536875Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:75:2076] ServerId# [1:364:2281] TabletId# 72057594037932033 PipeClientId# [2:75:2076] 2025-12-23T19:10:09.537092Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:152:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-12-23T19:10:09.541715Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:139: TLongTxService [Node 2] Received TEvCommitTx from [2:443:2101] LongTxId# ydb://long-tx/000000001k1j7dz2td1v7tyhs1?node_id=3 2025-12-23T19:10:09.542016Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 3 from session [2:481:2103] 2025-12-23T19:10:10.319879Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7754: Cannot subscribe to console configs 2025-12-23T19:10:10.319947Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:10.658324Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:10:11.500550Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T19:10:11.501089Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c6d/r3tmp/tmpV6BWRZ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T19:10:11.501346Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c6d/r3tmp/tmpV6BWRZ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/003c6d/r3tmp/tmpV6BWRZ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10968860816303971740 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T19:10:11.995940Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:346: TLongTxService [Node 3] Received TEvAcquireReadSnapshot from [3:624:2496] for database /dc-1 2025-12-23T19:10:11.996038Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:381: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-12-23T19:10:12.006559Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:388: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-12-23T19:10:12.006800Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:48: LongTxService.AcquireSnapshot [3:820:2677] Sending navigate request for /dc-1 2025-12-23T19:10:12.016023Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:75: LongTxService.AcquireSnapshot [3:820:2677] Received navigate response status Ok 2025-12-23T19:10:12.016110Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:129: LongTxService.AcquireSnapshot [3:820:2677] Sending acquire step to coordinator 72057594046316545 2025-12-23T19:10:12.016761Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:165: LongTxService.AcquireSnapshot [3:820:2677] Received read step 1500 2025-12-23T19:10:12.016888Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:400: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 1 2025-12-23T19:10:12.017153Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:94: TLongTxService [Node 3] Received TEvBeginTx from [3:624:2496] 2025-12-23T19:10:12.017210Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:381: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-12-23T19:10:12.027643Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:388: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-12-23T19:10:12.027892Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:48: LongTxService.AcquireSnapshot [3:833:2684] Sending navigate request for /dc-1 2025-12-23T19:10:12.028156Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:75: LongTxService.AcquireSnapshot [3:833:2684] Received navigate response status Ok 2025-12-23T19:10:12.028214Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:129: LongTxService.AcquireSnapshot [3:833:2684] Sending acquire step to coordinator 72057594046316545 2025-12-23T19:10:12.028430Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:165: LongTxService.AcquireSnapshot [3:833:2684] Received read step 1500 2025-12-23T19:10:12.028520Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:400: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 2 2025-12-23T19:10:12.028578Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:425: TLongTxService [Node 3] Created new read-only LongTxId# ydb://long-tx/read-only?snapshot=1500%3Amax 2025-12-23T19:10:12.028809Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:94: TLongTxService [Node 3] Received TEvBeginTx from [3:624:2496] 2025-12-23T19:10:12.028860Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:381: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-12-23T19:10:12.041147Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:388: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-12-23T19:10:12.041377Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:48: LongTxService.AcquireSnapshot [3:835:2686] Sending navigate request for /dc-1 2025-12-23T19:10:12.041649Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:75: LongTxService.AcquireSnapshot [3:835:2686] Received navigate response status Ok 2025-12-23T19:10:12.041707Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:129: LongTxService.AcquireSnapshot [3:835:2686] Sending acquire step to coordinator 72057594046316545 2025-12-23T19:10:12.041953Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:165: LongTxService.AcquireSnapshot [3:835:2686] Received read step 1500 2025-12-23T19:10:12.042041Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:400: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 3 2025-12-23T19:10:12.042117Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:423: TLongTxService [Node 3] Created new read-write LongTxId# ydb://long-tx/00000001efa8a1fnrdpxd6648f?node_id=3&snapshot=1500%3Amax 2025-12-23T19:10:12.940173Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-12-23T19:10:12.940625Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c6d/r3tmp/tmpTBWp5W/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-12-23T19:10:12.940824Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/8ptz/003c6d/r3tmp/tmpTBWp5W/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/8ptz/003c6d/r3tmp/tmpTBWp5W/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12367791861952005573 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-12-23T19:10:13.001490Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:468: TLongTxService [Node 5] Received TEvRegisterLock for LockId# 123 2025-12-23T19:10:13.001977Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [5:443:2333] for LockId# 987 LockNode# 5 2025-12-23T19:10:13.035025Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 6] Received TEvSubscribeLock from [6:444:2101] for LockId# 987 LockNode# 5 2025-12-23T19:10:13.035186Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:100:2048] 2025-12-23T19:10:13.036848Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [6:153:2090] for LockId# 987 LockNode# 5 2025-12-23T19:10:13.038276Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:611: TLongTxService [Node 6] Received TEvLockStatus from [5:152:2138] for LockId# 987 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-12-23T19:10:13.038515Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [5:443:2333] for LockId# 123 LockNode# 5 2025-12-23T19:10:13.038692Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 6] Received TEvSubscribeLock from [6:444:2101] for LockId# 123 LockNode# 5 2025-12-23T19:10:13.039989Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [6:153:2090] for LockId# 123 LockNode# 5 2025-12-23T19:10:13.040244Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:611: TLongTxService [Node 6] Received TEvLockStatus from [5:152:2138] for LockId# 123 LockNode# 5 LockStatus# STATUS_SUBSCRIBED 2025-12-23T19:10:13.040403Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:479: TLongTxService [Node 5] Received TEvUnregisterLock for LockId# 123 2025-12-23T19:10:13.040536Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:611: TLongTxService [Node 6] Received TEvLockStatus from [5:152:2138] for LockId# 123 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-12-23T19:10:13.040678Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 6] Received TEvSubscribeLock from [6:444:2101] for LockId# 234 LockNode# 5 2025-12-23T19:10:13.040998Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 6 2025-12-23T19:10:13.041093Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2025-12-23T19:10:13.041186Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2025-12-23T19:10:13.041576Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:100:2048] 2025-12-23T19:10:13.042075Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:77:2077] ServerId# [5:365:2282] TabletId# 72057594037932033 PipeClientId# [6:77:2077] 2025-12-23T19:10:13.042245Z node 6 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [6:151:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-12-23T19:10:13.246996Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:474:2048] 2025-12-23T19:10:13.247288Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2025-12-23T19:10:13.247340Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2025-12-23T19:10:13.247860Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:474:2048] 2025-12-23T19:10:13.248078Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:475:2102] ServerId# [5:479:2352] TabletId# 72057594037932033 PipeClientId# [6:475:2102] 2025-12-23T19:10:13.477313Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:494:2048] 2025-12-23T19:10:13.478052Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:494:2048] 2025-12-23T19:10:13.478196Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2025-12-23T19:10:13.478258Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2025-12-23T19:10:13.478694Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:495:2103] ServerId# [5:499:2363] TabletId# 72057594037932033 PipeClientId# [6:495:2103] 2025-12-23T19:10:13.731731Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:517:2048] 2025-12-23T19:10:13.732047Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2025-12-23T19:10:13.732119Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2025-12-23T19:10:13.732381Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:518:2105] ServerId# [5:522:2377] TabletId# 72057594037932033 PipeClientId# [6:518:2105] 2025-12-23T19:10:13.732681Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:517:2048] |99.3%| [TS] {RESULT} ydb/core/tx/long_tx_service/ut/unittest |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |99.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/long_tx_service/ut/unittest |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> TestFilterSet::Watermark [GOOD] >> BasicExample::BasicExample >> TestFilterSet::WatermarkWhere >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] >> SdkCredProvider::PingFromProviderSyncDiscovery [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> TGenerateQueueIdTests::MakeQueueIdBasic [GOOD] >> TParseParamsTests::CreateUser [GOOD] >> TParseParamsTests::ChangeMessageVisibilityBatchRequest [GOOD] >> TParseParamsTests::DeleteMessageBatchRequest [GOOD] >> TParseParamsTests::MessageBody [GOOD] >> TParseParamsTests::SendMessageBatchRequest [GOOD] >> TParseParamsTests::DeleteQueueBatchRequest [GOOD] >> TParseParamsTests::PurgeQueueBatchRequest [GOOD] >> TParseParamsTests::GetQueueAttributesBatchRequest [GOOD] >> TParseParamsTests::UnnumberedAttribute [GOOD] >> TParseParamsTests::UnnumberedAttributeName [GOOD] >> TParseParamsTests::FailsOnInvalidDeduplicationId [GOOD] >> TParseParamsTests::FailsOnInvalidGroupId [GOOD] >> TParseParamsTests::FailsOnInvalidReceiveRequestAttemptId [GOOD] >> TParseParamsTests::FailsOnInvalidMaxNumberOfMessages [GOOD] >> TParseParamsTests::FailsOnInvalidWaitTime [GOOD] >> TParseParamsTests::FailsOnInvalidDelaySeconds [GOOD] >> ConfigGRPCService::ReplaceConfig >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |99.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/ut/unittest >> TParseParamsTests::FailsOnInvalidDelaySeconds [GOOD] |99.3%| [TS] {RESULT} ydb/core/ymq/ut/unittest |99.3%| [TS] {BAZEL_UPLOAD} ydb/core/ymq/ut/unittest >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] >> TestFilterSet::WatermarkWhere [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] >> TestFilterSet::WatermarkWhereFalse |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] >> TabletService_ChangeSchema::Basics >> KqpTpch::Query02 [GOOD] >> KqpTpch::Query03 >> TCreateAndDropViewTest::CheckCreatedView >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> BasicExample::BasicExample [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default [GOOD] >> ConfigGRPCService::ReplaceConfig [GOOD] >> ConfigGRPCService::ReplaceConfigWithInvalidHostConfig >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default_not_null |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_example.py::TestExample::test_example >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] [GOOD] >> TestFilterSet::WatermarkWhereFalse [GOOD] >> KqpTpch::Query03 [GOOD] >> KqpTpch::Query04 >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] >> Discovery::DelayedNameserviceResponse [GOOD] >> Discovery::SecondDiscovererRequestBeforeNameserviceResponse >> TestFormatHandler::ManyJsonClients >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] |99.4%| [TM] {RESULT} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> KqpTpch::Query04 [GOOD] >> KqpTpch::Query05 |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> MediatorTimeCast::ReadStepSubscribe >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> BasicExample::BasicExample [GOOD] >> TCreateAndDropViewTest::CheckCreatedView [GOOD] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |99.4%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/basic_example/gtest |99.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> ConfigGRPCService::ReplaceConfigWithInvalidHostConfig [GOOD] >> ConfigGRPCService::FetchConfig >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> TestFormatHandler::ManyJsonClients [GOOD] >> TabletService_ChangeSchema::Basics [GOOD] >> TabletService_ChangeSchema::OnlyAdminsAllowed >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] >> TestFormatHandler::ManyRawClients >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> QueryActorTest::SimpleQuery >> test.py::TestViewer::test_whoami_root >> TopicSessionTests::TwoSessionsWithoutOffsets [GOOD] >> KqpTpch::Query05 [GOOD] >> KqpTpch::Query06 >> test_timeout.py::TestTimeout::test_timeout >> Coordinator::ReadStepSubscribe >> TopicSessionTests::TwoSessionWithoutPredicate >> DataShardBackgroundCompaction::ShouldCompact >> ReadUpdateWrite::Load >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default_not_null [GOOD] >> KqpTpch::Query06 [GOOD] >> KqpTpch::Query07 >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::InvalidQuery >> TMLPConsumerTests::ReloadPQTablet [GOOD] >> TMLPConsumerTests::AlterConsumer >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] >> test_example.py::TestExample::test_example [GOOD] >> TestFormatHandler::ManyRawClients [GOOD] >> ConfigGRPCService::FetchConfig [GOOD] >> ConfigGRPCService::CheckV1IsBlocked >> Login::TokenIsNotExpired >> TestFormatHandler::ClientValidation >> Discovery::SecondDiscovererRequestBeforeNameserviceResponse [GOOD] >> test.py::test_order_conflict [GOOD] >> test.py::test_missing_value [GOOD] >> test.py::test_unexpected_value [GOOD] >> test.py::test_local >> TabletService_ChangeSchema::OnlyAdminsAllowed [GOOD] >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead >> Login::TokenIsNotExpired [GOOD] >> MediatorTimeCast::ReadStepSubscribe [GOOD] >> MediatorTimeCast::GranularTimecast >> QueryActorTest::SimpleQuery [GOOD] >> QueryActorTest::Rollback >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> test_commit.py::TestCommit::test_commit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/discovery/unittest >> Discovery::SecondDiscovererRequestBeforeNameserviceResponse [GOOD] Test command err: Trying to start YDB, gRPC: 22638, MsgBus: 64184 2025-12-23T19:10:13.942164Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587132802869312934:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:10:13.942228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0062d7/r3tmp/tmpEwLfye/pdisk_1.dat 2025-12-23T19:10:14.291650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:10:14.344807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:14.344985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:14.403652Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:14.412413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22638, node 1 2025-12-23T19:10:14.505748Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:10:14.831480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:10:14.831502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:10:14.831571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:10:14.831738Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:10:14.968060Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:64184 TClient is connected to server localhost:64184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766517014479 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:10:15.875076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:10:15.900325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:10:16.125627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:10:16.326166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:10:16.408444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:10:17.697230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132820049183958:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:17.697328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:17.697807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132820049183967:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:17.697854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:18.439690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:18.494122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:18.533541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:18.575608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:18.618093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:18.696542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:18.742967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:18.799900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:18.919899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132824344152146:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:18.920022Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:18.920514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132824344152151:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:18.920573Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132824344152152:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:18.920648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:18.933515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:10:18.941887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587132802869312934:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:10:18.9 ... e(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:23.108375Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:23.110801Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587132838486682282:2081] 1766517022961079 != 1766517022961082 2025-12-23T19:10:23.125206Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18495, node 2 2025-12-23T19:10:23.207815Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:10:23.267111Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:10:23.267134Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:10:23.267142Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:10:23.267215Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30998 TClient is connected to server localhost:30998 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766517023222 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:10:23.684358Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:10:23.693880Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:10:23.758508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:10:23.974266Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; waiting... 2025-12-23T19:10:24.014245Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:24.078662Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:10:26.415535Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587132855666553350:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:26.415627Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:26.419399Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587132855666553360:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:26.419504Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:26.504682Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:26.539666Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:26.576435Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:26.615140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:26.660551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:26.703976Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:26.739495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:26.790445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:26.869744Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587132855666554233:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:26.869824Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:26.870017Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587132855666554238:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:26.870077Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587132855666554239:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:26.870149Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:26.873796Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:10:26.884405Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7587132855666554242:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-12-23T19:10:26.946991Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7587132855666554293:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:10:27.962550Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7587132838486682321:2070];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:10:27.963191Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |99.4%| [TM] {RESULT} ydb/core/kqp/ut/discovery/unittest |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/discovery/unittest >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] >> test_commit.py::TestCommit::test_commit [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber >> TCreateAndDropViewTest::InvalidQuery [GOOD] >> TCreateAndDropViewTest::ParsingSecurityInvoker >> TestFormatHandler::ClientValidation [GOOD] >> TestFormatHandler::ClientError >> KqpTpch::Query07 [GOOD] >> KqpTpch::Query08 |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] >> TDqPqRdReadActorTests::TestReadFromTopic2 >> TDqPqRdReadActorTests::TestReadFromTopic2 [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> TopicSessionTests::TwoSessionWithoutPredicate [GOOD] >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration [GOOD] >> DataShardStats::OneChannelStatsCorrect >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate >> TDqPqRdReadActorTests::SessionError [GOOD] >> DataShardBackgroundCompaction::ShouldCompact [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed >> TDqPqRdReadActorTests::ReadWithFreeSpace [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/auth/gtest >> Login::TokenIsNotExpired [GOOD] |99.4%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/auth/gtest >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead |99.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/auth/gtest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/config/ut/unittest >> ConfigGRPCService::CheckV1IsBlocked Test command err: 2025-12-23T19:10:17.338071Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587132819328752070:2071];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:10:17.338144Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:10:17.466164Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/002e56/r3tmp/tmpRBPraM/pdisk_1.dat 2025-12-23T19:10:17.751396Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:10:18.030556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:18.030679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:18.040797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:18.091153Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:18.094941Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587132819328752033:2082] 1766517017326662 != 1766517017326665 2025-12-23T19:10:18.098870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 65035, node 1 2025-12-23T19:10:18.173007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/.sys 2025-12-23T19:10:18.173826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T19:10:18.173866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T19:10:18.174034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-12-23T19:10:18.175079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T19:10:18.175121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:63: MkDir::TPropose operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T19:10:18.175182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-12-23T19:10:18.175360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:175: SysViewsRosterUpdate# [1:7587132823623720028:2395] at schemeshard: 72057594046644480 Handle TEvModifySchemeTransactionResult, mkdir '/Root/.sys', status: StatusAccepted 2025-12-23T19:10:18.175378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:158: SysViewsRosterUpdate# [1:7587132823623720028:2395] at schemeshard: 72057594046644480 Send TEvNotifyTxCompletion, txId 281474976715657 2025-12-23T19:10:18.175497Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2025-12-23T19:10:18.175849Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2025-12-23T19:10:18.178143Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2025-12-23T19:10:18.178186Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2025-12-23T19:10:18.178344Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2025-12-23T19:10:18.178508Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2025-12-23T19:10:18.179630Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2025-12-23T19:10:18.179658Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2025-12-23T19:10:18.181088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T19:10:18.192821Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-12-23T19:10:18.193168Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } VDiskStatus { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1001 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } VDiskStatus { VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1002 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } VDiskStatus { VDiskId { GroupID: 2181038083 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1003 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } VDiskStatus { VDiskId { GroupID: 2181038084 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1004 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2025-12-23T19:10:18.195455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:10:18.195482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: false 2025-12-23T19:10:18.195504Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:10:18.196298Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2025-12-23T19:10:18.196358Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:425: Got proxy service configuration 2025-12-23T19:10:18.196393Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T19:10:18.196422Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } State: Initial Replicated: false DiskSpace: Green } } 2025-12-23T19:10:18.196432Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T19:10:18.196452Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:425: Got proxy service configuration 2025-12-23T19:10:18.196458Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T19:10:18.196498Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T19:10:18.196504Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } State: Initial Replicated: false DiskSpace: Green } } 2025-12-23T19:10:18.196623Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038083 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } State: Initial Replicated: false DiskSpace: Green } } 2025-12-23T19:10:18.196717Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038084 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1004 } State: Initial Replicated: false DiskSpace: Green } } 2025-12-23T19:10:18.197388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 3, at schemeshard: 72057594046644480 2025-12-23T19:10:18.197678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/auth_permissions 2025-12-23T19:10:18.197709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715658:0, there is await operations num 1 2025-12-23T19:10:18.197785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:175: SysViewsRosterUpdate# [1:7587132823623720028:2395] at schemeshard: 72057594046644480 Handle TEvModifySchemeTransactionResult, create sys view '/Root/.sys/auth_permissions', status: StatusAccepted 2025-12-23T19:10:18.197805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:158: SysViewsRosterUpdate# [1:7587132823623720028:2395] at schemeshard: 72057594046644480 Send TEvNotifyTxCompletion, txId 281474976715658 2025-12-23T19:10:18.197900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715659, response: Status: StatusAccepted TxId: 281474976715659 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-12-23T19:10:18.198084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/auth_effective_permissions 2025-12-23T19:10:18.198101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715659:0, there is await operations num 1 2025-12-23T19:10:18.198172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715660, response: Status: StatusAccepted TxId: 281474976715660 SchemeshardId: 72057594046644480 PathId: 5, at schemeshard: 72057594046644480 2025-12-23T19:10:18.198326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/auth_users 2025-12-2 ... 8 -> 240 2025-12-23T19:10:31.515092Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-12-23T19:10:31.515253Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-12-23T19:10:31.515300Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-12-23T19:10:31.517278Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-12-23T19:10:31.517309Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-12-23T19:10:31.517503Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-12-23T19:10:31.517528Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:7587132879412092795:2392], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-12-23T19:10:31.517574Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-12-23T19:10:31.517604Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-12-23T19:10:31.517701Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710657:0 progress is 1/1 2025-12-23T19:10:31.517722Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-12-23T19:10:31.517744Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710657:0 progress is 1/1 2025-12-23T19:10:31.517755Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-12-23T19:10:31.517772Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-12-23T19:10:31.517794Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-12-23T19:10:31.517817Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2025-12-23T19:10:31.517828Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5576: RemoveTx for txid 281474976710657:0 2025-12-23T19:10:31.517881Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2025-12-23T19:10:31.517904Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2025-12-23T19:10:31.517917Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 6 2025-12-23T19:10:31.519294Z node 7 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T19:10:31.519415Z node 7 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T19:10:31.519431Z node 7 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2025-12-23T19:10:31.519470Z node 7 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2025-12-23T19:10:31.519752Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6273: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-12-23T19:10:31.520543Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-12-23T19:10:31.520566Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-12-23T19:10:31.520586Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2025-12-23T19:10:31.520607Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-12-23T19:10:31.520686Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2025-12-23T19:10:31.520703Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [7:7587132879412093396:2304] 2025-12-23T19:10:31.522101Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710657 2025-12-23T19:10:31.563413Z node 7 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ReplaceConfigRequest, traceId# 01kd69xgmb5cp7982m754804pa, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:38912, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-12-23T19:10:31.580880Z node 7 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { GetInterfaceVersion { } } } 2025-12-23T19:10:31.581424Z node 7 :BS_CONTROLLER DEBUG: {BSC24@console_interaction.cpp:224} Console replace config request Request# {ClusterYaml: "\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\n\nconfig:\n host_configs:\n - host_config_id: 1\n drive:\n - path: SectorMap:1:64\n type: SSD\n - path: SectorMap:2:64\n type: SSD\n - host_config_id: 2\n drive:\n - path: SectorMap:3:64\n type: SSD\n hosts:\n - host: ::1\n port: 12001\n host_config_id: 2\n feature_flags:\n switch_to_config_v2: true\n" DedicatedConfigMode: false AllowUnknownFields: false BypassMetadataChecks: false PeerName: "ipv6:[::1]:38912" UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" } 2025-12-23T19:10:31.582896Z node 7 :BS_CONTROLLER DEBUG: {BSC36@console_interaction.cpp:407} Sending TEvControllerValidateConfigRequest to console ConsolePipe# [7:7587132879412092879:2472] 2025-12-23T19:10:31.595179Z node 7 :BS_CONTROLLER DEBUG: {BSC27@console_interaction.cpp:478} Console validate config response Response# {Status: ConfigIsValid YAML: "\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 1\n\nconfig:\n host_configs:\n - host_config_id: 1\n drive:\n - path: SectorMap:1:64\n type: SSD\n - path: SectorMap:2:64\n type: SSD\n - host_config_id: 2\n drive:\n - path: SectorMap:3:64\n type: SSD\n hosts:\n - host: ::1\n port: 12001\n host_config_id: 2\n feature_flags:\n switch_to_config_v2: true\n" } 2025-12-23T19:10:31.601247Z node 7 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:431} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:1:64" Type: SSD } Drive { Path: "SectorMap:2:64" Type: SSD } } } Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "SectorMap:3:64" Type: SSD } } } Command { DefineBox { BoxId: 999 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } ItemConfigGeneration: 1 } } Command { DeleteHostConfig { HostConfigId: 7 ItemConfigGeneration: 1 } } Rollback: true StorageConfig { type_url: "type.googleapis.com/NKikimrBlobStorage.TStorageConfig" value: "\032\234\001\nL\n\030\010\001\020\001\032\016SectorMap:1:64(\0010\001\022\030\n\n\010\000\020\001\030\000 \000(\000\022\010\010\001\020\001\030\000 \001\030\000\032\024\010\000\020\001\030\000\"\014\n\n\n\010\010\001\020\001\030\000 \001 \001R*\010\001\032\022\n\016SectorMap:1:64\020\001\032\022\n\016SectorMap:2:64\020\001R\026\010\002\032\022\n\016SectorMap:3:64\020\001Z\010\010\001\"\004\020\002\030\001*,\n\003::1\020\341]\030\001\" R\007default\362\001\020generated-rack-1\240\2111\001J\010\010\001\022\004\010\001\030\001R\010\010\001\022\004\010\001\030\001Z\010\010\001\022\004\010\001\030\001" } } 2025-12-23T19:10:31.601600Z node 7 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# SectorMap:3:64 2025-12-23T19:10:31.601626Z node 7 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 7:1 2025-12-23T19:10:31.604734Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:10:31.821085Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; VERIFY failed (2025-12-23T19:10:31.606499Z): ydb/core/mind/bscontroller/bsc.cpp:528 BuildConfigRequestFromStorageConfig(): requirement resolved failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+888 (0x1B2CEC28) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x1B2BC95B) NKikimr::NBsController::TBlobStorageController::BuildConfigRequestFromStorageConfig(NKikimrBlobStorage::TStorageConfig const&, std::__y1::shared_ptr const&, bool)+7396 (0x3EE323B4) NKikimr::NBsController::TBlobStorageController::ApplyStorageConfig(bool)+474 (0x3EE2C82A) NKikimr::NBsController::TBlobStorageController::TTxCommitConfig::Complete(NActors::TActorContext const&)+4022 (0x3EEC85D6) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+899 (0x223F5023) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3818 (0x222D892A) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+1566 (0x22033C4E) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+635 (0x21FD31CB) NActors::IActor::Receive(TAutoPtr&)+744 (0x1D2D8158) NActors::TExecutorThread::Execute(NActors::TMailbox*, bool)+8322 (0x1D3B16B2) ??+0 (0x1D3BB2D7) NActors::TExecutorThread::ProcessExecutorPool()+1486 (0x1D3BA88E) NActors::TExecutorThread::ThreadProc()+399 (0x1D3BC8FF) ??+0 (0x1B2D3C85) ??+0 (0x1AF75967) ??+0 (0x7F6632364AC3) ??+0 (0x7F66323F68C0) |99.4%| [TM] {RESULT} ydb/services/config/ut/unittest |99.4%| [TM] {BAZEL_UPLOAD} ydb/services/config/ut/unittest >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead >> QueryActorTest::Rollback [GOOD] >> QueryActorTest::Commit |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/add_column/py3test >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default_not_null [GOOD] |99.4%| [TM] {RESULT} ydb/tests/datashard/add_column/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/add_column/py3test >> MediatorTimeCast::GranularTimecast [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead [GOOD] >> TDqPqRdReadActorTests::CoordinatorChanged >> TestFormatHandler::ClientError [GOOD] >> TestFormatHandler::ClientErrorWithEmptyFilter >> test_http_api.py::TestHttpApi::test_simple_analytics_query |99.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/py3test >> test_commit.py::TestCommit::test_commit [GOOD] |99.4%| [TS] {RESULT} ydb/tests/tools/pq_read/test/py3test |99.4%| [TS] {BAZEL_UPLOAD} ydb/tests/tools/pq_read/test/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/time_cast/ut/unittest >> MediatorTimeCast::GranularTimecast [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T19:10:28.534320Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:28.715497Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:10:28.767430Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:10:28.768530Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:10:28.768653Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:10:29.195489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:29.195607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:29.293733Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766517025320677 != 1766517025320681 2025-12-23T19:10:29.310298Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:29.360376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:29.478404Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00426d/r3tmp/tmpia3GnN/pdisk_1.dat 2025-12-23T19:10:29.807419Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:922: Actor# [1:26:2073] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2025-12-23T19:10:29.808186Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [1:26:2073] HANDLE EvClientConnected 2025-12-23T19:10:29.823859Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:999: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 1 LastAcquireStep: 0 NextAcquireStep: 1000 2025-12-23T19:10:29.835749Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:10:29.946581Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 1500 2025-12-23T19:10:30.037672Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 2000 2025-12-23T19:10:30.213186Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 3000 2025-12-23T19:10:30.343542Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 4000 2025-12-23T19:10:30.508064Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 5000 2025-12-23T19:10:30.689390Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 6000 2025-12-23T19:10:30.700129Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:970: Actor# [1:26:2073] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 8000 } 2025-12-23T19:10:30.849074Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 7000 2025-12-23T19:10:30.980427Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 8000 2025-12-23T19:10:30.982953Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:593: Actor# [1:26:2073] HANDLE EvClientDestroyed 2025-12-23T19:10:31.013404Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [1:26:2073] HANDLE EvClientConnected 2025-12-23T19:10:31.014304Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:999: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 0 NextAcquireStep: 8000 2025-12-23T19:10:31.027651Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:970: Actor# [1:26:2073] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 13000 } 2025-12-23T19:10:31.122970Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 8500 2025-12-23T19:10:31.220010Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 9000 2025-12-23T19:10:31.371153Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 10000 2025-12-23T19:10:31.539886Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 11000 2025-12-23T19:10:31.700880Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 12000 2025-12-23T19:10:31.839879Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 13000 ... waiting for SysViewsRoster update finished 2025-12-23T19:10:35.385944Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:35.396530Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:10:35.396925Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:10:35.397029Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:10:35.645065Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:35.645220Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:35.666622Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766517032350988 != 1766517032350991 2025-12-23T19:10:35.674705Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:35.727033Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:35.799215Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/00426d/r3tmp/tmprHwOdq/pdisk_1.dat 2025-12-23T19:10:36.169923Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:460: Actor# [2:26:2073] HANDLE {TEvRegisterTablet TabletId# 72057594047365120 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-12-23T19:10:36.170796Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:276: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 1 Tablets: 72057594047365120 MinStep: 0 2025-12-23T19:10:36.170873Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:378: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-12-23T19:10:36.170933Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:508: Actor# [2:26:2073] SEND to Sender# [2:885:2753] {TEvRegisterTabletResult TabletId# 72057594047365120 Entry# 0} 2025-12-23T19:10:36.171294Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [2:26:2073] HANDLE EvClientConnected 2025-12-23T19:10:36.171520Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 1000 2025-12-23T19:10:36.171700Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1000} 2025-12-23T19:10:36.171916Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:460: Actor# [2:26:2073] HANDLE {TEvRegisterTablet TabletId# 72057594047365121 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-12-23T19:10:36.172009Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:304: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 2 AddTablets: 72057594047365121 2025-12-23T19:10:36.172061Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:508: Actor# [2:26:2073] SEND to Sender# [2:888:2755] {TEvRegisterTabletResult TabletId# 72057594047365121 Entry# 1000} 2025-12-23T19:10:36.172339Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 2 LatestStep: 1000 2025-12-23T19:10:36.172557Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:460: Actor# [2:26:2073] HANDLE {TEvRegisterTablet TabletId# 72057594047365123 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-12-23T19:10:36.172643Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:304: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72057594047365123 2025-12-23T19:10:36.172701Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:508: Actor# [2:2 ... GranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 0 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-12-23T19:10:36.874661Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 3500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 3499 FrozenSteps: 3499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-12-23T19:10:36.885776Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 3500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 3499 FrozenSteps: 3499 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4000 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-12-23T19:10:36.896454Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4000 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-12-23T19:10:36.907441Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 3999 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-12-23T19:10:36.928896Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 3999 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 4499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-12-23T19:10:36.950371Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 4499 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 UnfrozenTablets: 72057594047365121 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-12-23T19:10:36.973414Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 UnfrozenTablets: 72057594047365121 ... restarting mediator 2025-12-23T19:10:36.985632Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:593: Actor# [2:26:2073] HANDLE EvClientDestroyed 2025-12-23T19:10:36.985813Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:276: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 6 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 4500 2025-12-23T19:10:36.985869Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:361: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-12-23T19:10:36.986918Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [2:26:2073] HANDLE EvClientConnected 2025-12-23T19:10:36.987074Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:276: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 7 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 4500 2025-12-23T19:10:36.987138Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:361: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-12-23T19:10:37.005519Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [2:26:2073] HANDLE EvClientConnected ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 2025-12-23T19:10:37.005944Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... fully unblocking tx1 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 2025-12-23T19:10:37.028667Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3500} ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... tablet1 at 3500 ... tablet2 at 4500 ... tablet3 at 4500 ... fully unblocking tx2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 2025-12-23T19:10:37.039769Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} ... tablet1 at 4000 ... tablet2 at 4500 ... tablet3 at 4500 ... fully unblocking tx3 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 2025-12-23T19:10:37.050810Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4500} ... tablet1 at 4500 ... tablet2 at 4500 ... tablet3 at 4500 |99.4%| [TM] {RESULT} ydb/core/tx/time_cast/ut/unittest |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/time_cast/ut/unittest >> ServerRestartTest::RestartOnGetSession >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] >> TCreateAndDropViewTest::ParsingSecurityInvoker [GOOD] >> TCreateAndDropViewTest::ListCreatedView >> TDqPqRdReadActorTests::CoordinatorChanged [GOOD] >> TDqPqRdReadActorTests::Backpressure >> TMLPConsumerTests::AlterConsumer [GOOD] >> TMLPConsumerTests::RecreateConsumer >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::MalformedParams >> QueryActorTest::Commit [GOOD] >> QueryActorTest::StreamQuery >> test_query_cache.py::TestQueryCache::test >> TestFormatHandler::ClientErrorWithEmptyFilter [GOOD] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] >> TestFormatHandler::Watermark >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] >> KqpTpch::Query08 [GOOD] >> KqpTpch::Query09 |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TCreateAndDropViewTest::ListCreatedView [GOOD] >> TCreateAndDropViewTest::CreateSameViewTwice >> TCloudEventsProcessorTests::TestCreateCloudEventProcessor |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> TestFormatHandler::Watermark [GOOD] >> Coordinator::ReadStepSubscribe [GOOD] >> Coordinator::LastStepSubscribe >> TestFormatHandler::WatermarkWhere >> TabletService_ExecuteMiniKQL::MalformedParams [GOOD] >> TabletService_ExecuteMiniKQL::MalformedProgram |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows >> test_example.py::TestExample::test_example2 |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] >> KqpTpch::Query09 [GOOD] >> KqpTpch::Query10 |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] >> TCreateAndDropViewTest::CreateSameViewTwice [GOOD] >> TCreateAndDropViewTest::CreateViewOccupiedName >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] >> TestFormatHandler::WatermarkWhere [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected [GOOD] >> TestFormatHandler::WatermarkWhereFalse >> TopicSessionTests::TwoSessionsWithOffsets |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> DataShardStats::OneChannelStatsCorrect [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] >> TabletService_ExecuteMiniKQL::MalformedProgram [GOOD] >> TabletService_ExecuteMiniKQL::DryRunEraseRow >> ServerRestartTest::RestartOnGetSession [GOOD] >> test_query_cache.py::TestQueryCache::test [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime >> ReadUpdateWrite::Load [GOOD] >> QueryActorTest::StreamQuery [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] >> TMLPConsumerTests::RecreateConsumer [GOOD] >> TMLPConsumerTests::ReloadPQTabletAfterAlterConsumer >> KqpTpch::Query10 [GOOD] >> KqpTpch::Query11 >> test_example.py::TestExample::test_example2 [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] >> TestFormatHandler::WatermarkWhereFalse [GOOD] >> TCloudEventsProcessorTests::TestCreateCloudEventProcessor [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] >> TestJsonParser::Simple1 >> TestJsonParser::Simple1 [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TestJsonParser::Simple2 >> TestJsonParser::Simple2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/query_actor/ut/unittest >> QueryActorTest::StreamQuery [GOOD] Test command err: 2025-12-23T19:10:27.727071Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587132858906768168:2072];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:10:27.727128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005936/r3tmp/tmp2U8rRi/pdisk_1.dat 2025-12-23T19:10:28.036028Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T19:10:28.088477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:28.088593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:28.171559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:28.174683Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:28.247017Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions TClient is connected to server localhost:14615 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766517028234 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T19:10:28.488067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:10:28.495387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-12-23T19:10:28.505461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:10:28.634920Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7587132863201736291:2535], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2025-12-23T19:10:28.735496Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:10:30.575857Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T19:10:30.587458Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977556678.964193s seconds to be completed 2025-12-23T19:10:30.604275Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=1&id=YmNmZmY4MjYtZjQyMjM5YTUtYjZlZTIxNWItM2IwNTlkYjg=, workerId: [1:7587132871791670939:2306], database: /dc-1, longSession: 1, local sessions count: 1 2025-12-23T19:10:30.604474Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T19:10:30.604569Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T19:10:30.604588Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T19:10:30.604613Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T19:10:30.605580Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7587132863201736291:2535], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=YmNmZmY4MjYtZjQyMjM5YTUtYjZlZTIxNWItM2IwNTlkYjg=, TxId: , text: SELECT 42 2025-12-23T19:10:30.609612Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=1&id=YmNmZmY4MjYtZjQyMjM5YTUtYjZlZTIxNWItM2IwNTlkYjg=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7587132871791670939:2306] 2025-12-23T19:10:30.609643Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7587132871791670941:2547] 2025-12-23T19:10:31.975960Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [1:7587132871791670940:2307], selfId: [1:7587132858906768389:2265], source: [1:7587132871791670939:2306] 2025-12-23T19:10:31.979189Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7587132863201736291:2535], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YmNmZmY4MjYtZjQyMjM5YTUtYjZlZTIxNWItM2IwNTlkYjg=, TxId: 2025-12-23T19:10:31.986935Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7587132863201736291:2535], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=YmNmZmY4MjYtZjQyMjM5YTUtYjZlZTIxNWItM2IwNTlkYjg=, TxId: 2025-12-23T19:10:31.992012Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=YmNmZmY4MjYtZjQyMjM5YTUtYjZlZTIxNWItM2IwNTlkYjg=, workerId: [1:7587132871791670939:2306], local sessions count: 0 2025-12-23T19:10:32.671229Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587132881194836619:2067];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:10:32.671285Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:10:32.688484Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005936/r3tmp/tmpYeNP5Q/pdisk_1.dat 2025-12-23T19:10:32.793203Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2025-12-23T19:10:32.805118Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:32.805201Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:32.812601Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:32.815027Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587132881194836592:2081] 1766517032670298 != 1766517032670301 2025-12-23T19:10:32.827587Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31299 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766517032917 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-12-23T19:10:33.071264Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:10:33.078211Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:10:33.085240Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:10:33.095578Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2025-12-23T19:10:33.127796Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7587132885489804754:2537], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2025-12-23T19:10:33.680016Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:10:35.880370Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T19:10:35.882855Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977556673.668787s seconds to be completed 2025-12-23T19:10:35.885080Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new sessi ... n part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:10:42.293541Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:10:42.296834Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... 2025-12-23T19:10:42.359158Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7587132925640083507:2537], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2025-12-23T19:10:42.902888Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:10:45.366480Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T19:10:45.369779Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977556664.181854s seconds to be completed 2025-12-23T19:10:45.372213Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=4&id=ZjVjNjk0ZjQtMjY4NDIzZmMtZjdmYWZlZGYtNDQyNzdkMGU=, workerId: [4:7587132938524985454:2307], database: /dc-1, longSession: 1, local sessions count: 1 2025-12-23T19:10:45.372406Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T19:10:45.372451Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2025-12-23T19:10:45.372474Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2025-12-23T19:10:45.372493Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2025-12-23T19:10:45.372633Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7587132925640083507:2537], RunStreamQuery with text: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-12-23T19:10:45.373806Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7587132925640083507:2537], Start read next stream part 2025-12-23T19:10:45.388937Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd69xy3x0rxjdqbm4hgqn9w8", Created new session, sessionId: ydb://session/3?node_id=4&id=YTJmY2JiZDEtODAzNDM0YTMtZDI5ZTY2MjUtYTBiYTFmYmY=, workerId: [4:7587132938524985457:2308], database: /dc-1, longSession: 0, local sessions count: 2 2025-12-23T19:10:45.389232Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kd69xy3x0rxjdqbm4hgqn9w8, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=YTJmY2JiZDEtODAzNDM0YTMtZDI5ZTY2MjUtYTBiYTFmYmY=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [4:7587132938524985457:2308] 2025-12-23T19:10:45.389277Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [4:7587132938524985458:2552] 2025-12-23T19:10:45.391298Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587132938524985459:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:45.391394Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:45.391777Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587132938524985472:2313], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:45.391843Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7587132938524985470:2311], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:45.392169Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:45.405626Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:10:45.419876Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7587132938524985475:2314], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-12-23T19:10:45.483933Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7587132938524985526:2588] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:10:46.860813Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7587132921345115412:2086];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:10:46.860889Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:10:50.599098Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7587132925640083507:2537], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T19:10:50.602815Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:333: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7587132925640083507:2537], Cancel stream request 2025-12-23T19:10:50.602892Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7587132925640083507:2537], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=ZjVjNjk0ZjQtMjY4NDIzZmMtZjdmYWZlZGYtNDQyNzdkMGU=, TxId: 2025-12-23T19:10:50.615112Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7587132959999822067:2620], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2025-12-23T19:10:50.813820Z node 4 :RPC_REQUEST WARN: rpc_stream_execute_scan_query.cpp:410: Client lost 2025-12-23T19:10:50.977531Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444977556658.574112s seconds to be completed 2025-12-23T19:10:50.981198Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=4&id=M2I5MWFiMTEtYmIwOWNmMGYtYjFiNzA0MC1lYTdiNGE1Yg==, workerId: [4:7587132959999822072:2334], database: /dc-1, longSession: 1, local sessions count: 3 2025-12-23T19:10:50.981417Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2025-12-23T19:10:50.981931Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=ZjVjNjk0ZjQtMjY4NDIzZmMtZjdmYWZlZGYtNDQyNzdkMGU=, workerId: [4:7587132938524985454:2307], local sessions count: 2 2025-12-23T19:10:50.983084Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7587132959999822067:2620], RunStreamQuery with text: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-12-23T19:10:50.983189Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7587132959999822067:2620], Start read next stream part 2025-12-23T19:10:50.985057Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01kd69y3k73k14xmpy455he132", Created new session, sessionId: ydb://session/3?node_id=4&id=OTBlZjlkYmEtMmEyOTI1NDYtOTU1ZGZhYmMtM2RmMDA1OTM=, workerId: [4:7587132959999822076:2335], database: /dc-1, longSession: 0, local sessions count: 3 2025-12-23T19:10:50.985290Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01kd69y3k73k14xmpy455he132, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=OTBlZjlkYmEtMmEyOTI1NDYtOTU1ZGZhYmMtM2RmMDA1OTM=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [4:7587132959999822076:2335] 2025-12-23T19:10:50.985321Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 5 timeout: 600.000000s actor id: [4:7587132959999822077:2624] 2025-12-23T19:10:51.077770Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7587132959999822067:2620], StreamQueryResultPart #1 finished SUCCESS, Issues: 2025-12-23T19:10:51.080094Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1766517051065, txId: 281474976715663] shutting down 2025-12-23T19:10:51.086405Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7587132959999822067:2620], Start read next stream part 2025-12-23T19:10:51.089056Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01kd69y3k73k14xmpy455he132", Forwarded response to sender actor, requestId: 5, sender: [4:7587132959999822074:2620], selfId: [4:7587132921345115516:2177], source: [4:7587132959999822076:2335] 2025-12-23T19:10:51.089706Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=OTBlZjlkYmEtMmEyOTI1NDYtOTU1ZGZhYmMtM2RmMDA1OTM=, workerId: [4:7587132959999822076:2335], local sessions count: 2 2025-12-23T19:10:51.089957Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7587132959999822067:2620], StreamQueryResultPart #2 finished SUCCESS, Issues: 2025-12-23T19:10:51.090057Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7587132959999822067:2620], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=M2I5MWFiMTEtYmIwOWNmMGYtYjFiNzA0MC1lYTdiNGE1Yg==, TxId: 2025-12-23T19:10:51.090951Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=M2I5MWFiMTEtYmIwOWNmMGYtYjFiNzA0MC1lYTdiNGE1Yg==, workerId: [4:7587132959999822072:2334], local sessions count: 1 |99.4%| [TM] {RESULT} ydb/library/query_actor/ut/unittest |99.4%| [TM] {BAZEL_UPLOAD} ydb/library/query_actor/ut/unittest >> TestJsonParser::Simple3 >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] >> test.py::test_local [GOOD] >> ProtoTests::CreateQueueFiller [GOOD] >> TestJsonParser::Simple3 [GOOD] >> TestJsonParser::Simple4 >> ProtoTests::UpdateQueueFiller [GOOD] >> ProtoTests::DeleteQueueFiller [GOOD] >> TestJsonParser::Simple4 [GOOD] >> TestJsonParser::LargeStrings >> TestJsonParser::LargeStrings [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] >> TestJsonParser::ManyValues >> TCreateAndDropViewTest::CreateViewOccupiedName [GOOD] >> TCreateAndDropViewTest::CreateViewIfNotExists >> TestJsonParser::ManyValues [GOOD] >> TestJsonParser::MissingFields >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> ServerRestartTest::RestartOnGetSession [GOOD] |99.4%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/server_restart/gtest |99.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] |99.4%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] [GOOD] >> TestJsonParser::MissingFields [GOOD] >> test_http_api.py::TestHttpApi::test_simple_analytics_query [GOOD] >> test_http_api.py::TestHttpApi::test_empty_query [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] >> test_http_api.py::TestHttpApi::test_warning >> TestJsonParser::NestedTypes >> TestJsonParser::NestedTypes [GOOD] >> TestJsonParser::SimpleBooleans >> TestJsonParser::SimpleBooleans [GOOD] >> TestJsonParser::ChangeParserSchema >> TestJsonParser::ChangeParserSchema [GOOD] >> TestJsonParser::ManyBatches |99.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> TestJsonParser::ManyBatches [GOOD] >> TestJsonParser::LittleBatches >> TestJsonParser::LittleBatches [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest >> ProtoTests::DeleteQueueFiller [GOOD] Test command err: 2025-12-23T19:10:43.712070Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587132929813366674:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:10:43.712143Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005b3f/r3tmp/tmpzRZzL5/pdisk_1.dat 2025-12-23T19:10:44.030697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:10:44.035143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:44.035235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:44.079345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:44.149916Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587132929813366648:2081] 1766517043711138 != 1766517043711141 2025-12-23T19:10:44.157943Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9338, node 1 2025-12-23T19:10:44.232646Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:10:44.232669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:10:44.232680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:10:44.232769Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:10:44.291071Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766517044173 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:10:44.646780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:10:44.655918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-12-23T19:10:44.731164Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10988 waiting... 2025-12-23T19:10:44.932325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-12-23T19:10:46.988005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132942698269481:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:46.988230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132942698269489:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:46.989147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:46.989448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132942698269497:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:46.989527Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:46.995625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:10:47.004736Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587132942698269495:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-12-23T19:10:47.031692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:47.105053Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587132946993236896:2607] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===Execute query: UPSERT INTO`/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq` (CreatedAt,Id,QueueName,Type,CloudId,FolderId,ResourceId,UserSID,MaskedToken,AuthType,PeerName,RequestId,Labels)VALUES(1766517048265,8620932143851333843,'queue1','CreateMessageQueue','cloud1','folder1','/Root/sqs/folder/queue1','username','maskedToken123','authtype','localhost:8000','req1','{"k1" : "v1"}'); End execute query=== ===Execute query: UPSERT INTO`/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq` (CreatedAt,Id,QueueName,Type,CloudId,FolderId,ResourceId,UserSID,MaskedToken,AuthType,PeerName,RequestId,Labels)VALUES(1766517048584,8987604509765593902,'queue1','UpdateMessageQueue','cloud1','folder1','/Root/sqs/folder/queue1','username','maskedToken123','authtype','localhost:8000','req1','{"k1" : "v1"}'); End execute query=== ===Execute query: UPSERT INTO`/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq` (CreatedAt,Id,QueueName,Type,CloudId,FolderId,ResourceId,UserSID,MaskedToken,AuthType,PeerName,RequestId,Labels)VALUES(1766517048698,11552522045715212277,'queue1','DeleteMessageQueue','cloud1','folder1','/Root/sqs/folder/queue1','username','maskedToken123','authtype','localhost:8000','req1','{"k1" : "v1"}'); 2025-12-23T19:10:48.715703Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587132929813366674:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:10:48.715805Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; End execute query=== 2025-12-23T19:10:44.107172Z: component=schemeshard, tx_id=281474976710657, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/.sys], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.107655Z: component=schemeshard, tx_id=281474976710658, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_permissions], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.107914Z: component=schemeshard, tx_id=281474976710659, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_effective_permissions], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.108088Z: component=schemeshard, tx_id=281474976710660, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_users], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.108248Z: component=schemeshard, tx_id=281474976710661, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_tables], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.108356Z: component=schemeshard, tx_id=281474976710662, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_pdisks], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.108484Z: component=schemeshard, tx_id=281474976710663, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_group_members], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.108640Z: component=schemeshard, tx_id=281474976710664, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.108752Z: component=schemeshard, tx_id=281474976710665, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.108843Z: component=schemeshard, tx_id=281474976710666, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_vslots], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.109024Z: component=schemeshard, tx_id=281474976710667, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.109191Z: component=schemeshard, tx_id=281474976710668, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/nodes], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.109332Z: component=schemeshard, tx_id=281474976710669, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.109465Z: component=schemeshard, tx_id=281474976710670, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.109604Z: component=schemeshard, tx_id=281474976710671, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.109725Z: component=schemeshard, tx_id=281474976710672, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/streaming_queries], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.109839Z: component=schemeshard, tx_id=281474976710673, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/hive_tablets], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.109935Z: component=schemeshard, tx_id=281474976710674, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_storage_pools], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.110043Z: component=schemeshard, tx_id=281474976710675, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.110173Z: component=schemeshard, tx_id=281474976710676, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pool_classifiers], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.112323Z: component=schemeshard, tx_id=281474976710677, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/compile_cache_queries], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.112709Z: component=schemeshard, tx_id=281474976710678, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_owners], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.112857Z: component=schemeshard, tx_id=281474976710679, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/query_sessions], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.112993Z: component=schemeshard, tx_id=281474976710680, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_duration_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.113162Z: component=schemeshard, tx_id=281474976710681, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_class], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.113306Z: component=schemeshard, tx_id=281474976710682, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/tables], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.113458Z: component=schemeshard, tx_id=281474976710683, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.113596Z: component=schemeshard, tx_id=281474976710684, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.113733Z: component=schemeshard, tx_id=281474976710685, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/partition_stats], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.113847Z: component=schemeshard, tx_id=281474976710686, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_groups], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.113953Z: component=schemeshard, tx_id=281474976710687, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_groups], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.114097Z: component=schemeshard, tx_id=281474976710688, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pools], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.114236Z: component=schemeshard, tx_id=281474976710689, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/query_metrics_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.114359Z: component=schemeshard, tx_id=281474976710690, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_storage_stats], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.114464Z: component=schemeshard, tx_id=281474976710691, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.114616Z: component=schemeshard, tx_id=281474976710692, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_duration_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.647420Z: component=schemeshard, tx_id=281474976715657, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//Root], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.669285Z: component=schemeshard, tx_id=281474976715658, remote_address={none}, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/SQS], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:44.920792Z: component=schemeshard, tx_id=281474976715659, remote_address={none}, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/SQS/Root/SQS/CreateCloudEventProcessor], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:46.996678Z: component=schemeshard, tx_id=281474976715660, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE RESOURCE POOL, paths=[.metadata/workload_manager/pools/default], status=SUCCESS, detailed_status=StatusAccepted, new_owner=metadata@system, acl_add=[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin] 2025-12-23T19:10:47.032511Z: component=schemeshard, tx_id=281474976715661, remote_address=::1, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE TABLE, paths=[/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq], status=SUCCESS, detailed_status=StatusAccepted 2025-12-23T19:10:47.104537Z: component=schemeshard, tx_id=281474976715662, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE RESOURCE POOL, paths=[default], status=SUCCESS, detailed_status=StatusAlreadyExists, reason=Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges), new_owner=metadata@system, acl_add=[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin] 2025-12-23T19:10:50.346645Z: component=ymq, id=8620932143851333843$CreateMessageQueue$2025-12-23T19:10:50.346285Z, operation=CreateMessageQueue, status=SUCCESS, remote_address=localhost:8000, subject=username, masked_token=maskedToken123, auth_type=authtype, permission=ymq.queues.create, created_at=2025-12-23T19:10:48.265000Z, cloud_id=cloud1, folder_id=folder1, resource_id=/Root/sqs/folder/queue1, request_id=req1, idempotency_id=8620932143851333843$CreateMessageQueue$2025-12-23T19:10:48.265000Z, queue=queue1, labels={"k1" : "v1"} 2025-12-23T19:10:50.349707Z: component=ymq, id=8987604509765593902$UpdateMessageQueue$2025-12-23T19:10:50.346401Z, operation=UpdateMessageQueue, status=SUCCESS, remote_address=localhost:8000, subject=username, masked_token=maskedToken123, auth_type=authtype, permission=ymq.queues.setAttributes, created_at=2025-12-23T19:10:48.584000Z, cloud_id=cloud1, folder_id=folder1, resource_id=/Root/sqs/folder/queue1, request_id=req1, idempotency_id=8987604509765593902$UpdateMessageQueue$2025-12-23T19:10:48.584000Z, queue=queue1, labels={"k1" : "v1"} 2025-12-23T19:10:50.349903Z: component=ymq, id=11552522045715212277$DeleteMessageQueue$2025-12-23T19:10:50.346451Z, operation=DeleteMessageQueue, status=SUCCESS, remote_address=localhost:8000, subject=username, masked_token=maskedToken123, auth_type=authtype, permission=ymq.queues.delete, created_at=2025-12-23T19:10:48.698000Z, cloud_id=cloud1, folder_id=folder1, resource_id=/Root/sqs/folder/queue1, request_id=req1, idempotency_id=11552522045715212277$DeleteMessageQueue$2025-12-23T19:10:48.698000Z, queue=queue1, labels={"k1" : "v1"} |99.4%| [TS] {RESULT} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest >> TabletService_ExecuteMiniKQL::DryRunEraseRow [GOOD] >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed >> TestJsonParser::MissingFieldsValidation >> TestJsonParser::MissingFieldsValidation [GOOD] >> TestJsonParser::TypeKindsValidation |99.4%| [TS] {BAZEL_UPLOAD} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest >> TestJsonParser::TypeKindsValidation [GOOD] >> TestJsonParser::NumbersValidation [GOOD] >> TestJsonParser::StringsValidation >> TestJsonParser::StringsValidation [GOOD] >> TestJsonParser::NestedJsonValidation >> TestJsonParser::NestedJsonValidation [GOOD] >> TestJsonParser::BoolsValidation >> TestJsonParser::BoolsValidation [GOOD] >> TestJsonParser::JsonStructureValidation >> TopicSessionTests::TwoSessionsWithOffsets [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] >> TestJsonParser::JsonStructureValidation [GOOD] >> TestJsonParser::SkipErrors_Simple1 >> TestJsonParser::SkipErrors_Simple1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/high_load/unittest >> ReadUpdateWrite::Load [GOOD] Test command err: Step 1. only write Was written: 0 MiB, Speed: 0 MiB/s Write: 10% 0.344926s 30% 0.344926s 50% 0.344926s 90% 0.344926s 99% 0.344926s Write: 10% 0.737568s 30% 0.737568s 50% 0.737568s 90% 0.737568s 99% 0.737568s Write: 10% 0.842014s 30% 0.842014s 50% 0.842014s 90% 0.842014s 99% 0.842014s Write: 10% 0.798598s 30% 0.798598s 50% 0.798598s 90% 0.798598s 99% Write: 10% 0.432310s 30% 0.432310s 50% 0.432310s 90% 0.432310s 99% 0.432310s 0.798598s Write: 10% 0.691224s 30% 0.691224s 50% Write: 10% Write: 10% 0.691224s 90% 0.691224s 99% 0.691224s0.414293s 30% 0.414293s 50% 0.414293s 90% 0.417414s 30% 0.417414s 50% 0.417414s 90% 0.417414s 99% 0.417414s 0.414293s 99% 0.414293s Write: 10% 0.530812s 30% 0.530812s 50% 0.530812s 90% 0.530812s 99% 0.530812s Write: 10% 0.535378s 30% 0.535378s 50% 0.535378s 90% 0.535378s 99% 0.535378s Write: 10% 0.972244s 30% 0.972244s 50% 0.972244s 90% 0.972244s 99% 0.972244s Write: 10% 0.983213s 30% 0.983213s 50% 0.983213s 90% 0.983213s 99% 0.983213s Write: 10% 0.724409s 30% 0.724409s 50% 0.724409s 90% 0.724409s 99% 0.724409s Write: 10% 0.796850s 30% 0.796850s 50% 0.796850s 90% 0.796850s 99% 0.796850s Write: 10% 1.000053s 30% 1.000053s 50% 1.000053s 90% 1.000053s 99% 1.000053s Write: 10% 0.678852s 30% 0.678852s 50% 0.678852s 90% 0.678852s 99% 0.678852s Write: 10% 0.995669s 30% 0.995669s 50% 0.995669s 90% 0.995669s 99% 0.995669s Write: 10% 0.696556s 30% 0.696556s 50% 0.696556s 90% 0.696556s 99% 0.696556s Write: 10% 0.859570s 30% 0.859570s 50% 0.859570s 90% 0.859570s 99% 0.859570s Write: 10% 0.802279s 30% 0.802279s 50% 0.802279s 90% 0.802279s 99% 0.802279s Write: 10% 0.756471s 30% 0.756471s 50% 0.756471s 90% 0.756471s 99% 0.756471s Write: 10% 0.752920s 30% 0.752920s 50% 0.752920s 90% 0.752920s 99% 0.752920s Write: 10% Write: 10% 0.880872s 30% 0.880872s 50% 0.880872s 90% 0.880872s 99% 0.736165s0.880872s 30% 0.736165s 50% 0.736165s 90% 0.736165s 99% 0.736165s Write: 10% 0.784997s 30% 0.784997s 50% 0.784997s 90% 0.784997s 99% 0.784997s Write: 10% 0.717546s 30% 0.717546s 50% 0.717546s 90% 0.717546s 99% 0.717546s Write: 10% 0.862302s 30% 0.862302s 50% 0.862302s 90% 0.862302s 99% 0.862302s Write: 10% 0.886049s 30% 0.886049s 50% 0.886049s 90% 0.886049s 99% 0.886049s Write: 10% 0.767073s 30% 0.767073s 50% 0.767073s 90% 0.767073s 99% 0.767073s Write: 10% 0.792848s 30% 0.792848s 50% 0.792848s 90% 0.792848s 99% 0.792848s Write: 10% 0.869864s 30% 0.869864s 50% 0.869864s 90% 0.869864s 99% 0.869864s Write: 10% 0.808651s 30% 0.808651s 50% 0.808651s 90% 0.808651s 99% 0.808651s Write: 10% 1.132225s 30% 1.132225s 50% 1.132225s 90% 1.132225s 99% 1.132225s Write: 10% 1.141336s 30% 1.141336s 50% 1.141336s 90% 1.141336s 99% 1.141336s Write: 10% 0.759438s 30% 0.759438s 50% 0.759438s 90% 0.759438s 99% 0.759438s Write: 10% 0.849092s 30% 0.849092s 50% 0.849092s 90% 0.849092s 99% 0.849092s Write: 10% 0.881617s 30% 0.881617s 50% 0.881617s 90% 0.881617s 99% 0.881617s Write: 10% 0.822314s 30% 0.822314s 50% 0.822314s 90% 0.822314s 99% 0.822314s Write: 10% 1.145181s 30% 1.145181s 50% 1.145181s 90% 1.145181s 99% 1.145181s Write: 10% 0.805825s 30% 0.805825s 50% 0.805825s 90% 0.805825s 99% 0.805825s Write: 10% 0.815744s 30% 0.815744s 50% 0.815744s 90% 0.815744s 99% 0.815744s Write: 10% 0.927157s 30% 0.927157s 50% 0.927157s 90% 0.927157s 99% 0.927157s Write: 10% 1.151402s 30% 1.151402s 50% 1.151402s 90% 1.151402s 99% 1.151402s Write: 10% 0.835638s 30% 0.835638s 50% 0.835638s 90% 0.835638s 99% 0.835638s Write: 10% 0.840084s 30% 0.840084s 50% 0.840084s 90% 0.840084s 99% 0.840084s Write: 10% 0.720483s 30% 0.720483s 50% 0.720483s 90% 0.720483s 99% 0.720483s Write: 10% 0.766519s 30% 0.766519s 50% 0.766519s 90% 0.766519s 99% 0.766519s Write: 10% 0.848998s 30% 0.848998s 50% 0.848998s 90% 0.848998s 99% 0.848998s Write: 10% 0.852685s 30% 0.852685s 50% 0.852685s 90% 0.852685s 99% 0.852685s Write: 10% 0.839168s 30% 0.839168s 50% 0.839168s 90% 0.839168s 99% 0.839168s Write: 10% 0.875697s 30% 0.875697s 50% 0.875697s 90% 0.875697s 99% 0.875697s Write: 10% 0.699625s 30% 0.699625s 50% 0.699625s 90% 0.699625s 99% 0.699625s Write: 10% 0.765244s 30% 0.765244s 50% 0.765244s 90% 0.765244s 99% 0.765244s Write: 10% 0.972536s 30% 0.972536s 50% 0.972536s 90% 0.972536s 99% Write: 10% 0.724572s 30% 0.724572s0.972536s 50% 0.724572s 90% 0.724572s 99% 0.724572s Write: 10% Write: 10% 1.218744s 30% 1.218744s 50% 1.218744s0.835547s 90% 30% 1.218744s 99% 0.835547s 50% 0.835547s 90% 0.835547s 99% 0.835547s Write: 10% 1.062293s 30% 1.062293s 50% 1.062293s 90% 1.062293s 99% 1.062293s 1.218744s Write: 10% 0.757131s 30% 0.757131s 50% 0.757131s 90% 0.757131s 99% 0.757131s Write: 10% 1.031016s 30% 1.031016s 50% 1.031016s 90% 1.031016s 99% 1.031016s Write: 10% 0.902574s 30% 0.902574s 50% 0.902574s 90% 0.902574s 99% 0.902574s Write: 10% 0.758666s 30% 0.758666s 50% 0.758666s 90% 0.758666s 99% 0.758666s Write: 10% 0.894651s 30% 0.894651s 50% 0.894651s 90% 0.894651s 99% 0.894651s Write: 10% 0.763047s 30% 0.763047s 50% 0.763047s 90% 0.763047s 99% 0.763047s Step 2. read write Write: 10% 0.286987s 30% 0.286987s 50% 0.286987s 90% 0.286987s 99% 0.286987s Write: 10% 0.437602s 30% 0.437602s 50% 0.437602s 90% 0.437602s 99% 0.437602s Write: 10% 0.491965s 30% 0.491965s 50% 0.491965s 90% 0.491965s 99% 0.491965s Write: 10% 0.439786s 30% 0.439786s 50% 0.439786s 90% 0.439786s 99% 0.439786s Write: 10% 0.270457s 30% 0.270457s 50% 0.270457s 90% 0.270457s 99% 0.270457s Write: 10% 0.473808s 30% 0.473808s 50% 0.473808s 90% 0.473808s 99% 0.473808s Write: 10% 0.563552s 30% 0.563552s 50% 0.563552s 90% 0.563552s 99% 0.563552s Write: 10% 0.780477s 30% 0.780477s 50% 0.780477s 90% 0.780477s 99% 0.780477s Write: 10% 0.822899s 30% 0.822899s 50% 0.822899s 90% 0.822899s 99% 0.822899s Write: 10% 0.680530s 30% 0.680530s 50% 0.680530s 90% 0.680530s 99% 0.680530s Write: 10% 0.820916s 30% 0.820916s 50% 0.820916s 90% 0.820916s 99% 0.820916s Write: 10% 0.786960s 30% 0.786960s 50% 0.786960s 90% 0.786960s 99% 0.786960s Write: 10% 0.823832s 30% 0.823832s 50% 0.823832s 90% 0.823832s 99% 0.823832s Write: 10% 0.938988s 30% 0.938988s 50% 0.938988s 90% 0.938988s 99% 0.938988s Write: 10% 0.987523s 30% 0.987523s 50% 0.987523s 90% 0.987523s 99% 0.987523s Write: 10% 0.825847s 30% 0.825847s 50% 0.825847s 90% 0.825847s 99% 0.825847s Write: 10% 0.921904s 30% 0.921904s 50% 0.921904s 90% 0.921904s 99% 0.921904s Write: 10% 0.838022s 30% 0.838022s 50% 0.838022s 90% 0.838022s 99% 0.838022s Write: 10% 0.819503s 30% 0.819503s 50% 0.819503s 90% 0.819503s 99% 0.819503s Write: 10% 0.869232s 30% 0.869232s 50% 0.869232s 90% 0.869232s 99% 0.869232s Write: 10% Write: 10% 1.005259s 30% 1.005259s 50% 0.879781s 30% 0.879781s 50% 0.879781s 90% 0.879781s 99% 0.879781s 1.005259s 90% 1.005259s 99% 1.005259s Write: 10% 0.939178s 30% 0.939178s 50% 0.939178s 90% 0.939178s 99% 0.939178s Write: 10% 1.063870s 30% 1.063870s 50% 1.063870s 90% 1.063870s 99% 1.063870s Write: 10% 0.974250s 30% 0.974250s 50% 0.974250s 90% 0.974250s 99% 0.974250s Write: 10% 0.997786s 30% 0.997786s 50% 0.997786s 90% 0.997786s 99% 0.997786s Write: 10% 1.027959s 30% 1.027959s 50% 1.027959s 90% 1.027959s 99% 1.027959s Write: 10% 0.875782s 30% 0.875782s 50% 0.875782s 90% 0.875782s 99% 0.875782s Write: 10% 0.858008s 30% 0.858008s 50% 0.858008s 90% 0.858008s 99% 0.858008s Write: 10% 0.915577s 30% 0.915577s 50% 0.915577s 90% 0.915577s 99% 0.915577s Write: 10% 0.983460s 30% 0.983460s 50% 0.983460s 90% 0.983460s 99% 0.983460s Write: 10% 0.887033s 30% 0.887033s 50% 0.887033s 90% 0.887033s 99% 0.887033s Write: 10% 1.039047s 30% 1.039047s 50% 1.039047s 90% 1.039047s 99% 1.039047s Write: 10% 0.937066s 30% 0.937066s 50% 0.937066s 90% 0.937066s 99% 0.937066s Write: 10% 1.015512s 30% 1.015512s 50% 1.015512s 90% 1.015512s 99% 1.015512s Write: 10% 1.037466s 30% 1.037466s 50% 1.037466s 90% 1.037466s 99% 1.037466s Write: 10% 0.923664s 30% 0.923664s 50% 0.923664s 90% 0.923664s 99% 0.923664s Write: 10% 1.072638s 30% 1.072638s 50% 1.072638s 90% 1.072638s 99% 1.072638s Write: 10% 1.024066s 30% 1.024066s 50% 1.024066s 90% 1.024066s 99% 1.024066s Write: 10% 0.694881s 30% 0.694881s 50% 0.694881s 90% 0.694881s 99% 0.694881s Write: 10% 0.705339s 30% 0.705339s 50% 0.705339s 90% 0.705339s 99% 0.705339s Write: 10% 0.714450s 30% 0.714450s 50% 0.714450s 90% 0.714450s 99% 0.714450s Write: 10% 0.947519s 30% 0.947519s 50% 0.947519s 90% 0.947519s 99% 0.947519s Write: 10% 0.742867s 30% 0.742867s 50% 0.742867s 90% 0.742867s 99% 0.742867s Write: 10% 0.911779s 30% 0.911779s 50% 0.911779s 90% 0.911779s 99% 0.911779s Write: 10% 0.660660s 30% 0.660660s 50% 0.660660s 90% 0.660660s 99% 0.660660s Write: 10% 0.701633s 30% 0.701633s 50% 0.701633s 90% 0.701633s 99% 0.701633s Write: 10% 0.724555s 30% 0.724555s 50% 0.724555s 90% 0.724555s 99% 0.724555s Write: 10% 0.661452s 30% 0.661452s 50% 0.661452s 90% 0.661452s 99% 0.661452s Write: 10% 0.661670s 30% 0.661670s 50% 0.661670s 90% 0.661670s 99% 0.661670s Write: 10% 0.733704s 30% 0.733704s 50% 0.733704s 90% 0.733704s 99% 0.733704s Write: 10% 0.669598s 30% 0.669598s 50% 0.669598s 90% 0.669598s 99% 0.669598s Write: 10% 0.699822s 30% 0.699822s 50% 0.699822s 90% 0.699822s 99% 0.699822s Write: 10% 0.930969s 30% 0.930969s 50% 0.930969s 90% 0.930969s 99% 0.930969s Write: 10% 0.987295s 30% 0.987295s 50% 0.987295s 90% 0.987295s 99% 0.987295s Write: 10% 0.748977s 30% 0.748977s 50% 0.748977s 90% 0.748977s 99% 0.748977s Write: 10% 1.028354s 30% 1.028354s 50% 1.028354s 90% 1.028354s 99% 1.028354s Write: 10% 0.760603s 30% 0.760603s 50% 0.760603s 90% 0.760603s 99% 0.760603s Write: 10% Write: 10% 0.753530s 30% 0.989799s 30% 0.989799s 50% 0.989799s 90% 0.989799s 99% 0.989799s 0.753530s 50% 0.753530s 90% 0.753530s 99% 0.753530s Write: 10% 0.793349s 30% 0.793349s 50% 0.793349s 90% 0.793349s 99% 0.793349s Write: 10% 0.798691s 30% 0.798691s 50% 0.798691s 90% 0.798691s 99% 0.798691s Write: 10% 1.151525s 30% 1.151525s 50% 1.151525s 90% 1.151525s 99% 1.151525s Write: 10% 0.756487s 30% 0.756487s 50% 0.756487s 90% 0.756487s 99% 0.756487s Read: 10% 1.779646s 30% 1.779646s 50% 1.779646s 90% 1.779646s 99% 1.779646s Step 3. write modify Write: 10% 0.263693s 30% 0.263693s 50% Write: 10% 0.288709s 30% 0.288709s 50% 0.288709s 90% 0.288709s 99% 0.288709s 0.263693s 90% 0.263693s 99% 0.263693s Write: 10% 0.249444s 30% 0.249444s 50% 0.249444s 90% 0.249444s 99% 0.249444s Write: 10% 0.255806s 30% 0.255806s 50% 0.255806s 90% 0.255806s 99% 0.255806s Write: 10% 0.299078s 30% 0.299078s 50% 0.299078s 90% 0.299078s 99% 0.299078s Write: 10% 0.454693s 30% 0.454693s 50% 0.454693s 90% 0.454693s 99% 0.454693s Write: 10% 0.487324s 30% 0.487324s 50% 0.487324s 90% 0.487324s 99% 0.487324s Write: 10% 0.564663s 30% 0.564663s 50% 0.564663s 90% 0.564663s 99% 0.564663s Write: 10% 0.529321s 30% 0.529321s 50% 0.529321s 90% 0.529321s 99% 0.529321s Write: 10% 0.646470s 30% 0.646470s 50% 0.646470s 90% 0.646470s 99% 0.646470s Write: 10% 0.651164s 30% 0.651164s 50% 0.651164s 90% 0.651164s 99% 0.651164s Write: 10% 0.695833s 30% 0.695833s 50% 0.695833s 90% 0.695833s 99% 0.695833s Write: 10% 0.637064s 30% 0.637064s 50% 0.637064s 90% 0.637064s 99% 0.637064s Write: 10% 0.654692s 30% 0.654692s 50% 0.654692s 90% 0.654692s 99% 0.654692s Write: 10% 0.616914s 30% 0.616914s 50% 0.616914s 90% 0.616914s 99% 0.616914s Write: 10% 0.657177s 30% 0.657177s 50% 0.657177s 90% 0.657177s 99% 0.657177s Write: 10% 0.698082s 30% 0.698082s 50% 0.698082s 90% 0.698082s 99% 0.698082s Write: 10% 0.718280s 30% 0.718280s 50% 0.718280s 90% 0.718280s 99% 0.718280s Write: 10% 0.712634s 30% 0.712634s 50% 0.712634s 90% 0.712634s 99% 0.712634s Write: 10% 0.715691s 30% 0.715691s 50% 0.715691s 90% 0.715691s 99% 0.715691s Write: 10% 0.702310s 30% 0.702310s 50% 0.702310s 90% 0.702310s 99% 0.702310s Write: 10% 0.670024s 30% 0.670024s 50% 0.670024s 90% 0.670024s 99% 0.670024s Write: 10% 0.666755s 30% 0.666755s 50% 0.666755s 90% 0.666755s 99% 0.666755s Write: 10% 0.719624s 30% 0.719624s 50% 0.719624s 90% 0.719624s 99% 0.719624s Write: 10% 0.638599s 30% 0.638599s 50% 0.638599s 90% 0.638599s 99% 0.638599s Write: 10% 0.656814s 30% 0.656814s 50% 0.656814s 90% 0.656814s 99% 0.656814s Write: 10% 0.680646s 30% 0.680646s 50% 0.680646s 90% 0.680646s 99% 0.680646s Write: 10% 0.711861s 30% 0.711861s 50% 0.711861s 90% 0.711861s 99% 0.711861s Write: 10% 0.676251s 30% 0.676251s 50% 0.676251s 90% 0.676251s 99% 0.676251s Write: 10% 0.676483s 30% 0.676483s 50% 0.676483s 90% 0.676483s 99% 0.676483s Write: 10% 0.708359s 30% 0.708359s 50% 0.708359s 90% 0.708359s 99% 0.708359s Write: 10% 0.727376s 30% 0.727376s 50% 0.727376s 90% 0.727376s 99% 0.727376s Write: 10% 0.700588s 30% 0.700588s 50% 0.700588s 90% 0.700588s 99% 0.700588s Write: 10% 0.695750s 30% 0.695750s 50% 0.695750s 90% 0.695750s 99% 0.695750s Write: 10% 0.745433s 30% 0.745433s 50% 0.745433s 90% 0.745433s 99% 0.745433s Write: 10% 0.687708s 30% 0.687708s 50% 0.687708s 90% 0.687708s 99% 0.687708s Write: 10% 0.734036s 30% 0.734036s 50% 0.734036s 90% 0.734036s 99% 0.734036s Write: 10% Write: 10% 0.746283s 30% 0.746283s0.681527s 30% 0.681527s 50% 0.681527s 90% 0.681527s 99% 0.681527s 50% 0.746283s 90% 0.746283s 99% 0.746283s Write: 10% Write: 10% 0.693529s0.492581s 30% 0.492581s 50% 30% 0.693529s 50% 0.693529s 90% 0.693529s 99% 0.492581s 90% 0.492581s 99% 0.492581s0.693529s Write: 10% 0.751418s 30% 0.751418s 50% 0.751418s 90% 0.751418s 99% 0.751418s Write: 10% 0.768633s 30% 0.768633s 50% 0.768633s 90% 0.768633s 99% 0.768633s Write: 10% 0.757224s 30% 0.757224s 50% 0.757224s 90% 0.757224s 99% 0.757224s Write: 10% 0.461208s 30% 0.461208s 50% 0.461208s 90% 0.461208s 99% 0.461208s Write: 10% 0.731515s 30% 0.731515s 50% 0.731515s 90% 0.731515s 99% 0.731515s Write: 10% 0.770276s 30% 0.770276s 50% 0.770276s 90% 0.770276s 99% 0.770276s Write: 10% 0.739882s 30% 0.739882s 50% 0.739882s 90% 0.739882s 99% 0.739882s Write: 10% 0.750500s 30% 0.750500s 50% 0.750500s 90% 0.750500s 99% 0.750500s Write: 10% 0.750067s 30% 0.750067s 50% 0.750067s 90% 0.750067s 99% 0.750067s Write: 10% 0.476174s 30% 0.476174s 50% 0.476174s 90% 0.476174s 99% 0.476174s Write: 10% 0.470560s 30% 0.470560s 50% 0.470560s 90% 0.470560s 99% 0.470560s Write: 10% 0.725471s 30% 0.725471s 50% 0.725471s 90% 0.725471s 99% 0.725471s Write: 10% 0.552235s 30% 0.552235s 50% 0.552235s 90% 0.552235s 99% 0.552235s Write: 10% 0.586979s 30% 0.586979s 50% 0.586979s 90% 0.586979s 99% 0.586979s Write: 10% 0.516410s 30% 0.516410s 50% 0.516410s 90% 0.516410s 99% 0.516410s Write: 10% 0.481116s 30% 0.481116s 50% 0.481116s 90% 0.481116s 99% 0.481116s Write: 10% 0.487655s 30% 0.487655s 50% 0.487655s 90% 0.487655s 99% 0.487655s Write: 10% 0.497622s 30% 0.497622s 50% 0.497622s 90% 0.497622s 99% 0.497622s Write: 10% 0.841390s 30% 0.841390s 50% 0.841390s 90% 0.841390s 99% 0.841390s Write: 10% 0.538758s 30% 0.538758s 50% 0.538758s 90% 0.538758s 99% 0.538758s Write: 10% 0.499245s 30% 0.499245s 50% Write: 10% 0.493406s 30% 0.493406s 50% 0.499245s 90% 0.499245s 99% 0.499245s 0.493406s 90% 0.493406s 99% 0.493406sWrite: 10% 0.535392s 30% 0.535392s 50% 0.535392s 90% 0.535392s 99% 0.535392s Update: 10% 0.486052s 30% 0.486052s 50% 0.486052s 90% 0.486052s 99% 0.486052s Step 4. read modify write Write: 10% 0.238388s 30% 0.238388s 50% 0.238388s 90% 0.238388s 99% 0.238388s Write: 10% 0.279076s 30% 0.279076s 50% 0.279076s 90% 0.279076s 99% 0.279076s Write: 10% 0.273285s 30% 0.273285s 50% 0.273285s 90% 0.273285s 99% 0.273285s Write: 10% 0.438584s 30% 0.438584s 50% 0.438584s 90% 0.438584s 99% 0.438584s Write: 10% 0.554682s 30% 0.554682s 50% 0.554682s 90% 0.554682s 99% 0.554682s Write: 10% 0.618166s 30% 0.618166s 50% 0.618166s 90% 0.618166s 99% 0.618166s Write: 10% 0.668096s 30% 0.668096s 50% Write: 10% 0.745444s 30% 0.745444s 50% 0.745444s 90% 0.745444s 99% 0.745444s 0.668096s 90% 0.668096s 99% 0.668096s Write: 10% 0.688636s 30% 0.688636s 50% 0.688636s 90% 0.688636s 99% 0.688636s Write: 10% 0.485934sWrite: 10% 0.578629s 30% 0.578629s 50% 30% 0.485934s 50% 0.485934s 90% 0.485934s 99% 0.485934s0.578629s 90% 0.578629s 99% 0.578629s Write: 10% 0.523740s 30% 0.523740s 50% 0.523740s 90% 0.523740s 99% 0.523740s Write: 10% 0.551210s 30% 0.551210s 50% 0.551210s 90% 0.551210s 99% 0.551210s Write: 10% 0.472455sWrite: 10% 0.470514s 30% 0.470514s 50% 0.470514s 90% 0.470514s 99% 0.470514s 30% 0.472455s 50% 0.472455s 90% 0.472455s 99% 0.472455s Write: 10% 2.773539s 30% 2.773539s 50% 2.773539s 90% 2.773539s 99% 2.773539s Write: 10% 2.851720s 30% 2.851720s 50% 2.851720s 90% 2.851720s 99% 2.851720s Write: 10% 2.880922s 30% 2.880922s 50% 2.880922s 90% 2.880922s 99% 2.880922s Write: 10% 3.131511s 30% 3.131511s 50% 3.131511s 90% 3.131511s 99% 3.131511s Write: 10% 3.149928s 30% 3.149928s 50% 3.149928s 90% 3.149928s 99% 3.149928s Write: 10% 3.409432s 30% 3.409432s 50% 3.409432s 90% 3.409432s 99% 3.409432s Write: 10% 3.393817s 30% 3.393817s 50% 3.393817s 90% 3.393817s 99% 3.393817s Write: 10% 3.496971s 30% 3.496971s 50% 3.496971s 90% 3.496971s 99% 3.496971s Write: 10% 3.471180s 30% 3.471180s 50% 3.471180s 90% 3.471180s 99% 3.471180s Write: 10% 3.564477s 30% 3.564477s 50% 3.564477s 90% 3.564477s 99% 3.564477s Write: 10% 3.495322s 30% 3.495322s 50% 3.495322s 90% 3.495322s 99% 3.495322s Write: 10% 3.662458s 30% 3.662458s 50% 3.662458s 90% 3.662458s 99% 3.662458s Write: 10% 3.821835s 30% 3.821835s 50% 3.821835s 90% 3.821835s 99% 3.821835s Write: 10% 3.658079s 30% 3.658079s 50% 3.658079s 90% 3.658079s 99% 3.658079s Write: 10% 3.686660s 30% 3.686660s 50% 3.686660s 90% 3.686660s 99% 3.686660s Write: 10% 3.588669s 30% 3.588669s 50% 3.588669s 90% 3.588669s 99% 3.588669s Write: 10% 3.578192s 30% 3.578192s 50% 3.578192s 90% 3.578192s 99% 3.578192s Write: 10% 3.639459s 30% 3.639459s 50% 3.639459s 90% 3.639459s 99% 3.639459s Write: 10% 3.669662s 30% 3.669662s 50% 3.669662s 90% 3.669662s 99% 3.669662s Write: 10% 3.637751s 30% 3.637751s 50% 3.637751s 90% 3.637751s 99% 3.637751s Write: 10% 3.619457s 30% 3.619457s 50% 3.619457s 90% 3.619457s 99% 3.619457s Write: 10% 3.745255s 30% 3.745255s 50% 3.745255s 90% 3.745255s 99% 3.745255s Write: 10% 3.733358s 30% 3.733358s 50% 3.733358s 90% 3.733358s 99% 3.733358s Write: 10% 3.750572s 30% 3.750572s 50% 3.750572s 90% 3.750572s 99% 3.750572s Write: 10% 3.732744s 30% 3.732744s 50% 3.732744s 90% 3.732744s 99% 3.732744s Write: 10% 3.820745s 30% 3.820745s 50% 3.820745s 90% 3.820745s 99% 3.820745s Write: 10% 3.857879s 30% 3.857879s 50% 3.857879s 90% 3.857879s 99% 3.857879s Write: 10% 3.837198s 30% 3.837198s 50% 3.837198s 90% 3.837198s 99% 3.837198s Write: 10% 3.856514s 30% 3.856514s 50% 3.856514s 90% 3.856514s 99% 3.856514s Write: 10% 3.868865s 30% 3.868865s 50% 3.868865s 90% 3.868865s 99% 3.868865s Write: 10% 3.883938s 30% 3.883938s 50% 3.883938s 90% 3.883938s 99% 3.883938s Write: 10% 3.959357s 30% 3.959357s 50% 3.959357s 90% 3.959357s 99% 3.959357s Write: 10% 3.963824s 30% 3.963824s 50% 3.963824s 90% 3.963824s 99% 3.963824s Write: 10% 4.059174s 30% 4.059174s 50% 4.059174s 90% 4.059174s 99% 4.059174s Write: 10% 4.039217s 30% 4.039217s 50% 4.039217s 90% 4.039217s 99% 4.039217s Write: 10% 4.109262s 30% 4.109262s 50% 4.109262s 90% 4.109262s 99% 4.109262s Write: 10% 4.086682s 30% 4.086682s 50% 4.086682s 90% 4.086682s 99% 4.086682s Write: 10% 4.111697s 30% 4.111697s 50% 4.111697s 90% 4.111697s 99% 4.111697s Write: 10% 4.503415s 30% 4.503415s 50% 4.503415s 90% 4.503415s 99% 4.503415s Write: 10% 4.572558s 30% 4.572558s 50% 4.572558s 90% 4.572558s 99% 4.572558s Write: 10% 4.691618s 30% 4.691618s 50% 4.691618s 90% 4.691618s 99% 4.691618s Was written: 87 MiB, Speed: 8 MiB/s Write: 10% 4.605094s 30% 4.605094s 50% 4.605094s 90% 4.605094s 99% 4.605094s Write: 10% 4.637657s 30% 4.637657s 50% 4.637657s 90% 4.637657s 99% 4.637657s Write: 10% 4.801058s 30% 4.801058s 50% 4.801058s 90% 4.801058s 99% 4.801058s Write: 10% 4.696682s 30% 4.696682s 50% 4.696682s 90% 4.696682s 99% 4.696682s Write: 10% 4.985636s 30% 4.985636s 50% 4.985636s 90% 4.985636s 99% 4.985636s Write: 10% 5.047959s 30% 5.047959s 50% 5.047959s 90% 5.047959s 99% 5.047959s Write: 10% 4.912787s 30% 4.912787s 50% 4.912787s 90% 4.912787s 99% 4.912787s Write: 10% 4.832152s 30% 4.832152s 50% 4.832152s 90% 4.832152s 99% 4.832152s Update: 10% 4.835566s 30% 4.835566s 50% 4.835566s 90% 4.835566s 99% 4.835566s Read: 10% 1.957244s 30% 1.957244s 50% 3.665898s 90% 3.665898s 99% 3.665898s |99.4%| [TM] {RESULT} ydb/tests/olap/high_load/unittest >> TestJsonParser::SkipErrors_StringValidation [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/high_load/unittest >> TestJsonParser::SkipErrors_NoField >> TestJsonParser::SkipErrors_NoField [GOOD] >> TestJsonParser::SkipErrors_NoJson >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] >> TestJsonParser::SkipErrors_NoJson [GOOD] >> TestJsonParser::SkipErrors_Optional >> TopicSessionTests::BadDataSessionError >> TestJsonParser::SkipErrors_Optional [GOOD] >> TestJsonParser::SkipErrors1JsonIn2Messages >> TestJsonParser::SkipErrors1JsonIn2Messages [GOOD] >> TestPurecalcFilter::Simple1 |99.4%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> Coordinator::LastStepSubscribe [GOOD] >> test_http_api.py::TestHttpApi::test_warning [GOOD] >> Coordinator::RestoreDomainConfiguration >> test_http_api.py::TestHttpApi::test_get_unknown_query [GOOD] >> test_http_api.py::TestHttpApi::test_unauthenticated >> test_http_api.py::TestHttpApi::test_unauthenticated [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency >> KqpTpch::Query11 [GOOD] >> KqpTpch::Query12 >> test_example.py::TestExample::test_linked_with_testcase >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_background_compaction/unittest >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T19:10:31.896363Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:32.076141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:10:32.100331Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:10:32.100976Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:10:32.101078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:10:32.721330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:32.721464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:32.817011Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766517028914929 != 1766517028914933 2025-12-23T19:10:32.834500Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:32.886901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:33.016052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003d08/r3tmp/tmpCaWH8m/pdisk_1.dat 2025-12-23T19:10:33.351293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:10:33.368412Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:10:33.486938Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-12-23T19:10:33.487051Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-12-23T19:10:33.490944Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:860:2743] 2025-12-23T19:10:33.616649Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:860:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-12-23T19:10:33.616786Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:860:2743] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-12-23T19:10:33.617502Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-12-23T19:10:33.617636Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:860:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-12-23T19:10:33.618027Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-12-23T19:10:33.618185Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:860:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-12-23T19:10:33.618328Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:860:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-12-23T19:10:33.618653Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:860:2743] txid# 281474976715657 HANDLE EvClientConnected 2025-12-23T19:10:33.625263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:33.627324Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:860:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-12-23T19:10:33.627419Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:860:2743] txid# 281474976715657 SEND to# [1:828:2723] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-12-23T19:10:33.676077Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T19:10:33.676995Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T19:10:33.679110Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T19:10:33.679510Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T19:10:33.727877Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T19:10:33.728900Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T19:10:33.729046Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T19:10:33.732614Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T19:10:33.732727Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T19:10:33.732814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T19:10:33.734956Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T19:10:33.735139Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T19:10:33.735266Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T19:10:33.747652Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T19:10:33.811152Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T19:10:33.812678Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T19:10:33.812875Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T19:10:33.812929Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:10:33.813005Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T19:10:33.813050Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:10:33.813374Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T19:10:33.813434Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T19:10:33.815410Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T19:10:33.815540Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T19:10:33.816133Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:10:33.816182Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:10:33.816314Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T19:10:33.816357Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T19:10:33.816394Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T19:10:33.816428Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T19:10:33.816498Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:10:33.816612Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T19:10:33.816668Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:10:33.816724Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T19:10:33.816861Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T19:10:33.816912Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T19:10:33.817033Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T19:10:33.817430Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T19:10:33.817505Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2 ... Z node 5 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2025-12-23T19:10:58.253791Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T19:10:58.253825Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-12-23T19:10:58.253869Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-12-23T19:10:58.253911Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T19:10:58.254006Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T19:10:58.254043Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-12-23T19:10:58.254086Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T19:10:58.254125Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-12-23T19:10:58.254177Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2025-12-23T19:10:58.254200Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T19:10:58.254233Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2025-12-23T19:10:58.265202Z node 5 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-12-23T19:10:58.265307Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-12-23T19:10:58.265378Z node 5 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-12-23T19:10:58.265491Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:10:58.267994Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [5:67:2114] Handle TEvNavigate describe path /Root/table-1 2025-12-23T19:10:58.268139Z node 5 :TX_PROXY DEBUG: describe.cpp:270: Actor# [5:1057:2875] HANDLE EvNavigateScheme /Root/table-1 2025-12-23T19:10:58.274821Z node 5 :TX_PROXY DEBUG: describe.cpp:354: Actor# [5:1057:2875] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-12-23T19:10:58.275017Z node 5 :TX_PROXY DEBUG: describe.cpp:433: Actor# [5:1057:2875] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table-1" Options { ShowPrivateTable: true } 2025-12-23T19:10:58.276500Z node 5 :TX_PROXY DEBUG: describe.cpp:446: Actor# [5:1057:2875] Handle TEvDescribeSchemeResult Forward to# [5:828:2723] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 2000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 2025-12-23T19:10:58.278050Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [5:1061:2879], Recipient [5:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T19:10:58.278132Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:10:58.278199Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [5:1060:2878], serverId# [5:1061:2879], sessionId# [0:0:0] 2025-12-23T19:10:58.278347Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [5:1059:2877], Recipient [5:885:2764]: NKikimrTxDataShard.TEvGetInfoRequest 2025-12-23T19:10:58.279547Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [5:1064:2882], Recipient [5:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T19:10:58.279614Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:10:58.279669Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [5:1063:2881], serverId# [5:1064:2882], sessionId# [0:0:0] 2025-12-23T19:10:58.279871Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [5:1062:2880], Recipient [5:885:2764]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 38 } CompactBorrowed: false 2025-12-23T19:10:58.280031Z node 5 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 1 of 72075186224037888 tableId# 38 localTid# 1001, requested from [5:1062:2880], partsCount# 0, memtableSize# 728, memtableWaste# 3880, memtableRows# 3 2025-12-23T19:10:58.291820Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:02.520309Z 2025-12-23T19:10:58.291938Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-12-23T19:10:58.292001Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [5:1062:2880]pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2025-12-23T19:10:58.292883Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [5:876:2758], Recipient [5:885:2764]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-12-23T19:10:58.293408Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [5:1071:2888], Recipient [5:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T19:10:58.293477Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:10:58.293533Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [5:1070:2887], serverId# [5:1071:2888], sessionId# [0:0:0] 2025-12-23T19:10:58.293750Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [5:1069:2886], Recipient [5:885:2764]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 38 } CompactBorrowed: false 2025-12-23T19:10:58.293892Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:118: Background compaction of tablet# 72075186224037888 of path# [OwnerId: 72057594046644480, LocalPathId: 38], requested from# [5:1069:2886] is not needed |99.4%| [TM] {RESULT} ydb/core/tx/datashard/ut_background_compaction/unittest |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_background_compaction/unittest |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/py3test >> test.py::test_local [GOOD] |99.4%| [TM] {RESULT} ydb/tests/functional/serializable/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serializable/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TCreateAndDropViewTest::CreateViewIfNotExists [GOOD] >> TCreateAndDropViewTest::DropView >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] >> KqpTpch::Query12 [GOOD] >> KqpTpch::Query13 >> DataShardStats::MultipleChannelsStatsCorrect [GOOD] >> DataShardStats::HistogramStatsCorrect >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed [GOOD] >> TabletService_Restart::Basics |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> TestPurecalcFilter::Simple1 [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TestPurecalcFilter::Simple2 |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/py3test >> test_query_cache.py::TestQueryCache::test [GOOD] |99.4%| [TM] {RESULT} ydb/tests/functional/query_cache/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/query_cache/py3test >> Coordinator::RestoreDomainConfiguration [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false >> KqpTpch::Query13 [GOOD] >> KqpTpch::Query14 >> TopicSessionTests::BadDataSessionError [GOOD] >> test.py::TestViewer::test_whoami_root [GOOD] >> test.py::TestViewer::test_whoami_database [GOOD] >> test.py::TestViewer::test_whoami_viewer >> test.py::TestViewer::test_whoami_viewer [GOOD] >> test.py::TestViewer::test_whoami_monitoring [GOOD] >> test.py::TestViewer::test_counter >> test_http_api.py::TestHttpApi::test_create_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> TopicSessionTests::WrongFieldType >> test.py::TestViewer::test_counter [GOOD] >> test.py::TestViewer::test_viewer_nodelist >> test.py::TestViewer::test_viewer_nodelist [GOOD] >> test.py::TestViewer::test_viewer_nodes >> test.py::TestViewer::test_viewer_nodes [GOOD] >> test.py::TestViewer::test_viewer_nodes_all |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TestPurecalcFilter::Simple2 [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> test.py::TestViewer::test_viewer_nodes_all [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes_no_database [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes_no_database_filter_node_id [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes >> test.py::TestViewer::test_viewer_storage_nodes [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes_all >> KqpTpch::Query14 [GOOD] >> KqpTpch::Query15 >> TestPurecalcFilter::ManyValues >> test.py::TestViewer::test_viewer_storage_nodes_all [GOOD] >> test.py::TestViewer::test_viewer_nodes_group >> test.py::TestViewer::test_viewer_nodes_group [GOOD] >> test.py::TestViewer::test_storage_groups [GOOD] >> test.py::TestViewer::test_viewer_groups_group_by_pool_name [GOOD] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large >> test.py::TestViewer::test_viewer_groups_group_by_capacity_alert [GOOD] >> test.py::TestViewer::test_viewer_sysinfo [GOOD] >> test.py::TestViewer::test_viewer_vdiskinfo [GOOD] >> TCreateAndDropViewTest::DropView [GOOD] >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag >> test.py::TestViewer::test_viewer_pdiskinfo [GOOD] >> test.py::TestViewer::test_viewer_bsgroupinfo [GOOD] >> test.py::TestViewer::test_viewer_tabletinfo >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] >> test.py::TestViewer::test_viewer_tabletinfo [GOOD] >> test.py::TestViewer::test_viewer_describe >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] >> test_example.py::TestExample::test_linked_with_testcase [GOOD] >> test.py::TestViewer::test_viewer_describe [GOOD] >> test.py::TestViewer::test_viewer_cluster [GOOD] >> test.py::TestViewer::test_viewer_tenantinfo >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] >> test.py::TestViewer::test_viewer_tenantinfo [GOOD] >> test.py::TestViewer::test_viewer_tenantinfo_db >> test.py::TestViewer::test_viewer_tenantinfo_db [GOOD] >> test.py::TestViewer::test_viewer_healthcheck |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test.py::TestViewer::test_viewer_healthcheck [GOOD] >> test.py::TestViewer::test_viewer_acl |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TabletService_Restart::Basics [GOOD] >> TabletService_Restart::OnlyAdminsAllowed |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls >> test.py::TestViewer::test_viewer_acl [GOOD] >> test.py::TestViewer::test_viewer_acl_write |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> test.py::TestViewer::test_viewer_acl_write [GOOD] >> test.py::TestViewer::test_viewer_autocomplete [GOOD] >> test.py::TestViewer::test_viewer_check_access >> test.py::TestViewer::test_viewer_check_access [GOOD] >> test.py::TestViewer::test_viewer_query >> test.py::TestViewer::test_viewer_query [GOOD] >> test.py::TestViewer::test_viewer_query_from_table >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] >> test.py::TestViewer::test_viewer_query_from_table [GOOD] >> test.py::TestViewer::test_viewer_query_from_table_different_schemas >> KqpTpch::Query15 [GOOD] >> KqpTpch::Query16 >> test.py::TestViewer::test_viewer_query_from_table_different_schemas [GOOD] >> test.py::TestViewer::test_viewer_query_issue_13757 >> TestPurecalcFilter::ManyValues [GOOD] >> test.py::TestViewer::test_viewer_query_issue_13757 [GOOD] >> test.py::TestViewer::test_viewer_query_issue_13945 >> test_drain.py::TestHive::test_drain_tablets >> TestPurecalcFilter::NullValues |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test.py::TestViewer::test_viewer_query_issue_13945 [GOOD] >> test.py::TestViewer::test_pqrb_tablet |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] |99.4%| [TM] {RESULT} ydb/tests/functional/scheme_tests/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_tests/py3test >> test.py::TestViewer::test_pqrb_tablet [GOOD] >> test.py::TestViewer::test_viewer_nodes_issue_14992 >> test.py::TestViewer::test_viewer_nodes_issue_14992 [GOOD] >> test.py::TestViewer::test_operations_list [GOOD] >> test.py::TestViewer::test_operations_list_page >> test.py::TestViewer::test_operations_list_page [GOOD] >> test.py::TestViewer::test_operations_list_page_bad [GOOD] >> test.py::TestViewer::test_scheme_directory >> test.py::TestViewer::test_scheme_directory [GOOD] >> test.py::TestViewer::test_topic_data >> TopicSessionTests::WrongFieldType [GOOD] >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::DropNonexistingView >> test_select.py::TestDML::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> TopicSessionTests::RestartSessionIfNewClientWithOffset >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TabletService_Restart::OnlyAdminsAllowed [GOOD] >> KqpTpch::Query16 [GOOD] >> KqpTpch::Query17 >> TestPurecalcFilter::NullValues [GOOD] >> TestPurecalcFilter::PartialPush ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/tablet/ut/unittest >> TabletService_Restart::OnlyAdminsAllowed [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T19:10:23.802180Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:23.983948Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:10:24.012117Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:10:24.012726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:10:24.012830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:10:24.495318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:24.495496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:24.586125Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766517020698322 != 1766517020698326 2025-12-23T19:10:24.601638Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:24.651278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:24.776686Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bef/r3tmp/tmpTgUhYR/pdisk_1.dat 2025-12-23T19:10:25.094196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:10:25.111738Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... reading schema ... changing schema (dry run) ... reading schema ... changing schema ... reading schema ... waiting for SysViewsRoster update finished 2025-12-23T19:10:29.917107Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:29.930264Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:295:2339], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:10:29.930689Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:10:29.930792Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:10:30.208807Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:30.208994Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:30.244428Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1766517026879370 != 1766517026879373 2025-12-23T19:10:30.253764Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:30.308449Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:30.388944Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bef/r3tmp/tmpz6MY0H/pdisk_1.dat 2025-12-23T19:10:30.730727Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:10:30.758826Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... reading schema (without token) ... reading schema (non-admin token) ... reading schema (admin token) ... waiting for SysViewsRoster update finished 2025-12-23T19:10:35.178003Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:35.188165Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:10:35.191173Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:10:35.191495Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:10:35.191563Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:10:35.460267Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:35.460431Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:35.498598Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:34:2081] 1766517031858542 != 1766517031858546 2025-12-23T19:10:35.508080Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:35.555194Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:35.626593Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bef/r3tmp/tmpPu4N14/pdisk_1.dat 2025-12-23T19:10:35.947505Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:10:35.966147Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2025-12-23T19:10:40.151400Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:40.157746Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:10:40.164210Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:314:2357], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:10:40.164508Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:10:40.164571Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:10:40.499987Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:40.500111Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:40.535614Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1766517036946469 != 1766517036946473 2025-12-23T19:10:40.541233Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:40.591654Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:40.664929Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bef/r3tmp/tmpWfhAFW/pdisk_1.dat 2025-12-23T19:10:40.990470Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:10:41.005393Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2025-12-23T19:10:45.112704Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:45.117918Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] ... 1) 2025-12-23T19:10:50.759403Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2025-12-23T19:10:55.946675Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:55.972261Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:10:55.978109Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:314:2357], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:10:55.978474Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:10:55.978777Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:10:56.390701Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:56.390880Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:56.420739Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:34:2081] 1766517051667053 != 1766517051667057 2025-12-23T19:10:56.431619Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:56.484125Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:56.568754Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bef/r3tmp/tmp95ndLj/pdisk_1.dat 2025-12-23T19:10:56.916868Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:10:56.933714Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2025-12-23T19:11:02.383456Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:11:02.394390Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:292:2337], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:11:02.394643Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:11:02.394784Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:11:02.753570Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:11:02.753736Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:11:02.757903Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:34:2081] 1766517057803373 != 1766517057803377 2025-12-23T19:11:02.767310Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:11:02.816224Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:11:02.891354Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bef/r3tmp/tmp3kJn9b/pdisk_1.dat 2025-12-23T19:11:03.329430Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:11:03.343433Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2025-12-23T19:11:08.353349Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:11:08.368049Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:11:08.371003Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:11:08.371490Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:11:08.371644Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:11:08.674019Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:11:08.674178Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:11:08.706206Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:34:2081] 1766517064161279 != 1766517064161283 2025-12-23T19:11:08.715619Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:11:08.764371Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:11:08.849594Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bef/r3tmp/tmpx58s3V/pdisk_1.dat 2025-12-23T19:11:09.156097Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:11:09.170309Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... restarting tablet 72057594046644480 2025-12-23T19:11:09.369277Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished 2025-12-23T19:11:14.594793Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:11:14.604099Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:11:14.609336Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:314:2357], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:11:14.609499Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:11:14.609570Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:11:14.973732Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:11:14.973901Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:11:15.009729Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:34:2081] 1766517070352693 != 1766517070352697 2025-12-23T19:11:15.015971Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:11:15.068900Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:11:15.151002Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004bef/r3tmp/tmp3PUOeX/pdisk_1.dat 2025-12-23T19:11:15.484858Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:11:15.501611Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... restarting tablet 72057594046644480 (without token) ... restarting tablet 72057594046644480 (non-admin token) ... restarting tablet 72057594046644480 (admin token) 2025-12-23T19:11:15.904383Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |99.4%| [TM] {RESULT} ydb/core/grpc_services/tablet/ut/unittest |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/tablet/ut/unittest >> TMLPConsumerTests::ReloadPQTabletAfterAlterConsumer [GOOD] >> TMLPConsumerTests::RetentionStorage >> test.py::TestViewer::test_topic_data [GOOD] >> test.py::TestViewer::test_topic_data_cdc |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_example.py::TestExample::test_skipped_with_issue [SKIPPED] >> test.py::TestViewer::test_topic_data_cdc [GOOD] >> test.py::TestViewer::test_async_replication_describe >> test.py::TestViewer::test_async_replication_describe [GOOD] >> test.py::TestViewer::test_transfer_describe >> test_http_api.py::TestHttpApi::test_stop_idempotency [GOOD] >> KqpTpch::Query17 [GOOD] >> KqpTpch::Query18 >> test_http_api.py::TestHttpApi::test_restart_idempotency |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/example/py3test >> test_example.py::TestExample::test_skipped_with_issue [SKIPPED] |99.4%| [TM] {RESULT} ydb/tests/example/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/example/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions >> TestPurecalcFilter::PartialPush [GOOD] >> test.py::TestViewer::test_transfer_describe [GOOD] >> test.py::TestViewer::test_viewer_query_long >> TCreateAndDropViewTest::DropNonexistingView [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TestPurecalcFilter::CompilationValidation |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] >> KqpTpch::Query18 [GOOD] >> KqpTpch::Query19 |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TopicSessionTests::RestartSessionIfNewClientWithOffset [GOOD] >> TopicSessionTests::ReadNonExistentTopic >> TestPurecalcFilter::CompilationValidation [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] >> TestPurecalcFilter::Emtpy [GOOD] >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update >> test.py::TestViewer::test_viewer_query_long [GOOD] >> test.py::TestViewer::test_viewer_query_long_multipart >> TestPurecalcFilter::Watermark |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test.py::TestViewer::test_viewer_query_long_multipart [GOOD] >> test.py::TestViewer::test_viewer_query_event_stream >> test.py::TestViewer::test_viewer_query_event_stream [GOOD] >> test.py::TestViewer::test_security >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] >> test.py::TestViewer::test_security [GOOD] >> test.py::TestViewer::test_storage_stats >> TCreateAndDropViewTest::CallDropViewOnTable [GOOD] >> TCreateAndDropViewTest::DropSameViewTwice |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> KqpTpch::Query19 [GOOD] >> KqpTpch::Query20 >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them >> test.py::TestViewer::test_storage_stats [GOOD] >> test.py::TestViewer::test_viewer_peers [GOOD] >> DataShardStats::HistogramStatsCorrect [GOOD] >> DataShardStats::BlobsStatsCorrect >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true >> TestPurecalcFilter::Watermark [GOOD] >> TestPurecalcFilter::WatermarkWhere |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] [FAIL] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/common/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] |99.5%| [TM] {RESULT} ydb/tests/fq/common/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/common/py3test >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> TopicSessionTests::ReadNonExistentTopic [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] [FAIL] >> TopicSessionTests::SlowSession |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TMLPConsumerTests::RetentionStorage [GOOD] >> TMLPConsumerTests::RetentionStorageAfterReload >> KqpTpch::Query20 [GOOD] >> KqpTpch::Query21 >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> TestPurecalcFilter::WatermarkWhere [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TestPurecalcFilter::WatermarkWhereFalse >> test_http_api.py::TestHttpApi::test_restart_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query >> TCreateAndDropViewTest::DropSameViewTwice [GOOD] >> TCreateAndDropViewTest::DropViewIfExists >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values >> test_http_api.py::TestHttpApi::test_integral_results |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results [GOOD] >> test_http_api.py::TestHttpApi::test_optional_results |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> TestPurecalcFilter::WatermarkWhereFalse [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted >> TestRawParser::Simple >> TestRawParser::Simple [GOOD] >> TestRawParser::ManyValues >> test_http_api.py::TestHttpApi::test_optional_results [GOOD] >> test_http_api.py::TestHttpApi::test_pg_results >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> TestRawParser::ManyValues [GOOD] >> TestRawParser::ChangeParserSchema >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple >> TestRawParser::ChangeParserSchema [GOOD] >> TestRawParser::TypeKindsValidation >> TestRawParser::TypeKindsValidation [GOOD] >> DataShardStats::BlobsStatsCorrect [GOOD] >> DataShardStats::SharedCacheGarbage >> test_http_api.py::TestHttpApi::test_pg_results [GOOD] >> test_http_api.py::TestHttpApi::test_set_result |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] [FAIL] >> TopicSessionTests::SlowSession [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> TestRawParser::TypeKindsValidation [GOOD] Test command err: 2025-12-23T19:10:02.306945Z node 1 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_0 == "str1", FALSE) AS _filter, _offset FROM Input; 2025-12-23T19:10:02.311044Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2025-12-23T19:10:02.311067Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_0 == "str1", FALSE) AS _filter, _offset FROM Input; ' (client id: [0:0:0]) 2025-12-23T19:10:02.311095Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2025-12-23T19:10:02.311223Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [1:7587132753546631287:2051] 2025-12-23T19:10:05.064644Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [1:7587132753546631287:2051] [id 1]: Started compile request 2025-12-23T19:10:05.843581Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [1:7587132753546631287:2051] [id 1]: Compilation completed for request 2025-12-23T19:10:05.843698Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [1:7587132753546631287:2051] 2025-12-23T19:10:05.843844Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2025-12-23T19:10:05.843870Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2025-12-23T19:10:05.843907Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [0:0:0] 2025-12-23T19:10:05.844002Z node 1 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_1 == "str2", FALSE) AS _filter, _offset FROM Input; 2025-12-23T19:10:05.844233Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [1:0:0] 2025-12-23T19:10:05.844253Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_1 == "str2", FALSE) AS _filter, _offset FROM Input; ' (client id: [1:0:0]) 2025-12-23T19:10:05.844283Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 2 2025-12-23T19:10:05.844338Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 2 from [1:7587132753546631287:2051] 2025-12-23T19:10:05.844442Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [1:7587132753546631287:2051] [id 2]: Started compile request 2025-12-23T19:10:05.864529Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [1:7587132753546631287:2051] [id 2]: Compilation completed for request 2025-12-23T19:10:05.864607Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 2 from [1:7587132753546631287:2051] 2025-12-23T19:10:05.864674Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 2 2025-12-23T19:10:05.864690Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2025-12-23T19:10:05.864715Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [1:0:0] 2025-12-23T19:10:05.864744Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:439: GenerateSql: No sql was generated 2025-12-23T19:10:05.864755Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [2:0:0] 2025-12-23T19:10:05.864770Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [2:0:0] 2025-12-23T19:10:05.864839Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:34: TTopicFilters: ProcessData for 3 clients, number rows: 3 2025-12-23T19:10:05.864848Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 3 rows to purecalc filter (client id: [1:0:0]) 2025-12-23T19:10:05.864853Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 3 rows 2025-12-23T19:10:05.867516Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 3 rows to purecalc filter (client id: [2:0:0]) 2025-12-23T19:10:05.867539Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 3 rows 2025-12-23T19:10:05.867550Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:411: TProgramRunHandler: Add 3 rows to client [2:0:0] without processing 2025-12-23T19:10:05.867564Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 3 rows to purecalc filter (client id: [0:0:0]) 2025-12-23T19:10:05.867568Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 3 rows 2025-12-23T19:10:05.867623Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:94: TTopicFilters: Remove program with client id [2:0:0] 2025-12-23T19:10:05.867662Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:34: TTopicFilters: ProcessData for 2 clients, number rows: 1 2025-12-23T19:10:05.867689Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 1 rows to purecalc filter (client id: [1:0:0]) 2025-12-23T19:10:05.867713Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2025-12-23T19:10:05.867749Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 1 rows to purecalc filter (client id: [0:0:0]) 2025-12-23T19:10:05.867759Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2025-12-23T19:10:06.080930Z node 2 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a1 = "str1", FALSE) AS _filter, _offset FROM Input; 2025-12-23T19:10:06.081254Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2025-12-23T19:10:06.081290Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a1 = "str1", FALSE) AS _filter, _offset FROM Input; ' (client id: [0:0:0]) 2025-12-23T19:10:06.081335Z node 2 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2025-12-23T19:10:06.081441Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [2:7587132770811192848:2051] 2025-12-23T19:10:08.606828Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [2:7587132770811192848:2051] [id 1]: Started compile request 2025-12-23T19:10:08.629492Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [2:7587132770811192848:2051] [id 1]: Compilation completed for request 2025-12-23T19:10:08.629623Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [2:7587132770811192848:2051] 2025-12-23T19:10:08.629764Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2025-12-23T19:10:08.629817Z node 2 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2025-12-23T19:10:08.629882Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [0:0:0] 2025-12-23T19:10:08.629919Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2025-12-23T19:10:08.629983Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:94: TTopicFilters: Remove program with client id [0:0:0] 2025-12-23T19:10:09.001505Z node 3 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; 2025-12-23T19:10:09.001719Z node 3 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2025-12-23T19:10:09.001736Z node 3 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; ' (client id: [0:0:0]) 2025-12-23T19:10:09.001765Z node 3 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2025-12-23T19:10:09.001828Z node 3 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [3:7587132782361888301:2051] 2025-12-23T19:10:11.909895Z node 3 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [3:7587132782361888301:2051] [id 1]: Started compile request 2025-12-23T19:10:11.938667Z node 3 :FQ_ROW_DISPATCHER ERROR: compile_service.cpp:67: TPurecalcCompileActor [3:7587132782361888301:2051] [id 1]: Compilation failed for request 2025-12-23T19:10:11.938804Z node 3 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [3:7587132782361888301:2051] 2025-12-23T19:10:11.938953Z node 3 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2025-12-23T19:10:11.939074Z node 3 :FQ_ROW_DISPATCHER ERROR: purecalc_filter.cpp:375: TProgramCompileHandler: Program compilation error: {
: Error: Failed to compile purecalc program subissue: {
: Error: Compile issues: generated.sql:3:27: Error: extraneous input '(' expecting {, ';'} } subissue: {
: Error: Final yql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; } } 2025-12-23T19:10:12.278913Z node 4 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(TRUE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2025-12-23T19:10:12.279293Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2025-12-23T19:10:12.279321Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(TRUE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; ' (client id: [0:0:0]) 2025-12-23T19:10:12.279385Z node 4 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2025-12-23T19:10:12.279472Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [4:7587132797708833126:2051] 2025-12-23T19:10:15.324395Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [4:7587132797708833126:2051] [id 1]: Started compile request 2025-12-23T19:10:15.390860Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [4:758713279 ... 2025-12-23T19:10:59.488358Z node 39 :FQ_ROW_DISPATCHER TRACE: json_parser.cpp:600: TJsonParser: Init parser, skipped 0, outputRowId 0 size 151 2025-12-23T19:10:59.726406Z node 40 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 > 100, FALSE) AS _filter, _offset FROM Input; 2025-12-23T19:10:59.726700Z node 40 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2025-12-23T19:10:59.727776Z node 40 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [40:7587132999174786232:2051] 2025-12-23T19:11:04.391434Z node 40 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [40:7587132999174786232:2051] [id 0]: Started compile request 2025-12-23T19:11:04.420785Z node 40 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [40:7587132999174786232:2051] [id 0]: Compilation completed for request 2025-12-23T19:11:04.420881Z node 40 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [40:7587132999174786232:2051] 2025-12-23T19:11:04.787122Z node 41 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 > 100, FALSE) AS _filter, _offset FROM Input; 2025-12-23T19:11:04.787993Z node 41 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2025-12-23T19:11:04.788087Z node 41 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [41:7587133019327629576:2051] 2025-12-23T19:11:08.402609Z node 41 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [41:7587133019327629576:2051] [id 0]: Started compile request 2025-12-23T19:11:08.424790Z node 41 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [41:7587133019327629576:2051] [id 0]: Compilation completed for request 2025-12-23T19:11:08.424909Z node 41 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [41:7587133019327629576:2051] 2025-12-23T19:11:08.424986Z node 41 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2025-12-23T19:11:08.425043Z node 41 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2025-12-23T19:11:08.425125Z node 41 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2025-12-23T19:11:08.926095Z node 42 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 > 100, FALSE) AS _filter, _offset FROM Input; 2025-12-23T19:11:08.926551Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2025-12-23T19:11:08.926615Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [42:7587133038149624062:2051] 2025-12-23T19:11:12.748674Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [42:7587133038149624062:2051] [id 0]: Started compile request 2025-12-23T19:11:12.778426Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [42:7587133038149624062:2051] [id 0]: Compilation completed for request 2025-12-23T19:11:12.778700Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [42:7587133038149624062:2051] 2025-12-23T19:11:12.778869Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2025-12-23T19:11:12.778981Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2025-12-23T19:11:12.779073Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2025-12-23T19:11:12.779113Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2025-12-23T19:11:12.779142Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2025-12-23T19:11:12.779177Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2025-12-23T19:11:13.447382Z node 43 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a1 is null, FALSE) AS _filter, _offset FROM Input; 2025-12-23T19:11:13.447695Z node 43 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2025-12-23T19:11:13.447800Z node 43 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [43:7587133059841581957:2051] 2025-12-23T19:11:17.326469Z node 43 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [43:7587133059841581957:2051] [id 0]: Started compile request 2025-12-23T19:11:17.941374Z node 44 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 > 50, FALSE) AS _filter, _offset FROM Input; 2025-12-23T19:11:17.941693Z node 44 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2025-12-23T19:11:17.942010Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [44:7587133076781505241:2051] 2025-12-23T19:11:23.404844Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [44:7587133076781505241:2051] [id 0]: Started compile request 2025-12-23T19:11:23.452656Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [44:7587133076781505241:2051] [id 0]: Compilation completed for request 2025-12-23T19:11:23.452815Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [44:7587133076781505241:2051] 2025-12-23T19:11:24.093300Z node 45 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; 2025-12-23T19:11:24.093632Z node 45 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2025-12-23T19:11:24.094502Z node 45 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [45:7587133105637053193:2051] 2025-12-23T19:11:28.497490Z node 45 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [45:7587133105637053193:2051] [id 0]: Started compile request 2025-12-23T19:11:28.501980Z node 45 :FQ_ROW_DISPATCHER ERROR: compile_service.cpp:67: TPurecalcCompileActor [45:7587133105637053193:2051] [id 0]: Compilation failed for request 2025-12-23T19:11:28.502137Z node 45 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [45:7587133105637053193:2051] 2025-12-23T19:11:29.316547Z node 47 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(TRUE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2025-12-23T19:11:29.316897Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2025-12-23T19:11:29.317015Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [47:7587133127157471175:2051] 2025-12-23T19:11:33.598632Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [47:7587133127157471175:2051] [id 0]: Started compile request 2025-12-23T19:11:33.625850Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [47:7587133127157471175:2051] [id 0]: Compilation completed for request 2025-12-23T19:11:33.625949Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [47:7587133127157471175:2051] 2025-12-23T19:11:33.626231Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2025-12-23T19:11:33.626318Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2025-12-23T19:11:33.626402Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2025-12-23T19:11:34.218074Z node 48 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(pass > 0, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2025-12-23T19:11:34.218405Z node 48 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2025-12-23T19:11:34.221859Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [48:7587133148205105808:2051] 2025-12-23T19:11:38.862072Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [48:7587133148205105808:2051] [id 0]: Started compile request 2025-12-23T19:11:38.898191Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [48:7587133148205105808:2051] [id 0]: Compilation completed for request 2025-12-23T19:11:38.898292Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [48:7587133148205105808:2051] 2025-12-23T19:11:39.588431Z node 49 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(FALSE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2025-12-23T19:11:39.595764Z node 49 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2025-12-23T19:11:39.597393Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [49:7587133168351287549:2051] 2025-12-23T19:11:44.786632Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [49:7587133168351287549:2051] [id 0]: Started compile request 2025-12-23T19:11:44.815920Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [49:7587133168351287549:2051] [id 0]: Compilation completed for request 2025-12-23T19:11:44.816027Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [49:7587133168351287549:2051] 2025-12-23T19:11:44.816360Z node 49 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2025-12-23T19:11:44.816447Z node 49 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2025-12-23T19:11:44.816557Z node 49 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows |99.5%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest |99.5%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> TopicSessionTests::TwoSessionsWithDifferentSchemes >> TCreateAndDropViewTest::DropViewIfExists [FAIL] >> TCreateAndDropViewTest::DropViewInFolder >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_http_api.py::TestHttpApi::test_set_result [GOOD] >> test_http_api.py::TestHttpApi::test_complex_results >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data >> KqpTpch::Query21 [GOOD] >> KqpTpch::Query22 >> test_drain.py::TestHive::test_drain_on_stop |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/py3test >> test.py::TestViewer::test_viewer_peers [GOOD] |99.5%| [TM] {RESULT} ydb/core/viewer/tests/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/core/viewer/tests/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] |99.5%| [TM] {RESULT} ydb/tests/functional/wardens/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/wardens/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> test_http_api.py::TestHttpApi::test_complex_results [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] [FAIL] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_http_api.py::TestHttpApi::test_result_offset_limit [GOOD] >> test_http_api.py::TestHttpApi::test_openapi_spec >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |99.5%| [TA] $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] |99.5%| [TA] {RESULT} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] |99.5%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/postgresql/py3test >> test_select.py::TestDML::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> KqpTpch::Query22 [GOOD] >> TopicSessionTests::TwoSessionsWithDifferentSchemes [GOOD] >> TCreateAndDropViewTest::DropViewInFolder [GOOD] >> TCreateAndDropViewTest::ContextPollution >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TMLPConsumerTests::RetentionStorageAfterReload [GOOD] >> TMLPCountersTests::SimpleCounters |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, pic, release} ydb/core/kqp/tests/kikimr_tpch/unittest >> KqpTpch::Query22 [GOOD] Test command err: -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 5 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 20 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 28 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 37 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 5 |99.5%| [TM] {RESULT} ydb/core/kqp/tests/kikimr_tpch/unittest |99.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/tests/kikimr_tpch/unittest >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true [GOOD] >> Coordinator::LastEmptyStepResent |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_select.py::TestDML::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TCreateAndDropViewTest::ContextPollution [GOOD] >> TEvaluateExprInViewTest::EvaluateExpr >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TopicSessionTests::RestartSessionIfQueryStopped |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] |99.5%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> Coordinator::LastEmptyStepResent [GOOD] >> CoordinatorVolatile::PlanResentOnReboots |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [FAIL] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/py3test >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |99.5%| [TM] {RESULT} ydb/tests/fq/http_api/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/http_api/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> CoordinatorVolatile::PlanResentOnReboots [GOOD] >> CoordinatorVolatile::MediatorReconnectPlanRace |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TEvaluateExprInViewTest::EvaluateExpr [GOOD] >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TopicSessionTests::RestartSessionIfQueryStopped [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [FAIL] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> TopicSessionTests::WrongJson |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TMLPCountersTests::SimpleCounters [GOOD] >> TMLPDLQMoverTests::MoveToDLQ_ShortMessage >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session >> CoordinatorVolatile::MediatorReconnectPlanRace [GOOD] >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> BulkUpsert::BulkUpsert [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx [GOOD] >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction [GOOD] >> TSelectFromViewTest::OneTable >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_select.py::TestDML::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |99.6%| [TA] $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> BulkUpsert::BulkUpsert [GOOD] |99.6%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest |99.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest |99.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario >> DataShardStats::SharedCacheGarbage [GOOD] >> DataShardStats::CollectStatsForSeveralParts |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/coordinator/ut/unittest >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T19:10:35.063515Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:35.064683Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:35.075722Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:678:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T19:10:35.230487Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:10:35.231742Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:10:35.250371Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:683:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:10:35.250939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:10:35.251294Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:10:35.253301Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:10:35.253449Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:10:35.895298Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:36.051664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:36.051821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:36.052799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:36.052881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:36.105094Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T19:10:36.107628Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:36.111268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:36.293849Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:10:36.306980Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0051f1/r3tmp/tmpblNdag/pdisk_1.dat ... waiting for the first mediator step 2025-12-23T19:10:37.039895Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:10:37.041395Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... found first step to be 1500 2025-12-23T19:10:37.379715Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 1500 ... waiting for the next mediator step ... found second step to be 2000 ... read step subscribe result: [1500, 2000] ... read step subscribe update: 3000 2025-12-23T19:10:38.439417Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 3000 ... read step subscribe result: [3000, 3000] ... read step subscribe update: 3500 ... read step subscribe update: 3500 ... read step subscribe update: 4000 ... read step subscribe update: 5000 ... read step subscribe update: 6000 ... read step subscribe update: 7000 ... read step subscribe result: [3000, 7000] 2025-12-23T19:10:41.296004Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2025-12-23T19:10:41.296101Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037968897] NodeDisconnected NodeId# 2 2025-12-23T19:10:41.296138Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037936131] NodeDisconnected NodeId# 2 2025-12-23T19:10:41.296174Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2025-12-23T19:10:41.296373Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:96:2091] ServerId# [1:1127:2696] TabletId# 72057594037932033 PipeClientId# [2:96:2091] 2025-12-23T19:10:41.297329Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037936129] NodeDisconnected NodeId# 2 2025-12-23T19:10:41.297581Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:257:2137] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-12-23T19:10:41.298472Z node 1 :HIVE WARN: hive_impl.cpp:824: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeDisconnected, NodeId 2 2025-12-23T19:10:41.298925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnecting 2025-12-23T19:10:41.299665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnecting -> Disconnected 2025-12-23T19:10:41.308387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:41.333619Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T19:10:41.334299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... read step subscribe update: 8000 ... read step subscribe update: 9000 ... read step subscribe update: 10000 ... read step subscribe update: 11000 ... read step subscribe update: 12000 ... waiting for SysViewsRoster update finished 2025-12-23T19:10:52.494973Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:52.496110Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:52.499942Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:679:2346], Scheduled retry for error: {
: Error: Scheme service not found } 2025-12-23T19:10:52.511077Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:10:52.512586Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:10:52.514171Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:684:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:10:52.515378Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:10:52.515631Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:10:52.517163Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:10:52.517218Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:10:52.922553Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:53.046436Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:53.046583Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:53.047091Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:53.047169Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:53.121269Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-12-23T19:10:53.121835Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:53.122283Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:53.232645Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:10:53.246981Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/0051f1/r3tmp/tmpxEAyso/pdisk_1.dat 2025-12-23T19:10:54.067574Z node 3 :TX_COORDINATOR DEBUG: coordinator__last_step_subscriptions.cpp:52: Processing TEvSubscribeLastStep from [4:1492:2374] at coordinator 72057594046316545 with seqNo 123 and cookie 234 2025-12-23T19:10:54.123551Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:10:54.123697Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: ... ut responses 2025-12-23T19:12:31.991933Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 10000000 HANDLE EvProposeTransaction marker# C0 2025-12-23T19:12:31.992027Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 10000000 step# 200 Status# 16 SEND to# [20:829:2724] Proxy marker# C1 ... coordinator 72057594046316545 gen 2 is planning step 200 2025-12-23T19:12:32.002790Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 10000000 has been planned 2025-12-23T19:12:32.002922Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 10000000 for mediator 72057594046382081 tablet 72057594047365120 ... blocking put [72057594046316545:2:10:1:24576:168:0] response ... waiting for planning for the required step ... coordinator 72057594046316545 gen 2 is planning step 250 ... starting a new coordinator instance ... waiting for migrated state 2025-12-23T19:12:32.037139Z node 20 :TX_COORDINATOR INFO: coordinator_impl.cpp:615: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-12-23T19:12:32.037720Z node 20 :TX_COORDINATOR INFO: coordinator_impl.cpp:615: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-12-23T19:12:32.048118Z node 20 :TX_COORDINATOR INFO: coordinator__init.cpp:120: tablet# 72057594046316545 CreateTxInit Complete ... blocking state response from [20:535:2394] to [20:873:2744] LastSentStep: 150 LastAcquiredStep: 0 LastConfirmedStep: 150 ... unblocking put responses and requests 2025-12-23T19:12:32.048946Z node 20 :TX_COORDINATOR INFO: coordinator_impl.cpp:615: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-12-23T19:12:32.049237Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 10000000 stepId# 200 Status# 17 SEND EvProposeTransactionStatus to# [20:829:2724] Proxy 2025-12-23T19:12:32.053269Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:322: tablet# 72057594046382081 server# [20:539:2480] disconnnected 2025-12-23T19:12:32.053708Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:201: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvServerDisconnected server# [20:539:2480] ... trying to plan tx 10000011 ... waiting for planned another persistent tx 2025-12-23T19:12:32.077818Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594046382081 server# [20:880:2754] connected 2025-12-23T19:12:32.078163Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:139: tablet# 72057594046382081 HANDLE EvCoordinatorSync 2025-12-23T19:12:32.078247Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594046382081 SEND EvCoordinatorSyncResult to# [20:876:2752] Cookie# 1 CompleteStep# 150 LatestKnownStep# 150 SubjectiveTime# 102 Coordinator# 72057594046316545 2025-12-23T19:12:32.078506Z node 20 :TX_COORDINATOR NOTICE: coordinator_impl.cpp:412: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-12-23T19:12:32.078578Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 200, txid# 10000000 marker# C2 ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000000 } Step: 200 PrevStep: 0 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-12-23T19:12:32.086163Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 200 2025-12-23T19:12:32.086259Z node 20 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [200] transactions [1] 2025-12-23T19:12:32.086440Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594046382081 SEND EvCommitStep to# [20:557:2490] ExecQueue {TMediateStep From 150 To# 200Steps: {{TCoordinatorStep step# 200 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M0 2025-12-23T19:12:32.086602Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 10000011 HANDLE EvProposeTransaction marker# C0 2025-12-23T19:12:32.086671Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 10000011 step# 250 Status# 16 SEND to# [20:829:2724] Proxy marker# C1 2025-12-23T19:12:32.086915Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 150 To# 200Steps: {{TCoordinatorStep step# 200 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M1 2025-12-23T19:12:32.087035Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND Ev to# [20:558:2491] step# 200 forTablet# 72057594047365120 txid# 10000000 marker# M3 2025-12-23T19:12:32.087131Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:558:2491] bucket.ActiveActor step# 200 2025-12-23T19:12:32.087197Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:559:2492] bucket.ActiveActor step# 200 2025-12-23T19:12:32.087342Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 200 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000000 AckTo# [20:876:2752]}}} marker# M4 2025-12-23T19:12:32.087523Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:559:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 200} 2025-12-23T19:12:32.087678Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 200} 2025-12-23T19:12:32.088390Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365120 Status: OK ServerId: [20:884:2757] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-12-23T19:12:32.088522Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [20:558:2491] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 200 MediatorId# 72057594046382081 TabletID 72057594047365120} ... observed tablet step: Transactions { TxId: 10000000 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... coordinator 72057594046316545 gen 3 is planning step 250 2025-12-23T19:12:32.101738Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 10000011 has been planned 2025-12-23T19:12:32.101882Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 10000011 for mediator 72057594046382081 tablet 72057594047365120 2025-12-23T19:12:32.102914Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 250, txid# 10000011 marker# C2 2025-12-23T19:12:32.103019Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 10000011 stepId# 250 Status# 17 SEND EvProposeTransactionStatus to# [20:829:2724] Proxy ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000011 } Step: 250 PrevStep: 200 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-12-23T19:12:32.103393Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 250 2025-12-23T19:12:32.103481Z node 20 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [250] transactions [1] 2025-12-23T19:12:32.103651Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594046382081 SEND EvCommitStep to# [20:557:2490] ExecQueue {TMediateStep From 200 To# 250Steps: {{TCoordinatorStep step# 250 PrevStep# 200Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M0 2025-12-23T19:12:32.103850Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 200 To# 250Steps: {{TCoordinatorStep step# 250 PrevStep# 200Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M1 2025-12-23T19:12:32.103948Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND Ev to# [20:558:2491] step# 250 forTablet# 72057594047365120 txid# 10000011 marker# M3 2025-12-23T19:12:32.104048Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:558:2491] bucket.ActiveActor step# 250 2025-12-23T19:12:32.104130Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:559:2492] bucket.ActiveActor step# 250 2025-12-23T19:12:32.104263Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 250 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000011 AckTo# [20:876:2752]}}} marker# M4 2025-12-23T19:12:32.104358Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [20:558:2491] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 250 MediatorId# 72057594046382081 TabletID 72057594047365120} 2025-12-23T19:12:32.104470Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:559:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 250} 2025-12-23T19:12:32.104595Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 250} ... observed tablet step: Transactions { TxId: 10000011 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... coordinator 72057594046316545 gen 3 is planning step 300 ... observed step: Step: 300 PrevStep: 250 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-12-23T19:12:32.118905Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 250 To# 300Steps: {{TCoordinatorStep step# 300 PrevStep# 250}}} marker# M1 2025-12-23T19:12:32.118988Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:558:2491] bucket.ActiveActor step# 300 2025-12-23T19:12:32.119053Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:559:2492] bucket.ActiveActor step# 300 2025-12-23T19:12:32.119121Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 300} 2025-12-23T19:12:32.119169Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:559:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 300} |99.6%| [TM] {RESULT} ydb/core/tx/coordinator/ut/unittest |99.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/coordinator/ut/unittest >> TopicSessionTests::WrongJson [GOOD] >> TopicSessionTests::WrongJsonOffset >> TMLPDLQMoverTests::MoveToDLQ_ShortMessage [GOOD] >> TMLPDLQMoverTests::MoveToDLQ_BigMessage >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> DataShardStats::CollectStatsForSeveralParts [GOOD] >> DataShardStats::NoData >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range >> test_select.py::TestDML::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> TSelectFromViewTest::OneTable [GOOD] >> TSelectFromViewTest::OneTableUsingRelativeName >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] >> TopicSessionTests::WrongJsonOffset [GOOD] >> RowDispatcherTests::OneClientOneSession >> RowDispatcherTests::OneClientOneSession [GOOD] >> RowDispatcherTests::TwoClientOneSession >> RowDispatcherTests::TwoClientOneSession [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> RowDispatcherTests::SessionError >> RowDispatcherTests::SessionError [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns >> RowDispatcherTests::CoordinatorSubscribe >> RowDispatcherTests::CoordinatorSubscribe [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged [GOOD] >> RowDispatcherTests::TwoClients4Sessions >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [GOOD] >> RowDispatcherTests::TwoClients4Sessions [GOOD] >> RowDispatcherTests::ReinitConsumerIfNewGeneration >> RowDispatcherTests::ReinitConsumerIfNewGeneration [GOOD] >> RowDispatcherTests::HandleTEvUndelivered >> RowDispatcherTests::HandleTEvUndelivered [GOOD] >> RowDispatcherTests::TwoClientTwoConnection >> RowDispatcherTests::TwoClientTwoConnection [GOOD] >> RowDispatcherTests::ProcessNoSession >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> RowDispatcherTests::ProcessNoSession [GOOD] >> RowDispatcherTests::IgnoreWrongPartitionId >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] >> RowDispatcherTests::SessionFatalError >> RowDispatcherTests::SessionFatalError [GOOD] >> test_select.py::TestDML::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok >> TDqPqRdReadActorTests::Backpressure [GOOD] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [GOOD] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 [GOOD] >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted [GOOD] >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/ut/unittest >> RowDispatcherTests::SessionFatalError [GOOD] Test command err: 2025-12-23T19:10:12.010634Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [1:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2025-12-23T19:10:12.011513Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2025-12-23T19:10:12.011588Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [1:25:2054] 2025-12-23T19:10:12.011633Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [1:25:2054] 2025-12-23T19:10:12.011664Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [1:25:2054] 2025-12-23T19:10:12.011699Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [2:26:2054] 2025-12-23T19:10:12.011741Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [2:26:2054] 2025-12-23T19:10:12.011764Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2025-12-23T19:10:12.011795Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [2:26:2054] 2025-12-23T19:10:12.011826Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [3:27:2054] 2025-12-23T19:10:12.011845Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [3:27:2054] 2025-12-23T19:10:12.011866Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2025-12-23T19:10:12.011888Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2025-12-23T19:10:12.011966Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [3:27:2054] 2025-12-23T19:10:12.012089Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-12-23T19:10:12.012203Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-12-23T19:10:12.013397Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 0 2025-12-23T19:10:12.013462Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-12-23T19:10:12.025763Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-12-23T19:10:12.025906Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-12-23T19:10:12.026081Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [2:32:2055] 2025-12-23T19:10:12.026139Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [2:32:2055] 2025-12-23T19:10:12.026178Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:313: Coordinator: Move all Locations from old actor [2:26:2054] to new [2:32:2055] 2025-12-23T19:10:12.026210Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [2:32:2055] 2025-12-23T19:10:12.026265Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [2:33:2056] 2025-12-23T19:10:12.026296Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [2:33:2056] 2025-12-23T19:10:12.026329Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:313: Coordinator: Move all Locations from old actor [2:32:2055] to new [2:33:2056] 2025-12-23T19:10:12.026363Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [2:33:2056] 2025-12-23T19:10:12.026471Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-12-23T19:10:12.026520Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-12-23T19:10:12.026626Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-12-23T19:10:12.026686Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-12-23T19:10:12.120259Z node 5 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [5:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2025-12-23T19:10:12.120787Z node 5 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2025-12-23T19:10:12.120894Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [5:25:2054] 2025-12-23T19:10:12.120935Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [5:25:2054] 2025-12-23T19:10:12.120965Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [5:25:2054] 2025-12-23T19:10:12.121004Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [6:26:2054] 2025-12-23T19:10:12.121024Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [6:26:2054] 2025-12-23T19:10:12.121044Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2025-12-23T19:10:12.121086Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [6:26:2054] 2025-12-23T19:10:12.121134Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [7:27:2054] 2025-12-23T19:10:12.121155Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [7:27:2054] 2025-12-23T19:10:12.121178Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2025-12-23T19:10:12.121199Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2025-12-23T19:10:12.121225Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [7:27:2054] 2025-12-23T19:10:12.121321Z node 5 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [5:28:2055], topic1, partIds: 0, 1, 2 2025-12-23T19:10:12.121465Z node 5 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [5:28:2055] 2025-12-23T19:10:12.121654Z node 5 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [5:29:2056], topic1, partIds: 3 2025-12-23T19:10:12.121731Z node 5 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [5:29:2056] 2025-12-23T19:10:12.212462Z node 9 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [9:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2025-12-23T19:10:12.212809Z node 9 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2025-12-23T19:10:12.212860Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [10:26:2054] 2025-12-23T19:10:12.212901Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [10:26:2054] 2025-12-23T19:10:12.212944Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2025-12-23T19:10:12.212996Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [10:26:2054] 2025-12-23T19:10:12.213110Z node 9 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [9:28:2055], topic1, partIds: 0 2025-12-23T19:10:12.213184Z node 9 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:514: Coordinator: Not all nodes connected, nodes count: 3, known rd count: 2, add request into pending queue 2025-12-23T19:10:13.214483Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [11:27:2054] 2025-12-23T19:10:13.214588Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [11:27:2054] 2025-12-23T19:10:13.214630Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2025-12-23T19:10:13.214660Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2025-12-23T19:10:13.214766Z node 9 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [9:28:2055] 2025-12-23T19:10:13.214835Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [11:27:2054] 2025-12-23T19:10:13.303213Z node 13 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [13:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2025-12-23T19:10:13.303895Z node 13 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2025-12-23T19:10:13.303974Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [13:25:2054] 2025-12-23T19:10:13.304021Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [13:25:2054] 2025-12-23T19:10:13.304057Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [13:25:2054] 2025-12-23T19:10:13.304092Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [14:26:2054] 2025-12-23T19:10:13.304112Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [14:26:2054] 2025-12-23T19:10:13.304136Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2025-12-23T19:10:13.304164Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [14:26:2054] 2025-12-23T19:10:13.304210Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [15:27:2054] 2025-12-23T19:10:13.304229Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [15:27:2054] 2025-12-23T19:10:13.304248Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2025-12-23T19:10:13.304274Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2025-12-23T19:10:13.304315Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [15:27:2054] 2025-12-23T19:10:13.304410Z node 13 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [13:28:2055], topic1, partIds: 0, 1, 2 2025-12-23T19:10:13.304529Z node 13 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [13:28:2055] 2025-12-23T19:10:13.304703Z node 13 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [13:28:2055], topic1, partIds: 0, 1, 2 2025-12-23T19:10:13.304833Z node 13 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [13:28:2055] 2025-12-23T19:10:13.394539Z node 17 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [17:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2025-12-23T19:10:13.395435Z node 17 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 1, AssignedNodes: 0 2025-12-23T19:10:13.395483Z node 17 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2025-12-23T19:10:13.395532Z ... topic part id 100 query id QueryId cookie 42 2025-12-23T19:12:51.517740Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2025-12-23T19:12:51.521952Z node 60 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "YDB_DATABASE/RowDispatcher/Tenant" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): DNS resolution failed for YDB_ENDPOINT: C-ares status is not ARES_SUCCESS qtype=A name=YDB_ENDPOINT is_balancer=0: DNS server returned general failure } {
: Error: Grpc error response on endpoint YDB_ENDPOINT } ] 2025-12-23T19:12:51.522973Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1052: RowDispatcher: TEvTryConnect to node id 61 2025-12-23T19:12:51.526189Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:587: RowDispatcher: EvNodeConnected, node id 61 2025-12-23T19:12:51.528115Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [60:22:2063] to [61:16:2053] query id QueryId 2025-12-23T19:12:51.528540Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [61:16:2053] part id 100 query id QueryId 2025-12-23T19:12:51.528674Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [60:22:2063] to [61:16:2053] query id QueryId 2025-12-23T19:12:51.528935Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:967: RowDispatcher: Received TEvNoSession from [61:16:2053], generation 41 2025-12-23T19:12:51.529026Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [60:22:2063] to [61:16:2053] query id QueryId 2025-12-23T19:12:51.529312Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [61:16:2053] part id 100 query id QueryId 2025-12-23T19:12:51.529414Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [60:22:2063] to [61:16:2053] query id QueryId 2025-12-23T19:12:51.529675Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:967: RowDispatcher: Received TEvNoSession from [61:16:2053], generation 42 2025-12-23T19:12:51.529748Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [61:16:2053] query id QueryId, partitions size 1 2025-12-23T19:12:51.529846Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1036: RowDispatcher: Session is not used, sent TEvPoisonPill to [60:22:2063] 2025-12-23T19:12:51.762112Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:544: RowDispatcher: Successfully bootstrapped row dispatcher, id [62:17:2058], tenant Tenant 2025-12-23T19:12:51.762234Z node 62 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [62:17:2058] 2025-12-23T19:12:51.762277Z node 62 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2025-12-23T19:12:51.762596Z node 62 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [62:18:2059], NodesManagerId [0:0:0], rebalancing timeout 120.000000s 2025-12-23T19:12:51.762697Z node 62 :FQ_ROW_DISPATCHER DEBUG: leader_election.cpp:232: TLeaderElection [62:19:2060] Successfully bootstrapped, local coordinator id [62:18:2059], tenant id Tenant, local mode 0, coordination node path YDB_DATABASE/RowDispatcher/Tenant, endpoint YDB_ENDPOINT 2025-12-23T19:12:51.772770Z node 62 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:71: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-12-23T19:12:51.772847Z node 62 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:113: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-12-23T19:12:51.772893Z node 62 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:411: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-12-23T19:12:51.773872Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:637: RowDispatcher: TEvCoordinatorChangesSubscribe from [62:18:2059] 2025-12-23T19:12:51.776151Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [62:14:2056], read group connection_id1, topicPath topic part id 100 query id QueryId cookie 1 2025-12-23T19:12:51.776337Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2025-12-23T19:12:51.776820Z node 62 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [62:22:2063] to [62:14:2056] query id QueryId 2025-12-23T19:12:51.776970Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1001: RowDispatcher: Received TEvStopSession from [62:14:2056] topic topic query id QueryId 2025-12-23T19:12:51.777064Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [62:14:2056] query id QueryId, partitions size 1 2025-12-23T19:12:51.777196Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1036: RowDispatcher: Session is not used, sent TEvPoisonPill to [62:22:2063] 2025-12-23T19:12:52.091872Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:544: RowDispatcher: Successfully bootstrapped row dispatcher, id [64:17:2058], tenant Tenant 2025-12-23T19:12:52.092041Z node 64 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [64:17:2058] 2025-12-23T19:12:52.092090Z node 64 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2025-12-23T19:12:52.092381Z node 64 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [64:18:2059], NodesManagerId [0:0:0], rebalancing timeout 120.000000s 2025-12-23T19:12:52.092461Z node 64 :FQ_ROW_DISPATCHER DEBUG: leader_election.cpp:232: TLeaderElection [64:19:2060] Successfully bootstrapped, local coordinator id [64:18:2059], tenant id Tenant, local mode 0, coordination node path YDB_DATABASE/RowDispatcher/Tenant, endpoint YDB_ENDPOINT 2025-12-23T19:12:52.104948Z node 64 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:71: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-12-23T19:12:52.105020Z node 64 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:113: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-12-23T19:12:52.105055Z node 64 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:411: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-12-23T19:12:52.105441Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:637: RowDispatcher: TEvCoordinatorChangesSubscribe from [64:18:2059] 2025-12-23T19:12:52.106304Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:14:2056], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2025-12-23T19:12:52.106556Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2025-12-23T19:12:52.106855Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 101 2025-12-23T19:12:52.107345Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:15:2057], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2025-12-23T19:12:52.107853Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1120: RowDispatcher: Forward TEvSessionError from [64:22:2063] to [64:14:2056] query id QueryId 2025-12-23T19:12:52.107969Z node 64 :FQ_ROW_DISPATCHER WARN: row_dispatcher.cpp:1140: RowDispatcher: Fatal session error, remove session [64:22:2063] 2025-12-23T19:12:52.108064Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [64:14:2056] query id QueryId, partitions size 2 2025-12-23T19:12:52.108359Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:23:2064] to [64:15:2057] query id QueryId 2025-12-23T19:12:52.108489Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:15:2057] part id 101 query id QueryId 2025-12-23T19:12:52.108619Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:23:2064] to [64:15:2057] query id QueryId 2025-12-23T19:12:52.108782Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:14:2056], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2025-12-23T19:12:52.108947Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2025-12-23T19:12:52.109351Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1120: RowDispatcher: Forward TEvSessionError from [64:22:2063] to [64:15:2057] query id QueryId 2025-12-23T19:12:52.109457Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [64:15:2057] query id QueryId, partitions size 2 2025-12-23T19:12:52.109569Z node 64 :FQ_ROW_DISPATCHER ERROR: row_dispatcher.cpp:1033: RowDispatcher: Wrong readActorId [64:15:2057], no such consumer 2025-12-23T19:12:52.109642Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1036: RowDispatcher: Session is not used, sent TEvPoisonPill to [64:22:2063] 2025-12-23T19:12:52.109934Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:15:2057], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2025-12-23T19:12:52.110439Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:24:2065] to [64:14:2056] query id QueryId 2025-12-23T19:12:52.110560Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:14:2056] part id 100 query id QueryId 2025-12-23T19:12:52.110700Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:24:2065] to [64:14:2056] query id QueryId 2025-12-23T19:12:52.110845Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:24:2065] to [64:15:2057] query id QueryId 2025-12-23T19:12:52.110982Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:15:2057] part id 100 query id QueryId 2025-12-23T19:12:52.111324Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:24:2065] to [64:15:2057] query id QueryId 2025-12-23T19:12:52.111475Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:23:2064] to [64:14:2056] query id QueryId 2025-12-23T19:12:52.111629Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:14:2056] part id 101 query id QueryId 2025-12-23T19:12:52.111865Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:23:2064] to [64:14:2056] query id QueryId 2025-12-23T19:12:52.112073Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:23:2064] to [64:15:2057] query id QueryId 2025-12-23T19:12:52.112293Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:15:2057] part id 101 query id QueryId 2025-12-23T19:12:52.112407Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:23:2064] to [64:15:2057] query id QueryId |99.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/row_dispatcher/ut/unittest |99.6%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/ut/unittest >> TDqPqRdReadActorTests::MetadataFields [GOOD] >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> TSelectFromViewTest::OneTableUsingRelativeName [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState [GOOD] >> TDqPqRdReadActorTests::TestReadFromTopicFirstWatermark [GOOD] >> TDqPqRdReadActorTests::TestReadFromTopicWatermarks1 >> TDqPqRdReadActorTests::TestReadFromTopicWatermarks1 [GOOD] >> TDqPqRdReadActorTests::TestWatermarksWhere >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 >> TMLPDLQMoverTests::MoveToDLQ_BigMessage [GOOD] >> TMLPDLQMoverTests::MoveToDLQ_ManyMessages |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [FAIL] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TSelectFromViewTest::DisabledFeatureFlag [GOOD] >> TSelectFromViewTest::ReadTestCasesFromFiles >> TDqPqRdReadActorTests::TestWatermarksWhere [GOOD] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success >> TDqPqRdReadActorTests::TestWatermarksWhereFalse [GOOD] >> TDqPqRdReadActorTests::WatermarkCheckpointWithItemsInReadyBuffer >> test_crud.py::TestYdbCrudOperations::test_crud_operations >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b >> TDqPqRdReadActorTests::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 >> TDqPqRdReadActorTests::RebalanceAfterDistributionReset >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success >> DataShardStats::NoData [GOOD] >> DataShardStats::Follower |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example >> TDqPqRdReadActorTests::RebalanceAfterDistributionReset [GOOD] >> TDqPqReadActorTest::TestReadFromTopic >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] [FAIL] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 >> test_public_api.py::TestExplain::test_explain_data_query >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] >> test_drain.py::TestHive::test_drain_on_stop [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate >> TDqPqReadActorTest::TestReadFromTopic [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [FAIL] >> test_insert.py::TestInsertOperations::test_insert_revert_basis [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item >> TDqPqReadActorTest::TestReadFromTopicFromNow >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] >> TMLPDLQMoverTests::MoveToDLQ_ManyMessages [GOOD] >> TMLPDLQMoverTests::MoveToDLQ_TopicNotExists >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFromNow [GOOD] >> DataShardStats::Follower [GOOD] >> DataShardStats::Tli >> TDqPqReadActorTest::ReadWithFreeSpace >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] [FAIL] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> TDqPqReadActorTest::ReadWithFreeSpace [GOOD] >> TDqPqReadActorTest::ReadNonExistentTopic >> TDqPqReadActorTest::ReadNonExistentTopic [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [GOOD] |99.6%| [TA] $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/sql/py3test >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] |99.6%| [TM] {RESULT} ydb/tests/sql/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/sql/py3test >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> DataShardStats::Tli [GOOD] >> DataShardStats::HasSchemaChanges_BTreeIndex >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [GOOD] >> TMLPDLQMoverTests::MoveToDLQ_TopicNotExists [GOOD] >> TMLPStorageTests::NextFromEmptyStorage [GOOD] >> TMLPStorageTests::CommitToEmptyStorage [GOOD] >> TMLPStorageTests::UnlockToEmptyStorage [GOOD] >> TMLPStorageTests::ChangeDeadlineEmptyStorage [GOOD] >> TMLPStorageTests::AddMessageToEmptyStorage [GOOD] >> TMLPStorageTests::AddNotFirstMessageToEmptyStorage [GOOD] >> TMLPStorageTests::AddMessageWithSkippedMessage [GOOD] >> TMLPStorageTests::AddMessageWithDelay [GOOD] >> TMLPStorageTests::AddMessageWithBigDelay [GOOD] >> TMLPStorageTests::AddMessageWithZeroDelay [GOOD] >> TMLPStorageTests::AddMessageWithDelay_Unlock [GOOD] >> TMLPStorageTests::NextWithoutKeepMessageOrderStorage [GOOD] >> TMLPStorageTests::NextWithKeepMessageOrderStorage [GOOD] >> TMLPStorageTests::NextWithWriteRetentionPeriod [GOOD] >> TMLPStorageTests::NextWithInfinityRetentionPeriod [GOOD] >> TMLPStorageTests::SkipLockedMessage [GOOD] >> TMLPStorageTests::SkipLockedMessageGroups [GOOD] >> TMLPStorageTests::CommitLockedMessage_WithoutKeepMessageOrder [GOOD] >> TMLPStorageTests::CommitLockedMessage_WithKeepMessageOrder [GOOD] >> TMLPStorageTests::CommitUnlockedMessage [GOOD] >> TMLPStorageTests::CommitCommittedMessage [GOOD] >> TMLPStorageTests::UnlockLockedMessage_WithoutKeepMessageOrder [GOOD] >> TMLPStorageTests::UnlockLockedMessage_WithKeepMessageOrder [GOOD] >> TMLPStorageTests::UnlockUnlockedMessage [GOOD] >> TMLPStorageTests::UnlockCommittedMessage [GOOD] >> TMLPStorageTests::ChangeDeadlineLockedMessage [GOOD] >> TMLPStorageTests::ChangeDeadlineUnlockedMessage [GOOD] >> TMLPStorageTests::EmptyStorageSerialization [GOOD] >> TMLPStorageTests::StorageSerialization [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_Unlocked [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_Locked [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_Committed [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_DLQ [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_DeadLetterPolicy_Delete [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_WithHole [GOOD] >> TMLPStorageTests::StorageSerialization_WAL_WithMoveBaseTime_Deadline [GOOD] >> TMLPStorageTests::CompactStorage_ByCommittedOffset [GOOD] >> TMLPStorageTests::CompactStorage_ByRetention [GOOD] >> TMLPStorageTests::CompactStorage_ByDeadline [GOOD] >> TMLPStorageTests::CompactStorage_WithDLQ [GOOD] >> TMLPStorageTests::ProccessDeadlines [GOOD] >> TMLPStorageTests::MoveBaseDeadline [GOOD] >> TMLPStorageTests::SlowZone_MoveUnprocessedToSlowZone [GOOD] >> TMLPStorageTests::SlowZone_MoveLockedToSlowZone [GOOD] >> TMLPStorageTests::SlowZone_MoveCommittedToSlowZone [GOOD] >> TMLPStorageTests::SlowZone_MoveDLQToSlowZone [GOOD] >> TMLPStorageTests::SlowZone_MoveToSlowZoneAndLock [GOOD] >> TMLPStorageTests::SlowZone_MoveToSlowZoneAndCommit [GOOD] >> TMLPStorageTests::SlowZone_MoveToSlowZoneAndDLQ [GOOD] >> TMLPStorageTests::SlowZone_Lock [GOOD] >> TMLPStorageTests::SlowZone_Commit_First [GOOD] >> TMLPStorageTests::SlowZone_Commit [GOOD] >> TMLPStorageTests::SlowZone_DLQ [GOOD] >> TMLPStorageTests::SlowZone_CommitToFast [GOOD] >> TMLPStorageTests::SlowZone_CommitAndAdd [GOOD] >> TMLPStorageTests::SlowZone_Retention_1message [GOOD] >> TMLPStorageTests::SlowZone_Retention_2message [GOOD] >> TMLPStorageTests::SlowZone_Retention_3message [GOOD] >> TMLPStorageTests::ChangeDeadLetterPolicy_Delete [GOOD] >> TMLPStorageTests::ChangeDeadLetterPolicy_Unspecified [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest >> TMLPStorageTests::ChangeDeadLetterPolicy_Unspecified [GOOD] Test command err: 2025-12-23T19:10:05.921391Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587132765675567162:2069];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:10:05.921454Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:10:06.000049Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003d63/r3tmp/tmpZeXZr9/pdisk_1.dat 2025-12-23T19:10:06.223305Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:10:06.264490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:06.264605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:06.295936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:06.367596Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:06.368845Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587132765675567127:2081] 1766517005917225 != 1766517005917228 TServer::EnableGrpc on GrpcPort 29127, node 1 2025-12-23T19:10:06.519686Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:10:06.574874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/003d63/r3tmp/yandexxgRocC.tmp 2025-12-23T19:10:06.574992Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/003d63/r3tmp/yandexxgRocC.tmp 2025-12-23T19:10:06.576709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/003d63/r3tmp/yandexxgRocC.tmp 2025-12-23T19:10:06.576816Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:10:06.806031Z INFO: TTestServer started on Port 14314 GrpcPort 29127 2025-12-23T19:10:06.930266Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:14314 PQClient connected to localhost:29127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766517006422 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:10:07.107428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-12-23T19:10:07.124808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-12-23T19:10:07.135839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) waiting... waiting... waiting... 2025-12-23T19:10:09.099505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132782855437362:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:09.099521Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132782855437350:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:09.099709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:09.100199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132782855437366:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:09.100314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:09.108335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:10:09.113771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132782855437398:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:09.113897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:09.121925Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7587132782855437365:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-12-23T19:10:09.222168Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7587132782855437433:2646] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-12-23T19:10:09.545181Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7587132782855437448:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-12-23T19:10:09.558844Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2579: SessionId: ydb://session/3?node_id=1&id=YmE3NjhkOWQtMzkwYmMwMWEtYzlhOTMyNTMtYTRhODIxNTA=, ActorId: [1:7587132782855437348:2329], ActorState: ExecuteState, TraceId: 01kd69wtnk64ev8me5ba0989nm, ReplyQueryCompileError, status: SCHEME_ERROR, issues: { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } }, remove tx with tx_id: 2025-12-23T19:10:09.563973Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-12-23T19:10:09.616881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:09.649763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:09.741798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7587132787150405035:2827] 2025-12-23T19:10:10.922884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7587132765675567162:2069];send_to=[0:730719953665814 ... , 0, 1970-01-01T00:00:00.000000Z, 0} s{1, Unprocessed, 0, 1, 1970-01-01T00:00:00.000000Z, 1} f{2, Unprocessed, 0, 2, 1970-01-01T00:00:00.000000Z, 2} f{3, Unprocessed, 0, 3, 1970-01-01T00:00:00.000000Z, 3} f{4, Unprocessed, 0, 4, 1970-01-01T00:00:00.000000Z, 4} f{5, Unprocessed, 0, 5, 1970-01-01T00:00:00.000000Z, 5} f{6, Unprocessed, 0, 6, 1970-01-01T00:00:00.000000Z, 6} f{7, Unprocessed, 0, 7, 1970-01-01T00:00:00.000000Z, 7} ] LockedGroups [] DLQQueue [] DLQMessages [] Metrics {Inflight: 8, Unprocessed: 8, Locked: 0, LockedGroups: 0, Committed: 0, DLQ: 0} LOAD < WAL: FormatVersion: 1 Metrics { TotalScheduledToDLQMessageCount: 0 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 0 TotalDeletedByRetentionMessageCount: 3 } FirstOffset: 3 < STORAGE DUMP: FirstOffset: 3 FirstUncommittedOffset: 3 FirstUnlockedOffset: 3 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{3, Unprocessed, 0, 3, 1970-01-01T00:00:00.000000Z, 3} f{4, Unprocessed, 0, 4, 1970-01-01T00:00:00.000000Z, 4} f{5, Unprocessed, 0, 5, 1970-01-01T00:00:00.000000Z, 5} f{6, Unprocessed, 0, 6, 1970-01-01T00:00:00.000000Z, 6} f{7, Unprocessed, 0, 7, 1970-01-01T00:00:00.000000Z, 7} ] LockedGroups [] DLQQueue [] DLQMessages [] Metrics {Inflight: 5, Unprocessed: 5, Locked: 0, LockedGroups: 0, Committed: 0, DLQ: 0} LOAD < SNAPSHOT: FormatVersion: 1 Meta { FirstOffset: 3 FirstUncommittedOffset: 3 BaseDeadlineSeconds: 1761034384 BaseWriteTimestampSeconds: 1761034376 } Metrics { TotalScheduledToDLQMessageCount: 0 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 0 TotalDeletedByRetentionMessageCount: 3 } Messages: "\000\000\000\000\007\000\000\000\003\000\000\000\000\t\000\000\000\001\000\000\000\000\013\000\000\000\001\000\000\000\000\r\000\000\000\001\000\000\000\000\017\000\000\000\001" SlowMessages: "" DLQMessages: "" < STORAGE DUMP: FirstOffset: 3 FirstUncommittedOffset: 3 FirstUnlockedOffset: 3 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{3, Unprocessed, 0, 3, 1970-01-01T00:00:00.000000Z, 3} f{4, Unprocessed, 0, 4, 1970-01-01T00:00:00.000000Z, 4} f{5, Unprocessed, 0, 5, 1970-01-01T00:00:00.000000Z, 5} f{6, Unprocessed, 0, 6, 1970-01-01T00:00:00.000000Z, 6} f{7, Unprocessed, 0, 7, 1970-01-01T00:00:00.000000Z, 7} ] LockedGroups [] DLQQueue [] DLQMessages [] Metrics {Inflight: 5, Unprocessed: 5, Locked: 0, LockedGroups: 0, Committed: 0, DLQ: 0} CREATE > STORAGE DUMP: FirstOffset: 0 FirstUncommittedOffset: 0 FirstUnlockedOffset: 1 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{0, DLQ, 0, 0, 1970-01-01T00:00:00.000000Z, 0} ] LockedGroups [] DLQQueue [(0, 1)] DLQMessages [(0, 1)] Metrics {Inflight: 1, Unprocessed: 0, Locked: 0, LockedGroups: 0, Committed: 0, DLQ: 1} > SNAPSHOT: FormatVersion: 1 Meta { FirstOffset: 0 FirstUncommittedOffset: 0 BaseDeadlineSeconds: 1761034384 BaseWriteTimestampSeconds: 1761034376 } Metrics { TotalScheduledToDLQMessageCount: 1 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 0 TotalDeletedByRetentionMessageCount: 0 } Messages: "C\000\000\000\001\000\000\000\000" SlowMessages: "" DLQMessages: "\000\001" CREATE > STORAGE DUMP: FirstOffset: 0 FirstUncommittedOffset: 1 FirstUnlockedOffset: 1 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{0, Committed, 0, 0, 1970-01-01T00:00:00.000000Z, 0} ] LockedGroups [] DLQQueue [(0, 1)] DLQMessages [] Metrics {Inflight: 1, Unprocessed: 0, Locked: 0, LockedGroups: 0, Committed: 1, DLQ: 0} > WAL: FormatVersion: 1 Metrics { TotalScheduledToDLQMessageCount: 1 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 1 TotalDeletedByRetentionMessageCount: 0 } FirstOffset: 0 ChangedMessages: "\000B\000\000\000" AddedToDLQMessages: "" FirstDLQSeqNo: 1 CREATE > STORAGE DUMP: FirstOffset: 0 FirstUncommittedOffset: 1 FirstUnlockedOffset: 1 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{0, Committed, 0, 0, 1970-01-01T00:00:00.000000Z, 0} ] LockedGroups [] DLQQueue [(0, 1)] DLQMessages [] Metrics {Inflight: 1, Unprocessed: 0, Locked: 0, LockedGroups: 0, Committed: 1, DLQ: 0} > SNAPSHOT: FormatVersion: 1 Meta { FirstOffset: 0 FirstUncommittedOffset: 1 BaseDeadlineSeconds: 1761034384 BaseWriteTimestampSeconds: 1761034376 } Metrics { TotalScheduledToDLQMessageCount: 1 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 1 TotalDeletedByRetentionMessageCount: 0 } Messages: "B\000\000\000\001\000\000\000\000" SlowMessages: "" DLQMessages: "" LOAD < SNAPSHOT: FormatVersion: 1 Meta { FirstOffset: 0 FirstUncommittedOffset: 0 BaseDeadlineSeconds: 1761034384 BaseWriteTimestampSeconds: 1761034376 } Metrics { TotalScheduledToDLQMessageCount: 1 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 0 TotalDeletedByRetentionMessageCount: 0 } Messages: "C\000\000\000\001\000\000\000\000" SlowMessages: "" DLQMessages: "\000\001" < STORAGE DUMP: FirstOffset: 0 FirstUncommittedOffset: 0 FirstUnlockedOffset: 1 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{0, DLQ, 0, 0, 1970-01-01T00:00:00.000000Z, 0} ] LockedGroups [] DLQQueue [(0, 1)] DLQMessages [(0, 1)] Metrics {Inflight: 1, Unprocessed: 0, Locked: 0, LockedGroups: 0, Committed: 0, DLQ: 1} LOAD < WAL: FormatVersion: 1 Metrics { TotalScheduledToDLQMessageCount: 1 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 1 TotalDeletedByRetentionMessageCount: 0 } FirstOffset: 0 ChangedMessages: "\000B\000\000\000" AddedToDLQMessages: "" FirstDLQSeqNo: 1 < STORAGE DUMP: FirstOffset: 0 FirstUncommittedOffset: 0 FirstUnlockedOffset: 1 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{0, Committed, 0, 0, 1970-01-01T00:00:00.000000Z, 0} ] LockedGroups [] DLQQueue [(0, 1)] DLQMessages [] Metrics {Inflight: 1, Unprocessed: 0, Locked: 0, LockedGroups: 0, Committed: 1, DLQ: 0} LOAD < SNAPSHOT: FormatVersion: 1 Meta { FirstOffset: 0 FirstUncommittedOffset: 1 BaseDeadlineSeconds: 1761034384 BaseWriteTimestampSeconds: 1761034376 } Metrics { TotalScheduledToDLQMessageCount: 1 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 1 TotalDeletedByRetentionMessageCount: 0 } Messages: "B\000\000\000\001\000\000\000\000" SlowMessages: "" DLQMessages: "" < STORAGE DUMP: FirstOffset: 0 FirstUncommittedOffset: 1 FirstUnlockedOffset: 1 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{0, Committed, 0, 0, 1970-01-01T00:00:00.000000Z, 0} ] LockedGroups [] DLQQueue [] DLQMessages [] Metrics {Inflight: 1, Unprocessed: 0, Locked: 0, LockedGroups: 0, Committed: 1, DLQ: 0} CREATE > STORAGE DUMP: FirstOffset: 0 FirstUncommittedOffset: 0 FirstUnlockedOffset: 1 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{0, DLQ, 0, 0, 1970-01-01T00:00:00.000000Z, 0} ] LockedGroups [] DLQQueue [(0, 1)] DLQMessages [(0, 1)] Metrics {Inflight: 1, Unprocessed: 0, Locked: 0, LockedGroups: 0, Committed: 0, DLQ: 1} > SNAPSHOT: FormatVersion: 1 Meta { FirstOffset: 0 FirstUncommittedOffset: 0 BaseDeadlineSeconds: 1761034384 BaseWriteTimestampSeconds: 1761034376 } Metrics { TotalScheduledToDLQMessageCount: 1 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 0 TotalDeletedByRetentionMessageCount: 0 } Messages: "C\000\000\000\001\000\000\000\000" SlowMessages: "" DLQMessages: "\000\001" CREATE > STORAGE DUMP: FirstOffset: 0 FirstUncommittedOffset: 0 FirstUnlockedOffset: 0 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{0, Unprocessed, 0, 0, 1970-01-01T00:00:00.000000Z, 0} ] LockedGroups [] DLQQueue [] DLQMessages [] Metrics {Inflight: 1, Unprocessed: 1, Locked: 0, LockedGroups: 0, Committed: 0, DLQ: 0} > WAL: FormatVersion: 1 Metrics { TotalScheduledToDLQMessageCount: 1 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 0 TotalDeletedByRetentionMessageCount: 0 } FirstOffset: 0 ChangedMessages: "\000@\000\000\000" CREATE > STORAGE DUMP: FirstOffset: 0 FirstUncommittedOffset: 0 FirstUnlockedOffset: 0 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{0, Unprocessed, 0, 0, 1970-01-01T00:00:00.000000Z, 0} ] LockedGroups [] DLQQueue [] DLQMessages [] Metrics {Inflight: 1, Unprocessed: 1, Locked: 0, LockedGroups: 0, Committed: 0, DLQ: 0} > SNAPSHOT: FormatVersion: 1 Meta { FirstOffset: 0 FirstUncommittedOffset: 0 BaseDeadlineSeconds: 1761034384 BaseWriteTimestampSeconds: 1761034376 } Metrics { TotalScheduledToDLQMessageCount: 1 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 0 TotalDeletedByRetentionMessageCount: 0 } Messages: "@\000\000\000\001\000\000\000\000" SlowMessages: "" DLQMessages: "" LOAD < SNAPSHOT: FormatVersion: 1 Meta { FirstOffset: 0 FirstUncommittedOffset: 0 BaseDeadlineSeconds: 1761034384 BaseWriteTimestampSeconds: 1761034376 } Metrics { TotalScheduledToDLQMessageCount: 1 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 0 TotalDeletedByRetentionMessageCount: 0 } Messages: "C\000\000\000\001\000\000\000\000" SlowMessages: "" DLQMessages: "\000\001" < STORAGE DUMP: FirstOffset: 0 FirstUncommittedOffset: 0 FirstUnlockedOffset: 1 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{0, DLQ, 0, 0, 1970-01-01T00:00:00.000000Z, 0} ] LockedGroups [] DLQQueue [(0, 1)] DLQMessages [(0, 1)] Metrics {Inflight: 1, Unprocessed: 0, Locked: 0, LockedGroups: 0, Committed: 0, DLQ: 1} LOAD < WAL: FormatVersion: 1 Metrics { TotalScheduledToDLQMessageCount: 1 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 0 TotalDeletedByRetentionMessageCount: 0 } FirstOffset: 0 ChangedMessages: "\000@\000\000\000" < STORAGE DUMP: FirstOffset: 0 FirstUncommittedOffset: 0 FirstUnlockedOffset: 1 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{0, Unprocessed, 0, 0, 1970-01-01T00:00:00.000000Z, 0} ] LockedGroups [] DLQQueue [] DLQMessages [] Metrics {Inflight: 1, Unprocessed: 1, Locked: 0, LockedGroups: 0, Committed: 0, DLQ: 0} LOAD < SNAPSHOT: FormatVersion: 1 Meta { FirstOffset: 0 FirstUncommittedOffset: 0 BaseDeadlineSeconds: 1761034384 BaseWriteTimestampSeconds: 1761034376 } Metrics { TotalScheduledToDLQMessageCount: 1 TotalCommittedMessageCount: 0 TotalMovedToDLQMessageCount: 0 TotalPurgedMessageCount: 0 TotalDeletedByDeadlinePolicyMessageCount: 0 TotalDeletedByRetentionMessageCount: 0 } Messages: "@\000\000\000\001\000\000\000\000" SlowMessages: "" DLQMessages: "" < STORAGE DUMP: FirstOffset: 0 FirstUncommittedOffset: 0 FirstUnlockedOffset: 0 BaseDeadline: 2025-10-21T08:13:04.000000Z BaseWriteTimestamp: 2025-10-21T08:12:56.000000Z Messages: [f{0, Unprocessed, 0, 0, 1970-01-01T00:00:00.000000Z, 0} ] LockedGroups [] DLQQueue [] DLQMessages [] Metrics {Inflight: 1, Unprocessed: 1, Locked: 0, LockedGroups: 0, Committed: 0, DLQ: 0} |99.6%| [TM] {RESULT} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest |99.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest >> TSelectFromViewTest::ReadTestCasesFromFiles [GOOD] >> TSelectFromViewTest::QueryCacheIsUpdated |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] >> test_select.py::TestDML::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline >> DataShardStats::HasSchemaChanges_BTreeIndex [GOOD] >> DataShardStats::HasSchemaChanges_ByKeyFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] Test command err: Trying to start YDB, gRPC: 9003, MsgBus: 21581 2025-12-23T19:10:21.049106Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587132835982373430:2139];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:10:21.052499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003b81/r3tmp/tmpUnOPXT/pdisk_1.dat 2025-12-23T19:10:21.379617Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:10:21.477229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:21.477370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:21.542865Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587132835982373326:2081] 1766517021028910 != 1766517021028913 2025-12-23T19:10:21.552134Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:21.556377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:21.590603Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 9003, node 1 2025-12-23T19:10:21.907563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:10:21.907654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:10:21.907665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:10:21.907754Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:10:22.058951Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:21581 TClient is connected to server localhost:21581 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766517021640 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:10:22.757694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:10:22.765322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:10:24.369048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132848867276104:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:24.369205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:24.369677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7587132848867276114:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:24.369763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 24601, MsgBus: 8566 2025-12-23T19:10:25.910715Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587132851968388713:2066];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:10:25.910796Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003b81/r3tmp/tmpQsbXId/pdisk_1.dat 2025-12-23T19:10:25.932467Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:10:26.021414Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:26.024772Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7587132851968388687:2081] 1766517025909785 != 1766517025909788 2025-12-23T19:10:26.048862Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:26.048962Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:26.063638Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24601, node 2 2025-12-23T19:10:26.111065Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:10:26.111095Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:10:26.111109Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:10:26.111194Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:10:26.204476Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:8566 TClient is connected to server localhost:8566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766517026113 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-12-23T19:10:26.589669Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:10:26.923027Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:10:29.091480Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587132869148258758:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:29.091564Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:29.091861Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7587132869148258768:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:10:29.091922Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 11079, MsgBus: 5154 2025-12-23T19:10:30.085141Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7587132874808907252:2075];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:10:30.090080Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:10:30.100259Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003b81/r3tmp/tmpPDXMoF/pdisk_1.dat 2025-12-23T19:10:30.225103Z node 3 :KQP_PROXY ... utoref)))) (return (Commit! $4 $2)) ) Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (DataSource '"kikimr" '"db")) (let $3 '('"season_id" '"series_id")) (let $4 (AggApply 'count_all (StructType) (lambda '($31) (Void)))) (let $5 (Aggregate (ExtractMembers (Right! (KiReadTable! world $2 (Key '('table (String '"/Root/episodes"))) (Void) '())) $3) $3 '('('Count0 $4)) '())) (let $6 '('"season_id" '"series_id" '"title")) (let $7 '('Inner '"episodes" '"series" '('"episodes" '"series_id") '('"series" '"series_id") '())) (let $8 '('"episodes" '"series_id" '"episodes" '"season_id")) (let $9 '('"seasons" '"series_id" '"seasons" '"season_id")) (let $10 '('Inner $7 '"seasons" $8 $9 '())) (let $11 '('"rename" '"episodes.Count0" '"episode_count")) (let $12 '('"rename" '"episodes.season_id" '"")) (let $13 '('"rename" '"episodes.series_id" '"")) (let $14 '('"rename" '"seasons.season_id" '"")) (let $15 '('"rename" '"seasons.series_id" '"")) (let $16 '('"rename" '"seasons.title" '"season")) (let $17 '('"rename" '"series.series_id" '"")) (let $18 '('"rename" '"series.title" '"series")) (let $19 '($11 $12 $13 $14 $15 $16 $17 $18)) (let $20 (EquiJoin '($5 '"episodes") '((ExtractMembers (Right! (KiReadTable! world $2 (Key '('table (String '"/Root/series"))) (Void) '())) '('"series_id" '"title")) '"series") '((ExtractMembers (Right! (KiReadTable! world $2 (Key '('table (String '"/Root/seasons"))) (Void) '())) $6) '"seasons") $10 $19)) (let $21 (Bool 'true)) (let $22 (Sort $20 '($21 $21) (lambda '($32) '((Member $32 '"series") (Member $32 '"season"))))) (let $23 '('"db" '"/Root/episodes" '"Select")) (let $24 '('"db" '"/Root/series" '"Select")) (let $25 '('"db" '"/Root/seasons" '"Select")) (let $26 '($23 $24 $25)) (let $27 '('('"mode" '"flush"))) (let $28 (KiExecDataQuery! world $1 (DataQueryBlocks (TKiDataQueryBlock '('($22 '() '0 '0)) (KiEffects) $26 '())) $27 (Void))) (let $29 (DataSink 'result)) (let $30 (ResPull! (Left! $28) $29 (Key) (Nth (Right! $28) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $30 $29) $1 $27)) ) KqpLogical-ApplyExtractMembersToReadTableKqpLogical-ApplyExtractMembersToReadTableKqpLogical-ApplyExtractMembersToReadTableKqpLogical-RewriteAggregateKqpLogical-RewriteEquiJoinKqpLogical-JoinToIndexLookupKqpLogical-JoinToIndexLookupKqpPhysical-BuildReadTableRangesStageKqpPhysical-PushAggregateCombineToStageKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-BuildShuffleStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushFlatmapToStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushExtractMembersToStageKqpPhysical-PushFlatmapToStageKqpPhysical-BuildSortStageKqpPhysical-RewriteKqpReadTableKqpPeepholeFinal-SetCombinerMemoryLimitKqpPeepholeNewOperator-RewriteWideCombinerToDqHashCombinerCompiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/count_episodes_with_titles")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/count_episodes_with_titles" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) Trying to start YDB, gRPC: 23636, MsgBus: 10013 2025-12-23T19:13:39.145177Z node 23 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7587133687314694212:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:13:39.145409Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/003b81/r3tmp/tmpcQ2WzI/pdisk_1.dat 2025-12-23T19:13:39.193013Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:13:39.315611Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:13:39.326045Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:13:39.326163Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:13:39.352379Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:13:39.372667Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 23636, node 23 2025-12-23T19:13:39.444926Z node 23 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-12-23T19:13:39.444957Z node 23 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-12-23T19:13:39.444979Z node 23 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-12-23T19:13:39.445142Z node 23 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:13:39.747259Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:10013 2025-12-23T19:13:40.155945Z node 23 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:10013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1766517219425 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:13:40.361264Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-12-23T19:13:44.145400Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[23:7587133687314694212:2068];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:13:44.145534Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-12-23T19:13:44.615233Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7587133708789531553:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:13:44.615373Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:13:44.615705Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7587133708789531562:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:13:44.615774Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:13:44.684289Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7587133708789531583:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:13:44.684415Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7587133708789531588:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:13:44.684448Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:13:44.684813Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7587133708789531590:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:13:44.684917Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-12-23T19:13:44.689372Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-12-23T19:13:44.701213Z node 23 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [23:7587133708789531591:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-12-23T19:13:44.803671Z node 23 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [23:7587133708789531643:2559] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |99.6%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest |99.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/view/unittest >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead [GOOD] >> TDqPqReadActorTest::LoadCorruptedState >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] >> TDqPqReadActorTest::LoadCorruptedState [GOOD] >> TDqPqReadActorTest::TestLoadFromSeveralStates >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] >> TDqPqReadActorTest::TestLoadFromSeveralStates [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 >> DataShardStats::HasSchemaChanges_ByKeyFilter [GOOD] >> DataShardStats::HasSchemaChanges_Columns >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure >> test_select.py::TestDML::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 [GOOD] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer >> test_select.py::TestDML::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> TPqWriterTest::TestWriteToTopic >> TPqWriterTest::TestWriteToTopic [GOOD] >> TPqWriterTest::TestWriteToTopicMultiBatch >> TPqWriterTest::TestWriteToTopicMultiBatch [GOOD] >> TPqWriterTest::TestDeferredWriteToTopic >> TPqWriterTest::TestDeferredWriteToTopic [GOOD] >> TPqWriterTest::WriteNonExistentTopic [GOOD] >> TPqWriterTest::TestCheckpoints >> DataShardStats::HasSchemaChanges_Columns [GOOD] >> DataShardStats::HasSchemaChanges_Families >> test_select.py::TestDML::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> TPqWriterTest::TestCheckpoints [GOOD] >> TPqWriterTest::TestCheckpointWithEmptyBatch >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/pq_async_io/ut/unittest >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] Test command err: 2025-12-23T19:10:35.128657Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:557: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [1:7587132893532430411:2054], metadatafields: , partitions: 666, skip json errors: 0 2025-12-23T19:10:35.142733Z node 1 :KQP_COMPUTE DEBUG: dq_pq_read_actor_base.cpp:153: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. SessionId: empty Watermarks enabled: 1 granularity: 0.000100s late arrival delay: 0.000000s idle: 0 idle timeout: 86400.000000s 2025-12-23T19:10:35.143135Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-12-23T19:10:35.143193Z node 1 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1515: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2025-12-23T19:10:35.143223Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:608: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([1:7587132893532430411:2054]) 2025-12-23T19:10:35.143252Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:635: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [1:7587132893532430417:2048] 2025-12-23T19:10:35.143455Z node 1 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1009: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [1:7587132893532430412:2055] 2025-12-23T19:10:35.143504Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:655: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [1:7587132893532430412:2055], partIds: 666 cookie 1 2025-12-23T19:10:35.143706Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1053: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [1:7587132893532430412:2055], cookie 1 2025-12-23T19:10:35.143742Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1352: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2025-12-23T19:10:35.143755Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1355: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2025-12-23T19:10:35.143814Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1374: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [1:7587132893532430414:2057], generation 1 2025-12-23T19:10:35.143863Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:719: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [1:7587132893532430414:2057], connection id 1 partitions offsets (666 / ), 2025-12-23T19:10:35.144450Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:864: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [1:7587132893532430414:2057], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-12-23T19:10:35.147066Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:945: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [1:7587132893532430414:2057], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-12-23T19:10:35.154252Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1123: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [1:7587132893532430414:2057], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-12-23T19:10:35.155829Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2025-12-23T19:10:35.155847Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2025-12-23T19:10:35.155881Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-12-23T19:10:35.156101Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:807: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2025-12-23T19:10:35.156115Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:811: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, watermark (empty maybe), buffer size 0, free space 948, result size 52 2025-12-23T19:10:35.158684Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:744: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. PassAway 2025-12-23T19:10:35.158790Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1242: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. State: used buffer size 0 ready buffer event size 0 state 5 InFlyAsyncInputData 0 Counters: CoordinatorChanged 1 CoordinatorResult 1 MessageBatch 1 StartSessionAck 1 NewDataArrived 1 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 0 Retry 0 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 GetAsyncInputData 2 NotifyCA 1 [1:7587132893532430414:2057] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partitions 666 offsets 666=2 has pending data 2025-12-23T19:10:35.158844Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:736: SelfId: [1:7587132893532430417:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send StopSession to [1:7587132893532430414:2057] generation 1 2025-12-23T19:10:35.485270Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:557: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [2:7587132896893667573:2054], metadatafields: , partitions: 666, skip json errors: 0 2025-12-23T19:10:35.486038Z node 2 :KQP_COMPUTE DEBUG: dq_pq_read_actor_base.cpp:153: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. SessionId: empty Watermarks enabled: 1 granularity: 0.000100s late arrival delay: 0.000000s idle: 0 idle timeout: 86400.000000s 2025-12-23T19:10:35.487105Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-12-23T19:10:35.487481Z node 2 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1515: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2025-12-23T19:10:35.487509Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:608: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([2:7587132896893667573:2054]) 2025-12-23T19:10:35.487612Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:635: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [2:7587132896893667579:2048] 2025-12-23T19:10:35.490570Z node 2 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1009: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [2:7587132896893667574:2055] 2025-12-23T19:10:35.490617Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:655: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [2:7587132896893667574:2055], partIds: 666 cookie 1 2025-12-23T19:10:35.490935Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1053: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [2:7587132896893667574:2055], cookie 1 2025-12-23T19:10:35.490953Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1352: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2025-12-23T19:10:35.490964Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1355: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2025-12-23T19:10:35.490983Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1374: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [2:7587132896893667576:2057], generation 1 2025-12-23T19:10:35.491048Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:719: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [2:7587132896893667576:2057], connection id 1 partitions offsets (666 / ), 2025-12-23T19:10:35.491361Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:864: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [2:7587132896893667576:2057], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-12-23T19:10:35.494228Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:945: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [2:7587132896893667576:2057], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-12-23T19:10:35.496387Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1123: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [2:7587132896893667576:2057], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-12-23T19:10:35.496421Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2025-12-23T19:10:35.496431Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2025-12-23T19:10:35.497694Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2025-12-23T19:10:35.497814Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:807: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2025-12-23T19:10:35.497832Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:811: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, watermark (empty maybe), buffer size 0, free space 948, result size 52 2025-12-23T19:10:35.499725Z node 2 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1097: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvUndelivered, TSystem::Undelivered from [2:7587132896893667576:2057], reason Disconnected, cookie 999 2025-12-23T19:10:35.499789Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:945: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [2:7587132896893667576:2057], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-12-23T19:10:35.500103Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:963: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvRetry, EventQueueId 1 2025-12-23T19:10:35.500941Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1123: SelfId: [2:7587132896893667579:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [2:7587132896893667576:2057 ... 7b-70f34d59] Closing read session. Close timeout: 0.000000s 2025-12-23T19:14:27.151739Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-12-23T19:14:27.151831Z :INFO: [local] [local] [3d3bf0b6-d992389-430cb37b-70f34d59] Counters: { Errors: 0 CurrentSessionLifetimeMs: 114 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T19:14:27.152008Z :NOTICE: [local] [local] [3d3bf0b6-d992389-430cb37b-70f34d59] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T19:14:27.156068Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf|43d0d08b-3de1dbbb-3b185857-e36da5a9_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-12-23T19:14:27.156128Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf|43d0d08b-3de1dbbb-3b185857-e36da5a9_0] PartitionId [0] Generation [1] Write session will now close 2025-12-23T19:14:27.156201Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf|43d0d08b-3de1dbbb-3b185857-e36da5a9_0] PartitionId [0] Generation [1] Write session: aborting 2025-12-23T19:14:27.156714Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf|43d0d08b-3de1dbbb-3b185857-e36da5a9_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-12-23T19:14:27.156758Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf|43d0d08b-3de1dbbb-3b185857-e36da5a9_0] PartitionId [0] Generation [1] Write session: destroy 2025-12-23T19:14:27.591521Z node 46 :KQP_COMPUTE DEBUG: dq_pq_write_actor.cpp:268: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Load state: { SourceId: "95e0d85b-9fd3d1f3-d6af71d3-fa963aaf" ConfirmedSeqNo: 3 EgressBytes: 3 } 2025-12-23T19:14:27.591725Z node 46 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:195: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 2. Checkpoint: 0. Finished: 0 2025-12-23T19:14:27.607502Z node 46 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:221: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Received data for sending: 4 2025-12-23T19:14:27.607539Z node 46 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:221: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Received data for sending: 5 2025-12-23T19:14:27.627393Z :INFO: [local] OnFederationDiscovery fall back to single mode, database=local [] [] Start federated write session to database '' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "" DbInfos: [ { path: "local" endpoint: "localhost:29687" status: AVAILABLE weight: 100 } ] ControlPlaneEndpoint: localhost:29687 }2025-12-23T19:14:27.628038Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf] Write session: try to update token 2025-12-23T19:14:27.628396Z :INFO: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] Starting read session 2025-12-23T19:14:27.628443Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf] Start write session. Will connect to nodeId: 0 2025-12-23T19:14:27.628481Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] Starting single session 2025-12-23T19:14:27.629488Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T19:14:27.629585Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T19:14:27.629737Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] Reconnecting session to cluster in 0.000000s 2025-12-23T19:14:27.638519Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] Successfully connected. Initializing session 2025-12-23T19:14:27.638533Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf] Write session: write to message_group: 95e0d85b-9fd3d1f3-d6af71d3-fa963aaf 2025-12-23T19:14:27.638756Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf] Write session: send init request: init_request { path: "Checkpoints" producer_id: "95e0d85b-9fd3d1f3-d6af71d3-fa963aaf" message_group_id: "95e0d85b-9fd3d1f3-d6af71d3-fa963aaf" } 2025-12-23T19:14:27.638797Z :TRACE: [local] TRACE_EVENT InitRequest 2025-12-23T19:14:27.639187Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf] Write session: OnWriteDone gRpcStatusCode: 0 2025-12-23T19:14:27.641441Z :INFO: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] Got InitResponse. ReadSessionId: test_client_1_22_13961146939318876157_v1 2025-12-23T19:14:27.641506Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T19:14:27.641663Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-12-23T19:14:27.645123Z :INFO: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "Checkpoints". Partition: 0. Read offset: (NULL) 2025-12-23T19:14:27.647952Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] Got ReadResponse, serverBytesSize = 936, now ReadSizeBudget = 0, ReadSizeServerDelta = 52427864 2025-12-23T19:14:27.648088Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52427864 2025-12-23T19:14:27.648405Z :DEBUG: [local] Decompression task done. Partition/PartitionSessionId: 1 (0-4) 2025-12-23T19:14:27.648496Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] Returning serverBytesSize = 936 to budget 2025-12-23T19:14:27.648565Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] In ContinueReadingDataImpl, ReadSizeBudget = 936, ReadSizeServerDelta = 52427864 2025-12-23T19:14:27.648809Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-12-23T19:14:27.648894Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-12-23T19:14:27.648927Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-12-23T19:14:27.648963Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-12-23T19:14:27.649001Z :DEBUG: [local] Take Data. Partition 0. Read: {2, 0} (3-3) 2025-12-23T19:14:27.649048Z :DEBUG: [local] Take Data. Partition 0. Read: {3, 0} (4-4) 2025-12-23T19:14:27.649195Z :INFO: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] Closing read session. Close timeout: 0.000000s 2025-12-23T19:14:27.649200Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] The application data is transferred to the client. Number of messages 5, size 5 bytes 2025-12-23T19:14:27.649286Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] Returning serverBytesSize = 0 to budget 2025-12-23T19:14:27.649329Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-12-23T19:14:27.649383Z :INFO: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 20 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T19:14:27.649513Z :NOTICE: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-12-23T19:14:27.649567Z :DEBUG: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] [] Abort session to cluster 2025-12-23T19:14:27.650194Z :INFO: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] Closing read session. Close timeout: 0.000000s 2025-12-23T19:14:27.650258Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-12-23T19:14:27.650325Z :INFO: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 21 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T19:14:27.650455Z :NOTICE: [local] [local] [1d977752-b04a7a0-fa804bf4-f9337de2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T19:14:27.653487Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf] Write session: close. Timeout 0.000000s 2025-12-23T19:14:27.653537Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf] Write session will now close 2025-12-23T19:14:27.653593Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf] Write session: aborting 2025-12-23T19:14:27.654070Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf] Write session: gracefully shut down, all writes complete 2025-12-23T19:14:27.654132Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-12-23T19:14:27.654219Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1766517267654 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T19:14:27.654274Z :TRACE: [local] TRACE_EVENT Error status=CLIENT_CANCELLED 2025-12-23T19:14:27.654318Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf] Write session is aborting and will not restart 2025-12-23T19:14:27.654403Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [95e0d85b-9fd3d1f3-d6af71d3-fa963aaf] Write session: destroy 2025-12-23T19:14:28.254700Z node 47 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:195: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 0. Checkpoint: 1. Finished: 0 2025-12-23T19:14:28.269247Z node 47 :KQP_COMPUTE DEBUG: dq_pq_write_actor.cpp:240: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. [Checkpoint 0.0] Send checkpoint state immediately 2025-12-23T19:14:28.269492Z node 47 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:418: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Save checkpoint { Id: 0 Generation: 0 } state: { SourceId: "ca210c76-377ef01b-79f73345-44924253" } |99.6%| [TM] {RESULT} ydb/tests/fq/pq_async_io/ut/unittest |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/pq_async_io/ut/unittest |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] |99.6%| [TA] $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_public_api.py::TestSessionNotFound::test_session_not_found |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] |99.7%| [TM] {RESULT} ydb/tests/fq/restarts/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/restarts/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> DataShardStats::HasSchemaChanges_Families [GOOD] >> DataShardStats::BackupTableStatsReportInterval |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> DataShardStats::BackupTableStatsReportInterval [GOOD] >> DataShardStats::CollectKeySampleLeader |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_select.py::TestDML::test_select[table_all_types-pk_types12-all_types12-index12---] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can >> DataShardStats::CollectKeySampleLeader [GOOD] >> DataShardStats::CollectKeySampleFollower |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestDML::test_select[table_all_types-pk_types12-all_types12-index12---] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] |99.7%| [TA] $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_public_api.py::TestBadSession::test_simple |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> DataShardStats::CollectKeySampleFollower [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_stats/unittest >> DataShardStats::CollectKeySampleFollower [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2025-12-23T19:10:39.105076Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-12-23T19:10:39.269030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-12-23T19:10:39.297680Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:109:2156], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-12-23T19:10:39.298082Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-12-23T19:10:39.298185Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-12-23T19:10:39.820370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:10:39.820542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:10:39.937940Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1766517035943679 != 1766517035943683 2025-12-23T19:10:39.961191Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:10:40.028379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:10:40.154110Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/004707/r3tmp/tmpHO6zJG/pdisk_1.dat 2025-12-23T19:10:40.510134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:10:40.530821Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:10:40.648598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2025-12-23T19:10:40.714047Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvBoot 2025-12-23T19:10:40.719947Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvRestored 2025-12-23T19:10:40.720234Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2764] 2025-12-23T19:10:40.720469Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-12-23T19:10:40.773077Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2758], Recipient [1:885:2764]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-12-23T19:10:40.773949Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-12-23T19:10:40.774075Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-12-23T19:10:40.777147Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-12-23T19:10:40.777244Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-12-23T19:10:40.777308Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-12-23T19:10:40.779360Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-12-23T19:10:40.779568Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-12-23T19:10:40.779661Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2764] in generation 1 2025-12-23T19:10:40.790550Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-12-23T19:10:40.826812Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-12-23T19:10:40.828085Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-12-23T19:10:40.828226Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2774] 2025-12-23T19:10:40.828278Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-12-23T19:10:40.828319Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-12-23T19:10:40.828396Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-12-23T19:10:40.828657Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2764], Recipient [1:885:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-12-23T19:10:40.829546Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-12-23T19:10:40.830884Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-12-23T19:10:40.831003Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-12-23T19:10:40.831469Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-12-23T19:10:40.831515Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:10:40.831568Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T19:10:40.831614Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T19:10:40.831653Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T19:10:40.831690Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-12-23T19:10:40.831752Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-12-23T19:10:40.831854Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2765], Recipient [1:885:2764]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T19:10:40.831895Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:10:40.831939Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:881:2761], serverId# [1:887:2765], sessionId# [0:0:0] 2025-12-23T19:10:40.832021Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2765] 2025-12-23T19:10:40.832078Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-12-23T19:10:40.832192Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-12-23T19:10:40.832581Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-12-23T19:10:40.832639Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-12-23T19:10:40.832739Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-12-23T19:10:40.832787Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-12-23T19:10:40.832830Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-12-23T19:10:40.832884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-12-23T19:10:40.832953Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-12-23T19:10:40.833250Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-12-23T19:10:40.833287Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-12-23T19:10:40.833337Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-12-23T19:10:40.833369Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-12-23T19:10:40.833450Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-12-23T19:10:40.833480Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-12-23T19:10:40.833510Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-12-23T19:10:40.833560Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-12-23T19:10:40.833586Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-12-23T19:10:40.836494Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:903:2775], Recipient [1:885:2764]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-12-23T19:10:40.836577Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 20 ... 46644480 FollowerId: 1 2025-12-23T19:15:50.821018Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [16:884:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T19:15:50.831519Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [16:923:2785]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T19:15:50.831605Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3312: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-12-23T19:15:51.571300Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [16:884:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T19:15:51.571545Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 38 2025-12-23T19:15:51.690807Z node 16 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-12-23T19:15:51.690894Z node 16 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-12-23T19:15:51.691453Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 268828680, Sender [16:915:2781], Recipient [16:923:2785]: NKikimr::TEvTablet::TEvFUpdate 2025-12-23T19:15:51.691828Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 268828680, Sender [16:915:2781], Recipient [16:923:2785]: NKikimr::TEvTablet::TEvFUpdate 2025-12-23T19:15:51.692382Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [16:876:2758], Recipient [16:884:2764]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-12-23T19:15:51.702829Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [16:923:2785]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T19:15:51.702919Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3312: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-12-23T19:15:51.703087Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 1, tableId 38 2025-12-23T19:15:52.494787Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [16:884:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-12-23T19:15:52.494902Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-12-23T19:15:52.494995Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037888 outdated step 76000 last cleanup 0 2025-12-23T19:15:52.495077Z node 16 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-12-23T19:15:52.495141Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-12-23T19:15:52.495205Z node 16 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-12-23T19:15:52.495277Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-12-23T19:15:52.495464Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [16:884:2764]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T19:15:52.506374Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [16:923:2785]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-12-23T19:15:52.506460Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3312: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-12-23T19:15:53.522516Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553215, Sender [16:1634:3415], Recipient [16:923:2785]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 KeysSize: 3 2025-12-23T19:15:53.522623Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3305: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2025-12-23T19:15:53.522903Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 1 2025-12-23T19:15:53.522983Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to repeatable v2500/18446744073709551615 2025-12-23T19:15:53.523111Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-12-23T19:15:53.523287Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T19:15:53.523359Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-12-23T19:15:53.523427Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-12-23T19:15:53.523489Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-12-23T19:15:53.523541Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2025-12-23T19:15:53.523612Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T19:15:53.523646Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-12-23T19:15:53.523672Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-12-23T19:15:53.523700Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-12-23T19:15:53.523844Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-12-23T19:15:53.524164Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Restart 2025-12-23T19:15:53.524196Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037888 2025-12-23T19:15:53.524500Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 1 2025-12-23T19:15:53.524534Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-12-23T19:15:53.524618Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-12-23T19:15:53.524898Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[16:1634:3415], 0} after executionsCount# 2 2025-12-23T19:15:53.524990Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[16:1634:3415], 0} sends rowCount# 3, bytes# 48, quota rows left# 998, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-12-23T19:15:53.525097Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[16:1634:3415], 0} finished in read 2025-12-23T19:15:53.525187Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T19:15:53.525214Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-12-23T19:15:53.525236Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-12-23T19:15:53.525257Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-12-23T19:15:53.525296Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2025-12-23T19:15:53.525311Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-12-23T19:15:53.525342Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2025-12-23T19:15:53.525393Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-12-23T19:15:53.525534Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-12-23T19:15:53.526244Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553219, Sender [16:1634:3415], Recipient [16:923:2785]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-12-23T19:15:53.526295Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3308: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-12-23T19:15:53.526363Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 42 } }, { items { uint32_value: 44 } }, { items { uint32_value: 46 } } TEST 9: EvGetTableStats(collectKeySample=false) after the collected key sample becomes invalid TEST Sending the EvGetTableStats message to the tablet 72075186224037888, tableId=38, collectKeySample=0, toFollower=1 2025-12-23T19:15:53.528896Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269877761, Sender [16:1637:3418], Recipient [16:923:2785]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-12-23T19:15:53.528992Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3303: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2025-12-23T19:15:53.529087Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at follower 1 tablet# 72075186224037888, clientId# [16:1636:3417], serverId# [16:1637:3418], sessionId# [0:0:0] 2025-12-23T19:15:53.529297Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553160, Sender [16:828:2723], Recipient [16:923:2785]: NKikimrTxDataShard.TEvGetTableStats TableId: 38 CollectKeySample: false TEST Received the TEvGetTableStatsResult response from the tablet 72075186224037888, tableId=38, collectKeySample=0, toFollower=1 DatashardId: 72075186224037888 TableLocalId: 38 TableStats { InMemSize: 0 LastAccessTime: 80533 LastUpdateTime: 0 } TabletMetrics { CPU: 130 } FullStatsReady: false TableOwnerId: 72057594046644480 FollowerId: 1 |99.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_stats/unittest |99.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_stats/unittest >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_public_api.py::TestBadSession::test_simple [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_public_api.py::TestDriverCanRecover::test_driver_recovery >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [FAIL] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Offset >> test_public_api.py::TestJsonExample::test_json_unexpected_failure >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [GOOD] >> test_public_api.py::TestJsonExample::test_json_success >> test_public_api.py::TestJsonExample::test_json_success [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Offset [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset [GOOD] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-12-23T19:17:31.179803Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587134683884143272:2077];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:17:31.179882Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:17:31.240643Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T19:17:31.243394Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587134681546056825:2155];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:17:31.243518Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:17:31.248808Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005f07/r3tmp/tmp8Mh1Ty/pdisk_1.dat 2025-12-23T19:17:31.372798Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:17:31.373269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:17:31.547234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:17:31.547350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:17:31.548856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:17:31.548924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:17:31.571884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:17:31.572271Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T19:17:31.573198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:17:31.579809Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:17:31.580016Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2025-12-23T19:17:31.589019Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7587134683884143221:2082] 1766517451175005 != 1766517451175008 TServer::EnableGrpc on GrpcPort 13967, node 1 2025-12-23T19:17:31.671019Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:17:31.819220Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005f07/r3tmp/yandexoKYQIw.tmp 2025-12-23T19:17:31.819303Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005f07/r3tmp/yandexoKYQIw.tmp 2025-12-23T19:17:31.820455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005f07/r3tmp/yandexoKYQIw.tmp 2025-12-23T19:17:31.820558Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:17:31.943339Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:17:32.020190Z INFO: TTestServer started on Port 19291 GrpcPort 13967 TClient is connected to server localhost:19291 PQClient connected to localhost:13967 === TenantModeEnabled() = 0 === Init PQ - start server on port 13967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-12-23T19:17:32.185898Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766517451713 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:17:32.246845Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-12-23T19:17:32.315295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T19:17:32.315476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T19:17:32.315722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T19:17:32.315754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T19:17:32.316823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T19:17:32.316888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:17:32.318814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T19:17:32.318947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T19:17:32.319077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T19:17:32.319110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T19:17:32.319123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-12-23T19:17:32.319137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-12-23T19:17:32.320090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:17:32.320118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-12-23T19:17:32.320134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:17:32.320538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T19:17:32.320594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T19:17:32.320624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2025-12-23T19:17:32.321813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T19:17:32.321849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T19:17:32.321876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-12-23T19:17:32.321925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-12-23T19:17:32.335693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T19:17:32.337454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-12-23T19:17:32.337571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-12-23T19:17:32.339271Z node 1 :FL ... sfully connected. Initializing session 2025-12-23T19:18:04.083995Z node 5 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:109: new grpc connection 2025-12-23T19:18:04.084012Z node 5 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:131: new session created cookie 1 2025-12-23T19:18:04.084406Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { path: "rt3.dc1--topic1" } consumer: "user" } } 2025-12-23T19:18:04.084562Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:941: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 read init: from# ipv6:[::1]:49578, request# { init_request { topics_read_settings { path: "rt3.dc1--topic1" } consumer: "user" } } 2025-12-23T19:18:04.084891Z node 5 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 auth for : user 2025-12-23T19:18:04.085231Z node 5 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:131: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 Handle describe topics response 2025-12-23T19:18:04.085316Z node 5 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 auth is DEAD 2025-12-23T19:18:04.085370Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1058: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 auth ok: topics# 1, initDone# 0 2025-12-23T19:18:04.086161Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1229: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 register session: topic# rt3.dc1--topic1 2025-12-23T19:18:04.086384Z :INFO: [] [] [e3440745-a657d8fb-363fba9d-523da0c5] [] Got InitResponse. ReadSessionId: shared/user_5_1_2024667091809648086_v1 2025-12-23T19:18:04.086415Z :DEBUG: [] [] [e3440745-a657d8fb-363fba9d-523da0c5] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-12-23T19:18:04.086576Z :DEBUG: [] [] [e3440745-a657d8fb-363fba9d-523da0c5] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-12-23T19:18:04.086669Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037898][rt3.dc1--topic1] pipe [5:7587134825600391700:2564] connected; active server actors: 1 2025-12-23T19:18:04.086834Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2025-12-23T19:18:04.086958Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 got read request: guid# ca43936c-63d22ca-dbfdefab-ed48a586 2025-12-23T19:18:04.086966Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1708: [72075186224037898][rt3.dc1--topic1] consumer "user" register session for pipe [5:7587134825600391700:2564] session shared/user_5_1_2024667091809648086_v1 2025-12-23T19:18:04.087007Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:639: [72075186224037898][rt3.dc1--topic1] consumer user register readable partition 0 2025-12-23T19:18:04.087060Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:669: [72075186224037898][rt3.dc1--topic1] consumer user family created family=1 (Status=Free, Partitions=[0]) 2025-12-23T19:18:04.087089Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:871: [72075186224037898][rt3.dc1--topic1] consumer user register reading session ReadingSession "shared/user_5_1_2024667091809648086_v1" (Sender=[5:7587134825600391697:2564], Pipe=[5:7587134825600391700:2564], Partitions=[], ActiveFamilyCount=0) 2025-12-23T19:18:04.087106Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037898][rt3.dc1--topic1] consumer user rebalancing was scheduled 2025-12-23T19:18:04.087142Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037898][rt3.dc1--topic1] consumer user balancing. Sessions=1, Families=1, UnreadableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-12-23T19:18:04.087179Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1306: [72075186224037898][rt3.dc1--topic1] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_5_1_2024667091809648086_v1" (Sender=[5:7587134825600391697:2564], Pipe=[5:7587134825600391700:2564], Partitions=[], ActiveFamilyCount=0) 2025-12-23T19:18:04.087226Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:549: [72075186224037898][rt3.dc1--topic1] consumer user family 1 status Active partitions [0] session "shared/user_5_1_2024667091809648086_v1" sender [5:7587134825600391697:2564] lock partition 0 for ReadingSession "shared/user_5_1_2024667091809648086_v1" (Sender=[5:7587134825600391697:2564], Pipe=[5:7587134825600391700:2564], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-12-23T19:18:04.087267Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037898][rt3.dc1--topic1] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-12-23T19:18:04.087285Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037898][rt3.dc1--topic1] consumer user balancing duration: 0.000128s 2025-12-23T19:18:04.088179Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1347: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/user_5_1_2024667091809648086_v1" ClientId: "user" PipeClient { RawX1: 7587134825600391700 RawX2: 4503621102209540 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-12-23T19:18:04.088290Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1144: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-12-23T19:18:04.088471Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:984: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7587134825600391703:2567] 2025-12-23T19:18:04.089207Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: shared/user_5_1_2024667091809648086_v1:1 with generation 1 2025-12-23T19:18:04.090823Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 6 WriteTimestampMS: 1766517483977 CreateTimestampMS: 1766517483975 SizeLag: 280 WriteTimestampEstimateMS: 1766517484078 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-12-23T19:18:04.090865Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 6 readOffset 0 committedOffset 0 2025-12-23T19:18:04.090911Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 sending to client partition status 2025-12-23T19:18:04.091396Z :INFO: [] [] [e3440745-a657d8fb-363fba9d-523da0c5] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: 10 2025-12-23T19:18:04.091827Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 10 } } 2025-12-23T19:18:04.091941Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:551: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 10, commitOffset# (empty maybe) 2025-12-23T19:18:04.091980Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1024: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 6 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 10 2025-12-23T19:18:04.092017Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 10 2025-12-23T19:18:04.182956Z :INFO: [] [] [e3440745-a657d8fb-363fba9d-523da0c5] Closing read session. Close timeout: 0.000000s 2025-12-23T19:18:04.183034Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:0:0 2025-12-23T19:18:04.183073Z :INFO: [] [] [e3440745-a657d8fb-363fba9d-523da0c5] Counters: { Errors: 0 CurrentSessionLifetimeMs: 104 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T19:18:04.183174Z :NOTICE: [] [] [e3440745-a657d8fb-363fba9d-523da0c5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-12-23T19:18:04.183211Z :DEBUG: [] [] [e3440745-a657d8fb-363fba9d-523da0c5] [] Abort session to cluster 2025-12-23T19:18:04.183919Z :NOTICE: [] [] [e3440745-a657d8fb-363fba9d-523da0c5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T19:18:04.184167Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 grpc read done: success# 0, data# { } 2025-12-23T19:18:04.184192Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 grpc read failed 2025-12-23T19:18:04.184213Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 grpc closed 2025-12-23T19:18:04.184237Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_5_1_2024667091809648086_v1 is DEAD 2025-12-23T19:18:04.185022Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_5_1_2024667091809648086_v1 2025-12-23T19:18:04.185116Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037898][rt3.dc1--topic1] pipe [5:7587134825600391700:2564] disconnected. 2025-12-23T19:18:04.185147Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037898][rt3.dc1--topic1] pipe [5:7587134825600391700:2564] disconnected; active server actors: 1 2025-12-23T19:18:04.185160Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037898][rt3.dc1--topic1] pipe [5:7587134825600391700:2564] client user disconnected session shared/user_5_1_2024667091809648086_v1 |99.7%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_public_api.py::TestRecursiveCreation::test_mkdir |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_public_api.py::TestRecursiveCreation::test_create_table >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestOnceSharedCache >> BuildStatsHistogram::Ten_Mixed >> TFlatTableExecutor_TryKeepInMemory::TestOnceSharedCache [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemory >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryMain >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryMain [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_FlatIndex >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_FlatIndex [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAll >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAll [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyTryKeepInMemory >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryAll >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryAll [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryPartially [GOOD] >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows >> TFlatTableExecutor_VersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-12-23T19:18:03.010430Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587134818640177225:2080];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:18:03.011150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:18:03.039443Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T19:18:03.040476Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587134820820279852:2173];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:18:03.040545Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005f02/r3tmp/tmpZjpPVo/pdisk_1.dat 2025-12-23T19:18:03.047264Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T19:18:03.158867Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:18:03.163848Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:18:03.201032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:18:03.201119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:18:03.201367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:18:03.201408Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:18:03.224875Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T19:18:03.225028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:18:03.227045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:18:03.281731Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21498, node 1 2025-12-23T19:18:03.329449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005f02/r3tmp/yandexA3SIoj.tmp 2025-12-23T19:18:03.329475Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005f02/r3tmp/yandexA3SIoj.tmp 2025-12-23T19:18:03.329692Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005f02/r3tmp/yandexA3SIoj.tmp 2025-12-23T19:18:03.329857Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:18:03.357017Z INFO: TTestServer started on Port 18343 GrpcPort 21498 2025-12-23T19:18:03.438857Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:18:03.459167Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:18343 PQClient connected to localhost:21498 === TenantModeEnabled() = 0 === Init PQ - start server on port 21498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766517483297 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:18:03.623186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T19:18:03.623315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-12-23T19:18:03.623468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T19:18:03.623494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976720657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T19:18:03.623619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T19:18:03.623650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:18:03.625141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T19:18:03.625236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T19:18:03.625391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-12-23T19:18:03.625416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T19:18:03.625430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-12-23T19:18:03.625442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 2 -> 3 waiting... 2025-12-23T19:18:03.626118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-12-23T19:18:03.626162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-12-23T19:18:03.626176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-12-23T19:18:03.626676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-12-23T19:18:03.626704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T19:18:03.626713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976720657:0 3 -> 128 2025-12-23T19:18:03.627760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-12-23T19:18:03.627801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-12-23T19:18:03.627826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-12-23T19:18:03.627844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-12-23T19:18:03.630535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T19:18:03.631684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-12-23T19:18:03.631826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-12-23T19:18:03.633283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766517483675, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T19:18:03.633393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1766517483675 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-12-23T19:18:03.633411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-12-23T19:18:03.633635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_ ... 0(assignId:1) 2025-12-23T19:18:23.690195Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:984: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [3:7587134904403456417:2576] 2025-12-23T19:18:23.690239Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: shared/user_3_1_9837974744007567956_v1:1 with generation 1 2025-12-23T19:18:23.692205Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 WriteTimestampMS: 1766517503580 CreateTimestampMS: 1766517503580 SizeLag: 280 WriteTimestampEstimateMS: 1766517503682 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-12-23T19:18:23.692233Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-12-23T19:18:23.692265Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 sending to client partition status 2025-12-23T19:18:23.692671Z :INFO: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-12-23T19:18:23.693060Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-12-23T19:18:23.693132Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:551: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-12-23T19:18:23.693166Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:1024: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-12-23T19:18:23.693181Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:970: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-12-23T19:18:23.693228Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2343: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 1766517503580, sizeLag# 280 2025-12-23T19:18:23.693240Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2354: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1TEvPartitionReady. Aval parts: 1 2025-12-23T19:18:23.693262Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2277: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 performing read request: guid# bfd7460b-98129bad-a4699b0-3bee54a5, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 336, partitionsAsked# 1, maxTimeLag# 0ms 2025-12-23T19:18:23.693327Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1396: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 336 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid bfd7460b-98129bad-a4699b0-3bee54a5 2025-12-23T19:18:23.694370Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1766517503580 CreateTimestampMS: 1766517503580 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1766517503584 CreateTimestampMS: 1766517503580 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1766517503624 CreateTimestampMS: 1766517503580 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 2 SizeLag: 18446744073709551530 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-12-23T19:18:23.694485Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 3 2025-12-23T19:18:23.694517Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid bfd7460b-98129bad-a4699b0-3bee54a5 has messages 1 2025-12-23T19:18:23.694587Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 read done: guid# bfd7460b-98129bad-a4699b0-3bee54a5, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 490 2025-12-23T19:18:23.694614Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 response to read: guid# bfd7460b-98129bad-a4699b0-3bee54a5 2025-12-23T19:18:23.694760Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 Process answer. Aval parts: 0 2025-12-23T19:18:23.694923Z :DEBUG: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] [] Got ReadResponse, serverBytesSize = 490, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-12-23T19:18:23.694992Z :DEBUG: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-12-23T19:18:23.695148Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-12-23T19:18:23.695182Z :DEBUG: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] [] Returning serverBytesSize = 490 to budget 2025-12-23T19:18:23.695204Z :DEBUG: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] [] In ContinueReadingDataImpl, ReadSizeBudget = 490, ReadSizeServerDelta = 52428310 2025-12-23T19:18:23.695371Z :DEBUG: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-12-23T19:18:23.695478Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-12-23T19:18:23.695515Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-12-23T19:18:23.695531Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-12-23T19:18:23.695561Z :DEBUG: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-12-23T19:18:23.695515Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 grpc read done: success# 1, data# { read_request { bytes_size: 490 } } 2025-12-23T19:18:23.695581Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 got read request: guid# 36bd121d-7fdae215-2c72eee4-707cfc72 2025-12-23T19:18:23.695599Z :DEBUG: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] [] Returning serverBytesSize = 0 to budget 2025-12-23T19:18:23.695683Z :DEBUG: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] [] Requesting status for partition stream id: 1 2025-12-23T19:18:23.695864Z :INFO: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] Closing read session. Close timeout: 0.000000s 2025-12-23T19:18:23.695904Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-12-23T19:18:23.695940Z :INFO: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] Counters: { Errors: 0 CurrentSessionLifetimeMs: 13 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T19:18:23.695941Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 grpc read done: success# 1, data# { partition_session_status_request { partition_session_id: 1 } } 2025-12-23T19:18:23.696013Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 sending to client partition status 2025-12-23T19:18:23.696025Z :NOTICE: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-12-23T19:18:23.696059Z :DEBUG: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] [] Abort session to cluster 2025-12-23T19:18:23.696189Z :NOTICE: [] [] [bcc5e565-1b0d11dd-73186eee-41ed6edb] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T19:18:23.697389Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 grpc read done: success# 0, data# { } 2025-12-23T19:18:23.697414Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 grpc read failed 2025-12-23T19:18:23.697459Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1678: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 closed 2025-12-23T19:18:23.697605Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_3_1_9837974744007567956_v1 is DEAD 2025-12-23T19:18:23.697821Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_9837974744007567956_v1 2025-12-23T19:18:23.698046Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037898][rt3.dc1--topic1] pipe [3:7587134904403456415:2573] disconnected. 2025-12-23T19:18:23.698089Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037898][rt3.dc1--topic1] pipe [3:7587134904403456415:2573] disconnected; active server actors: 1 2025-12-23T19:18:23.698112Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037898][rt3.dc1--topic1] pipe [3:7587134904403456415:2573] client user disconnected session shared/user_3_1_9837974744007567956_v1 |99.7%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Serial >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Crossed >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> TieredCache::Touch [GOOD] >> TieredCache::Erase [GOOD] >> TieredCache::EvictNext [GOOD] >> TieredCache::UpdateLimit [GOOD] >> TieredCache::InsertUntouched [GOOD] >> TieredCache::EnsureLimits [GOOD] >> TSharedPageCache_Actor::Request_Basics >> TSharedPageCache_Actor::Request_Basics [GOOD] >> TSharedPageCache_Actor::Request_Failed >> TSharedPageCache_Actor::Request_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue >> TSharedPageCache_Actor::Request_Queue [GOOD] >> TSharedPageCache_Actor::Request_Queue_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue_Fast >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> TSharedPageCache_Actor::Request_Queue_Fast [GOOD] >> TSharedPageCache_Actor::Request_Sequential [GOOD] >> TSharedPageCache_Actor::Request_Cached >> TSharedPageCache_Actor::Request_Cached [GOOD] >> TSharedPageCache_Actor::Request_Different_Collections >> TSharedPageCache_Actor::Request_Different_Collections [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages >> TSharedPageCache_Actor::Request_Different_Pages [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages_Reversed >> TSharedPageCache_Actor::Request_Different_Pages_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Subset [GOOD] >> TSharedPageCache_Actor::Request_Subset_Shuffled >> TSharedPageCache_Actor::Request_Subset_Shuffled [GOOD] >> TSharedPageCache_Actor::Request_Superset >> TSharedPageCache_Actor::Request_Superset [GOOD] >> TSharedPageCache_Actor::Request_Superset_Reversed >> TSharedPageCache_Actor::Request_Superset_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Reversed >> TSharedPageCache_Actor::Request_Crossing_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Shuffled >> TSharedPageCache_Actor::Request_Crossing_Shuffled [GOOD] >> TSharedPageCache_Actor::Unregister_Basics >> TSharedPageCache_Actor::Unregister_Basics [GOOD] >> TSharedPageCache_Actor::Unregister_Cached [GOOD] >> TSharedPageCache_Actor::Unregister_Expired >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log >> TSharedPageCache_Actor::Unregister_Expired [GOOD] >> TSharedPageCache_Actor::Unregister_InFly >> TSharedPageCache_Actor::Unregister_InFly [GOOD] >> TSharedPageCache_Actor::Unregister_Queued >> TSharedPageCache_Actor::Unregister_Queued [GOOD] >> TSharedPageCache_Actor::Unregister_Queued_Pending >> TSharedPageCache_Actor::Unregister_Queued_Pending [GOOD] >> TSharedPageCache_Actor::InMemory_Preemption >> TSharedPageCache_Actor::InMemory_Preemption [GOOD] >> TSharedPageCache_Actor::InMemory_Unregister >> TSharedPageCache_Actor::InMemory_Unregister [GOOD] >> TSharedPageCache_Actor::InMemory_ReloadPages >> TSharedPageCache_Actor::InMemory_ReloadPages [GOOD] >> TSharedPageCache_Actor::InMemory_ReloadPagesLimitedInFly >> TSharedPageCache_Actor::InMemory_ReloadPagesLimitedInFly [GOOD] >> TSharedPageCache_Actor::IncrementFrequency_Active [GOOD] >> TSharedPageCache_Actor::IncrementFrequency_Passive >> TSharedPageCache_Actor::IncrementFrequency_Passive [GOOD] >> TSharedPageCache_Transactions::One_Transaction_One_Key >> TSharedPageCache_Transactions::One_Transaction_One_Key [GOOD] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys >> TSharedPageCache_Transactions::One_Transaction_Two_Keys [GOOD] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys_Many_Parts >> TSharedPageCache_Transactions::One_Transaction_Two_Keys_Many_Parts [GOOD] >> TSharedPageCache_Transactions::Two_Transactions_One_Key >> TSharedPageCache_Transactions::Two_Transactions_One_Key [GOOD] >> TSharedPageCache_Transactions::Two_Transactions_Two_Keys >> TSharedPageCache_Transactions::Two_Transactions_Two_Keys [GOOD] >> TSharedPageCache_Transactions::Compaction >> TSharedPageCache_Transactions::Compaction [GOOD] >> Vacuum::StartVacuumNoTables [GOOD] >> Vacuum::StartVacuumNoTablesWithRestart [GOOD] >> Vacuum::StartVacuumLog [GOOD] >> Vacuum::StartVacuum [GOOD] >> Vacuum::StartVacuumMultipleFamilies >> Vacuum::StartVacuumMultipleFamilies [GOOD] >> Vacuum::StartVacuumMultipleTables [GOOD] >> Vacuum::StartVacuumWithFollowers >> Vacuum::StartVacuumWithFollowers [GOOD] >> Vacuum::StartVacuumMultipleTimes [GOOD] >> Vacuum::StartVacuumEmptyTable [GOOD] >> Vacuum::StartVacuumWithRestarts >> Vacuum::StartVacuumWithRestarts [GOOD] >> Vacuum::StartVacuumRetryWithNotGreaterGenerations [GOOD] >> Vacuum::StartVacuumWithTabletGCErrors [GOOD] >> Vacuum::StartVacuumWithSysTabletGCErrors >> Vacuum::StartVacuumWithSysTabletGCErrors [GOOD] >> TVersions::WreckHead >> TExecutorDb::RandomOps >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> BuildStatsHistogram::Ten_Serial_Log >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics [GOOD] >> TIterator::External >> TIterator::External [GOOD] >> TIterator::Single |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TIterator::Single [GOOD] >> TIterator::SingleReverse >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> test_public_api.py::TestAttributes::test_create_table |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level >> TIterator::Mixed [GOOD] >> TIterator::MixedReverse >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatTableExecutor_LongTx::MergeSkewedCommitted [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues >> BuildStatsMixedIndex::Single [GOOD] >> BuildStatsMixedIndex::Single_Slices >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues >> BuildStatsMixedIndex::Single_Slices [GOOD] >> BuildStatsMixedIndex::Single_History >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestEnqueueCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScan >> BuildStatsMixedIndex::Single_History [GOOD] >> BuildStatsMixedIndex::Single_History_Slices >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC >> BuildStatsMixedIndex::Single_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups_History >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Mixed >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability >> BuildStatsMixedIndex::Mixed [GOOD] >> BuildStatsMixedIndex::Mixed_Groups >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History >> TFlatTableExecutor_RejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> BuildStatsMixedIndex::Serial >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutor_Reschedule::TestExecuteReschedule [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile >> DBase::Select >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_LowResolution >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded >> DBase::Select [GOOD] >> DBase::Defaults [GOOD] >> DBase::Subsets [GOOD] >> DBase::Garbage [GOOD] >> DBase::WideKey >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData >> DBase::WideKey [GOOD] >> DBase::Outer [GOOD] >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics [GOOD] >> Charge::ByKeysGroups >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> Charge::ByKeysIndex [GOOD] >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse [GOOD] >> Charge::ByRowsLimits [GOOD] >> Charge::ByRowsLimitsReverse [GOOD] >> DBase::Basics [GOOD] >> DBase::Affects [GOOD] >> DBase::Annex [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> DBRowLocks::LockSurvivesCompactions [GOOD] >> DBRowLocks::LockOverCompactedErase [GOOD] >> DBRowLocks::CommitTxAfterLockThenCompact >> DBRowLocks::CommitTxAfterLockThenCompact [GOOD] >> DBRowLocks::CommitLockThenCompactRowVersion [GOOD] >> DBRowLocks::OverwriteLockThenCompact [GOOD] >> DBRowLocks::LockOpenTxAndTxDataAccounting [GOOD] >> DBRowLocks::MultipleCommittedRowLocks [GOOD] >> DBRowLocks::LocksCommittedRemovedIteration [GOOD] >> DBRowLocks::LocksReplay [GOOD] >> DBRowLocks::LocksMvccCompact [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> TExecutorDb::RandomOps [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> DBRowLocks::LocksMvccCompact [GOOD] Test command err: 10 parts: [0:0:1:0:0:0:0] 23928 rows, 1015 pages, 5 levels: (157741, 52588) (315424, 105149) (479998, 160007) (639733, 213252) (799132, 266385) [0:0:2:0:0:0:0] 24216 rows, 1025 pages, 5 levels: (158968, 52997) (323176, 107733) (478777, 159600) (638131, 212718) (798961, 266328) [0:0:3:0:0:0:0] 23857 rows, 1008 pages, 5 levels: (161719, 53914) (324091, 108038) (482023, 160682) (640741, 213588) (798478, 266167) [0:0:4:0:0:0:0] 24184 rows, 1023 pages, 5 levels: (160366, 53463) (321823, 107282) (478882, 159635) (641413, 213812) (799024, 266349) [0:0:5:0:0:0:0] 23945 rows, 1019 pages, 5 levels: (160006, 53343) (321943, 107322) (483100, 161041) (641107, 213710) (799117, 266380) [0:0:6:0:0:0:0] 23619 rows, 1005 pages, 5 levels: (161371, 53798) (319855, 106626) (480928, 160317) (636934, 212319) (799258, 266427) [0:0:7:0:0:0:0] 23988 rows, 1019 pages, 5 levels: (154531, 51518) (314071, 104698) (475438, 158487) (636523, 212182) (798766, 266263) [0:0:8:0:0:0:0] 23770 rows, 1013 pages, 5 levels: (160948, 53657) (318202, 106075) (477640, 159221) (640657, 213560) (799090, 266371) [0:0:9:0:0:0:0] 24256 rows, 1029 pages, 5 levels: (157747, 52590) (320038, 106687) (482770, 160931) (638905, 212976) (799195, 266406) [0:0:10:0:0:0:0] 24237 rows, 1026 pages, 5 levels: (162409, 54144) (317659, 105894) (477673, 159232) (637528, 212517) (798748, 266257) Checking BTree: Touched 1% bytes, 37 pages RowCountHistogram: 9% (actual 6%) key = (54346, 18123) value = 23288 (actual 16627 - 2% error) 10% (actual 12%) key = (155473, 51832) value = 47414 (actual 46964 - 0% error) 10% (actual 6%) key = (208177, 69400) value = 71513 (actual 62823 - 3% error) 10% (actual 12%) key = (310048, 103357) value = 95735 (actual 93347 - 0% error) 10% (actual 11%) key = (400444, 133489) value = 119910 (actual 120422 - 0% error) 10% (actual 7%) key = (460861, 153628) value = 144061 (actual 138588 - 2% error) 10% (actual 12%) key = (563395, 187806) value = 168190 (actual 169409 - 0% error) 10% (actual 5%) key = (611236, 203753) value = 192378 (actual 183755 - 3% error) 9% (actual 11%) key = (706417, 235480) value = 215613 (actual 212448 - 1% error) 10% (actual 11%) DataSizeHistogram: 9% (actual 6%) key = (54346, 18123) value = 1986792 (actual 1422570 - 2% error) 10% (actual 12%) key = (155473, 51832) value = 4036041 (actual 4004054 - 0% error) 10% (actual 6%) key = (208177, 69400) value = 6085370 (actual 5348583 - 3% error) 10% (actual 12%) key = (310048, 103357) value = 8135866 (actual 7931163 - 1% error) 10% (actual 11%) key = (400444, 133489) value = 10187497 (actual 10227908 - 0% error) 10% (actual 7%) key = (460861, 153628) value = 12238449 (actual 11773611 - 2% error) 10% (actual 12%) key = (563395, 187806) value = 14287401 (actual 14387497 - 0% error) 10% (actual 6%) key = (611236, 203753) value = 16340389 (actual 15610901 - 3% error) 9% (actual 11%) key = (706417, 235480) value = 18309634 (actual 18041898 - 1% error) 10% (actual 11%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (79777, 26600) value = 24001 (actual 24257 - 0% error) 10% (actual 10%) key = (159688, 53237) value = 48010 (actual 48277 - 0% error) 10% (actual 10%) key = (239839, 79954) value = 72013 (actual 72278 - 0% error) 10% (actual 9%) key = (319807, 106610) value = 96022 (actual 96277 - 0% error) 10% (actual 10%) key = (399964, 133329) value = 120041 (actual 120304 - 0% error) 10% (actual 10%) key = (479791, 159938) value = 144061 (actual 144321 - 0% error) 10% (actual 10%) key = (559867, 186630) value = 168077 (actual 168330 - 0% error) 10% (actual 10%) key = (639661, 213228) value = 192085 (actual 192333 - 0% error) 10% (actual 10%) key = (719458, 239827) value = 216091 (actual 216348 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79318, 26447) value = 2038035 (actual 2060169 - 0% error) 10% (actual 10%) key = (159028, 53017) value = 4076033 (actual 4098046 - 0% error) 10% (actual 10%) key = (239581, 79868) value = 6115440 (actual 6137485 - 0% error) 10% (actual 10%) key = (319516, 106513) value = 8153742 (actual 8175567 - 0% error) 10% (actual 10%) key = (399841, 133288) value = 10191957 (actual 10213746 - 0% error) 10% (actual 10%) key = (479734, 159919) value = 12230556 (actual 12252749 - 0% error) 10% (actual 10%) key = (559552, 186525) value = 14269383 (actual 14291350 - 0% error) 10% (actual 10%) key = (639193, 213072) value = 16307737 (actual 16329710 - 0% error) 10% (actual 10%) key = (719326, 239783) value = 18346896 (actual 18369051 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 43 pages RowCountHistogram: 10% (actual 6%) key = (50749, 16924) value = 24065 (actual 15550 - 3% error) 10% (actual 12%) key = (148027, 49350) value = 48098 (actual 44756 - 1% error) 10% (actual 7%) key = (205003, 68342) value = 72300 (actual 61833 - 4% error) 10% (actual 12%) key = (301159, 100394) value = 96516 (actual 90698 - 2% error) 10% (actual 11%) key = (396778, 132267) value = 120685 (actual 119332 - 0% error) 10% (actual 7%) key = (454192, 151405) value = 144842 (actual 136562 - 3% error) 10% (actual 11%) key = (548890, 182971) value = 168942 (actual 165043 - 1% error) 10% (actual 7%) key = (609940, 203321) value = 193089 (actual 183462 - 4% error) 10% (actual 11%) key = (701434, 233819) value = 218665 (actual 210950 - 3% error) 8% (actual 12%) DataSizeHistogram: 10% (actual 6%) key = (50749, 16924) value = 2051869 (actual 1330161 - 3% error) 10% (actual 12%) key = (148027, 49350) value = 4100433 (actual 3812568 - 1% error) 10% (actual 7%) key = (205003, 68342) value = 6148888 (actual 5264750 - 4% error) 10% (actual 11%) key = (301159, 100394) value = 8200933 (actual 7706870 - 2% error) 10% (actual 11%) key = (396778, 132267) value = 10251926 (actual 10135710 - 0% error) 10% (actual 7%) key = (454192, 151405) value = 12302580 (actual 11601475 - 3% error) 10% (actual 11%) key = (548890, 182971) value = 14351377 (actual 14019410 - 1% error) 10% (actual 7%) key = (609940, 203321) value = 16401437 (actual 15584938 - 4% error) 10% (actual 11%) key = (701434, 233819) value = 18568091 (actual 17915901 - 3% error) 8% (actual 12%) 10 parts: [0:0:1:0:0:0:0] 24000 rows, 1023 pages, 5 levels: (15913, 5312) (32008, 10677) (48025, 16016) (64045, 21356) (79984, 26669) [0:0:2:0:0:0:0] 24000 rows, 1022 pages, 5 levels: (95941, 31988) (111937, 37320) (127807, 42610) (143950, 47991) (159964, 53329) [0:0:3:0:0:0:0] 24000 rows, 1017 pages, 5 levels: (175939, 58654) (191968, 63997) (208006, 69343) (224077, 74700) (240169, 80064) [0:0:4:0:0:0:0] 24000 rows, 1018 pages, 5 levels: (256087, 85370) (272023, 90682) (288022, 96015) (304021, 101348) (320014, 106679) [0:0:5:0:0:0:0] 24000 rows, 1017 pages, 5 levels: (336010, 112011) (352159, 117394) (368092, 122705) (384097, 128040) (400177, 133400) [0:0:6:0:0:0:0] 24000 rows, 1020 pages, 5 levels: (416161, 138728) (432139, 144054) (447946, 149323) (463885, 154636) (479950, 159991) [0:0:7:0:0:0:0] 24000 rows, 1018 pages, 5 levels: (495844, 165289) (511843, 170622) (527917, 175980) (543799, 181274) (559849, 186624) [0:0:8:0:0:0:0] 24000 rows, 1020 pages, 5 levels: (575899, 191974) (591895, 197306) (607765, 202596) (623614, 207879) (639565, 213196) [0:0:9:0:0:0:0] 24000 rows, 1016 pages, 5 levels: (655633, 218552) (671707, 223910) (687631, 229218) (703516, 234513) (719437, 239820) [0:0:10:0:0:0:0] 24000 rows, 1015 pages, 5 levels: (735415, 245146) (751432, 250485) (767404, 255809) (783427, 261150) (799303, 266442) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 10% (actual 10%) key = (80038, 26687) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160003, 53342) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240184, 80069) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320035, 106686) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400189, 133404) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559891, 186638) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719461, 239828) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80038, 26687) value = 2046023 (actual 2082181 - 0% error) 10% (actual 10%) key = (160003, 53342) value = 4091184 (actual 4123481 - 0% error) 9% (actual 9%) key = (240184, 80069) value = 6122543 (actual 6150789 - 0% error) 9% (actual 9%) key = (320035, 106686) value = 8157907 (actual 8182094 - 0% error) 9% (actual 9%) key = (400189, 133404) value = 10192958 (actual 10213133 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 12231984 (actual 12248119 - 0% error) 9% (actual 9%) key = (559891, 186638) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 16310625 (actual 16318673 - 0% error) 9% (actual 9%) key = (719461, 239828) value = 18343487 (actual 18347516 - 0% error) 9% (actual 9%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (80041, 26688) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160006, 53343) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240187, 80070) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320038, 106687) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400192, 133405) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479959, 159994) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639649, 213224) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719467, 239830) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2078803 - 0% error) 10% (actual 9%) key = (159427, 53150) value = 4076220 (actual 4112556 - 0% error) 10% (actual 9%) key = (239872, 79965) value = 6113960 (actual 6146198 - 0% error) 10% (actual 9%) key = (319849, 106624) value = 8153334 (actual 8181530 - 0% error) 10% (actual 9%) key = (400177, 133400) value = 10192636 (actual 10213133 - 0% error) 10% (actual 9%) key = (479950, 159991) value = 12231529 (actual 12248119 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639481, 213168) value = 16306978 (actual 16318673 - 0% error) 10% (actual 9%) key = (719554, 239859) value = 18345472 (actual 18349551 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 40 pages RowCountHistogram: 10% (actual 4%) key = (34876, 11633) value = 24122 (actual 10941 - 5% error) 10% (actual 10%) key = (120064, 40029) value = 48205 (actual 36464 - 4% error) 10% (actual 10%) key = (205300, 68441) value = 72229 (actual 61965 - 4% error) 10% (actual 10%) key = (290449, 96824) value = 96245 (actual 87436 - 3% error) 10% (actual 11%) key = (380554, 126859) value = 121759 (actual 114432 - 3% error) 10% (actual 11%) ... 85 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27765 (actual 27678 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 5% (actual 5%) key = (91, 38) value = 25 (actual 25 - 0% error) 5% (actual 5%) key = (166, 63) value = 50 (actual 50 - 0% error) 4% (actual 4%) key = (253, 92) value = 74 (actual 74 - 0% error) 4% (actual 4%) key = (325, 116) value = 96 (actual 96 - 0% error) 4% (actual 4%) key = (394, 139) value = 119 (actual 119 - 0% error) 5% (actual 5%) key = (481, 168) value = 144 (actual 144 - 0% error) 4% (actual 4%) key = (556, 193) value = 167 (actual 167 - 0% error) 4% (actual 4%) key = (631, 218) value = 191 (actual 191 - 0% error) 4% (actual 4%) key = (709, 244) value = 215 (actual 215 - 0% error) 3% (actual 3%) key = (766, 263) value = 234 (actual 234 - 0% error) 5% (actual 5%) key = (853, 292) value = 261 (actual 261 - 0% error) 4% (actual 4%) key = (934, 319) value = 285 (actual 285 - 0% error) 4% (actual 4%) key = (1006, 343) value = 309 (actual 309 - 0% error) 4% (actual 4%) key = (1087, 370) value = 332 (actual 332 - 0% error) 0% (actual 0%) key = (1090, 371) value = 333 (actual 333 - 0% error) 4% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 4% (actual 4%) key = (91, 38) value = 1974 (actual 1974 - 0% error) 4% (actual 4%) key = (166, 63) value = 3992 (actual 3992 - 0% error) 4% (actual 4%) key = (253, 92) value = 5889 (actual 5889 - 0% error) 4% (actual 4%) key = (325, 116) value = 7868 (actual 7868 - 0% error) 4% (actual 4%) key = (394, 139) value = 9910 (actual 9910 - 0% error) 4% (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (556, 193) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27678 (actual 27678 - 0% error) 0% (actual 0%) key = (1090, 371) value = 27765 (actual 27765 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Mixed: Touched 100% bytes, 3 pages RowCountHistogram: 14% (actual 5%) key = (91, 38) value = 70 (actual 25 - 9% error) 5% (actual 5%) key = (166, 63) value = 95 (actual 50 - 9% error) 4% (actual 4%) key = (253, 92) value = 119 (actual 74 - 9% error) 4% (actual 4%) key = (325, 116) value = 141 (actual 96 - 9% error) 4% (actual 4%) key = (394, 139) value = 164 (actual 119 - 9% error) 5% (actual 5%) key = (481, 168) value = 189 (actual 144 - 9% error) 4% (actual 9%) key = (631, 218) value = 212 (actual 191 - 4% error) 4% (actual 4%) key = (709, 244) value = 236 (actual 215 - 4% error) 3% (actual 3%) key = (766, 263) value = 255 (actual 234 - 4% error) 5% (actual 5%) key = (853, 292) value = 282 (actual 261 - 4% error) 4% (actual 4%) key = (934, 319) value = 306 (actual 285 - 4% error) 4% (actual 4%) key = (1006, 343) value = 330 (actual 309 - 4% error) 4% (actual 4%) key = (1087, 370) value = 353 (actual 332 - 4% error) 0% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 14% (actual 4%) key = (91, 38) value = 5939 (actual 1974 - 9% error) 4% (actual 4%) key = (166, 63) value = 7957 (actual 3992 - 9% error) 4% (actual 4%) key = (253, 92) value = 9854 (actual 5889 - 9% error) 4% (actual 4%) key = (325, 116) value = 11833 (actual 7868 - 9% error) 4% (actual 4%) key = (394, 139) value = 13875 (actual 9910 - 9% error) 4% (actual 4%) key = (481, 168) value = 15903 (actual 11938 - 9% error) 4% (actual 8%) key = (631, 218) value = 17650 (actual 15674 - 4% error) 4% (actual 4%) key = (709, 244) value = 19685 (actual 17709 - 4% error) 4% (actual 4%) key = (766, 263) value = 21640 (actual 19664 - 4% error) 4% (actual 4%) key = (853, 292) value = 23649 (actual 21673 - 4% error) 4% (actual 4%) key = (934, 319) value = 25688 (actual 23712 - 4% error) 4% (actual 4%) key = (1006, 343) value = 27663 (actual 25687 - 4% error) 4% (actual 4%) key = (1087, 370) value = 29654 (actual 27678 - 4% error) 0% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) 3 parts: [0:0:1:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () [0:0:2:0:0:0:0] 166 rows, 1 pages, 0 levels: () () () () () [0:0:3:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 |99.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestSticky [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex >> DBase::VersionCompactedParts [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> DBase::DropModifiedTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyMain >> TFlatTableExecutor_StickyPages::TestStickyMain [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex >> test_public_api.py::TestAttributes::test_copy_table >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky >> TIterator::Serial [GOOD] >> TIterator::SerialReverse >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutor_Truncate::Truncate [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWrite [GOOD] >> TFlatTableExecutor_Truncate::TruncateWhileCompacting >> test_public_api.py::TestAttributes::test_alter_table >> TFlatTableExecutor_Truncate::TruncateWhileCompacting [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteWhileCompacting [GOOD] >> TFlatTableExecutor_Truncate::CompactThenTruncate [GOOD] >> TFlatTableExecutor_Truncate::CompactThenTruncateAndWrite >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] >> TFlatTableExecutor_Truncate::CompactThenTruncateAndWrite [GOOD] >> TFlatTableExecutor_Truncate::TruncateAtFollower [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteAtFollower >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteAtFollower [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteThenAttachFollower [GOOD] >> TFlatTableExecutor_Truncate::PartiallyCommitThenTruncateAndWrite [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyPartiallyTryKeepInMemory >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyPartiallyTryKeepInMemory [GOOD] >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyPartiallyTryKeepInMemory [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:42.362271Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.014 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.014 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.014 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.014 II| FAKE_ENV: DS.1 gone, left {582b, 8}, put {582b, 8} 00000.014 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.014 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.015 II| FAKE_ENV: DS.0 gone, left {525b, 8}, put {545b, 9} 00000.015 II| FAKE_ENV: All BS storage groups are stopped 00000.015 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.015 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:42.381804Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.007 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.007 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.007 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 84b annex 0, ~{ } -{ }, 0 gb} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.008 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.008 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.008 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{2, redo 78b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u> 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} hope 1 -> done Change{3, redo 78b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} release 4194304b of static, Memory{0 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{4, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u> 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} hope 1 -> done Change{5, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u> 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} hope 1 -> done Change{6, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} release 4194304b of static, Memory{0 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 ...compacting 00000.011 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Mem, forced state None, forced mode Full 00000.011 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.011 II| TABLET_EXECUTOR: Leader{1:2:8} starting compaction 00000.011 II| TABLET_EXECUTOR: Leader{1:2:9} starting Scan{1 on 101, Compact{1.2.8, eph 1}} 00000.011 II| TABLET_EXECUTOR: Leader{1:2:9} started compaction 1 00000.011 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.012 DD| OPS_COMPACT: Compact{1.2.8, eph 1} saving [1:2:8:1:69632:397:0] left 397b 00000.012 DD| OPS_COMPACT: Compact{1.2.8, eph 1} saving [1:2:8:1:12288:211:0] left 608b 00000.013 DD| OPS_COMPACT: Compact{1.2.8, eph 1} put [1:2:8:1:69632:397:0] result OK flags { Valid } left 211b 00000.013 DD| OPS_COMPACT: Compact{1.2.8, eph 1} put [1:2:8:1:12288:211:0] result OK flags { Valid } left 0b 00000.013 II| OPS_COMPACT: Compact{1.2.8, eph 1} end=Done, 2 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (397 0 0)b }, ecr=1.000 00000.014 II| TABLET_EXECUTOR: Leader{1:2:9} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 8, product {1 parts epoch 2} done 00000.014 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.014 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.014 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 8 for step 8 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 3 for step 9 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:10} switch applied on followers, step 9 ...waiting until compacted 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} hope 1 -> done Change{8, redo 72b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} release 4194304b of static, Memory{0 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} ...making snapshot with concurrent commit 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxM ... tion [1:2:143:1:12288:758:0] status OK pages [ 22 23 ] 00000.052 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:147} got result TEvResult{2 pages [1:2:143:1:12288:758:0] ok OK}, type 5 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:147} commited cookie 1 for step 146 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:147} Tx{144, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan 00000.052 DD| TABLET_EXECUTOR: Leader{1:2:147} Tx{144, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:147} Tx{144, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} hope 1 -> done Change{145, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.053 DD| TABLET_EXECUTOR: Leader{1:2:147} Tx{144, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} release 4194304b of static, Memory{0 dyn 0} ... waiting for NActors::TEvents::TEvWakeup 00000.053 II| TABLET_SAUSAGECACHE: Wakeup DoGCManual 00000.053 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) 00000.053 II| TABLET_EXECUTOR: Leader{1:2:147} suiciding, Waste{2:0, 141849b +(140, 14018b), 146 trc, -14018b acc} 00000.054 DD| TABLET_SAUSAGECACHE: Unregister owner [63:30:2062] 00000.054 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:143:1:12288:758:0] owner [63:30:2062] 00000.054 DD| TABLET_SAUSAGECACHE: Change mode of page collection [1:2:143:1:12288:758:0] to Regular 00000.054 DD| TABLET_SAUSAGECACHE: Remove owner [63:30:2062] 00000.054 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NMemory::TEvConsumerLimit 00000.054 II| TABLET_SAUSAGECACHE: Limit memory consumer with 0B 00000.054 DD| TABLET_SAUSAGECACHE: Drop expired page collection [1:2:143:1:12288:758:0] 00000.054 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 0B Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NMemory::TEvConsumerLimit (done) ... waiting for NKikimr::NMemory::TEvConsumerLimit 00000.054 II| TABLET_SAUSAGECACHE: Limit memory consumer with 8MiB 00000.054 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NMemory::TEvConsumerLimit (done) 00000.057 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [1:2:1:1:28672:35:0] } 00000.058 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 344 bytes, 344 total, blobs: { [1:2:2:1:8192:209:0], [1:2:145:1:8192:20:0], [1:2:146:1:8192:11:0], [1:2:144:1:32768:104:0] } 00000.058 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 14896 bytes, 14896 total, blobs: { [1:2:143:1:12288:758:0], [1:2:3:1:24576:90:0], [1:2:4:1:24576:96:0], [1:2:5:1:24576:96:0], [1:2:6:1:24576:96:0], [1:2:7:1:24576:97:0], [1:2:8:1:24576:95:0], [1:2:9:1:24576:91:0], [1:2:10:1:24576:93:0], [1:2:11:1:24576:96:0], [1:2:12:1:24576:96:0], [1:2:13:1:24576:93:0], [1:2:14:1:24576:97:0], [1:2:15:1:24576:97:0], [1:2:16:1:24576:97:0], [1:2:17:1:24576:96:0], [1:2:18:1:24576:100:0], [1:2:19:1:24576:97:0], [1:2:20:1:24576:96:0], [1:2:21:1:24576:97:0], [1:2:22:1:24576:97:0], [1:2:23:1:24576:97:0], [1:2:24:1:24576:97:0], [1:2:25:1:24576:97:0], [1:2:26:1:24576:97:0], [1:2:27:1:24576:97:0], [1:2:28:1:24576:97:0], [1:2:29:1:24576:96:0], [1:2:30:1:24576:100:0], [1:2:31:1:24576:97:0], [1:2:32:1:24576:95:0], [1:2:33:1:24576:104:0], [1:2:34:1:24576:97:0], [1:2:35:1:24576:99:0], [1:2:36:1:24576:97:0], [1:2:37:1:24576:97:0], [1:2:38:1:24576:97:0], [1:2:39:1:24576:97:0], [1:2:40:1:24576:97:0], [1:2:41:1:24576:97:0], [1:2:42:1:24576:97:0], [1:2:43:1:24576:97:0], [1:2:44:1:24576:97:0], [1:2:45:1:24576:97:0], [1:2:46:1:24576:97:0], [1:2:47:1:24576:97:0], [1:2:48:1:24576:97:0], [1:2:49:1:24576:97:0], [1:2:50:1:24576:97:0], [1:2:51:1:24576:97:0], [1:2:52:1:24576:97:0], [1:2:53:1:24576:97:0], [1:2:54:1:24576:97:0], [1:2:55:1:24576:97:0], [1:2:56:1:24576:97:0], [1:2:57:1:24576:97:0], [1:2:58:1:24576:97:0], [1:2:59:1:24576:97:0], [1:2:60:1:24576:97:0], [1:2:61:1:24576:97:0], [1:2:62:1:24576:97:0], [1:2:63:1:24576:97:0], [1:2:64:1:24576:97:0], [1:2:65:1:24576:97:0], [1:2:66:1:24576:97:0], [1:2:67:1:24576:97:0], [1:2:68:1:24576:97:0], [1:2:69:1:24576:97:0], [1:2:70:1:24576:97:0], [1:2:71:1:24576:97:0], [1:2:72:1:24576:97:0], [1:2:73:1:24576:101:0], [1:2:74:1:24576:102:0], [1:2:75:1:24576:101:0], [1:2:76:1:24576:102:0], [1:2:77:1:24576:104:0], [1:2:78:1:24576:104:0], [1:2:79:1:24576:104:0], [1:2:80:1:24576:104:0], [1:2:81:1:24576:103:0], [1:2:82:1:24576:101:0], [1:2:83:1:24576:104:0], [1:2:84:1:24576:104:0], [1:2:85:1:24576:104:0], [1:2:86:1:24576:104:0], [1:2:87:1:24576:104:0], [1:2:88:1:24576:104:0], [1:2:89:1:24576:104:0], [1:2:90:1:24576:101:0], [1:2:91:1:24576:104:0], [1:2:92:1:24576:104:0], [1:2:93:1:24576:98:0], [1:2:94:1:24576:104:0], [1:2:95:1:24576:104:0], [1:2:96:1:24576:104:0], [1:2:97:1:24576:104:0], [1:2:98:1:24576:104:0], [1:2:99:1:24576:104:0], [1:2:100:1:24576:104:0], [1:2:101:1:24576:100:0], [1:2:102:1:24576:97:0], [1:2:103:1:24576:104:0], [1:2:104:1:24576:104:0], [1:2:105:1:24576:104:0], [1:2:106:1:24576:104:0], [1:2:107:1:24576:104:0], [1:2:108:1:24576:104:0], [1:2:109:1:24576:104:0], [1:2:110:1:24576:104:0], [1:2:111:1:24576:104:0], [1:2:112:1:24576:104:0], [1:2:113:1:24576:104:0], [1:2:114:1:24576:104:0], [1:2:115:1:24576:104:0], [1:2:116:1:24576:104:0], [1:2:117:1:24576:104:0], [1:2:118:1:24576:104:0], [1:2:119:1:24576:104:0], [1:2:120:1:24576:104:0], [1:2:121:1:24576:104:0], [1:2:122:1:24576:104:0], [1:2:123:1:24576:104:0], [1:2:124:1:24576:104:0], [1:2:125:1:24576:104:0], [1:2:126:1:24576:104:0], [1:2:127:1:24576:104:0], [1:2:128:1:24576:104:0], [1:2:129:1:24576:104:0], [1:2:130:1:24576:104:0], [1:2:131:1:24576:104:0], [1:2:132:1:24576:104:0], [1:2:133:1:24576:104:0], [1:2:134:1:24576:104:0], [1:2:135:1:24576:104:0], [1:2:136:1:24576:104:0], [1:2:137:1:24576:104:0], [1:2:138:1:24576:104:0], [1:2:139:1:24576:104:0], [1:2:140:1:24576:104:0], [1:2:141:1:24576:104:0], [1:2:142:1:24576:104:0], [1:2:145:1:24576:60:0], [1:2:146:1:24576:60:0] } 00000.060 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:143:1:12288:758:0] 00000.060 DD| TABLET_SAUSAGECACHE: Add page collection [1:2:143:1:12288:758:0] owner [63:212:2237] 00000.060 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [63:212:2237] cookie 4 class Online from cache [ ] already requested [ ] to request [ 20 21 24 25 ] 00000.060 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 1.42KiB EvictedInMemoryBytes: 0B 00000.060 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:143:1:12288:758:0] status OK pages [ 20 21 24 25 ] 00000.060 TT| TABLET_SAUSAGECACHE: Send page collection result [1:2:143:1:12288:758:0] owner [63:212:2237] class Online pages [ 20 21 24 25 ] cookie 4 00000.060 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 1.42KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.060 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.061 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 1881b, wait} done, Waste{2:0, 141849b +(140, 14018b), 146 trc} 00000.061 DD| TABLET_SAUSAGECACHE: Attach page collection [1:2:143:1:12288:758:0] owner [63:212:2237] cache mode TryKeepInMemory 00000.061 DD| TABLET_SAUSAGECACHE: Change mode of page collection [1:2:143:1:12288:758:0] to TryKeepInMemory 00000.061 TT| TABLET_SAUSAGECACHE: Try move collection [1:2:143:1:12288:758:0] in memory, total pages: 26 (140KiB), pages already loaded: 4 00000.061 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 1.42KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.061 TT| TABLET_SAUSAGECACHE: Try load in-memory collection [1:2:143:1:12288:758:0], total pages: 26 (140KiB), pages to request: 22 (136KiB, remain pages in queue: 0 00000.061 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [63:212:2237] class Scan pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 22 23 ] 00000.061 TT| TABLET_SAUSAGECACHE: Remain in-memory collections to load: 0 00000.061 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{4 pages [1:2:143:1:12288:758:0] ok OK}, type 5 00000.061 TT| TABLET_SAUSAGECACHE: Sync page collection [1:2:143:1:12288:758:0] owner [63:212:2237] pages [ 21 24 25 20 ] 00000.062 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 1.42KiB Passive: 0B LoadInFly: 139KiB EvictedInMemoryBytes: 0B 00000.062 TT| TABLET_SAUSAGECACHE: Receive page collection [1:2:143:1:12288:758:0] status OK pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 22 23 ] 00000.062 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.062 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{22 pages [1:2:143:1:12288:758:0] ok OK}, type 5 00000.062 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 00000.063 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan 00000.063 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.063 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} hope 1 -> done Change{145, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.063 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxFullScan} release 4194304b of static, Memory{0 dyn 0} ... waiting for NActors::TEvents::TEvWakeup 00000.063 II| TABLET_SAUSAGECACHE: Wakeup DoGCManual 00000.063 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) 00000.063 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.064 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 141849b +(0, 0b), 1 trc, -14018b acc} 00000.064 DD| TABLET_SAUSAGECACHE: Unregister owner [63:212:2237] 00000.064 DD| TABLET_SAUSAGECACHE: Remove page collection [1:2:143:1:12288:758:0] owner [63:212:2237] 00000.064 DD| TABLET_SAUSAGECACHE: Change mode of page collection [1:2:143:1:12288:758:0] to Regular 00000.064 DD| TABLET_SAUSAGECACHE: Remove owner [63:212:2237] 00000.064 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.064 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {0 0b} miss {4 1002b} in-memory miss {0 0b} 00000.064 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 140KiB Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.064 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.064 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14354b, 149} 00000.064 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.064 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.064 II| FAKE_ENV: DS.1 gone, left {143730b, 8}, put {157887b, 150} 00000.064 II| FAKE_ENV: All BS storage groups are stopped 00000.064 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.064 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 823}, stopped |99.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> TIterator::SerialReverse [GOOD] >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> TLegacy::IndexIter >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLease >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck >> Memtable::Wreck [GOOD] >> Memtable::Erased >> TLegacy::IndexIter [GOOD] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter >> Memtable::Erased [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> TPart::MassCheck [GOOD] >> TPart::PageFailEnv >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> NOther::Blocks [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} |99.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> TPart::ForwardEnv [GOOD] >> TPart::ForwardEnvColumnGroups >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::ManyVersions >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] >> Backup::GenerationDirs [GOOD] >> Backup::SnapshotIOError >> Backup::SnapshotIOError [GOOD] >> Backup::EmptyData >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleep >> Backup::EmptyData [GOOD] >> Backup::SnapshotData >> Backup::SnapshotData [GOOD] >> Backup::SnapshotLargeData >> TVersions::WreckHeadReverse [GOOD] >> TVersions::Wreck2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TPart::CutKeys_CutUtf8String [GOOD] Test command err: 00000.001 II| FAKE_ENV: Born at 2025-12-23T19:18:23.364359Z 00000.037 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.042 II| FAKE_ENV: Starting storage for BS group 0 00000.044 II| FAKE_ENV: Starting storage for BS group 1 00000.044 II| FAKE_ENV: Starting storage for BS group 2 00000.044 II| FAKE_ENV: Starting storage for BS group 3 00000.060 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.064 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.068 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 ... waiting for NKikimr::NMemory::TEvConsumerLimit 00000.068 II| TABLET_SAUSAGECACHE: Limit memory consumer with 8MiB 00000.068 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NMemory::TEvConsumerLimit (done) 00000.073 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.073 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.075 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.075 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.077 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.077 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily 00000.077 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.077 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.077 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} release 4194304b of static, Memory{0 dyn 0} 00000.077 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.077 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.077 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{2, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.077 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{3, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{4, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{5, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{6, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.078 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{7, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{8, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{9, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{10, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.079 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{11, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{12, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{13, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{14, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.080 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memor ... {[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{11} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 10 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{3} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccccd} | 1 1 41b {ccccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{3} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 43b {ccccccd} | 1 1 43b {ccccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{3} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccccd} | 1 1 40b {cccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{3} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 42b {cccccd} | 1 1 42b {cccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 39b {ccccd} | 1 1 39b {ccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{3} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccd} | 1 1 41b {ccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{3} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 38b {cccd} | 1 1 38b {cccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{3} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{3} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{3} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{3} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 71 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{3} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{3} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 71 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 280b 2r} data 904b + FlatIndex{3} Label{3 rev 3, 401b} 3 rec | Page Row Bytes (String) | 0 0 140b {____________________________________________________________________________________________________cccddd} | 1 1 140b {____________________________________________________________________________________________________cd} | 1 1 140b {____________________________________________________________________________________________________cddddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 280 ErasedRowCount: 0} Label{13 rev 1, 204b} | PageId: 0 RowCount: 1 DataSize: 140 ErasedRowCount: 0 | > {____________________________________________________________________________________________________cd} | PageId: 1 RowCount: 2 DataSize: 280 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (Utf8) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 98b 2r} data 350b + FlatIndex{3} Label{3 rev 3, 124b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x89\x91} | 1 1 49b {abc\xF0\x9F\x89\x91\xF0\x9F\x89\x91\xF0\x9F\x89\x91} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 98 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xF0\x9F\x89\x91} | PageId: 1 RowCount: 2 DataSize: 98 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 95b 2r} data 342b + FlatIndex{3} Label{3 rev 3, 120b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 46b {abc\xE2\x9A\xAB} | 1 1 46b {abc\xE2\x9A\xAB\xE2\x9A\xAB\xE2\x9A\xAB} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 95 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xE2\x9A\xAB} | PageId: 1 RowCount: 2 DataSize: 95 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 95b 2r} data 344b + FlatIndex{3} Label{3 rev 3, 121b} 3 rec | Page Row Bytes (Utf8) | 0 0 46b {abc\xE2\x9A\xAB\xE2\x9A\xAB\xE2\x9A\xAB} | 1 1 49b {abc\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 95 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 46 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x94} | PageId: 1 RowCount: 2 DataSize: 95 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 89b 2r} data 332b + FlatIndex{3} Label{3 rev 3, 115b} 3 rec | Page Row Bytes (Utf8) | 0 0 40b {abcxxx} | 1 1 49b {abc\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 89 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x94} | PageId: 1 RowCount: 2 DataSize: 89 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 89b 2r} data 326b + FlatIndex{3} Label{3 rev 3, 112b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 40b {abcx} | 1 1 40b {abcxxx} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 89 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abcx} | PageId: 1 RowCount: 2 DataSize: 89 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 98b 2r} data 350b + FlatIndex{3} Label{3 rev 3, 124b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x96} | 1 1 49b {abc\xF0\x9F\x98\x96\xF0\x9F\x98\x96\xF0\x9F\x98\x96} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 98 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x96} | PageId: 1 RowCount: 2 DataSize: 98 ErasedRowCount: 0 |99.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> TBtreeIndexBuilder::NoNodes [GOOD] >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TBtreeIndexTPart::FewNodes [GOOD] >> TBtreeIndexTPart::Erases [GOOD] >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History >> TBtreeIndexTPart::History [GOOD] >> TBtreeIndexTPart::External >> TBtreeIndexTPart::External [GOOD] >> TChargeBTreeIndex::NoNodes >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::NoNodes_Groups >> TFlatExecutorLeases::BasicsInitialLeaseSleep [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_History >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> TPart::State [GOOD] >> TPart::Trivials [GOOD] >> TPart::WreckPart >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::OneNode >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> TPart::WreckPart [GOOD] >> TPart::WreckPartColumnGroups >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout [GOOD] >> TFlatTableDatetime::TestDate >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 >> TPart::WreckPartColumnGroups [GOOD] >> TPart::PageFailEnvColumnGroups >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default >> TPart::PageFailEnvColumnGroups [GOOD] >> TPart::Versions [GOOD] >> TPartBtreeIndexIteration::NoNodes >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True >> TPartBtreeIndexIteration::NoNodes [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> TPartBtreeIndexIteration::NoNodes_History >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_public_api.py::TestDocApiTables::test_create_table >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Follower::FollowerPromoteToLeaderWhileLoadingPages >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices >> TFlatTableExecutor_Follower::FollowerPromoteToLeaderWhileLoadingPages [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> TVersions::Wreck2Reverse [GOOD] >> TVersions::Wreck1 >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [FAIL] >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::FullScan >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> TExecutorDb::FullScan [GOOD] >> TExecutorDb::CoordinatorSimulation >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 2050b 40r} data 2167b + FlatIndex{1} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 0 0 2050b {0, 1} | 0 39 2050b {5, 7} + BTreeIndex{Empty, PageId: 0 RowCount: 40 DataSize: 2050 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{0} Label{04 rev 1, 2050b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{10} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b {0, 1} | 3 39 620b {5, 7} + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{21} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 20 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERo ... owOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{29} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 7 12 122b {1, 8} | 8 14 122b {2, NULL} | 9 16 122b {2, 4} | 11 18 122b {2, 7} | 12 20 122b {2, 10} | 13 22 122b {3, 3} | 15 24 122b {3, 6} | 16 26 122b {3, 8} | 17 28 122b {4, NULL} | 19 30 122b {4, 4} | 20 32 122b {4, 7} | 21 34 122b {4, 10} | 24 36 122b {5, 3} | 25 38 122b {5, 6} | 25 39 122b {5, 7} + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } |99.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TVersions::Wreck1 [GOOD] >> TVersions::Wreck1Reverse >> test_vdisks.py::TestTinyVDisks::test_disabled_enabled >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex >> Backup::SnapshotLargeData [GOOD] >> Backup::SnapshotSchema >> Backup::SnapshotSchema [GOOD] >> Backup::ChangelogData >> Backup::ChangelogData [GOOD] >> Backup::ChangelogLargeData >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> TExecutorDb::EncodedPage [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse >> TFlatTableExecutor_LongTx::CompactedTxIdReuse [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > {0, a, false, 0} | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > {1, b, true, 10} | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > {2, c, false, 20} | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > {3, d, true, 30} | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > {4, e, false, 40} | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > {5, f, true, 50} | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > {6, g, false, 60} | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > {7, h, true, 70} | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > {9, j, true, 90} | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, a, false, 0} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, b, true, 10} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > {2, c, false, 20} | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, d, true, 30} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, e, false, 40} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > {5, f, true, 50} | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, g, false, 60} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, h, true, 70} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, j, true, 90} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, k, false, 100} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, l, true, 110} | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, m, false, 120} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, n, true, 130} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > {14, o, false, 140} | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, p, true, 150} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, q, false, 160} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > {17, r, true, 170} | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, s, false, 180} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, t, true, 190} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, x, NULL, NULL} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, xx, NULL, NULL} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > {2, xxx, NULL, NULL} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, xxxx, NULL, NULL} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, xxxxx, NULL, NULL} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > {5, xxxxxx, NULL, NULL} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, xxxxxxx, NULL, NULL} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, xxxxxxxx, NULL, NULL} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > {8, xxxxxxxxx, NULL, NULL} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, xxxxxxxxxx, NULL, NULL} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > {14, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > {17, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > {20, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > {21, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > {22, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > {23, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > {24, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > {25, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > {26, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > {27, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > {28, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > {29, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > {30, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > {31, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > {32, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > {33, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > {34, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > {35, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > {36, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > {37, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > {38, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > {39, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > {40, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > {41, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > {42, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > {43, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > {44, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > {45, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > {46, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 ... n{3 on 2, Compact{1.2.6, eph 1}} 00000.194 II| TABLET_EXECUTOR: Leader{1:2:7} started compaction 3 00000.194 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 1}} begin on TSubset{head 0, 0m 1p 0c} 00000.194 II| TABLET_EXECUTOR: Leader{1:2:7} starting compaction 00000.194 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{5 on 2, Compact{1.2.7, eph 2}} 00000.194 II| TABLET_EXECUTOR: Leader{1:2:8} started compaction 5 00000.195 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.7, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.226 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.7, eph 2}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.226 II| OPS_COMPACT: Compact{1.2.7, eph 2} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.261 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 1}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.261 II| OPS_COMPACT: Compact{1.2.6, eph 1} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.388 II| TABLET_EXECUTOR: Leader{1:2:9} Compact 5 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 7, product {1 parts epoch 3} done 00000.391 II| TABLET_EXECUTOR: Leader{1:2:10} Compact 3 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 6, product {1 parts epoch 0} done 00000.392 II| TABLET_EXECUTOR: Leader{1:2:11} starting compaction 00000.392 II| TABLET_EXECUTOR: Leader{1:2:12} starting Scan{7 on 2, Compact{1.2.11, eph 3}} 00000.392 II| TABLET_EXECUTOR: Leader{1:2:12} started compaction 7 00000.392 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.11, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.414 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.11, eph 3}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.415 II| OPS_COMPACT: Compact{1.2.11, eph 3} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.420 II| TABLET_EXECUTOR: Leader{1:2:12} starting compaction 00000.420 II| TABLET_EXECUTOR: Leader{1:2:13} starting Scan{9 on 2, Compact{1.2.12, eph 2}} 00000.420 II| TABLET_EXECUTOR: Leader{1:2:13} started compaction 9 00000.420 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.12, eph 2}} begin on TSubset{head 0, 0m 1p 0c} 00000.421 II| TABLET_EXECUTOR: Leader{1:2:13} starting compaction 00000.421 II| TABLET_EXECUTOR: Leader{1:2:14} starting Scan{11 on 2, Compact{1.2.13, eph 1}} 00000.421 II| TABLET_EXECUTOR: Leader{1:2:14} started compaction 11 00000.421 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.13, eph 1}} begin on TSubset{head 0, 0m 1p 0c} 00000.475 II| TABLET_EXECUTOR: Leader{1:2:14} Compact 7 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 11, product {1 parts epoch 4} done 00000.482 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.13, eph 1}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.482 II| OPS_COMPACT: Compact{1.2.13, eph 1} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.486 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.12, eph 2}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.486 II| OPS_COMPACT: Compact{1.2.12, eph 2} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.493 II| TABLET_EXECUTOR: Leader{1:2:15} Compact 11 on TGenCompactionParams{2: gen 2 epoch 0, 1 parts} step 13, product {1 parts epoch 0} done 00000.494 II| TABLET_EXECUTOR: Leader{1:2:16} Compact 9 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 12, product {1 parts epoch 0} done 00000.495 II| TABLET_EXECUTOR: Leader{1:2:17} starting compaction 00000.496 II| TABLET_EXECUTOR: Leader{1:2:18} starting Scan{13 on 2, Compact{1.2.17, eph 3}} 00000.496 II| TABLET_EXECUTOR: Leader{1:2:18} started compaction 13 00000.496 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.17, eph 3}} begin on TSubset{head 0, 0m 1p 0c} 00000.496 II| TABLET_EXECUTOR: Leader{1:2:18} starting compaction 00000.496 II| TABLET_EXECUTOR: Leader{1:2:19} starting Scan{15 on 2, Compact{1.2.18, eph 2}} 00000.496 II| TABLET_EXECUTOR: Leader{1:2:19} started compaction 15 00000.496 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.18, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.563 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.17, eph 3}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.563 II| OPS_COMPACT: Compact{1.2.17, eph 3} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.615 II| TABLET_EXECUTOR: Leader{1:2:19} Compact 13 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 17, product {1 parts epoch 0} done 00000.615 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.18, eph 2}} end=Done, 2r seen, TFwd{fetch=19.1MiB,saved=19.1MiB,usage=19.1MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=5}, trace 0 of 0 ~3p 00000.616 II| OPS_COMPACT: Compact{1.2.18, eph 2} end=Done, 6 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (20000244 20000016 0)b }, ecr=1.000 00000.627 II| TABLET_EXECUTOR: Leader{1:2:20} Compact 15 on TGenCompactionParams{2: gen 2 epoch 0, 2 parts} step 18, product {1 parts epoch 0} done 00000.628 II| TABLET_EXECUTOR: Leader{1:2:21} starting compaction 00000.628 II| TABLET_EXECUTOR: Leader{1:2:22} starting Scan{17 on 2, Compact{1.2.21, eph 3}} 00000.628 II| TABLET_EXECUTOR: Leader{1:2:22} started compaction 17 00000.628 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.21, eph 3}} begin on TSubset{head 0, 0m 2p 0c} 00000.687 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.21, eph 3}} end=Done, 2r seen, TFwd{fetch=19.1MiB,saved=19.1MiB,usage=19.1MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=4}, trace 0 of 0 ~3p 00000.688 II| OPS_COMPACT: Compact{1.2.21, eph 3} end=Done, 6 blobs 2r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (20000244 20000016 0)b }, ecr=1.000 00000.696 II| TABLET_EXECUTOR: Leader{1:2:22} Compact 17 on TGenCompactionParams{2: gen 2 epoch 0, 2 parts} step 21, product {1 parts epoch 0} done 00000.709 II| TABLET_EXECUTOR: Leader{1:2:23} got result TEvResult{0 pages [1:2:18:2:12288:140:0] fail RACE}, type 1 00000.733 II| TABLET_EXECUTOR: Leader{1:2:23} suiciding, Waste{2:0, 20001011b +(44, 90121851b), 22 trc, -90121851b acc} 00000.783 II| FAKE_ENV: Model starts soft shutdown on level 8 of 8, left 2 actors 00000.783 NN| TABLET_SAUSAGECACHE: Poison cache serviced 27 reqs hit {6 10000223b} miss {21 120000552b} in-memory miss {0 0b} 00000.783 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.783 II| FAKE_ENV: DS.2 gone, left {110001012b, 29}, put {110001012b, 29} 00000.821 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.821 II| FAKE_ENV: DS.0 gone, left {3608b, 22}, put {3628b, 23} 00000.821 II| FAKE_ENV: DS.1 gone, left {122950b, 32}, put {122950b, 32} 00000.829 II| FAKE_ENV: All BS storage groups are stopped 00000.829 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.829 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 83}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:19:38.520722Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: TNanny initiates TDummy tablet 72057594037927937 birth 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.012 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.012 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.075 II| TABLET_EXECUTOR: Leader{1:2:3} starting compaction 00000.075 II| TABLET_EXECUTOR: Leader{1:2:4} starting Scan{1 on 2, Compact{1.2.3, eph 1}} 00000.075 II| TABLET_EXECUTOR: Leader{1:2:4} started compaction 1 00000.075 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.3, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.087 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.3, eph 1}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.087 II| OPS_COMPACT: Compact{1.2.3, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (39360 0 0)b }, ecr=0.004 00000.088 II| TABLET_EXECUTOR: Leader{1:2:4} Compact 1 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 3, product {1 parts epoch 2} done 00000.152 II| TABLET_EXECUTOR: Leader{1:2:6} starting compaction 00000.152 II| TABLET_EXECUTOR: Leader{1:2:7} starting Scan{3 on 2, Compact{1.2.6, eph 2}} 00000.153 II| TABLET_EXECUTOR: Leader{1:2:7} started compaction 3 00000.153 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 2}} begin on TSubset{head 3, 1m 1p 0c} 00000.211 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 2}} end=Done, 2r seen, TFwd{fetch=38.3KiB,saved=38.3KiB,usage=38.3KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.212 II| OPS_COMPACT: Compact{1.2.6, eph 2} end=Done, 2 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (78660 0 0)b }, ecr=0.004 00000.213 II| TABLET_EXECUTOR: Leader{1:2:7} Compact 3 on TGenCompactionParams{2: gen 0 epoch +inf, 1 parts} step 6, product {1 parts epoch 3} done 00000.283 II| TABLET_EXECUTOR: Leader{1:2:9} starting compaction 00000.283 II| TABLET_EXECUTOR: Leader{1:2:10} starting Scan{5 on 2, Compact{1.2.9, eph 3}} 00000.283 II| TABLET_EXECUTOR: Leader{1:2:10} started compaction 5 00000.283 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.9, eph 3}} begin on TSubset{head 4, 1m 1p 0c} 00000.360 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.9, eph 3}} end=Done, 2r seen, TFwd{fetch=76.8KiB,saved=76.8KiB,usage=76.8KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.361 II| OPS_COMPACT: Compact{1.2.9, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (78660 0 0)b }, ecr=0.004 00000.418 II| TABLET_EXECUTOR: Leader{1:2:10} Compact 5 on TGenCompactionParams{2: gen 0 epoch +inf, 1 parts} step 9, product {1 parts epoch 4} done 00000.420 II| TABLET_EXECUTOR: Leader{1:2:11} suiciding, Waste{2:0, 79404b +(7, 236400b), 10 trc, -236400b acc} 00000.426 II| FAKE_ENV: Model starts soft shutdown on level 8 of 8, left 2 actors 00000.426 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {4 117917b} miss {0 0b} in-memory miss {0 0b} 00000.426 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.426 II| FAKE_ENV: DS.1 gone, left {316163b, 14}, put {316163b, 14} 00000.431 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.431 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.431 II| FAKE_ENV: DS.0 gone, left {1032b, 10}, put {1052b, 11} 00000.431 II| FAKE_ENV: All BS storage groups are stopped 00000.431 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.431 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 40}, stopped |99.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:35.609571Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: TNanny initiates TDummy tablet 72057594037927937 birth 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.008 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.008 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.109 II| TABLET_EXECUTOR: Leader{1:2:297} starting compaction 00000.109 II| TABLET_EXECUTOR: Leader{1:2:298} starting Scan{1 on 2, Compact{1.2.297, eph 1}} 00000.109 II| TABLET_EXECUTOR: Leader{1:2:298} started compaction 1 00000.109 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.297, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.112 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.297, eph 1}} end=Done, 113r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 9 of 14 ~1p 00000.112 II| OPS_COMPACT: Compact{1.2.297, eph 1} end=Done, 14 blobs 86r (max 113), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 9 +10, (186690 13981 167512)b }, ecr=1.000 00000.124 II| TABLET_EXECUTOR: Leader{1:2:299} Compact 1 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 297, product {1 parts epoch 2} done 00000.125 II| TABLET_EXECUTOR: LSnap{1:2, on 2:301, 5924b, wait} done, Waste{2:0, 816197b +(72, 53548b), 300 trc} 00000.148 II| TABLET_EXECUTOR: Leader{1:2:340} starting compaction 00000.148 II| TABLET_EXECUTOR: Leader{1:2:341} starting Scan{3 on 3, Compact{1.2.340, eph 1}} 00000.148 II| TABLET_EXECUTOR: Leader{1:2:341} started compaction 3 00000.148 II| TABLET_OPS_HOST: Scan{3 on 3, Compact{1.2.340, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.150 II| TABLET_OPS_HOST: Scan{3 on 3, Compact{1.2.340, eph 1}} end=Done, 108r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 15 of 17 ~1p 00000.150 II| OPS_COMPACT: Compact{1.2.340, eph 1} end=Done, 9 blobs 82r (max 108), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 15 +5, (135649 26952 173980)b }, ecr=1.000 00000.162 II| TABLET_EXECUTOR: Leader{1:2:341} Compact 3 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 340, product {1 parts epoch 2} done 00000.280 II| TABLET_EXECUTOR: LSnap{1:2, on 2:601, 7848b, wait} done, Waste{2:0, 1480710b +(76, 28314b), 300 trc} 00000.308 II| TABLET_EXECUTOR: Leader{1:2:660} starting compaction 00000.308 II| TABLET_EXECUTOR: Leader{1:2:661} starting Scan{5 on 2, Compact{1.2.660, eph 2}} 00000.308 II| TABLET_EXECUTOR: Leader{1:2:661} started compaction 5 00000.308 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.660, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.311 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.660, eph 2}} end=Done, 115r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 13 of 14 ~1p 00000.311 II| OPS_COMPACT: Compact{1.2.660, eph 2} end=Done, 9 blobs 115r (max 115), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 13 +5, (188812 13541 158956)b }, ecr=1.000 00000.324 II| TABLET_EXECUTOR: Leader{1:2:662} Compact 5 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 660, product {1 parts epoch 3} done 00000.325 II| TABLET_EXECUTOR: Leader{1:2:663} starting compaction 00000.325 II| TABLET_EXECUTOR: Leader{1:2:664} starting Scan{7 on 2, Compact{1.2.663, eph 2}} 00000.325 II| TABLET_EXECUTOR: Leader{1:2:664} started compaction 7 00000.325 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.663, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.330 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.663, eph 2}} end=Done, 168r seen, TFwd{fetch=359KiB,saved=359KiB,usage=359KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=9}, trace 29 of 37 ~3p 00000.331 II| OPS_COMPACT: Compact{1.2.663, eph 2} end=Done, 4 blobs 133r (max 201), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 29 +0, (285394 20542 253440)b }, ecr=1.000 00000.332 II| TABLET_EXECUTOR: Leader{1:2:665} Compact 7 on TGenCompactionParams{2: gen 1 epoch 0, 2 parts} step 663, product {1 parts epoch 0} done 00000.360 II| TABLET_EXECUTOR: Leader{1:2:704} starting compaction 00000.360 II| TABLET_EXECUTOR: Leader{1:2:705} starting Scan{9 on 3, Compact{1.2.704, eph 2}} 00000.360 II| TABLET_EXECUTOR: Leader{1:2:705} started compaction 9 00000.360 II| TABLET_OPS_HOST: Scan{9 on 3, Compact{1.2.704, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.363 II| TABLET_OPS_HOST: Scan{9 on 3, Compact{1.2.704, eph 2}} end=Done, 103r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 11 of 11 ~1p 00000.363 II| OPS_COMPACT: Compact{1.2.704, eph 2} end=Done, 9 blobs 103r (max 103), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 11 +5, (160838 26791 135833)b }, ecr=1.000 00000.374 II| TABLET_EXECUTOR: Leader{1:2:706} Compact 9 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 704, product {1 parts epoch 3} done 00000.375 II| TABLET_EXECUTOR: Leader{1:2:708} starting compaction 00000.376 II| TABLET_EXECUTOR: Leader{1:2:709} starting Scan{11 on 3, Compact{1.2.708, eph 2}} 00000.376 II| TABLET_EXECUTOR: Leader{1:2:709} started compaction 11 00000.376 II| TABLET_OPS_HOST: Scan{11 on 3, Compact{1.2.708, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.379 II| TABLET_OPS_HOST: Scan{11 on 3, Compact{1.2.708, eph 2}} end=Done, 157r seen, TFwd{fetch=288KiB,saved=288KiB,usage=282KiB,after=6.7KiB,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=7}, trace 28 of 36 ~3p 00000.380 II| OPS_COMPACT: Compact{1.2.708, eph 2} end=Done, 4 blobs 134r (max 185), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 28 +0, (244683 46885 241789)b }, ecr=1.000 00000.381 II| TABLET_EXECUTOR: Leader{1:2:709} Compact 11 on TGenCompactionParams{3: gen 1 epoch 0, 2 parts} step 708, product {1 parts epoch 0} done 00000.382 II| TABLET_EXECUTOR: Leader{1:2:710} got result TEvResult{0 pages [1:2:704:1:12289:0:0] fail RACE}, type 1 00000.492 II| TABLET_EXECUTOR: LSnap{1:2, on 2:901, 10142b, wait} done, Waste{2:0, 2169849b +(183, 845616b), 300 trc} 00000.533 II| TABLET_EXECUTOR: Leader{1:2:997} starting compaction 00000.533 II| TABLET_EXECUTOR: Leader{1:2:998} starting Scan{13 on 3, Compact{1.2.997, eph 3}} 00000.533 II| TABLET_EXECUTOR: Leader{1:2:998} started compaction 13 00000.533 II| TABLET_OPS_HOST: Scan{13 on 3, Compact{1.2.997, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.536 II| TABLET_OPS_HOST: Scan{13 on 3, Compact{1.2.997, eph 3}} end=Done, 113r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 7 of 11 ~1p 00000.536 II| OPS_COMPACT: Compact{1.2.997, eph 3} end=Done, 11 blobs 113r (max 113), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 7 +7, (177438 47549 122089)b }, ecr=1.000 00000.550 II| TABLET_EXECUTOR: Leader{1:2:999} Compact 13 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 997, product {1 parts epoch 4} done 00000.574 II| TABLET_EXECUTOR: Leader{1:2:1027} starting compaction 00000.574 II| TABLET_EXECUTOR: Leader{1:2:1028} starting Scan{15 on 2, Compact{1.2.1027, eph 3}} 00000.574 II| TABLET_EXECUTOR: Leader{1:2:1028} started compaction 15 00000.574 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.1027, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.576 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.1027, eph 3}} end=Done, 108r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 12 of 16 ~1p 00000.576 II| OPS_COMPACT: Compact{1.2.1027, eph 3} end=Done, 16 blobs 108r (max 108), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 12 +12, (131616 33353 212915)b }, ecr=1.000 00000.578 II| TABLET_EXECUTOR: Leader{1:2:1029} Compact 15 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 1027, product {1 parts epoch 4} done 00000.686 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1201, 11533b, wait} done, Waste{2:0, 3280233b +(154, 88785b), 300 trc} 00000.733 II| TABLET_EXECUTOR: Leader{1:2:1293} starting compaction 00000.733 II| TABLET_EXECUTOR: Leader{1:2:1294} starting Scan{17 on 2, Compact{1.2.1293, eph 4}} 00000.733 II| TABLET_EXECUTOR: Leader{1:2:1294} started compaction 17 00000.733 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.1293, eph 4}} begin on TSubset{head 5, 1m 0p 0c} 00000.736 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.1293, eph 4}} end=Done, 111r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 14 of 19 ~1p 00000.736 II| OPS_COMPACT: Compact{1.2.1293, eph 4} end=Done, 11 blobs 111r (max 111), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 14 +7, (167374 47177 180251)b }, ecr=1.000 00000.746 II| TABLET_EXECUTOR: Leader{1:2:1295} Compact 17 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 1293, product {1 parts epoch 5} done 00000.747 II| TABLET_EXECUTOR: Leader{1:2:1296} starting compaction 00000.747 II| TABLET_EXECUTOR: Leader{1:2:1297} starting Scan{19 on 2, Compact{1.2.1296, eph 4}} 00000.747 II| TABLET_EXECUTOR: Leader{1:2:1297} started compaction 19 00000.747 II| TABLET_OPS_HOST: Scan{19 on 2, Compact{1.2.1296, eph 4}} begin on TSubset{head 0, 0m 2p 0c} 00000.751 II| TABLET_OPS_HOST: Scan{19 on 2, Compact{1.2.1296, eph 4}} end=Done, 182r seen, TFwd{fetch=290KiB,saved=290KiB,usage=264KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=6}, trace 36 of 45 ~3p 00000.751 II| OPS_COMPACT: Compact{1.2.1296, eph 4} end=Done, 4 blobs 182r (max 219), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 36 +0, (250425 53902 309967)b }, ecr=1.000 00000.753 II| TABLET_EXECUTOR: Leader{1:2:1300} Compact 19 on TGenCompactionParams{2: gen 1 epoch 0, 2 parts} step 1296, product {1 parts epoch 0} done 00000.814 II| TABLET_EXECUTOR: Leader{1:2:1377} starting compaction 00000.814 II| TABLET_EXECUTOR: Leader{1:2:1378} starting Scan{21 on 3, Compact{1.2.1377, eph 4}} 00000.814 II| TABLET_EXECUTOR: Leader{1:2:1378} started compaction 21 00000.814 II| TABLET_OPS_HOST: Scan{21 on 3, Compact{1.2.1377, eph 4}} begin on TSubset{head 5, 1m 0p 0c} 00000.816 II| TABLET_OPS_HOST: Scan{21 on 3, Compact{1.2.1377, eph 4}} end=Done, 109r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 12 of 14 ~1p 00000.817 II| OPS_COMPACT: Compact{1.2.1377, eph 4} end=Done, 12 blobs 109r (max 109), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 12 +8, (183779 53533 165560)b }, ecr=1.000 00000.825 II| TABLET_EXECUTOR: Leader{1:2:1379} Compact 21 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 1377, product {1 parts epoch 5} done 00000.826 II| TABLET_EXECUTOR: Leader{1:2:1380} starting compaction 00000.826 II| TABLET_EXECUTOR: Leader{1:2:1381} starting Scan{23 on 3, Compact{1.2.1380, eph 4}} 00000.826 II| TABLET_EXECUTOR: Leader{1:2:1381} started compaction 23 00000.826 II| TABLET_OPS_HOST: Scan{23 on 3, Compact{1.2.1380, eph 4}} begin on TSubset{head 0, 0m 2p 0c} 00000.830 II| TABLET_OPS_HOST: Scan{23 on 3, Compact{1.2.1380, eph 4}} end=Done, 173r seen, TFwd{fetch=339KiB,saved=339KiB,usage=319KiB,after=6.65KiB,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=7}, trace 29 of 34 ~3p 00000.830 II| OPS_COMPACT: Compact{1.2.1380, eph 4} end=Done, 4 blobs 173r (max 222), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 29 +0, (268861 67130 237120)b }, ecr=1.000 00000.832 II| TABLET_EXECUTOR: Leader{1:2:1383} Compact 23 on TGenCompactionParams{3: gen 1 epoch 0, 2 parts} step ... DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [59:30:2062]) to queue queue_compaction_gen0 00000.011 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.390625 (insert task gen0-table-101-tablet-1 (1 by [59:30:2062])) 00000.012 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.012 II| TABLET_EXECUTOR: Leader{1:2:5} starting compaction 00000.012 II| TABLET_EXECUTOR: Leader{1:2:6} starting Scan{1 on 101, Compact{1.2.5, eph 1}} 00000.012 II| TABLET_EXECUTOR: Leader{1:2:6} started compaction 1 00000.012 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.021 II| TABLET_EXECUTOR: Leader{1:2:6} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 5, product {tx status + 1 parts epoch 2} done 00000.021 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.021 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.021 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.021 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [59:30:2062]) (release resources {1, 0}) 00000.021 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.390625 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [59:30:2062])) 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 8 for step 5 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 3 for step 6 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:7} switch applied on followers, step 6 ...waiting until compacted 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} release 4194304b of static, Memory{0 dyn 0} ...hasTxData = 1 ...compacting 00000.022 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Mem, forced state None, forced mode Mem 00000.022 DD| RESOURCE_BROKER: Submitted new compaction_gen0 task gen0-table-101-tablet-1 (2 by [59:30:2062]) priority=5 resources={1, 0} 00000.022 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [59:30:2062]) to queue queue_compaction_gen0 00000.022 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (2 by [59:30:2062]) from queue queue_compaction_gen0 00000.022 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (2 by [59:30:2062]) to queue queue_compaction_gen0 00000.022 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.371094 (insert task gen0-table-101-tablet-1 (2 by [59:30:2062])) 00000.022 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 2, edge 9223372036854775807/0, generation 0 00000.022 II| TABLET_EXECUTOR: Leader{1:2:7} starting compaction 00000.023 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{3 on 101, Compact{1.2.7, eph 1}} 00000.023 II| TABLET_EXECUTOR: Leader{1:2:8} started compaction 3 00000.023 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 3 generation 0 00000.023 II| TABLET_EXECUTOR: Leader{1:2:8} Compact 3 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 7, product {0 parts epoch 2} done 00000.023 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 3, generation 0 00000.023 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.023 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (2 by [59:30:2062]) (release resources {1, 0}) 00000.023 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.371094 to 0.000000 (remove task gen0-table-101-tablet-1 (2 by [59:30:2062])) 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 8 for step 7 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 3 for step 8 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:9} switch applied on followers, step 8 ...waiting until compacted 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} hope 1 -> done Change{6, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} release 4194304b of static, Memory{0 dyn 0} ...hasTxData = 0 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{6, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} release 4194304b of static, Memory{0 dyn 0} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} ...restarting tablet 00000.026 II| TABLET_EXECUTOR: Leader{1:2:10} suiciding, Waste{2:0, 478b +(3, 191b), 9 trc, -191b acc} 00000.028 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [1:2:1:1:28672:35:0] } 00000.028 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 258 bytes, 258 total, blobs: { [1:2:2:1:8192:84:0], [1:2:6:1:32768:124:0], [1:2:8:1:32768:50:0] } 00000.029 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 384 bytes, 384 total, blobs: { [1:2:5:1:12288:158:0], [1:2:3:1:24576:78:0], [1:2:4:1:24576:65:0], [1:2:9:1:24576:83:0] } 00000.029 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.029 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.029 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.029 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.029 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 239b, wait} done, Waste{2:0, 478b +(3, 191b), 9 trc} 00000.030 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 ... checking rows 00000.030 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.030 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.030 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> retry Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.030 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} pin 0 (0 b) load 1 (55 b) 00000.030 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.030 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} postponed, loading 1 pages, 55 bytes, newly pinned 0 pages, 0 bytes 00000.031 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:5:1:12288:158:0] ok OK}, type 1 00000.031 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} activated 00000.031 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 2 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.031 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 8388608b of static, Memory{0 dyn 0} 00000.031 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.031 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 478b +(0, 0b), 1 trc, -191b acc} 00000.031 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 91b} miss {0 0b} in-memory miss {0 0b} 00000.031 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.031 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {796b, 12} 00000.032 II| FAKE_ENV: DS.1 gone, left {717b, 5}, put {1117b, 11} 00000.032 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.032 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.032 II| FAKE_ENV: All BS storage groups are stopped 00000.032 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.032 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 126}, stopped |99.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> Backup::ChangelogLargeData [GOOD] >> Backup::ChangelogManyCommits >> Backup::ChangelogManyCommits [GOOD] >> Backup::ChangelogSchema >> Backup::ChangelogSchema [GOOD] >> Backup::ChangelogSchemaAndData >> Backup::ChangelogSchemaAndData [GOOD] >> Backup::ChangelogSchemaNewColumn >> Backup::ChangelogSchemaNewColumn [GOOD] >> Backup::ChangelogTornWrite [GOOD] >> Backup::ExcludeTablet >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_pdisk_slot_size_in_units >> Backup::ExcludeTablet [GOOD] >> Backup::RecoveryModeKeepsData [GOOD] >> Backup::NoBackupTable >> Backup::NoBackupTable [GOOD] >> Backup::NoBackupColumn >> Backup::NoBackupColumn [GOOD] >> Backup::NewSnapshotChangelogSize >> Backup::NewSnapshotChangelogSize [GOOD] >> Bloom::Conf [GOOD] >> Bloom::Hashes >> Bloom::Hashes [GOOD] >> Bloom::Rater >> Bloom::Rater [GOOD] >> Bloom::Dipping >> Bloom::Dipping [GOOD] >> Bloom::Basics [GOOD] >> Bloom::Stairs >> Bloom::Stairs [GOOD] >> BuildStatsBTreeIndex::Single >> BuildStatsBTreeIndex::Single [GOOD] >> BuildStatsBTreeIndex::Single_Slices >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> BuildStatsBTreeIndex::Single_History |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> BuildStatsBTreeIndex::Single_History [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsBTreeIndex::Mixed >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Single >> BuildStatsFlatIndex::Single [GOOD] >> BuildStatsFlatIndex::Single_Slices >> BuildStatsFlatIndex::Single_Slices [GOOD] >> BuildStatsFlatIndex::Single_History >> BuildStatsFlatIndex::Single_History [GOOD] >> BuildStatsFlatIndex::Single_History_Slices >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups >> BuildStatsFlatIndex::Single_Groups [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups_History >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsFlatIndex::Mixed >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Single |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_pdisk_slot_size_in_units [GOOD] >> BuildStatsHistogram::Single [GOOD] >> BuildStatsHistogram::Single_Slices |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> BuildStatsHistogram::Single_Slices [GOOD] >> BuildStatsHistogram::Single_History |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_pdisk_slot_size_in_units [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TVersions::Wreck0Reverse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: 2025-12-23T19:18:31.290859Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2025-12-23T19:18:31.293942Z node 1 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:1] 2025-12-23T19:18:31.293982Z node 1 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [1:5:2052] 2025-12-23T19:18:31.294046Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [1:5:2052] cookie 1 class Online from cache [ ] already requested [ ] to request [ 1 2 3 ] 2025-12-23T19:18:31.294103Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 366B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #1 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #1 (done) Checking fetches#1 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] ... waiting for results #1 2025-12-23T19:18:31.294894Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 1 2 3 ] 2025-12-23T19:18:31.294967Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [1:5:2052] class Online pages [ 1 2 3 ] cookie 1 2025-12-23T19:18:31.295022Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 366B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #1 (done) Checking results#1 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ 1 2 3 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ 1 2 3 ] 2025-12-23T19:18:31.353016Z node 2 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2025-12-23T19:18:31.353520Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:1] 2025-12-23T19:18:31.353555Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [2:5:2052] 2025-12-23T19:18:31.353605Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [2:5:2052] cookie 1 class Online from cache [ ] already requested [ ] to request [ 1 2 3 ] 2025-12-23T19:18:31.353670Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 366B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2025-12-23T19:18:31.353753Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:2] 2025-12-23T19:18:31.353779Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:2] owner [2:5:2052] 2025-12-23T19:18:31.353812Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:2] owner [2:5:2052] cookie 2 class Online from cache [ ] already requested [ ] to request [ 4 5 ] 2025-12-23T19:18:31.353857Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 610B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest 2025-12-23T19:18:31.353912Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [2:6:2053] 2025-12-23T19:18:31.353976Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [2:6:2053] cookie 3 class Online from cache [ ] already requested [ ] to request [ 5 6 ] 2025-12-23T19:18:31.354061Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 854B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2025-12-23T19:18:31.354168Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:2] owner [2:6:2053] 2025-12-23T19:18:31.354203Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:2] owner [2:6:2053] cookie 4 class Online from cache [ ] already requested [ ] to request [ 6 7 ] 2025-12-23T19:18:31.354248Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 1.07KiB EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #4 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #4 (done) Checking fetches#4 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 5 6 ] PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 6 7 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 5 6 ] PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 6 7 ] ... waiting for results #4 2025-12-23T19:18:31.354610Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status ERROR pages [ 1 2 3 ] 2025-12-23T19:18:31.354634Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1076: Drop page collection [1:0:256:0:0:0:1] error ERROR 2025-12-23T19:18:31.354660Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1020: Send page collection error [1:0:256:0:0:0:1] owner [2:5:2052] class Online error ERROR cookie 1 2025-12-23T19:18:31.354689Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1020: Send page collection error [1:0:256:0:0:0:1] owner [2:6:2053] class Online error ERROR cookie 3 2025-12-23T19:18:31.354733Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 732B EvictedInMemoryBytes: 0B ... waiting for results #4 (done) Checking results#4 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ ] PageCollection: [1:0:256:0:0:0:1] Cookie: 3 Pages: [ ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ ] PageCollection: [1:0:256:0:0:0:1] Cookie: 3 Pages: [ ] 2025-12-23T19:18:31.354945Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 5 6 ] 2025-12-23T19:18:31.354975Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 488B EvictedInMemoryBytes: 0B Checking results#4 Expected: Actual: ... waiting for results #4 2025-12-23T19:18:31.365407Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:2] status OK pages [ 6 7 ] 2025-12-23T19:18:31.365473Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:2] owner [2:6:2053] class Online pages [ 6 7 ] cookie 4 2025-12-23T19:18:31.365523Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 244B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B 2025-12-23T19:18:31.365552Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:2] status OK pages [ 4 5 ] 2025-12-23T19:18:31.365572Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:2] owner [2:5:2052] class Online pages [ 4 5 ] cookie 2 2025-12-23T19:18:31.365606Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 488B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #4 (done) Checking results#4 Expected: PageCollection: [1:0:256:0:0:0:2] Cookie: 2 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 4 Pages: [ 6 7 ] Actual: PageCollection: [1:0:256:0:0:0:2] Cookie: 2 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 4 Pages: [ 6 7 ] 2025-12-23T19:18:31.423209Z node 3 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2025-12-23T19:18:31.423665Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:1] 2025-12-23T19:18:31.423720Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [3:5:2052] 2025-12-23T19:18:31.423787Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [3:5:2052] cookie 1 class AsyncLoad from cache [ ] already requested [ ] to request [ 1 2 3 4 5 ] 2025-12-23T19:18:31.423828Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:573: Request page collection [1:0:256:0:0:0:1] async queue pages [ 1 2 ] 2025-12-23T19:18:31.423898Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2025-12-23T19:18:31.423988Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:2] 2025-12-23T19:18:31.424014Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:2] owner [3:6:2053] 2025-12-23T19:18:31.424069Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:2] owner [3:6:2053] cookie 2 class AsyncLoad from cache [ ] already requested [ ] to request [ 1 2 3 ] 2025-12-23T19:18:31.424107Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) Checking fetches#2 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 1 2 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 1 2 ] ... waiting for fetches #2 2025-12-23T19:18:31.424248Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 1 2 ] 2025-12-23T19:18:31.424281Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:573: Request page collection [1:0:256:0:0:0:1] async queue pages [ 3 4 ] 2025-12-23T19:18:31.424384Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 244B Passive: 0B LoadInFly: 244B EvictedI ... 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.020 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.021 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} in-memory miss {0 0b} 00000.021 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.021 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.021 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.021 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.021 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.021 II| FAKE_ENV: All BS storage groups are stopped 00000.021 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.021 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:34.928880Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.005 II| FAKE_ENV: Starting storage for BS group 1 00000.005 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A cookie 0 00000.050 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A 00000.051 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.052 NN| TABLET_SAUSAGECACHE: Poison cache serviced 11 reqs hit {18 513007b} miss {0 0b} in-memory miss {0 0b} 00000.052 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.052 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {2095b, 23} 00000.052 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {210604b, 21} 00000.052 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {205178b, 4} 00000.052 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {102690b, 4} 00000.052 II| FAKE_ENV: All BS storage groups are stopped 00000.052 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.052 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:34.986058Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.005 II| FAKE_ENV: Starting storage for BS group 1 00000.005 II| FAKE_ENV: Starting storage for BS group 2 00000.005 II| FAKE_ENV: Starting storage for BS group 3 00000.030 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.031 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 307329b} miss {0 0b} in-memory miss {0 0b} 00000.031 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.031 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1828b, 23} 00000.031 II| FAKE_ENV: DS.1 gone, left {1247b, 3}, put {311467b, 22} 00000.031 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.031 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.031 II| FAKE_ENV: All BS storage groups are stopped 00000.031 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.031 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:35.022177Z 00000.004 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.005 II| FAKE_ENV: Starting storage for BS group 1 00000.005 II| FAKE_ENV: Starting storage for BS group 2 00000.005 II| FAKE_ENV: Starting storage for BS group 3 00000.026 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 5 actors 00000.027 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4 reqs hit {8 307836b} miss {0 0b} in-memory miss {0 0b} 00000.028 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.028 II| FAKE_ENV: DS.0 gone, left {57b, 2}, put {1436b, 31} 00000.028 II| FAKE_ENV: DS.1 gone, left {629b, 3}, put {310476b, 16} 00000.028 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.028 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.028 II| FAKE_ENV: All BS storage groups are stopped 00000.028 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.028 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:35.055295Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.026 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.027 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 194646b} miss {0 0b} in-memory miss {0 0b} 00000.027 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.027 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1766b, 27} 00000.027 II| FAKE_ENV: DS.1 gone, left {732b, 6}, put {197813b, 24} 00000.027 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.027 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.027 II| FAKE_ENV: All BS storage groups are stopped 00000.027 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.027 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:35.087533Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.010 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.011 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.011 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.011 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {326b, 7} 00000.011 II| FAKE_ENV: DS.1 gone, left {418b, 4}, put {453b, 5} 00000.011 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.011 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.011 II| FAKE_ENV: All BS storage groups are stopped 00000.011 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.011 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:35.104014Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A cookie 0 00000.041 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A 00000.042 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.043 NN| TABLET_SAUSAGECACHE: Poison cache serviced 6 reqs hit {8 410030b} miss {0 0b} in-memory miss {0 0b} 00000.043 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.043 II| FAKE_ENV: DS.1 gone, left {504b, 4}, put {310786b, 20} 00000.043 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.043 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.043 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1490b, 23} 00000.043 II| FAKE_ENV: All BS storage groups are stopped 00000.043 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 15.00s 00000.043 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:35.152318Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.016 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.017 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} in-memory miss {0 0b} 00000.017 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.017 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.017 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.017 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.017 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.017 II| FAKE_ENV: All BS storage groups are stopped 00000.017 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.017 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:35.177926Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbageResult from FAKE_ENV_A to FLAT_EXECUTOR cookie 0 00000.035 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbageResult from FAKE_ENV_A to FLAT_EXECUTOR ... waiting for NKikimr::TEvBlobStorage::TEvCollectGarbageResult ... waiting for NKikimr::TEvBlobStorage::TEvCollectGarbageResult (done) 00000.046 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.047 NN| TABLET_SAUSAGECACHE: Poison cache serviced 5 reqs hit {8 205278b} miss {0 0b} in-memory miss {0 0b} 00000.047 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.047 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1353b, 17} 00000.047 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {105547b, 14} 00000.047 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {102560b, 2} 00000.047 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {116b, 2} 00000.047 II| FAKE_ENV: All BS storage groups are stopped 00000.047 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.047 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:35.233897Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.005 II| FAKE_ENV: Starting storage for BS group 1 00000.005 II| FAKE_ENV: Starting storage for BS group 2 00000.005 II| FAKE_ENV: Starting storage for BS group 3 00000.224 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled 00000.235 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.236 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.236 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.236 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {501b, 9} 00000.236 II| FAKE_ENV: DS.1 gone, left {425b, 4}, put {460b, 5} 00000.236 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.236 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.236 II| FAKE_ENV: All BS storage groups are stopped 00000.236 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 17.71s 00000.236 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped |99.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_group_size_in_units >> BuildStatsHistogram::Single_History [GOOD] >> BuildStatsHistogram::Single_History_Slices |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> BuildStatsHistogram::Single_History_Slices [GOOD] >> BuildStatsHistogram::Five_Five_Mixed |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_group_size_in_units [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> BuildStatsHistogram::Five_Five_Crossed |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTopicReaderTests::TestRun_ReadOneMessage >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_1_Level >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_group_size_in_units [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random [GOOD] >> TS3FIFOCache::InsertUntouched [GOOD] >> TS3FIFOCache::EnsureLimits [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential >> TScreen::Sequential [GOOD] >> TScreen::Random |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [FAIL] >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::S3FIFO >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited >> TSharedPageCache::S3FIFO [GOOD] >> TSharedPageCache::BigCache_BTreeIndex >> TSharedPageCache::BigCache_BTreeIndex [GOOD] >> TSharedPageCache::BigCache_FlatIndex >> TSharedPageCache::BigCache_FlatIndex [GOOD] >> TSharedPageCache::MiddleCache_BTreeIndex |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TSharedPageCache::MiddleCache_BTreeIndex [GOOD] >> TSharedPageCache::MiddleCache_FlatIndex >> TSharedPageCache::MiddleCache_FlatIndex [GOOD] >> TSharedPageCache::ZeroCache_BTreeIndex >> TSharedPageCache::ZeroCache_BTreeIndex [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Basics >> TSharedPageCache::TryKeepInMemoryMode_Basics [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Enabling >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Enabling [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Disabling >> test_vdisks.py::TestTinyVDisks::test_enabled_disabled >> TSharedPageCache::TryKeepInMemoryMode_Disabling [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_AfterCompaction |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TSharedPageCache::TryKeepInMemoryMode_AfterCompaction [GOOD] >> TSharedPageCache_Actor::Attach_Basics [GOOD] >> TSharedPageCache_Actor::Attach_Request >> TSharedPageCache_Actor::Attach_Request [GOOD] >> TSharedPageCache_Actor::Detach_Basics >> TSharedPageCache_Actor::Detach_Basics [GOOD] >> TSharedPageCache_Actor::Detach_Cached [GOOD] >> TSharedPageCache_Actor::Detach_Expired >> TSharedPageCache_Actor::Detach_Expired [GOOD] >> TSharedPageCache_Actor::Detach_InFly >> TSharedPageCache_Actor::Detach_InFly [GOOD] >> TSharedPageCache_Actor::Detach_Queued [GOOD] >> TSharedPageCache_Actor::InMemory_Basics >> TSharedPageCache_Actor::InMemory_Basics [GOOD] >> TSharedPageCache_Actor::InMemory_NotEnoughMemory >> TSharedPageCache_Actor::InMemory_NotEnoughMemory [GOOD] >> TSharedPageCache_Actor::InMemory_Enabling >> TSharedPageCache_Actor::InMemory_Enabling [GOOD] >> TSharedPageCache_Actor::InMemory_Enabling_AllRequested >> TSharedPageCache_Actor::InMemory_Enabling_AllRequested [GOOD] >> TSharedPageCache_Actor::InMemory_Disabling >> TSharedPageCache_Actor::InMemory_Disabling [GOOD] >> TSharedPageCache_Actor::InMemory_Detach >> TSharedPageCache_Actor::InMemory_Detach [GOOD] >> TSharedPageCache_Actor::InMemory_MoveEvictedToInMemory >> TSharedPageCache_Actor::InMemory_MoveEvictedToInMemory [GOOD] >> TSharedPageCache_Actor::InMemory_MoveEvictedToRegular >> TSharedPageCache_Actor::InMemory_MoveEvictedToRegular [GOOD] >> TSharedPageCache_Actor::InMemory_AttachRepeated >> TSharedPageCache_Actor::InMemory_AttachRepeated [GOOD] >> TSharedPageCache_Actor::GC_Manual >> TSharedPageCache_Actor::GC_Manual [GOOD] >> TSharedPageCache_Actor::GC_Scheduled >> TSharedPageCache_Actor::GC_Scheduled [GOOD] >> TSharedPageCache_Actor::Evict_Active >> TSharedPageCache_Actor::Evict_Active [GOOD] >> TSharedPageCache_Actor::Evict_Passive >> TSharedPageCache_Actor::Evict_Passive [GOOD] >> BuildStatsHistogram::Many_Mixed [GOOD] >> BuildStatsHistogram::Many_Serial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TSharedPageCache_Actor::Evict_Passive [GOOD] Test command err: 0.29132 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:20:58.351994Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.007 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.008 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} queued, type NKikimr::NSharedCache::TTxInitSchema 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 500b annex 0, ~{ } -{ }, 0 gb} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.009 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.009 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.009 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.009 NN| TABLET_SAUSAGECACHE: Update config MemoryLimit: 8388608 00000.009 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{2, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{3, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.011 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{4, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{5, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{6, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{7, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{8, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{9, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{10, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{11, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{12, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{13, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{14, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:16} commited cookie 1 for step 15 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{15, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:17} commited cookie 1 for step 16 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{16, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:18} commited cookie 1 for step 17 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{17, r ... :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #5 (done) Checking results#5 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 5 Pages: [ 13 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 5 Pages: [ 13 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2025-12-23T19:21:07.360350Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 6 class Online from cache [ ] already requested [ ] to request [ 14 ] 2025-12-23T19:21:07.360386Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 13 ] owner [33:5:2052] 2025-12-23T19:21:07.360412Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #6 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #6 (done) Checking fetches#6 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 14 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 14 ] ... waiting for results #6 2025-12-23T19:21:07.360520Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 14 ] 2025-12-23T19:21:07.360551Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 14 ] cookie 6 2025-12-23T19:21:07.360596Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #6 (done) Checking results#6 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 6 Pages: [ 14 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 6 Pages: [ 14 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2025-12-23T19:21:07.360673Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 7 class Online from cache [ ] already requested [ ] to request [ 15 ] 2025-12-23T19:21:07.360727Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 14 ] owner [33:5:2052] 2025-12-23T19:21:07.360749Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #7 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #7 (done) Checking fetches#7 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 15 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 15 ] ... waiting for results #7 2025-12-23T19:21:07.360847Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 15 ] 2025-12-23T19:21:07.360874Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 15 ] cookie 7 2025-12-23T19:21:07.360915Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #7 (done) Checking results#7 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 7 Pages: [ 15 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 7 Pages: [ 15 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2025-12-23T19:21:07.360997Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 8 class Online from cache [ ] already requested [ ] to request [ 16 ] 2025-12-23T19:21:07.361051Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 15 ] owner [33:5:2052] 2025-12-23T19:21:07.361075Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #8 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #8 (done) Checking fetches#8 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 16 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 16 ] ... waiting for results #8 2025-12-23T19:21:07.361183Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 16 ] 2025-12-23T19:21:07.361204Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 16 ] cookie 8 2025-12-23T19:21:07.361227Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #8 (done) Checking results#8 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 8 Pages: [ 16 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 8 Pages: [ 16 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2025-12-23T19:21:07.361345Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 9 class Online from cache [ ] already requested [ ] to request [ 17 ] 2025-12-23T19:21:07.361387Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 16 ] owner [33:5:2052] 2025-12-23T19:21:07.361410Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #9 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #9 (done) Checking fetches#9 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 17 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 17 ] ... waiting for results #9 2025-12-23T19:21:07.361492Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 17 ] 2025-12-23T19:21:07.361509Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 17 ] cookie 9 2025-12-23T19:21:07.361531Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #9 (done) Checking results#9 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 9 Pages: [ 17 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 9 Pages: [ 17 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2025-12-23T19:21:07.361617Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 10 class Online from cache [ ] already requested [ ] to request [ 18 ] 2025-12-23T19:21:07.361647Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 17 ] owner [33:5:2052] 2025-12-23T19:21:07.361668Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #10 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #10 (done) Checking fetches#10 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] ... waiting for results #10 2025-12-23T19:21:07.361748Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 18 ] 2025-12-23T19:21:07.361766Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 18 ] cookie 10 2025-12-23T19:21:07.361789Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #10 (done) Checking results#10 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2025-12-23T19:21:07.361887Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 11 class Online from cache [ ] already requested [ ] to request [ 19 ] 2025-12-23T19:21:07.361944Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 18 ] owner [33:5:2052] 2025-12-23T19:21:07.361975Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #11 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #11 (done) Checking fetches#11 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 19 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 19 ] ... waiting for results #11 2025-12-23T19:21:07.362084Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 19 ] 2025-12-23T19:21:07.362108Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 19 ] cookie 11 2025-12-23T19:21:07.362132Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #11 (done) Checking results#11 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 11 Pages: [ 19 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 11 Pages: [ 19 ] ... waiting for NActors::TEvents::TEvWakeup 2025-12-23T19:21:07.362223Z node 33 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:865: Wakeup DoGCManual 2025-12-23T19:21:07.362259Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) ... waiting for NActors::TEvents::TEvWakeup 2025-12-23T19:21:07.362343Z node 33 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:865: Wakeup DoGCManual 2025-12-23T19:21:07.362390Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 1 2 3 ] owner [33:5:2052] 2025-12-23T19:21:07.362440Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) |99.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> BuildStatsHistogram::Many_Serial [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-12-23T19:20:48.662718Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7587135529920106367:2079];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:20:48.662824Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:20:48.691291Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-12-23T19:20:48.691906Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7587135528570243685:2078];send_to=[0:7307199536658146131:7762515]; 2025-12-23T19:20:48.692924Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-12-23T19:20:48.700837Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath for PDisk # /home/runner/.ya/build/build_root/8ptz/005f05/r3tmp/tmpWDMElS/pdisk_1.dat 2025-12-23T19:20:48.856295Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:20:48.863308Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-12-23T19:20:48.884107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:20:48.884184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:20:48.885857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-12-23T19:20:48.885944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-12-23T19:20:48.906995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:20:48.912194Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-12-23T19:20:48.913371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-12-23T19:20:48.962099Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6789, node 1 2025-12-23T19:20:49.009152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/8ptz/005f05/r3tmp/yandexH3qql1.tmp 2025-12-23T19:20:49.009177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/8ptz/005f05/r3tmp/yandexH3qql1.tmp 2025-12-23T19:20:49.009370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/8ptz/005f05/r3tmp/yandexH3qql1.tmp 2025-12-23T19:20:49.009454Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-12-23T19:20:49.043798Z INFO: TTestServer started on Port 30638 GrpcPort 6789 2025-12-23T19:20:49.094716Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-12-23T19:20:49.130440Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:30638 PQClient connected to localhost:6789 === TenantModeEnabled() = 0 === Init PQ - start server on port 6789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 1766517648980 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 7205759... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-12-23T19:20:49.339744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-12-23T19:20:49.339881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T19:20:49.340060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-12-23T19:20:49.340083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5552: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-12-23T19:20:49.340202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-12-23T19:20:49.340277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-12-23T19:20:49.342175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-12-23T19:20:49.342293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-12-23T19:20:49.342445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T19:20:49.342479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-12-23T19:20:49.342491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-12-23T19:20:49.342500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-12-23T19:20:49.343447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:20:49.343469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-12-23T19:20:49.343496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-12-23T19:20:49.343980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T19:20:49.344008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-12-23T19:20:49.344020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2025-12-23T19:20:49.345415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T19:20:49.345444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-12-23T19:20:49.345462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-12-23T19:20:49.345478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-12-23T19:20:49.348554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-12-23T19:20:49.349624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-12-23T19:20:49.349735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-12-23T19:20:49.351129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1766517649393, transactions count in step: 1, at schemeshard: 72057594046644480 2025-12-23T19:20:49.351228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1766517649393 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-12-23T19:20:49.351258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-12-23T19:20:49.351521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl ... TabletId: 72075186224037897 Generation: 1, pipe: [5:7587135660108962490:2569] 2025-12-23T19:21:19.710832Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: shared/user_5_1_16283602025828564303_v1:1 with generation 1 2025-12-23T19:21:19.712966Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 4 WriteTimestampMS: 1766517679596 CreateTimestampMS: 1766517679595 SizeLag: 280 WriteTimestampEstimateMS: 1766517679698 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-12-23T19:21:19.713008Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 2025-12-23T19:21:19.713082Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 sending to client partition status 2025-12-23T19:21:19.713785Z :INFO: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-12-23T19:21:19.714336Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-12-23T19:21:19.714454Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:551: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-12-23T19:21:19.714500Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1024: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-12-23T19:21:19.714526Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:970: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2025-12-23T19:21:19.714592Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2343: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1766517679596, sizeLag# 280 2025-12-23T19:21:19.714612Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2354: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1TEvPartitionReady. Aval parts: 1 2025-12-23T19:21:19.714649Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2277: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 performing read request: guid# c0d20ca5-1e3b9ad5-62783c83-4ab2f2c, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 4, size# 336, partitionsAsked# 1, maxTimeLag# 0ms 2025-12-23T19:21:19.714767Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1396: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 4 maxSize 336 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid c0d20ca5-1e3b9ad5-62783c83-4ab2f2c 2025-12-23T19:21:19.716100Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1766517679596 CreateTimestampMS: 1766517679595 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1766517679641 CreateTimestampMS: 1766517679595 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1766517679641 CreateTimestampMS: 1766517679595 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 4 WriteTimestampMS: 1766517679641 CreateTimestampMS: 1766517679595 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 2 SizeLag: 18446744073709551408 RealReadOffset: 3 WaitQuotaTimeMs: 0 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2025-12-23T19:21:19.716267Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2025-12-23T19:21:19.716307Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid c0d20ca5-1e3b9ad5-62783c83-4ab2f2c has messages 1 2025-12-23T19:21:19.716377Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 read done: guid# c0d20ca5-1e3b9ad5-62783c83-4ab2f2c, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 412 2025-12-23T19:21:19.716407Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 response to read: guid# c0d20ca5-1e3b9ad5-62783c83-4ab2f2c 2025-12-23T19:21:19.716586Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 Process answer. Aval parts: 0 2025-12-23T19:21:19.716842Z :DEBUG: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] [] Got ReadResponse, serverBytesSize = 412, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428388 2025-12-23T19:21:19.716968Z :DEBUG: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428388 2025-12-23T19:21:19.717253Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-3) 2025-12-23T19:21:19.717315Z :DEBUG: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] [] Returning serverBytesSize = 412 to budget 2025-12-23T19:21:19.717351Z :DEBUG: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] [] In ContinueReadingDataImpl, ReadSizeBudget = 412, ReadSizeServerDelta = 52428388 2025-12-23T19:21:19.717602Z :DEBUG: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-12-23T19:21:19.717742Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-12-23T19:21:19.717799Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-12-23T19:21:19.717828Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-12-23T19:21:19.717852Z :DEBUG: [] Take Data. Partition 0. Read: {1, 2} (3-3) 2025-12-23T19:21:19.717893Z :DEBUG: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] [] The application data is transferred to the client. Number of messages 4, size 32 bytes 2025-12-23T19:21:19.717869Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 grpc read done: success# 1, data# { read_request { bytes_size: 412 } } 2025-12-23T19:21:19.717940Z :DEBUG: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] [] Returning serverBytesSize = 0 to budget 2025-12-23T19:21:19.717985Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 got read request: guid# 6e2ac04f-6fe97da4-613e9793-504948bc 2025-12-23T19:21:19.718046Z :DEBUG: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] [] Requesting status for partition stream id: 1 2025-12-23T19:21:19.718406Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 grpc read done: success# 1, data# { partition_session_status_request { partition_session_id: 1 } } 2025-12-23T19:21:19.718547Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 sending to client partition status 2025-12-23T19:21:19.818202Z :INFO: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] Closing read session. Close timeout: 0.000000s 2025-12-23T19:21:19.818287Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:3:0 2025-12-23T19:21:19.818339Z :INFO: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] Counters: { Errors: 0 CurrentSessionLifetimeMs: 119 BytesRead: 32 MessagesRead: 4 BytesReadCompressed: 32 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-12-23T19:21:19.818436Z :NOTICE: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-12-23T19:21:19.818498Z :DEBUG: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] [] Abort session to cluster 2025-12-23T19:21:19.818941Z :NOTICE: [] [] [30d2a29c-bcf2bfcb-3b35e220-1d94937a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-12-23T19:21:19.820008Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 grpc read done: success# 0, data# { } 2025-12-23T19:21:19.820042Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 grpc read failed 2025-12-23T19:21:19.820084Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 grpc closed 2025-12-23T19:21:19.820121Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_5_1_16283602025828564303_v1 is DEAD 2025-12-23T19:21:19.821345Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_5_1_16283602025828564303_v1 2025-12-23T19:21:19.821402Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037898][rt3.dc1--topic1] pipe [5:7587135660108962487:2566] disconnected. 2025-12-23T19:21:19.821435Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037898][rt3.dc1--topic1] pipe [5:7587135660108962487:2566] disconnected; active server actors: 1 2025-12-23T19:21:19.821451Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037898][rt3.dc1--topic1] pipe [5:7587135660108962487:2566] client user disconnected session shared/user_5_1_16283602025828564303_v1 |99.8%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest |99.8%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:52.114593Z ...starting tablet 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.013 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpSBAlPJ/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.014 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpSBAlPJ/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...restarting tablet 00000.017 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpSBAlPJ/dummy/1/backup_19700101000000Z_g3_s2/snapshot 00000.018 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpSBAlPJ/dummy/1/backup_19700101000000Z_g3_s2/changelog.json ...restarting tablet again 00000.021 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpSBAlPJ/dummy/1/backup_19700101000000Z_g4_s2/snapshot 00000.021 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpSBAlPJ/dummy/1/backup_19700101000000Z_g4_s2/changelog.json 00000.022 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.022 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.022 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.022 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {186b, 6} 00000.022 II| FAKE_ENV: DS.1 gone, left {105b, 3}, put {105b, 3} 00000.022 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.022 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.022 II| FAKE_ENV: All BS storage groups are stopped 00000.022 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.023 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 21}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:52.141753Z ...starting tablet 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.007 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmp0h1u9A/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.010 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmp0h1u9A/dummy/1/backup_19700101000000Z_g2_s2/changelog.json 00000.115 C1| TABLET_EXECUTOR: Tablet 1 unhandled exception yexception: ydb/core/tablet_flat/flat_executor.cpp:5171: Backup snapshot failed: Failed to create snapshot dir /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmp0h1u9A/dummy/1/backup_19700101000000Z_g2_s2/snapshot: (Error 13: Permission denied) util/folder/path.cpp:424: could not create directory /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmp0h1u9A/dummy/1 ??+0 (0x1238147D) __cxa_throw+221 (0x1238129D) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&)+1049 (0x187E4509) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+1443 (0x187698D3) NActors::IActor::Receive(TAutoPtr&)+744 (0x13AB0338) ??+0 (0x1238147D) __cxa_rethrow_primary_exception+340 (0x123816C4) std::rethrow_exception(std::exception_ptr)+28 (0x123C343C) NActors::IActorExceptionHandler::OnUnhandledException(std::exception_ptr const&)+183 (0x111E0507) ...waiting tablet death 00000.116 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.116 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.116 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.116 II| FAKE_ENV: DS.0 gone, left {62b, 2}, put {62b, 2} 00000.116 II| FAKE_ENV: DS.1 gone, left {35b, 1}, put {35b, 1} 00000.116 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.116 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.116 II| FAKE_ENV: All BS storage groups are stopped 00000.116 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.116 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 1 Error 0 Left 17}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:52.261906Z ...starting tablet 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.007 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpukbL49/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.008 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpukbL49/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...initing schema 00000.010 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...restarting tablet 00000.017 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpukbL49/dummy/1/backup_19700101000000Z_g3_s2/snapshot 00000.023 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpukbL49/dummy/1/backup_19700101000000Z_g3_s2/changelog.json 00000.024 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.024 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot ...restarting dummy tablet in recovery mode ...restoring backup ...restarting tablet in normal mode 00000.058 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpukbL49/dummy/1/backup_19700101000000Z_g5_s2/snapshot 00000.061 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpukbL49/dummy/1/backup_19700101000000Z_g5_s2/changelog.json 00000.062 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.062 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.065 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.065 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.066 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.066 II| FAKE_ENV: DS.0 gone, left {68b, 3}, put {440b, 14} 00000.066 II| FAKE_ENV: DS.1 gone, left {395b, 2}, put {860b, 6} 00000.066 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.066 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.066 II| FAKE_ENV: All BS storage groups are stopped 00000.066 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.001s 00000.066 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 26}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:52.341123Z ...starting tablet 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.008 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpj7wQEg/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.009 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpj7wQEg/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...initing schema 00000.010 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing two columns 00000.011 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.012 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing two columns simultaneously 00000.013 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...erasing row 00000.013 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.013 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...replacing row 00000.014 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.014 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing different values in one column 00000.015 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.015 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing composite primary key 00000.016 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...restarting tablet 00000.022 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpj7wQEg/dummy/1/backup_19700101000000Z_g3_s2/snapshot 00000.025 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpj7wQEg/dummy/1/backup_19700101000000Z_g3_s2/changelog.json 00000.025 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.026 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot ...restarting dummy tablet in recovery mode ...restoring backup ...restarting tablet in normal mode 00000.062 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpj7wQEg/dummy/1/backup_19700101000000Z_g5_s2/changelog.json 00000.062 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpj7wQEg/dummy/1/backup_19700101000000Z_g5_s2/snapshot 00000.066 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.067 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.075 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.075 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.076 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.076 II| FAKE_ENV: DS.0 gone, left {68b, 3}, put {1268b, 26} 00000.076 II| FAKE_ENV: DS.1 gone, left {668b, 4}, put {2057b, 18} 00000.076 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.076 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.076 II| FAKE_ENV: All BS storage groups are stopped 00000.076 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.001s 00000.076 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 36}, stopped 00000.000 II| FAKE_ENV: Born at 2025-12-23T19:18:52.422688Z ...starting tablet 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.008 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpzTZED4/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.008 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/8ptz/00620a/r3tmp/tmpzTZED4/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...initing schema 00000.010 DD| LOCAL_DB_BACKUP: Handle NKikimr::NT ... 140, NULL) (311209, NULL) (311281, NULL) (311344, NULL) (311416, NULL) [0:0:935:0:0:0:0] 100 rows, 100 pages, 4 levels: (311479, NULL) (311542, NULL) (311614, NULL) (311683, NULL) (311755, NULL) [0:0:936:0:0:0:0] 100 rows, 100 pages, 4 levels: (311821, NULL) (311890, NULL) (311956, NULL) (312034, NULL) (312100, NULL) [0:0:937:0:0:0:0] 100 rows, 100 pages, 4 levels: (312172, NULL) (312232, NULL) (312301, NULL) (312370, NULL) (312439, NULL) [0:0:938:0:0:0:0] 100 rows, 100 pages, 4 levels: (312508, NULL) (312571, NULL) (312637, NULL) (312700, NULL) (312760, NULL) [0:0:939:0:0:0:0] 100 rows, 100 pages, 4 levels: (312835, NULL) (312904, NULL) (312970, NULL) (313030, NULL) (313102, NULL) [0:0:940:0:0:0:0] 100 rows, 100 pages, 4 levels: (313174, NULL) (313240, NULL) (313300, NULL) (313366, NULL) (313429, NULL) [0:0:941:0:0:0:0] 100 rows, 100 pages, 4 levels: (313498, NULL) (313573, NULL) (313639, NULL) (313699, NULL) (313768, NULL) [0:0:942:0:0:0:0] 100 rows, 100 pages, 4 levels: (313828, NULL) (313891, NULL) (313957, NULL) (314023, NULL) (314086, NULL) [0:0:943:0:0:0:0] 100 rows, 100 pages, 4 levels: (314149, NULL) (314212, NULL) (314275, NULL) (314338, NULL) (314401, NULL) [0:0:944:0:0:0:0] 100 rows, 100 pages, 4 levels: (314464, NULL) (314530, NULL) (314590, NULL) (314656, NULL) (314719, NULL) [0:0:945:0:0:0:0] 100 rows, 100 pages, 4 levels: (314788, NULL) (314854, NULL) (314920, NULL) (314983, NULL) (315046, NULL) [0:0:946:0:0:0:0] 100 rows, 100 pages, 4 levels: (315109, NULL) (315178, NULL) (315238, NULL) (315304, NULL) (315370, NULL) [0:0:947:0:0:0:0] 100 rows, 100 pages, 4 levels: (315433, NULL) (315496, NULL) (315565, NULL) (315631, NULL) (315697, NULL) [0:0:948:0:0:0:0] 100 rows, 100 pages, 4 levels: (315766, NULL) (315826, NULL) (315889, NULL) (315952, NULL) (316024, NULL) [0:0:949:0:0:0:0] 100 rows, 100 pages, 4 levels: (316087, NULL) (316156, NULL) (316222, NULL) (316288, NULL) (316357, NULL) [0:0:950:0:0:0:0] 100 rows, 100 pages, 4 levels: (316432, NULL) (316498, NULL) (316564, NULL) (316636, NULL) (316705, NULL) [0:0:951:0:0:0:0] 100 rows, 100 pages, 4 levels: (316768, NULL) (316831, NULL) (316891, NULL) (316951, NULL) (317011, NULL) [0:0:952:0:0:0:0] 100 rows, 100 pages, 4 levels: (317080, NULL) (317143, NULL) (317218, NULL) (317287, NULL) (317356, NULL) [0:0:953:0:0:0:0] 100 rows, 100 pages, 4 levels: (317422, NULL) (317497, NULL) (317563, NULL) (317632, NULL) (317701, NULL) [0:0:954:0:0:0:0] 100 rows, 100 pages, 4 levels: (317764, NULL) (317824, NULL) (317887, NULL) (317953, NULL) (318019, NULL) [0:0:955:0:0:0:0] 100 rows, 100 pages, 4 levels: (318088, NULL) (318166, NULL) (318235, NULL) (318304, NULL) (318370, NULL) [0:0:956:0:0:0:0] 100 rows, 100 pages, 4 levels: (318442, NULL) (318511, NULL) (318574, NULL) (318640, NULL) (318703, NULL) [0:0:957:0:0:0:0] 100 rows, 100 pages, 4 levels: (318772, NULL) (318838, NULL) (318898, NULL) (318970, NULL) (319036, NULL) [0:0:958:0:0:0:0] 100 rows, 100 pages, 4 levels: (319099, NULL) (319162, NULL) (319225, NULL) (319294, NULL) (319360, NULL) [0:0:959:0:0:0:0] 100 rows, 100 pages, 4 levels: (319423, NULL) (319492, NULL) (319555, NULL) (319621, NULL) (319687, NULL) [0:0:960:0:0:0:0] 100 rows, 100 pages, 4 levels: (319753, NULL) (319828, NULL) (319900, NULL) (319963, NULL) (320035, NULL) [0:0:961:0:0:0:0] 100 rows, 100 pages, 4 levels: (320104, NULL) (320164, NULL) (320233, NULL) (320299, NULL) (320365, NULL) [0:0:962:0:0:0:0] 100 rows, 100 pages, 4 levels: (320428, NULL) (320500, NULL) (320569, NULL) (320629, NULL) (320698, NULL) [0:0:963:0:0:0:0] 100 rows, 100 pages, 4 levels: (320764, NULL) (320833, NULL) (320893, NULL) (320959, NULL) (321019, NULL) [0:0:964:0:0:0:0] 100 rows, 100 pages, 4 levels: (321085, NULL) (321151, NULL) (321214, NULL) (321277, NULL) (321352, NULL) [0:0:965:0:0:0:0] 100 rows, 100 pages, 4 levels: (321421, NULL) (321493, NULL) (321562, NULL) (321631, NULL) (321691, NULL) [0:0:966:0:0:0:0] 100 rows, 100 pages, 4 levels: (321757, NULL) (321823, NULL) (321886, NULL) (321949, NULL) (322009, NULL) [0:0:967:0:0:0:0] 100 rows, 100 pages, 4 levels: (322081, NULL) (322159, NULL) (322225, NULL) (322294, NULL) (322363, NULL) [0:0:968:0:0:0:0] 100 rows, 100 pages, 4 levels: (322429, NULL) (322498, NULL) (322564, NULL) (322642, NULL) (322711, NULL) [0:0:969:0:0:0:0] 100 rows, 100 pages, 4 levels: (322783, NULL) (322846, NULL) (322915, NULL) (322978, NULL) (323041, NULL) [0:0:970:0:0:0:0] 100 rows, 100 pages, 4 levels: (323104, NULL) (323164, NULL) (323230, NULL) (323305, NULL) (323368, NULL) [0:0:971:0:0:0:0] 100 rows, 100 pages, 4 levels: (323434, NULL) (323506, NULL) (323569, NULL) (323632, NULL) (323707, NULL) [0:0:972:0:0:0:0] 100 rows, 100 pages, 4 levels: (323776, NULL) (323851, NULL) (323917, NULL) (323986, NULL) (324052, NULL) [0:0:973:0:0:0:0] 100 rows, 100 pages, 4 levels: (324115, NULL) (324184, NULL) (324256, NULL) (324316, NULL) (324379, NULL) [0:0:974:0:0:0:0] 100 rows, 100 pages, 4 levels: (324442, NULL) (324502, NULL) (324568, NULL) (324631, NULL) (324703, NULL) [0:0:975:0:0:0:0] 100 rows, 100 pages, 4 levels: (324769, NULL) (324838, NULL) (324904, NULL) (324973, NULL) (325033, NULL) [0:0:976:0:0:0:0] 100 rows, 100 pages, 4 levels: (325105, NULL) (325174, NULL) (325234, NULL) (325297, NULL) (325363, NULL) [0:0:977:0:0:0:0] 100 rows, 100 pages, 4 levels: (325438, NULL) (325504, NULL) (325570, NULL) (325630, NULL) (325699, NULL) [0:0:978:0:0:0:0] 100 rows, 100 pages, 4 levels: (325771, NULL) (325834, NULL) (325900, NULL) (325966, NULL) (326032, NULL) [0:0:979:0:0:0:0] 100 rows, 100 pages, 4 levels: (326101, NULL) (326170, NULL) (326233, NULL) (326296, NULL) (326359, NULL) [0:0:980:0:0:0:0] 100 rows, 100 pages, 4 levels: (326434, NULL) (326497, NULL) (326563, NULL) (326632, NULL) (326701, NULL) [0:0:981:0:0:0:0] 100 rows, 100 pages, 4 levels: (326773, NULL) (326836, NULL) (326905, NULL) (326965, NULL) (327025, NULL) [0:0:982:0:0:0:0] 100 rows, 100 pages, 4 levels: (327097, NULL) (327169, NULL) (327232, NULL) (327301, NULL) (327364, NULL) [0:0:983:0:0:0:0] 100 rows, 100 pages, 4 levels: (327430, NULL) (327496, NULL) (327559, NULL) (327622, NULL) (327682, NULL) [0:0:984:0:0:0:0] 100 rows, 100 pages, 4 levels: (327742, NULL) (327811, NULL) (327871, NULL) (327934, NULL) (327997, NULL) [0:0:985:0:0:0:0] 100 rows, 100 pages, 4 levels: (328072, NULL) (328138, NULL) (328222, NULL) (328291, NULL) (328363, NULL) [0:0:986:0:0:0:0] 100 rows, 100 pages, 4 levels: (328432, NULL) (328501, NULL) (328573, NULL) (328648, NULL) (328717, NULL) [0:0:987:0:0:0:0] 100 rows, 100 pages, 4 levels: (328783, NULL) (328849, NULL) (328915, NULL) (328978, NULL) (329044, NULL) [0:0:988:0:0:0:0] 100 rows, 100 pages, 4 levels: (329119, NULL) (329185, NULL) (329248, NULL) (329317, NULL) (329383, NULL) [0:0:989:0:0:0:0] 100 rows, 100 pages, 4 levels: (329455, NULL) (329518, NULL) (329590, NULL) (329662, NULL) (329722, NULL) [0:0:990:0:0:0:0] 100 rows, 100 pages, 4 levels: (329782, NULL) (329854, NULL) (329917, NULL) (329983, NULL) (330049, NULL) [0:0:991:0:0:0:0] 100 rows, 100 pages, 4 levels: (330118, NULL) (330187, NULL) (330253, NULL) (330322, NULL) (330382, NULL) [0:0:992:0:0:0:0] 100 rows, 100 pages, 4 levels: (330454, NULL) (330520, NULL) (330595, NULL) (330673, NULL) (330739, NULL) [0:0:993:0:0:0:0] 100 rows, 100 pages, 4 levels: (330808, NULL) (330874, NULL) (330940, NULL) (331003, NULL) (331072, NULL) [0:0:994:0:0:0:0] 100 rows, 100 pages, 4 levels: (331132, NULL) (331204, NULL) (331276, NULL) (331342, NULL) (331405, NULL) [0:0:995:0:0:0:0] 100 rows, 100 pages, 4 levels: (331465, NULL) (331540, NULL) (331615, NULL) (331684, NULL) (331753, NULL) [0:0:996:0:0:0:0] 100 rows, 100 pages, 4 levels: (331816, NULL) (331891, NULL) (331960, NULL) (332026, NULL) (332086, NULL) [0:0:997:0:0:0:0] 100 rows, 100 pages, 4 levels: (332152, NULL) (332215, NULL) (332284, NULL) (332350, NULL) (332419, NULL) [0:0:998:0:0:0:0] 100 rows, 100 pages, 4 levels: (332491, NULL) (332557, NULL) (332623, NULL) (332686, NULL) (332752, NULL) [0:0:999:0:0:0:0] 100 rows, 100 pages, 4 levels: (332818, NULL) (332884, NULL) (332944, NULL) (333013, NULL) (333073, NULL) [0:0:1000:0:0:0:0] 100 rows, 100 pages, 4 levels: (333148, NULL) (333214, NULL) (333274, NULL) (333340, NULL) (333403, NULL) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) |99.9%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.9%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_vdisks.py::TestTinyVDisks::test_disabled [GOOD] |99.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_workload.py::TestYdbWorkload::test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_kafka_streams.py::TestYdbTopicWorkload::test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_workload.py::TestYdbWorkload::test >> test_workload.py::TestYdbWorkload::test[row-local] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-None] >> test_vdisks.py::TestTinyVDisks::test_disabled_enabled [GOOD] >> test_workload.py::TestYdbWorkload::test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled_enabled [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_workload.py::TestYdbWorkload::test[row] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_workload.py::TestYdbWorkload::test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.9%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-None] [GOOD] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_workload_topic.py::TestYdbTopicWorkload::test >> test_vdisks.py::TestTinyVDisks::test_enabled [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-test_scv] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.9%| [TA] $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbBackupWorkload::test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [FAIL] >> test_workload.py::TestYdbWorkload::test [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> Transfer_ColumnTable::KeyColumnFirst |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/viewer/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/viewer/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/viewer/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> Transfer_ColumnTable::KeyColumnFirst [GOOD] >> Transfer_ColumnTable::KeyColumnLast >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> Transfer_ColumnTable::KeyColumnLast [GOOD] >> Transfer_ColumnTable::ComplexKey >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-test_scv] [GOOD] >> Transfer_ColumnTable::ComplexKey [GOOD] >> Transfer_ColumnTable::NullableColumn >> test_vdisks.py::TestTinyVDisks::test_enabled_disabled [GOOD] >> ConsistentIndexRead::InteractiveTx |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled_disabled [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.9%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbWorkload::test[row] [GOOD] |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> Transfer_ColumnTable::NullableColumn [GOOD] >> Transfer_ColumnTable::WriteNullToKeyColumn |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/show_create/view/tests/py3test >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-test_scv] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/show_create/view/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/show_create/view/tests/py3test >> Backup::UuidValue >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-None] >> Transfer_ColumnTable::WriteNullToKeyColumn [GOOD] >> Transfer_ColumnTable::WriteNullToColumn >> Backup::UuidValue [GOOD] >> test_workload.py::TestYdbWorkload::test[row-local] [GOOD] >> Transfer_ColumnTable::WriteNullToColumn [GOOD] >> Transfer_ColumnTable::Upsert_DifferentBatch >> test_workload.py::TestYdbWorkload::test[column] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/unittest >> Backup::UuidValue [GOOD] Test command err: Found S3 object: "ProducerUuidValueBackup/data_00.csv" Found S3 object: "ProducerUuidValueBackup/data_00.csv.sha256" Found S3 object: "ProducerUuidValueBackup/metadata.json" Found S3 object: "ProducerUuidValueBackup/metadata.json.sha256" Found S3 object: "ProducerUuidValueBackup/permissions.pb" Found S3 object: "ProducerUuidValueBackup/permissions.pb.sha256" Found S3 object: "ProducerUuidValueBackup/scheme.pb" Found S3 object: "ProducerUuidValueBackup/scheme.pb.sha256" |99.9%| [TM] {RESULT} ydb/tests/functional/backup/unittest |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/backup/unittest >> test_workload.py::TestYdbWorkload::test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [FAIL] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> Transfer_ColumnTable::Upsert_DifferentBatch [GOOD] >> Transfer_ColumnTable::Upsert_OneBatch >> test_workload_topic.py::TestYdbTopicWorkload::test >> test_workload.py::TestYdbWorkload::test [GOOD] >> test_workload.py::TestYdbTestShardWorkload::test >> Transfer_ColumnTable::Upsert_OneBatch [GOOD] >> Transfer_ColumnTable::ColumnType_Date >> test_workload.py::TestYdbWorkload::test[row-remote] >> Transfer_ColumnTable::ColumnType_Date [GOOD] >> Transfer_ColumnTable::ColumnType_Double |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/cdc/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/cdc/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/cdc/tests/py3test >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-None] [GOOD] >> test_kafka_streams.py::TestYdbTopicWorkload::test [FAIL] >> test_workload.py::TestYdbWorkload::test [GOOD] >> test_workload.py::TestYdbWorkload::test >> Transfer_ColumnTable::ColumnType_Double [GOOD] >> Transfer_ColumnTable::ColumnType_Int8 |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/kafka/tests/py3test >> test_kafka_streams.py::TestYdbTopicWorkload::test [FAIL] |99.9%| [TM] {RESULT} ydb/tests/stress/kafka/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/kafka/tests/py3test >> Transfer_ColumnTable::ColumnType_Int8 [GOOD] >> Transfer_ColumnTable::ColumnType_Int16 >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-test_sct] >> test_workload.py::TestYdbKvWorkload::test[row] >> Transfer_ColumnTable::ColumnType_Int16 [GOOD] >> Transfer_ColumnTable::ColumnType_Int32 >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/ctas/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/ctas/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/ctas/tests/py3test >> Transfer_ColumnTable::ColumnType_Int32 [GOOD] >> Transfer_ColumnTable::ColumnType_Int64 >> test_workload.py::TestYdbBackupWorkload::test [GOOD] >> Transfer_ColumnTable::ColumnType_Int64 [GOOD] >> Transfer_ColumnTable::ColumnType_Utf8_LongValue |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/backup/tests/py3test >> test_workload.py::TestYdbBackupWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/backup/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/backup/tests/py3test >> Transfer_RowTable::KeyColumnFirst >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-test_sct] [GOOD] >> Transfer_ColumnTable::ColumnType_Utf8_LongValue [GOOD] >> Transfer_ColumnTable::MessageField_Attributes >> Transfer_RowTable::KeyColumnFirst [GOOD] >> Transfer_RowTable::KeyColumnLast >> test_workload.py::TestYdbWorkload::test[column] [GOOD] >> Transfer_ColumnTable::MessageField_Attributes [GOOD] >> Transfer_ColumnTable::MessageField_CreateTimestamp >> Transfer_RowTable::KeyColumnLast [GOOD] >> Transfer_RowTable::ComplexKey |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/show_create/table/tests/py3test >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-test_sct] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/show_create/table/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/show_create/table/tests/py3test >> Transfer_RowTable::ComplexKey [GOOD] >> Transfer_RowTable::NullableColumn >> S3PathStyleBackup::DisableVirtualAddressing >> test_encryption.py::TestEncryption::test_simple_encryption >> Transfer_RowTable::NullableColumn [GOOD] >> Transfer_RowTable::WriteNullToKeyColumn >> Transfer_ColumnTable::MessageField_CreateTimestamp [GOOD] >> Transfer_ColumnTable::MessageField_Partition >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] >> Transfer_RowTable::WriteNullToKeyColumn [GOOD] >> Transfer_RowTable::WriteNullToColumn >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] >> Transfer_RowTable::WriteNullToColumn [GOOD] >> Transfer_RowTable::Upsert_DifferentBatch |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD, SKIPPED} ydb/tests/stress/simple_queue/tests/py3test >> Transfer_ColumnTable::MessageField_Partition [GOOD] >> Transfer_ColumnTable::MessageField_SeqNo |99.9%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/s3_path_style/unittest >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/backup/s3_path_style/unittest |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/backup/s3_path_style/unittest >> test_workload.py::TestYdbWorkload::test[row-remote] [GOOD] >> Transfer_RowTable::Upsert_DifferentBatch [GOOD] >> Transfer_RowTable::Upsert_OneBatch >> Transfer_RowTable::Upsert_OneBatch [GOOD] >> Transfer_RowTable::ColumnType_Bool |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/topic/tests/py3test >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/topic/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/topic/tests/py3test >> test_workload.py::TestYdbWorkload::test[column-local] >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] >> Transfer_RowTable::ColumnType_Bool [GOOD] >> Transfer_RowTable::ColumnType_Date >> test_workload.py::TestYdbWorkload::test >> Transfer_RowTable::ColumnType_Date [GOOD] >> Transfer_RowTable::ColumnType_Double >> Transfer_ColumnTable::MessageField_SeqNo [GOOD] >> Transfer_ColumnTable::MessageField_ProducerId >> Transfer_RowTable::ColumnType_Double [GOOD] >> Transfer_RowTable::ColumnType_Int8 >> KqpQuerySession::NoLocalAttach >> Transfer::BaseScenario_Local >> Transfer_RowTable::ColumnType_Int8 [GOOD] >> Transfer_RowTable::ColumnType_Int16 |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/topic_kafka/tests/py3test >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/topic_kafka/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/topic_kafka/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer_ColumnTable::MessageField_ProducerId [GOOD] >> Transfer_ColumnTable::MessageField_MessageGroupId ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] 2025-12-23 19:25:49,588 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-12-23 19:25:50,608 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1918509 1.1G 1.2G 1.1G ydb-tests-functional-serverless --basetemp /home/runner/.ya/build/build_root/8ptz/004e23/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 2032641 1.6G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/8ptz/004e23/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 2034392 1.4G 1.5G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/8ptz/004e23/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 2034408 1.4G 1.5G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/8ptz/004e23/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 2034612 1.5G 1.5G 1.0G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/8ptz/004e23/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 591, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/functional/serverless/test_serverless.py", line 534, in test_database_with_column_disk_quotas rows = [BulkUpsertRow((now + datetime.timedelta(microseconds=dt))) for dt in range(i * bulk_size, (i + 1) * bulk_size)] File "ydb/tests/functional/serverless/test_serverless.py", line 519, in __init__ self.value_string = ''.join(random.choices(string.ascii_lowercase, k=1024)) File "library/python/pytest/plugins/ya.py", line 345, in _graceful_shutdown stack = traceback.format_stack() Thread 0x00007b242b26a640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b242c574640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b2429f60640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b242e387640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b2432288640 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 198 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 279 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 693 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 707 in stop_node File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b242ba6b640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b242eb88640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b243f569640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b2442686640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b244a423640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007f247a8e7940 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1169 in _wait_for_tstate_lock File "contrib/tools/python3/Lib/threading.py", line 1149 in join File "ydb/tests/library/harness/kikimr_runner.py", line 718 in stop File "ydb/tests/library/fixtures/__init__.py", line 59 in ydb_cluster File "contrib/python/pytest/py3/_pytest/fixtures.py", line 926 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1042 in finish File "contrib/python/pytest/py3/_pytest/runner.py", line 543 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 109 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 308 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 320 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...runner/.ya/build/build_root/8ptz/004e23/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/8ptz/004e23', '--source-root', '/home/runner/.ya/build/build_root/8ptz/004e23/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/8ptz/004e23/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10658668958/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '3', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...runner/.ya/build/build_root/8ptz/004e23/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/8ptz/004e23', '--source-root', '/home/runner/.ya/build/build_root/8ptz/004e23/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/8ptz/004e23/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10658668958/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '3', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout",), {}) 2025-12-23 19:26:21,853 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-12-23 19:26:21,855 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.9%| [TM] {BAZEL_UPLOAD, SKIPPED} ydb/tests/functional/serverless/py3test >> Transfer_RowTable::ColumnType_Int16 [GOOD] >> Transfer_RowTable::ColumnType_Int32 >> test_workload.py::TestYdbTestShardWorkload::test [GOOD] >> test_workload.py::TestYdbWorkload::test >> Transfer::BaseScenario_Local [GOOD] >> Transfer::BaseScenario_Remote >> Transfer_RowTable::ColumnType_Int32 [GOOD] >> Transfer_RowTable::ColumnType_Int64 >> Transfer_ColumnTable::MessageField_MessageGroupId [GOOD] >> Transfer_ColumnTable::MessageField_WriteTimestamp >> Transfer_RowTable::ColumnType_Int64 [GOOD] >> Transfer_RowTable::ColumnType_Utf8_LongValue >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] >> Transfer_ColumnTable::MessageField_WriteTimestamp [FAIL] >> Transfer_ColumnTable::ProcessingJsonMessage >> Transfer::BaseScenario_Remote [GOOD] >> Transfer::CreateTransfer_TargetNotFound |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/s3_backups/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/s3_backups/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/s3_backups/tests/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] 2025-12-23 19:26:21,391 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-12-23 19:26:22,834 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1925437 841M 831M 762M ydb-tests-functional-serverless --basetemp /home/runner/.ya/build/build_root/8ptz/004dea/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 2016808 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/8ptz/004dea/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 2018068 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/8ptz/004dea/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 2018116 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/8ptz/004dea/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 2018382 1.6G 1.6G 1.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/8ptz/004dea/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 591, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/functional/serverless/test_serverless.py", line 200, in test_turn_on_serverless_storage_billing time.sleep(15) File "library/python/pytest/plugins/ya.py", line 345, in _graceful_shutdown stack = traceback.format_stack() Thread 0x00007b472b5ef640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b47238aa640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b47253b5640 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 198 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 279 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 693 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 707 in stop_node File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b4731efa640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b4728fdb640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b4734e96640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007f4765460940 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1169 in _wait_for_tstate_lock File "contrib/tools/python3/Lib/threading.py", line 1149 in join File "ydb/tests/library/harness/kikimr_runner.py", line 718 in stop File "ydb/tests/library/fixtures/__init__.py", line 59 in ydb_cluster File "contrib/python/pytest/py3/_pytest/fixtures.py", line 926 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1042 in finish File "contrib/python/pytest/py3/_pytest/runner.py", line 543 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 109 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 308 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 320 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...runner/.ya/build/build_root/8ptz/004dea/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/8ptz/004dea', '--source-root', '/home/runner/.ya/build/build_root/8ptz/004dea/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/8ptz/004dea/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10658668958/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '9', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...runner/.ya/build/build_root/8ptz/004dea/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/8ptz/004dea', '--source-root', '/home/runner/.ya/build/build_root/8ptz/004dea/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/8ptz/004dea/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10658668958/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '9', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout",), {}) 2025-12-23 19:26:53,980 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-12-23 19:26:53,981 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer::CreateTransfer_TargetNotFound [GOOD] >> Transfer::ConnectionString_BadChar >> Transfer_RowTable::ColumnType_Utf8_LongValue [GOOD] >> Transfer_RowTable::ColumnType_Uuid >> Transfer::ConnectionString_BadChar [GOOD] >> Transfer::ConnectionString_BadDNSName >> KqpQuerySession::NoLocalAttach [GOOD] >> Transfer::ConnectionString_BadDNSName [GOOD] >> Transfer::Create_WithPermission |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/testshard_workload/tests/py3test >> test_workload.py::TestYdbTestShardWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/testshard_workload/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/testshard_workload/tests/py3test >> Transfer_RowTable::ColumnType_Uuid [GOOD] >> Transfer_RowTable::MessageField_Attributes >> Transfer::Create_WithPermission [GOOD] >> Transfer::Create_WithoutTablePermission >> Replication::Types |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_session/unittest >> KqpQuerySession::NoLocalAttach [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_session/unittest |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/kqp_query_session/unittest >> Transfer::Create_WithoutTablePermission [GOOD] >> Transfer::Create_WithoutAlterTopicPermission_AndGrant >> test_workload.py::TestYdbKvWorkload::test[column] >> Transfer_ColumnTable::ProcessingJsonMessage [GOOD] >> Transfer_ColumnTable::ProcessingCDCMessage >> Transfer_RowTable::MessageField_Attributes [GOOD] >> Transfer_RowTable::MessageField_CreateTimestamp >> Replication::Types [GOOD] >> Replication::PauseAndResumeReplication >> test_scheme_board_workload.py::TestSchemeBoard::test_scheme_board >> Transfer_RowTable::MessageField_CreateTimestamp [GOOD] >> Transfer_RowTable::MessageField_Partition >> test_workload.py::TestYdbMixedWorkload::test[row] >> Transfer_ColumnTable::ProcessingCDCMessage [GOOD] >> Transfer_ColumnTable::ProcessingTargetTable >> Transfer::Create_WithoutAlterTopicPermission_AndGrant [GOOD] >> Transfer::LocalTopic_WithPermission >> Transfer_RowTable::MessageField_Partition [GOOD] >> Transfer_RowTable::MessageField_SeqNo >> Replication::PauseAndResumeReplication [GOOD] >> Replication::TopicAutopartitioning >> test_workload.py::TestDeltaProtocol::test >> KqpQueryService::ReplyPartLimitProxyNode >> KqpQueryService::ReplyPartLimitProxyNode [GOOD] >> NodeIdDescribe::HasDistribution >> Transfer::LocalTopic_WithPermission [GOOD] >> Transfer::LocalTopic_BigMessage >> Transfer_RowTable::MessageField_SeqNo [GOOD] >> Transfer_RowTable::MessageField_ProducerId >> Transfer_ColumnTable::ProcessingTargetTable [GOOD] >> Transfer_ColumnTable::ProcessingTargetTableOtherType >> Transfer_RowTable::MessageField_ProducerId [GOOD] >> Transfer_RowTable::MessageField_MessageGroupId >> Transfer_RowTable::MessageField_MessageGroupId [GOOD] >> Transfer_RowTable::MessageField_WriteTimestamp |99.9%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> Transfer::LocalTopic_BigMessage [FAIL] >> Transfer::AlterLambda >> Transfer_RowTable::MessageField_WriteTimestamp [GOOD] >> Transfer_RowTable::ProcessingJsonMessage >> test_workload.py::TestYdbWorkload::test[column-local] [GOOD] >> Transfer_ColumnTable::ProcessingTargetTableOtherType [GOOD] >> Transfer_ColumnTable::DropColumn >> Transfer::AlterLambda [GOOD] >> Transfer::EnsureError >> Transfer_RowTable::ProcessingJsonMessage [GOOD] >> Transfer_RowTable::ProcessingCDCMessage |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> Transfer::EnsureError [GOOD] >> Transfer::CheckCommittedOffset_Local >> test_workload.py::TestYdbWorkload::test[column-remote] >> Transfer_RowTable::ProcessingCDCMessage [GOOD] >> Transfer_RowTable::ProcessingTargetTable >> Transfer::CheckCommittedOffset_Local [GOOD] >> Transfer::CheckCommittedOffset_Remote >> Transfer_ColumnTable::DropColumn [GOOD] >> Transfer_ColumnTable::BigBatchSize_Remote >> Transfer::CheckCommittedOffset_Remote [GOOD] >> Transfer::DropTransfer >> Transfer_RowTable::ProcessingTargetTable [GOOD] >> Transfer_RowTable::ProcessingTargetTableOtherType >> test_workload.py::TestYdbWorkload::test [FAIL] >> Transfer::DropTransfer [GOOD] >> Transfer::CreateAndDropConsumer >> Transfer::CreateAndDropConsumer [GOOD] >> Transfer::DescribeError_OnLambdaCompilation >> Transfer_RowTable::ProcessingTargetTableOtherType [GOOD] >> Transfer_RowTable::DropColumn >> Transfer::DescribeError_OnLambdaCompilation [GOOD] >> Transfer::PausedAfterError >> Transfer::PausedAfterError [GOOD] >> Transfer::DescribeTransferWithErrorTopicNotFound >> Transfer::DescribeTransferWithErrorTopicNotFound [GOOD] >> Transfer::CustomConsumer >> Transfer_RowTable::DropColumn [GOOD] >> Transfer_RowTable::TableWithSyncIndex >> Transfer::CustomConsumer [GOOD] >> Transfer::CustomConsumer_NotExists_Remote >> Transfer_RowTable::TableWithSyncIndex [GOOD] >> Transfer_RowTable::TableWithAsyncIndex >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] >> Transfer::CustomConsumer_NotExists_Remote [GOOD] >> Transfer::CustomConsumer_NotExists_Local >> NodeIdDescribe::HasDistribution [GOOD] >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] >> Transfer_RowTable::TableWithAsyncIndex [GOOD] >> Transfer_ColumnTable::BigBatchSize_Remote [GOOD] >> Transfer_ColumnTable::BigBatchSize_Local >> Transfer::CustomConsumer_NotExists_Local [GOOD] >> Transfer::CustomFlushInterval |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [FAIL] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/olap_workload/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_svc/unittest >> NodeIdDescribe::HasDistribution [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/kqp_query_svc/unittest |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_svc/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/transfer/ut/row_table/unittest >> Transfer_RowTable::TableWithAsyncIndex [GOOD] Test command err: DDL: CREATE TABLE `Table_7902614204565545749` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_7902614204565545749` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_7902614204565545749` FROM `Topic_7902614204565545749` TO `Table_7902614204565545749` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:9622/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_7902614204565545749` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_7902614204565545749` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_7902614204565545749` ORDER BY `Key`, `Message` Attempt=17 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_7902614204565545749` ORDER BY `Key`, `Message` Attempt=16 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_7902614204565545749` ORDER BY `Key`, `Message` Attempt=15 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_7902614204565545749` ORDER BY `Key`, `Message` Attempt=14 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_7902614204565545749` ORDER BY `Key`, `Message` Attempt=13 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_7902614204565545749` ORDER BY `Key`, `Message` Attempt=12 count=1 DDL: DROP TRANSFER `Transfer_7902614204565545749`; DDL: DROP TABLE `Table_7902614204565545749` DDL: DROP TOPIC `Topic_7902614204565545749` DDL: CREATE TABLE `Table_4231230568640857704` ( Message Utf8 NOT NULL, Key Uint64 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_4231230568640857704` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_4231230568640857704` FROM `Topic_4231230568640857704` TO `Table_4231230568640857704` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:9622/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_4231230568640857704` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4231230568640857704` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4231230568640857704` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_4231230568640857704`; DDL: DROP TABLE `Table_4231230568640857704` DDL: DROP TOPIC `Topic_4231230568640857704` DDL: CREATE TABLE `Table_9454827689839758545` ( Key1 Uint64 NOT NULL, Key3 Uint64 NOT NULL, Value1 Utf8, Key2 Uint64 NOT NULL, Value2 Utf8, Key4 Uint64 NOT NULL, ___Value3 Utf8, PRIMARY KEY (Key3, Key2, Key1, Key4) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_9454827689839758545` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key1:Unwrap(CAST(1 AS Uint64)), Key2:Unwrap(CAST(2 AS Uint64)), Value2:CAST("value-2" AS Utf8), Key4:Unwrap(CAST(4 AS Uint64)), Key3:Unwrap(CAST(3 AS Uint64)), Value1:CAST("value-1" AS Utf8), ___Value3:CAST("value-3" AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_9454827689839758545` FROM `Topic_9454827689839758545` TO `Table_9454827689839758545` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:9622/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_9454827689839758545` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=19 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_9454827689839758545` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=18 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_9454827689839758545` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_9454827689839758545`; DDL: DROP TABLE `Table_9454827689839758545` DDL: DROP TOPIC `Topic_9454827689839758545` DDL: CREATE TABLE `Table_15012665130098482334` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_15012665130098482334` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_15012665130098482334` FROM `Topic_15012665130098482334` TO `Table_15012665130098482334` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:9622/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_15012665130098482334` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_15012665130098482334` ORDER BY `Key`, `Message` Attempt=18 count=1 DDL: DROP TRANSFER `Transfer_15012665130098482334`; DDL: DROP TABLE `Table_15012665130098482334` DDL: DROP TOPIC `Topic_15012665130098482334` DDL: CREATE TABLE `Table_8466065325397418578` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_8466065325397418578` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:Unwrap(Nothing(Uint64?), "The value of the 'Key' column must be non-NULL"), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_8466065325397418578` FROM `Topic_8466065325397418578` TO `Table_8466065325397418578` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:9622/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(102): generated.sql:4:29: Failed to unwrap empty optional: The value of the 'Key' column must be non-NULL } >>>>> EXPECTED: The value of the 'Key' column must be non-NULL DDL: DROP TRANSFER `Transfer_8466065325397418578`; DDL: DROP TABLE `Table_8466065325397418578` DDL: DROP TOPIC `Topic_8466065325397418578` DDL: CREATE TABLE `Table_10963036965539089296` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_10963036965539089296` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:Unwrap(Nothing(Utf8?), "The value of the 'Message' column must be non-NULL") |> ]; }; ; CREATE TRANSFER `Transfer_10963036965539089296` FROM `Topic_10963036965539089296` TO `Table_10963036965539089296` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:9622/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(106): generated.sql:5:33: Failed to unwrap empty optional: The value of the 'Message' column must be non-NULL } >>>>> EXPECTED: The value of the 'Message' column must be non-NULL DDL: DROP TRANSFER `Transfer_10963036965539089296`; DDL: DROP TABLE `Table_10963036965539089296` DDL: DROP TOPIC `Topic_10963036965539089296` DDL: CREATE TABLE `Table_5541365185868018760` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_5541365185868018760` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:1, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_5541365185868018760` FROM `Topic_5541365185868018760` TO `Table_5541365185868018760` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:9622/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_5541365185868018760` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_5541365185868018760` ORDER BY `Message` Attempt=18 count=1 >>>>> Query: SELECT `Message` FROM `Table_5541365185868018760` ORDER BY `Message` Attempt=19 count=1 DDL: DROP TRANSFER `Transfer_5541365185868018760`; DDL: DROP TABLE `Table_5541365185868018760` DDL: CREATE TABLE `Table_9078275021616527882` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_9078275021616527882` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { ... rstName`, `LastName`, `Salary` Attempt=18 count=0 >>>>> Query: SELECT `Id`, `FirstName`, `LastName`, `Salary` FROM `Table_5193129895238781845` ORDER BY `Id`, `FirstName`, `LastName`, `Salary` Attempt=17 count=0 >>>>> Query: SELECT `Id`, `FirstName`, `LastName`, `Salary` FROM `Table_5193129895238781845` ORDER BY `Id`, `FirstName`, `LastName`, `Salary` Attempt=16 count=1 DDL: DROP TRANSFER `Transfer_5193129895238781845`; DDL: DROP TABLE `Table_5193129895238781845` DDL: DROP TOPIC `Topic_5193129895238781845` DDL: CREATE TABLE `SourceTable_2656634110270094362` ( object_id Utf8 NOT NULL, timestamp Datetime NOT NULL, operation Utf8, PRIMARY KEY (object_id, timestamp) ) WITH ( STORE = ROW ) DDL: ALTER TABLE `SourceTable_2656634110270094362` ADD CHANGEFEED `cdc_2656634110270094362` WITH ( MODE = 'UPDATES', FORMAT = 'JSON' ) DDL: CREATE TABLE `Table_2656634110270094362` ( timestamp Datetime NOT NULL, object_id Utf8 NOT NULL, operation Utf8, PRIMARY KEY (timestamp, object_id) ) WITH ( STORE = ROW ) DDL: $l = ($x) -> { $d = CAST($x._data AS JSON); return [ <| timestamp: Unwrap(DateTime::MakeDatetime(DateTime::ParseIso8601(CAST(Yson::ConvertToString($d.key[1]) AS Utf8)))), object_id: Unwrap(CAST(Yson::ConvertToString($d.key[0]) AS Utf8)), operation: CAST(Yson::ConvertToString($d.update.operation) AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_2656634110270094362` FROM `SourceTable_2656634110270094362/cdc_2656634110270094362` TO `Table_2656634110270094362` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:9622/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: INSERT INTO `SourceTable_2656634110270094362` (`object_id`, `timestamp`, `operation`) VALUES ('id_1', Datetime('2019-01-01T15:30:00Z'), 'value_1'); >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_2656634110270094362` ORDER BY `operation`, `object_id`, `timestamp` Attempt=19 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_2656634110270094362` ORDER BY `operation`, `object_id`, `timestamp` Attempt=18 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_2656634110270094362` ORDER BY `operation`, `object_id`, `timestamp` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_2656634110270094362`; DDL: DROP TABLE `Table_2656634110270094362` DDL: DROP TABLE `SourceTable_2656634110270094362` DDL: CREATE TABLE `Table_13515965920367967705` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TABLE `Table_13515965920367967705_1` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TABLE `Table_13515965920367967705_2` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_13515965920367967705` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: $x._offset, Message:CAST($x._data AS Utf8) |>, <| __ydb_table: "Table_13515965920367967705_1", Key: $x._offset, Message:CAST($x._data || "_1" AS Utf8) |>, <| __ydb_table: "Table_13515965920367967705_2", Key: $x._offset, Message:CAST($x._data || "_2" AS Utf8) |>, ]; }; ; CREATE TRANSFER `Transfer_13515965920367967705` FROM `Topic_13515965920367967705` TO `Table_13515965920367967705` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:9622/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608, DIRECTORY = '/local' ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_13515965920367967705` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_13515965920367967705` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_13515965920367967705` ORDER BY `Key`, `Message` Attempt=17 count=1 >>>>> Query: SELECT `Key`, `Message` FROM `Table_13515965920367967705_1` ORDER BY `Key`, `Message` Attempt=19 count=1 >>>>> Query: SELECT `Key`, `Message` FROM `Table_13515965920367967705_2` ORDER BY `Key`, `Message` Attempt=19 count=1 DDL: DROP TRANSFER `Transfer_13515965920367967705`; DDL: DROP TABLE `Table_13515965920367967705` DDL: DROP TOPIC `Topic_13515965920367967705` DDL: CREATE TABLE `Table_4877188604351122256` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TABLE `Table_4877188604351122256_1` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_4877188604351122256` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: $x._offset, Message:CAST($x._data AS Utf8) |>, <| __ydb_table: "Table_4877188604351122256_1", Key: $x._offset, Message:CAST($x._data || "_1" AS Utf8) |>, ]; }; ; CREATE TRANSFER `Transfer_4877188604351122256` FROM `Topic_4877188604351122256` TO `Table_4877188604351122256` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:9622/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608, DIRECTORY = '/local' ); >>>>> ACTUAL: {
: Error: Error in target #1: {
: Error: Bulk upsert to table '/local/Table_4877188604351122256_1' Only the OLTP table is supported } } >>>>> EXPECTED: Error: Bulk upsert to table '/local/Table_ DDL: DROP TRANSFER `Transfer_4877188604351122256`; DDL: DROP TABLE `Table_4877188604351122256` DDL: DROP TOPIC `Topic_4877188604351122256` DDL: CREATE TABLE `Table_16121398117810567407` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_16121398117810567407` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_16121398117810567407` FROM `Topic_16121398117810567407` TO `Table_16121398117810567407` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:9622/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_16121398117810567407` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_16121398117810567407` ORDER BY `Message` Attempt=18 count=1 DDL: ALTER TABLE Table_16121398117810567407 DROP COLUMN Message >>>>> ACTUAL: {
: Error: Error in target #1: {
: Error: Bulk upsert to table 'local/Table_16121398117810567407' Unknown column: Message } } >>>>> EXPECTED: Unknown column: Message DDL: CREATE TABLE `Table_15055579135858717909` ( Key Uint64 NOT NULL, Message Utf8, INDEX `title_index` GLOBAL SYNC ON (`Message`), PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_15055579135858717909` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_15055579135858717909` FROM `Topic_15055579135858717909` TO `Table_15055579135858717909` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:9622/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: {
: Error: Bulk upsert to table 'local/Table_15055579135858717909' Only async-indexed tables are supported by BulkUpsert } } >>>>> EXPECTED: Only async-indexed tables are supported by BulkUpsert DDL: DROP TRANSFER `Transfer_15055579135858717909`; DDL: DROP TABLE `Table_15055579135858717909` DDL: DROP TOPIC `Topic_15055579135858717909` DDL: CREATE TABLE `Table_5129586999379434301` ( Key Uint64 NOT NULL, Message Utf8, INDEX `title_index` GLOBAL ASYNC ON (`Message`), PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_5129586999379434301` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_5129586999379434301` FROM `Topic_5129586999379434301` TO `Table_5129586999379434301` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:9622/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_5129586999379434301` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_5129586999379434301` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_5129586999379434301` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_5129586999379434301`; DDL: DROP TABLE `Table_5129586999379434301` DDL: DROP TOPIC `Topic_5129586999379434301` |99.9%| [TM] {BAZEL_UPLOAD} ydb/core/transfer/ut/row_table/unittest |99.9%| [TM] {RESULT} ydb/core/transfer/ut/row_table/unittest |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/kv/tests/py3test >> Transfer::CustomFlushInterval [GOOD] >> Transfer::AlterFlushInterval |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/encryption/py3test |99.9%| [TM] {RESULT} ydb/tests/functional/encryption/py3test >> Transfer::AlterFlushInterval [GOOD] >> Transfer::AlterBatchSize >> Transfer::AlterBatchSize [GOOD] >> Transfer::CreateTransferSourceNotExists >> Transfer::CreateTransferSourceNotExists [GOOD] >> Transfer::CreateTransferSourceNotExists_LocalTopic >> Transfer::CreateTransferSourceNotExists_LocalTopic [GOOD] >> Transfer::CreateTransferSourceDirNotExists >> Transfer_ColumnTable::BigBatchSize_Local [GOOD] >> Transfer::CreateTransferSourceDirNotExists [GOOD] >> Transfer::CreateTransferSourceDirNotExists_LocalTopic >> Transfer::CreateTransferSourceDirNotExists_LocalTopic [GOOD] >> Transfer::TransferSourceDropped >> Transfer::TransferSourceDropped [GOOD] >> Transfer::TransferSourceDropped_LocalTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/transfer/ut/column_table/unittest >> Transfer_ColumnTable::BigBatchSize_Local [GOOD] Test command err: DDL: CREATE TABLE `Table_10173287438333208036` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_10173287438333208036` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_10173287438333208036` FROM `Topic_10173287438333208036` TO `Table_10173287438333208036` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:24591/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_10173287438333208036` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_10173287438333208036` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_10173287438333208036` ORDER BY `Key`, `Message` Attempt=17 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_10173287438333208036` ORDER BY `Key`, `Message` Attempt=16 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_10173287438333208036` ORDER BY `Key`, `Message` Attempt=15 count=1 DDL: DROP TRANSFER `Transfer_10173287438333208036`; DDL: DROP TABLE `Table_10173287438333208036` DDL: DROP TOPIC `Topic_10173287438333208036` DDL: CREATE TABLE `Table_16246478898607376541` ( Message Utf8 NOT NULL, Key Uint64 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_16246478898607376541` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_16246478898607376541` FROM `Topic_16246478898607376541` TO `Table_16246478898607376541` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:24591/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_16246478898607376541` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_16246478898607376541` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_16246478898607376541` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_16246478898607376541`; DDL: DROP TABLE `Table_16246478898607376541` DDL: DROP TOPIC `Topic_16246478898607376541` DDL: CREATE TABLE `Table_11828486843268933569` ( Key1 Uint64 NOT NULL, Key3 Uint64 NOT NULL, Value1 Utf8, Key2 Uint64 NOT NULL, Value2 Utf8, Key4 Uint64 NOT NULL, ___Value3 Utf8, PRIMARY KEY (Key3, Key2, Key1, Key4) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_11828486843268933569` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key1:Unwrap(CAST(1 AS Uint64)), Key2:Unwrap(CAST(2 AS Uint64)), Value2:CAST("value-2" AS Utf8), Key4:Unwrap(CAST(4 AS Uint64)), Key3:Unwrap(CAST(3 AS Uint64)), Value1:CAST("value-1" AS Utf8), ___Value3:CAST("value-3" AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_11828486843268933569` FROM `Topic_11828486843268933569` TO `Table_11828486843268933569` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:24591/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_11828486843268933569` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=19 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_11828486843268933569` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=18 count=1 DDL: DROP TRANSFER `Transfer_11828486843268933569`; DDL: DROP TABLE `Table_11828486843268933569` DDL: DROP TOPIC `Topic_11828486843268933569` DDL: CREATE TABLE `Table_8774842511758006454` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_8774842511758006454` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_8774842511758006454` FROM `Topic_8774842511758006454` TO `Table_8774842511758006454` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:24591/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_8774842511758006454` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_8774842511758006454` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_8774842511758006454` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_8774842511758006454`; DDL: DROP TABLE `Table_8774842511758006454` DDL: DROP TOPIC `Topic_8774842511758006454` DDL: CREATE TABLE `Table_15446398542396114175` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_15446398542396114175` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:Unwrap(Nothing(Uint64?), "The value of the 'Key' column must be non-NULL"), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_15446398542396114175` FROM `Topic_15446398542396114175` TO `Table_15446398542396114175` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:24591/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(102): generated.sql:4:29: Failed to unwrap empty optional: The value of the 'Key' column must be non-NULL } >>>>> EXPECTED: The value of the 'Key' column must be non-NULL DDL: DROP TRANSFER `Transfer_15446398542396114175`; DDL: DROP TABLE `Table_15446398542396114175` DDL: DROP TOPIC `Topic_15446398542396114175` DDL: CREATE TABLE `Table_1297399569123692105` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_1297399569123692105` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:Unwrap(Nothing(Utf8?), "The value of the 'Message' column must be non-NULL") |> ]; }; ; CREATE TRANSFER `Transfer_1297399569123692105` FROM `Topic_1297399569123692105` TO `Table_1297399569123692105` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:24591/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(106): generated.sql:5:33: Failed to unwrap empty optional: The value of the 'Message' column must be non-NULL } >>>>> EXPECTED: The value of the 'Message' column must be non-NULL DDL: DROP TRANSFER `Transfer_1297399569123692105`; DDL: DROP TABLE `Table_1297399569123692105` DDL: DROP TOPIC `Topic_1297399569123692105` DDL: CREATE TABLE `Table_6163323771875321937` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_6163323771875321937` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:1, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_6163323771875321937` FROM `Topic_6163323771875321937` TO `Table_6163323771875321937` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:24591/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_6163323771875321937` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_6163323771875321937` ORDER BY `Message` Attempt=18 count=1 >>>>> Query: SELECT `Message` FROM `Table_6163323771875321937` ORDER BY `Message` Attempt=19 count=1 DDL: DROP TRANSFER `Transfer_6163323771875321937`; DDL: DROP TABLE `Table_6163323771875321937` DDL: CREATE TABLE `Table_5427098173649299954` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_5427098173649299954` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:1, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_5427098173649299954` FROM `Topic_5427098173649299954` TO `Table_5427098173649299954` USING $l WITH ( CONNECTION_ ... --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------' |>; }; return ListMap($lines, $m); }; ; CREATE TRANSFER `Transfer_475788880512999128` FROM `Topic_475788880512999128` TO `Table_475788880512999128` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 1073741824 ); >>>>> Query: SELECT `offset`, `line` FROM `Table_475788880512999128` ORDER BY `offset`, `line` Attempt=19 count=1802 DDL: DROP TRANSFER `Transfer_475788880512999128`; DDL: DROP TABLE `Table_475788880512999128` DDL: DROP TOPIC `Topic_475788880512999128` |99.9%| [TM] {BAZEL_UPLOAD} ydb/core/transfer/ut/column_table/unittest |99.9%| [TM] {RESULT} ydb/core/transfer/ut/column_table/unittest >> Transfer::TransferSourceDropped_LocalTopic [GOOD] >> Transfer::CreateTransferSourceIsNotTopic >> Transfer::CreateTransferSourceIsNotTopic [GOOD] >> Transfer::CreateTransferSourceIsNotTopic_LocalTopic >> Transfer::CreateTransferSourceIsNotTopic_LocalTopic [GOOD] >> Transfer::CreateTransferTargetIsNotTable >> Transfer::CreateTransferTargetIsNotTable [GOOD] >> Transfer::CreateTransferTargetNotExists >> Transfer::CreateTransferTargetNotExists [GOOD] >> Transfer::PauseAndResumeTransfer >> test_workload.py::TestDeltaProtocol::test [GOOD] >> Transfer::PauseAndResumeTransfer [GOOD] >> Transfer::TargetTableWithoutDirectory |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/node_broker/tests/py3test >> test_workload.py::TestDeltaProtocol::test [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/node_broker/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/node_broker/tests/py3test >> Transfer::TargetTableWithoutDirectory [GOOD] >> Transfer::TargetTableWriteOutsideDirectory >> test_workload.py::TestYdbWorkload::test[column-remote] [GOOD] >> test_workload.py::TestYdbMixedWorkload::test[row] [GOOD] >> Transfer::TargetTableWriteOutsideDirectory [GOOD] >> Transfer::TargetTableWriteInsideDirectory >> Transfer::TargetTableWriteInsideDirectory [GOOD] >> Transfer::AlterTargetDirectory >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/transfer/tests/py3test >> test_workload.py::TestYdbWorkload::test[column-remote] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/transfer/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/transfer/tests/py3test >> Transfer::AlterTargetDirectory [GOOD] >> Transfer::WriteToNotExists >> Transfer::WriteToNotExists [GOOD] >> Transfer::WriteToNotTable >> test_workload.py::TestYdbMixedWorkload::test[column] >> Transfer::WriteToNotTable [GOOD] >> Transfer::AlterLambdaOnWork >> Transfer::AlterLambdaOnWork [GOOD] >> Transfer::CreateAndAlterTransferInDirectory |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/oltp_workload/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/oltp_workload/tests/py3test >> Transfer::CreateAndAlterTransferInDirectory [GOOD] >> Transfer::Alter_WithSecret >> Transfer::Alter_WithSecret [GOOD] >> Transfer::MessageField_Key >> Transfer::MessageField_Key [GOOD] >> Transfer::MessageField_Key_Empty >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer::MessageField_Key_Empty [GOOD] >> Transfer::ErrorInMultiLine >> Transfer::ErrorInMultiLine [GOOD] >> Transfer::ReadFromCDC_Remote >> Transfer::ReadFromCDC_Remote [GOOD] >> Transfer::ReadFromCDC_Local >> Transfer::ReadFromCDC_Local [GOOD] >> Transfer::MessageField_CreateTimestamp_Remote >> Transfer::MessageField_CreateTimestamp_Remote [GOOD] >> Transfer::MessageField_CreateTimestamp_Local >> Transfer::MessageField_CreateTimestamp_Local [GOOD] >> Transfer::MessageField_WriteTimestamp_Remote >> Transfer::MessageField_WriteTimestamp_Remote [GOOD] >> Transfer::MessageField_WriteTimestamp_Local >> Transfer::MessageField_WriteTimestamp_Local [GOOD] >> Transfer::MessageField_Attributes_Remote >> test_scheme_board_workload.py::TestSchemeBoard::test_scheme_board [GOOD] >> Transfer::MessageField_Attributes_Remote [GOOD] >> Transfer::MessageField_Attributes_Local >> Transfer::MessageField_Attributes_Local [GOOD] >> Transfer::MessageField_Partition_Remote >> Transfer::MessageField_Partition_Remote [GOOD] >> Transfer::MessageField_Partition_Local >> Transfer::MessageField_Partition_Local [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/transfer/ut/functional/unittest >> Transfer::MessageField_Partition_Local [GOOD] Test command err: DDL: CREATE TABLE `Table_4887039286249907778` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_4887039286249907778` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_4887039286249907778` FROM `Topic_4887039286249907778` TO `Table_4887039286249907778` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_4887039286249907778` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_4887039286249907778` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_4887039286249907778` ORDER BY `Message` Attempt=17 count=0 >>>>> Query: SELECT `Message` FROM `Table_4887039286249907778` ORDER BY `Message` Attempt=16 count=0 >>>>> Query: SELECT `Message` FROM `Table_4887039286249907778` ORDER BY `Message` Attempt=15 count=0 >>>>> Query: SELECT `Message` FROM `Table_4887039286249907778` ORDER BY `Message` Attempt=14 count=0 >>>>> Query: SELECT `Message` FROM `Table_4887039286249907778` ORDER BY `Message` Attempt=13 count=1 DDL: DROP TRANSFER `Transfer_4887039286249907778`; DDL: DROP TABLE `Table_4887039286249907778` DDL: DROP TOPIC `Topic_4887039286249907778` DDL: CREATE TABLE `Table_8671132338725400724` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_8671132338725400724` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_8671132338725400724` FROM `Topic_8671132338725400724` TO `Table_8671132338725400724` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:25312/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_8671132338725400724` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_8671132338725400724` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_8671132338725400724` ORDER BY `Message` Attempt=17 count=0 >>>>> Query: SELECT `Message` FROM `Table_8671132338725400724` ORDER BY `Message` Attempt=16 count=0 >>>>> Query: SELECT `Message` FROM `Table_8671132338725400724` ORDER BY `Message` Attempt=15 count=0 >>>>> Query: SELECT `Message` FROM `Table_8671132338725400724` ORDER BY `Message` Attempt=14 count=0 >>>>> Query: SELECT `Message` FROM `Table_8671132338725400724` ORDER BY `Message` Attempt=13 count=1 DDL: DROP TRANSFER `Transfer_8671132338725400724`; DDL: DROP TABLE `Table_8671132338725400724` DDL: DROP TOPIC `Topic_8671132338725400724` DDL: CREATE TOPIC `Topic_7808982243221596220` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64) |> ]; }; ; CREATE TRANSFER `Transfer_7808982243221596220` FROM `Topic_7808982243221596220` TO `Table_7808982243221596220` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:25312/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: [ {
: Error: Executing ESchemeOpCreateTransfer, code: 2003 subissue: {
: Error: Path does not exist, code: 2003 } } {
: Error: Query invalidated on scheme/internal error during Scheme execution, code: 2019 } ] >>>>> EXPECTED: Path does not exist DDL: DROP TOPIC `Topic_7808982243221596220` DDL: CREATE TABLE `Table_12795040735695712511` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_12795040735695712511` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: 1, Message:CAST("Message-1" AS Utf8) |> ]; }; CREATE TRANSFER Transfer_12795040735695712511 FROM Topic_12795040735695712511 TO Table_12795040735695712511 USING $l WITH ( CONNECTION_STRING = "grp§c://localhost:2135/?database=/Root" ) >>>>> ACTUAL: {
: Error: Discovery error: /Root/Topic_12795040735695712511: TRANSPORT_UNAVAILABLE ([ {
: Error: GRpc error: (14): DNS resolution failed for grp§c://localhost:2135: C-ares status is not ARES_SUCCESS qtype=A name=grp§c://localhost:2135 is_balancer=0: Misformatted domain name } {
: Error: Grpc error response on endpoint grp§c://localhost:2135 } ]) } >>>>> EXPECTED: DNS resolution failed for grp§c://localhost:2135 DDL: DROP TRANSFER `Transfer_12795040735695712511`; DDL: DROP TABLE `Table_12795040735695712511` DDL: DROP TOPIC `Topic_12795040735695712511` DDL: CREATE TABLE `Table_10494357316234243876` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_10494357316234243876` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: 1, Message:CAST("Message-1" AS Utf8) |> ]; }; CREATE TRANSFER Transfer_10494357316234243876 FROM Topic_10494357316234243876 TO Table_10494357316234243876 USING $l WITH ( CONNECTION_STRING = "grpc://domain-not-exists-localhost.com.moc:2135/?database=/Root" ) >>>>> ACTUAL: {
: Error: Discovery error: /Root/Topic_10494357316234243876: TRANSPORT_UNAVAILABLE ([ {
: Error: GRpc error: (14): DNS resolution failed for domain-not-exists-localhost.com.moc:2135: C-ares status is not ARES_SUCCESS qtype=AAAA name=domain-not-exists-localhost.com.moc is_balancer=0: Domain name not found } {
: Error: Grpc error response on endpoint domain-not-exists-localhost.com.moc:2135 } ]) } >>>>> EXPECTED: Grpc error response on endpoint domain-not-exists-localhost.com.moc:2135 DDL: DROP TRANSFER `Transfer_10494357316234243876`; DDL: DROP TABLE `Table_10494357316234243876` DDL: DROP TOPIC `Topic_10494357316234243876` DDL: CREATE USER u59983 DDL: GRANT 'ydb.granular.create_table', 'ydb.granular.create_queue' ON `/local` TO `u59983@builtin` DDL: CREATE TABLE `Table_16945587636996535924` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: GRANT 'ydb.generic.write', 'ydb.generic.read' ON `/local/Table_16945587636996535924` TO `u59983@builtin` DDL: CREATE TOPIC `Topic_16945587636996535924` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: GRANT ALL ON `/local/Topic_16945587636996535924` TO `u59983@builtin` DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_16945587636996535924` FROM `Topic_16945587636996535924` TO `Table_16945587636996535924` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:25312/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); DDL: DROP TOPIC `Topic_16945587636996535924` DDL: DROP TRANSFER `Transfer_16945587636996535924`; DDL: CREATE USER u20214 DDL: GRANT 'ydb.granular.create_table', 'ydb.granular.create_queue' ON `/local` TO `u20214@builtin` DDL: CREATE TABLE `Table_15561986482397736236` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: GRANT 'ydb.generic.read' ON `/local/Table_15561986482397736236` TO `u20214@builtin` DDL: CREATE TOPIC `Topic_15561986482397736236` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: GRANT ALL ON `/local/Topic_15561986482397736236` TO `u20214@builtin` DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_15561986482397736236` FROM `Topic_15561986482397736236` TO `Table_15561986482397736236` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:25312/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Executing ESchemeOpCreateTransfer, code: 2018 subissue: {
: Error: Access denied for scheme request, code: 2018 subissue: {
: Error: Access denied. } } } >>>>> EXPECTED: Access denied for scheme request DDL: DROP TOPIC `Topic_15561986482397736236` DDL: CREATE USER u56811 DDL: CREATE TABLE `Table_14123859622726800632` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_14123859622726800632` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: GRANT 'ydb.generic.read' ON `/local/Topic_14123859622726800632` TO `u56811@builtin` DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data ... = 8388608 ); >>>>> Query: INSERT INTO `Table_7348353632217971465_in` (Key, Message) VALUES ( 7, '13' ) >>>>> Query: SELECT `Message` FROM `Table_7348353632217971465` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_7348353632217971465` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_7348353632217971465` ORDER BY `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_7348353632217971465`; DDL: DROP TABLE `Table_7348353632217971465` DDL: CREATE TABLE `Table_394743616141543831` ( Offset Uint64 NOT NULL, CreateTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_394743616141543831` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), CreateTimestamp:$x._create_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_394743616141543831` FROM `Topic_394743616141543831` TO `Table_394743616141543831` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:25312/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `CreateTimestamp` FROM `Table_394743616141543831` ORDER BY `CreateTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_394743616141543831` ORDER BY `CreateTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_394743616141543831` ORDER BY `CreateTimestamp` Attempt=17 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_394743616141543831` ORDER BY `CreateTimestamp` Attempt=16 count=1 DDL: DROP TRANSFER `Transfer_394743616141543831`; DDL: DROP TABLE `Table_394743616141543831` DDL: DROP TOPIC `Topic_394743616141543831` DDL: CREATE TABLE `Table_17547467386192440880` ( Offset Uint64 NOT NULL, CreateTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_17547467386192440880` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), CreateTimestamp:$x._create_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_17547467386192440880` FROM `Topic_17547467386192440880` TO `Table_17547467386192440880` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `CreateTimestamp` FROM `Table_17547467386192440880` ORDER BY `CreateTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_17547467386192440880` ORDER BY `CreateTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_17547467386192440880` ORDER BY `CreateTimestamp` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_17547467386192440880`; DDL: DROP TABLE `Table_17547467386192440880` DDL: DROP TOPIC `Topic_17547467386192440880` DDL: CREATE TABLE `Table_1419395867924847112` ( Offset Uint64 NOT NULL, WriteTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_1419395867924847112` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), WriteTimestamp:$x._write_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_1419395867924847112` FROM `Topic_1419395867924847112` TO `Table_1419395867924847112` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:25312/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `WriteTimestamp` FROM `Table_1419395867924847112` ORDER BY `WriteTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_1419395867924847112` ORDER BY `WriteTimestamp` Attempt=18 count=1 DDL: DROP TRANSFER `Transfer_1419395867924847112`; DDL: DROP TABLE `Table_1419395867924847112` DDL: DROP TOPIC `Topic_1419395867924847112` DDL: CREATE TABLE `Table_10050179602728811917` ( Offset Uint64 NOT NULL, WriteTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_10050179602728811917` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), WriteTimestamp:$x._write_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_10050179602728811917` FROM `Topic_10050179602728811917` TO `Table_10050179602728811917` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `WriteTimestamp` FROM `Table_10050179602728811917` ORDER BY `WriteTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_10050179602728811917` ORDER BY `WriteTimestamp` Attempt=18 count=1 DDL: DROP TRANSFER `Transfer_10050179602728811917`; DDL: DROP TABLE `Table_10050179602728811917` DDL: DROP TOPIC `Topic_10050179602728811917` DDL: CREATE TABLE `Table_17885732026900814053` ( Offset Uint64 NOT NULL, Value Utf8, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_17885732026900814053` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), Value:CAST($x._attributes['attribute_key'] AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_17885732026900814053` FROM `Topic_17885732026900814053` TO `Table_17885732026900814053` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:25312/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Value` FROM `Table_17885732026900814053` ORDER BY `Value` Attempt=19 count=0 >>>>> Query: SELECT `Value` FROM `Table_17885732026900814053` ORDER BY `Value` Attempt=18 count=0 >>>>> Query: SELECT `Value` FROM `Table_17885732026900814053` ORDER BY `Value` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_17885732026900814053`; DDL: DROP TABLE `Table_17885732026900814053` DDL: DROP TOPIC `Topic_17885732026900814053` DDL: CREATE TABLE `Table_3508188061351038439` ( Offset Uint64 NOT NULL, Value Utf8, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_3508188061351038439` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), Value:CAST($x._attributes['attribute_key'] AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_3508188061351038439` FROM `Topic_3508188061351038439` TO `Table_3508188061351038439` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Value` FROM `Table_3508188061351038439` ORDER BY `Value` Attempt=19 count=0 >>>>> Query: SELECT `Value` FROM `Table_3508188061351038439` ORDER BY `Value` Attempt=18 count=0 >>>>> Query: SELECT `Value` FROM `Table_3508188061351038439` ORDER BY `Value` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_3508188061351038439`; DDL: DROP TABLE `Table_3508188061351038439` DDL: DROP TOPIC `Topic_3508188061351038439` DDL: CREATE TABLE `Table_17660823459573312997` ( Partition Uint32 NOT NULL, Message Utf8, PRIMARY KEY (Partition) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_17660823459573312997` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Partition:CAST($x._partition AS Uint32), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_17660823459573312997` FROM `Topic_17660823459573312997` TO `Table_17660823459573312997` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:25312/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Partition`, `Message` FROM `Table_17660823459573312997` ORDER BY `Partition`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_17660823459573312997` ORDER BY `Partition`, `Message` Attempt=18 count=1 DDL: DROP TRANSFER `Transfer_17660823459573312997`; DDL: DROP TABLE `Table_17660823459573312997` DDL: DROP TOPIC `Topic_17660823459573312997` DDL: CREATE TABLE `Table_17004925563405735208` ( Partition Uint32 NOT NULL, Message Utf8, PRIMARY KEY (Partition) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_17004925563405735208` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Partition:CAST($x._partition AS Uint32), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_17004925563405735208` FROM `Topic_17004925563405735208` TO `Table_17004925563405735208` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Partition`, `Message` FROM `Table_17004925563405735208` ORDER BY `Partition`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_17004925563405735208` ORDER BY `Partition`, `Message` Attempt=18 count=1 DDL: DROP TRANSFER `Transfer_17004925563405735208`; DDL: DROP TABLE `Table_17004925563405735208` DDL: DROP TOPIC `Topic_17004925563405735208` |99.9%| [TM] {RESULT} ydb/core/transfer/ut/functional/unittest |99.9%| [TM] {BAZEL_UPLOAD} ydb/core/transfer/ut/functional/unittest |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test >> test_scheme_board_workload.py::TestSchemeBoard::test_scheme_board [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/streaming/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test >> ConsistentIndexRead::InteractiveTx [GOOD] >> KqpExtTest::SecondaryIndexSelectUsingScripting >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/streaming/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_indexes/unittest >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_indexes/unittest |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/kqp_indexes/unittest |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/streaming/tests/py3test >> Replication::TopicAutopartitioning [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/replication/unittest >> Replication::TopicAutopartitioning [GOOD] Test command err: DDL: CREATE TABLE `SourceTable_14654170032942546205` ( Key Uint32, Key2 Uuid, v01 Uuid, v02 Uuid NOT NULL, v03 Double, PRIMARY KEY (Key, Key2) ); >>>>> Query: UPSERT INTO `SourceTable_14654170032942546205` (Key,Key2,v01,v02,v03) VALUES ( 1, CAST("00078af5-0000-0000-6c0b-040000000000" as Uuid), CAST("00078af5-0000-0000-6c0b-040000000001" as Uuid), UNWRAP(CAST("00078af5-0000-0000-6c0b-040000000002" as Uuid)), CAST("311111111113.222222223" as Double) ); DDL: CREATE ASYNC REPLICATION `Replication_14654170032942546205` FOR `SourceTable_14654170032942546205` AS `Table_14654170032942546205` WITH ( CONNECTION_STRING = 'grpc://localhost:8022/?database=local' ); >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_14654170032942546205` ORDER BY `Key2`, `v01`, `v02`, `v03` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_14654170032942546205]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_14654170032942546205` ORDER BY `Key2`, `v01`, `v02`, `v03` Attempt=18 count=1 DDL: DROP ASYNC REPLICATION `Replication_14654170032942546205`; DDL: DROP TABLE `SourceTable_14654170032942546205` DDL: CREATE TABLE `SourceTable_632290913746210332` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ); DDL: CREATE ASYNC REPLICATION `Replication_632290913746210332` FOR `SourceTable_632290913746210332` AS `Table_632290913746210332` WITH ( CONNECTION_STRING = 'grpc://localhost:8022/?database=local' ); >>>>> Query: INSERT INTO `SourceTable_632290913746210332` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query: SELECT `Message` FROM `Table_632290913746210332` ORDER BY `Message` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_632290913746210332]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Message` FROM `Table_632290913746210332` ORDER BY `Message` Attempt=18 count=1 State: Paused DDL: ALTER ASYNC REPLICATION `Replication_632290913746210332` SET ( STATE = "Paused" ); >>>>> Query: INSERT INTO `SourceTable_632290913746210332` (`Key`, `Message`) VALUES (2, 'Message-2'); >>>>> Query: SELECT `Message` FROM `Table_632290913746210332` ORDER BY `Message` Attempt=19 count=1 State: StandBy DDL: ALTER ASYNC REPLICATION `Replication_632290913746210332` SET ( STATE = "StandBy" ); >>>>> Query: SELECT `Message` FROM `Table_632290913746210332` ORDER BY `Message` Attempt=19 count=1 >>>>> Query: SELECT `Message` FROM `Table_632290913746210332` ORDER BY `Message` Attempt=18 count=2 DDL: ALTER ASYNC REPLICATION `Replication_632290913746210332` SET ( STATE = "Paused" ); DDL: ALTER ASYNC REPLICATION `Replication_632290913746210332` SET ( STATE = "StandBy" ); DDL: DROP ASYNC REPLICATION `Replication_632290913746210332`; DDL: DROP TABLE `SourceTable_632290913746210332` DDL: CREATE TABLE `SourceTable_4110460276970326713` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( AUTO_PARTITIONING_BY_SIZE = ENABLED, AUTO_PARTITIONING_PARTITION_SIZE_MB = 1, AUTO_PARTITIONING_BY_LOAD = ENABLED, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1, AUTO_PARTITIONING_MAX_PARTITIONS_COUNT = 10, UNIFORM_PARTITIONS = 1 ); DDL: CREATE ASYNC REPLICATION `Replication_4110460276970326713` FOR `SourceTable_4110460276970326713` AS `Table_4110460276970326713` WITH ( CONNECTION_STRING = 'grpc://localhost:8022/?database=local' ); Insert much data to trigger split >>>>> Query: UPSERT INTO `SourceTable_4110460276970326713` (Key, Message) VALUES (0, 'Message-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa ... aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'), (4999, 'Message-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'); Wait for partition split and data replication Verify data replicated from all partitions >>>>> Query: SELECT `Message` FROM `Table_4110460276970326713` ORDER BY `Message` Attempt=19 count=5000 DDL: DROP ASYNC REPLICATION `Replication_4110460276970326713`; DDL: DROP TABLE `SourceTable_4110460276970326713` |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/replication/unittest |99.9%| [TM] {RESULT} ydb/tests/functional/replication/unittest >> test_workload.py::TestYdbMixedWorkload::test[column] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/mixedpy/tests/py3test >> test_workload.py::TestYdbMixedWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/mixedpy/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/mixedpy/tests/py3test |99.9%| CLEANING BUILD ROOT Number of suites skipped by size: 196 ydb/core/viewer/tests [size:medium] ------ sole chunk ran 51 tests (total:89.81s - setup:0.01s test:89.31s canon:0.24s) [fail] test.py::TestViewer::test_viewer_nodes_all [default-linux-x86_64-release-asan] (0.19s) Test results differ from canonical: test_result['/Root']['Nodes'][0]['Peers'][0]['ScopeId']['X2']: - 6 + 42 test_result['/Root']['Nodes'][0]['Peers'][1]['ScopeId']['X2']: - 7 + 43 test_result['/Root/dedicated_db']['Nodes'][0]['Peers'][1]['ScopeId']['X2']: - 7 + 43 test_result['/Root/dedicated_db']['Nodes'][0]['ReversePeers'][0]['ScopeId']['X2']: - 6 + 42 test_result['/Root/dedicated_db']['Nodes'][0]['ReversePeers'][1]['ScopeId']['X2']: - 6 + 42 test_result['/Root/serverless_db']['Nodes'][0]['Peers'][1]['ScopeId']['X2']: - 6 + 42 test_result['/Root/serverless_db']['Nodes'][0]['ReversePeers'][0]['ScopeId']['X2']: - 7 + 43 test_result['/Root/serverless_db']['Nodes'][0]['ReversePeers'][1]['ScopeId']['X2']: - 7 + 43 test_result['/Root/shared_db']['Nodes'][0]['Peers'][1]['ScopeId']['X2']: - 6 + 42 test_result['/Root/shared_db']['Nodes'][0]['ReversePeers'][0]['ScopeId']['X2']: - 7 + 43 test_result['/Root/shared_db']['Nodes'][0]['ReversePeers'][1]['ScopeId']['X2']: - 7 + 43 test_result['no-database']['Nodes'][0]['Peers'][0]['ScopeId']['X2']: - 6 + 42 test_result['no-database']['Nodes'][0]['Peers'][1]['ScopeId']['X2']: - 7 + 43 test_result['no-database']['Nodes'][1]['Peers'][1]['ScopeId']['X2']: - 6 + 42 test_result['no-database']['Nodes'][1]['ReversePeers'][0]['ScopeId']['X2']: - 7 + 43 test_result['no-database']['Nodes'][1]['ReversePeers'][1]['ScopeId']['X2']: - 7 + 43 test_result['no-database']['Nodes'][2]['Peers'][1]['ScopeId']['X2']: - 7 + 43 test_result['no-database']['Nodes'][2]['ReversePeers'][0]['ScopeId']['X2']: - 6 + 42 test_result['no-database']['Nodes'][2]['ReversePeers'][1]['ScopeId']['X2']: - 6 + 42 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_viewer_nodes_all.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::TestViewer::test_viewer_storage_nodes_all [default-linux-x86_64-release-asan] (0.20s) Test results differ from canonical: test_result['/Root']['Nodes'][0]['Peers'][0]['ScopeId']['X2']: - 6 + 42 test_result['/Root']['Nodes'][0]['Peers'][1]['ScopeId']['X2']: - 7 + 43 test_result['/Root/dedicated_db']['Nodes'][0]['Peers'][0]['ScopeId']['X2']: - 6 + 42 test_result['/Root/dedicated_db']['Nodes'][0]['Peers'][1]['ScopeId']['X2']: - 7 + 43 test_result['/Root/serverless_db']['Nodes'][0]['Peers'][0]['ScopeId']['X2']: - 6 + 42 test_result['/Root/serverless_db']['Nodes'][0]['Peers'][1]['ScopeId']['X2']: - 7 + 43 test_result['/Root/shared_db']['Nodes'][0]['Peers'][0]['ScopeId']['X2']: - 6 + 42 test_result['/Root/shared_db']['Nodes'][0]['Peers'][1]['ScopeId']['X2']: - 7 + 43 test_result['no-database']['Nodes'][0]['Peers'][0]['ScopeId']['X2']: - 6 + 42 test_result['no-database']['Nodes'][0]['Peers'][1]['ScopeId']['X2']: - 7 + 43 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_viewer_storage_nodes_all.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::TestViewer::test_viewer_describe [default-linux-x86_64-release-asan] (0.19s) Test results differ from canonical: test_result['/Root']['PathDescription']['Children'][1]['Name']: - dedicated_db + .sys test_result['/Root']['PathDescription']['Children'][1]['Owner']: - root + metadata@system test_result['/Root']['PathDescription']['Children'][1]['PathType']: value 1 differs from canonical 10 test_result['/Root']['PathDescription']['Children'][1]['ChildrenExist']: extra key with value True test_result['/Root']['PathDescription']['Children'][2]['Name']: - serverless_db + dedicated_db test_result['/Root']['PathDescription']['Children'][2]['PathState']: value 2 differs from canonical 3 test_result['/Root']['PathDescription']['Children'][3]['Name']: - shared_db + serverless_db test_result['/Root']['PathDescription']['Children'][3]['PathState']: value 3 differs from canonical 2 test_result['/Root']['PathDescription']['Children'][4]['CreateStep']: - 0 + not-zero-number-text test_result['/Root']['PathDescription']['Children'][4]['CreateTxId']: - 0 + not-zero-number-text test_result['/Root']['PathDescription']['Children'][4]['Name']: - .sys + shared_db test_result['/Root']['PathDescription']['Children'][4]['PathType']: value 10 differs from canonical 1 test_result['/Root']['PathDescription']['Children'][4]['ACL']: extra key with value '', test_result['/Root']['PathDescription']['Children'][4]['Owner']: extra key with value 'root', test_result['/Root']['PathDescription']['Children'][4]['ParentPathId']: extra key with value '1', test_result['/Root']['PathDescription']['Children'][4]['PathId']: extra key with value 'not-zero-number-text', test_result['/Root']['PathDescription']['Children'][4]['PathState']: extra key with value 2 test_result['/Root']['PathDescription']['DomainDescription']['PathsInside']: - 7 + 43 test_result['/Root']['PathDescription']['Self']['PathVersion']: - 16 + 19 test_result['/Root/dedicated_db']['PathDescription']['Children'][1]['ChildrenExist']: value True differs from canonical False test_result['/Root/dedicated_db']['PathDescription']['Children'][1]['Name']: - table1 + .sys test_result['/Root/dedicated_db']['PathDescription']['Children'][1]['Owner']: - root@builtin + metadata@system test_result['/Root/dedicated_db']['PathDescription']['Children'][1]['PathState']: value 2 differs from canonical 3 test_result['/Root/dedicated_db']['PathDescription']['Children'][1]['PathType']: value 1 differs from canonical 2 test_result['/Root/dedicated_db']['PathDescription']['Children'][2]['CreateStep']: - 0 + ..[snippet truncated].. cription']['Children'][1]['PathState']: extra key with value 3 test_result['/Root/serverless_db']['PathDescription']['DomainDescription']['PathId_Depricated']: - 8 + 44 test_result['/Root/serverless_db']['PathDescription']['DomainDescription']['PathsInside']: - 1 + 31 test_result['/Root/serverless_db']['PathDescription']['Self']['PathVersion']: - 6 + 9 test_result['/Root/shared_db']['PathDescription']['Children'][1]['ChildrenExist']: value True differs from canonical False test_result['/Root/shared_db']['PathDescription']['Children'][1]['Name']: - table1 + .sys test_result['/Root/shared_db']['PathDescription']['Children'][1]['Owner']: - root@builtin + metadata@system test_result['/Root/shared_db']['PathDescription']['Children'][1]['PathState']: value 2 differs from canonical 3 test_result['/Root/shared_db']['PathDescription']['Children'][1]['PathType']: value 1 differs from canonical 2 test_result['/Root/shared_db']['PathDescription']['Children'][2]['CreateStep']: - 0 + not-zero-number-text test_result['/Root/shared_db']['PathDescription']['Children'][2]['CreateTxId']: - 0 + not-zero-number-text test_result['/Root/shared_db']['PathDescription']['Children'][2]['Name']: - .sys + table1 test_result['/Root/shared_db']['PathDescription']['Children'][2]['PathType']: value 2 differs from canonical 1 test_result['/Root/shared_db']['PathDescription']['Children'][2]['ACL']: extra key with value '', test_result['/Root/shared_db']['PathDescription']['Children'][2]['ChildrenExist']: extra key with value False, test_result['/Root/shared_db']['PathDescription']['Children'][2]['Owner']: extra key with value 'root@builtin', test_result['/Root/shared_db']['PathDescription']['Children'][2]['ParentPathId']: extra key with value '1', test_result['/Root/shared_db']['PathDescription']['Children'][2]['PathId']: extra key with value 'not-zero-number-text', test_result['/Root/shared_db']['PathDescription']['Children'][2]['PathState']: extra key with value 3 test_result['/Root/shared_db']['PathDescription']['DomainDescription']['PathId_Depricated']: - 7 + 43 test_result['/Root/shared_db']['PathDescription']['DomainDescription']['PathsInside']: - 5 + 35 test_result['/Root/shared_db']['PathDescription']['Self']['PathVersion']: - 10 + 13 see test diff /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.TestViewer.test_viewer_describe/test.TestViewer.test_viewer_describe.diff Diff: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.TestViewer.test_viewer_describe/test.TestViewer.test_viewer_describe.diff Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_viewer_describe.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::TestViewer::test_viewer_tenantinfo [default-linux-x86_64-release-asan] (0.04s) Test results differ from canonical: test_result['TenantInfo'][0]['GrpcRequestBytes']: extra key with value '364', test_result['TenantInfo'][0]['GrpcResponseBytes']: extra key with value '47' test_result['TenantInfo'][1]['Id']: - 6 + 42 test_result['TenantInfo'][2]['Id']: - 8 + 44 test_result['TenantInfo'][2]['ResourceId']: - 7 + 43 test_result['TenantInfo'][3]['Id']: - 7 + 43 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_viewer_tenantinfo.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::TestViewer::test_viewer_tenantinfo_db [default-linux-x86_64-release-asan] (0.16s) Test results differ from canonical: test_result['/Root']['TenantInfo'][0]['GrpcRequestBytes']: extra key with value '364', test_result['/Root']['TenantInfo'][0]['GrpcResponseBytes']: extra key with value '47' test_result['/Root/dedicated_db']['TenantInfo'][0]['Id']: - 6 + 42 test_result['/Root/serverless_db']['TenantInfo'][0]['Id']: - 8 + 44 test_result['/Root/serverless_db']['TenantInfo'][0]['ResourceId']: - 7 + 43 test_result['/Root/shared_db']['TenantInfo'][0]['Id']: - 7 + 43 test_result['no-database']['TenantInfo'][0]['AliveNodes']: value 1 is missing test_result['no-database']['TenantInfo'][0]['CoresTotal']: value 'not-zero-number' is missing test_result['no-database']['TenantInfo'][0]['CoresUsed']: value 'not-zero-number' is missing test_result['no-database']['TenantInfo'][0]['MemoryLimit']: value 'not-zero-number-text' is missing test_result['no-database']['TenantInfo'][0]['Metrics']: value {} is missing test_result['no-database']['TenantInfo'][0]['NetworkUtilization']: value 'number' is missing test_result['no-database']['TenantInfo'][0]['NetworkWriteThroughput']: value 'text' is missing test_result['no-database']['TenantInfo'][0]['NodeIds']: value [1] is missing test_result['no-database']['TenantInfo'][0]['Overall']: value 'Green' is missing test_result['no-database']['TenantInfo'][0]['PoolStats']: value [{'Name': 'System', 'Threads': 2, 'Usage': 'number'}, {'Name'... is missing test_result['no-database']['TenantInfo'][1]['Id']: - 6 + 42 test_result['no-database']['TenantInfo'][2]['Id']: - 8 + 44 test_result['no-database']['TenantInfo'][2]['ResourceId']: - 7 + 43 test_result['no-database']['TenantInfo'][3]['Id']: - 7 + 43 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_viewer_tenantinfo_db.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::TestViewer::test_viewer_autocomplete [default-linux-x86_64-release-asan] (0.10s) Test results differ from canonical: test_result['/Root']['Result']['Entities'][0]['Name']: - .metadata + .sys test_result['/Root']['Result']['Entities'][1]['Name']: - shared_db + .metadata test_result['/Root']['Result']['Entities'][1]['Type']: - ext_sub_domain + dir test_result['/Root']['Result']['Entities'][2]['Name']: - dedicated_db + shared_db test_result['/Root']['Result']['Entities'][3]['Name']: - serverless_db + dedicated_db test_result['/Root']['Result']['Entities'][4]: extra value {'Name': 'serverless_db', 'Type': 'ext_sub_domain'} test_result['/Root']['Result']['Total']: value 5 differs from canonical 4 test_result['/Root/dedicated_db']['Result']['Entities'][0]['Name']: - table1 + .sys test_result['/Root/dedicated_db']['Result']['Entities'][0]['Type']: - table + dir test_result['/Root/dedicated_db']['Result']['Entities'][1]['Name']: - .metadata + table1 test_result['/Root/dedicated_db']['Result']['Entities'][1]['Type']: - dir + table test_result['/Root/dedicated_db']['Result']['Entities'][2]: extra value {'Name': '.metadata', 'Type': 'dir'} test_result['/Root/dedicated_db']['Result']['Total']: value 3 differs from canonical 2 test_result['/Root/serverless_db']['Result']['Entities'][0]['Name']: - table1 + .sys test_result['/Root/serverless_db']['Result']['Entities'][0]['Type']: - table + dir test_result['/Root/serverless_db']['Result']['Entities'][1]: extra value {'Name': 'table1', 'Type': 'table'} test_result['/Root/serverless_db']['Result']['Total']: value 2 differs from canonical 1 test_result['/Root/shared_db']['Result']['Entities'][0]['Name']: - table1 + .sys test_result['/Root/shared_db']['Result']['Entities'][0]['Type']: - table + dir test_result['/Root/shared_db']['Result']['Entities'][1]['Name']: - .metadata + table1 test_result['/Root/shared_db']['Result']['Entities'][1]['Type']: - dir + table test_result['/Root/shared_db']['Result']['Entities'][2]: extra value {'Name': '.metadata', 'Type': 'dir'} test_result['/Root/shared_db']['Result']['Total']: value 3 differs from canonical 2 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_viewer_autocomplete.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::TestViewer::test_scheme_directory [default-linux-x86_64-release-asan] (0.31s) Test results differ from canonical: test_result['1-get']['children'][1]['name']: - table1 + .sys test_result['1-get']['children'][1]['owner']: - root@builtin + metadata@system test_result['1-get']['children'][1]['type']: - TABLE + DIRECTORY test_result['1-get']['children'][2]['name']: - topic1 + table1 test_result['1-get']['children'][2]['type']: - TOPIC + TABLE test_result['1-get']['children'][3]['created_at']['plan_step']: extra key with value 'not-zero-number-text', test_result['1-get']['children'][3]['created_at']['tx_id']: extra key with value 'not-zero-number-text' test_result['1-get']['children'][3]['name']: - .sys + topic1 test_result['1-get']['children'][3]['type']: - DIRECTORY + TOPIC test_result['1-get']['children'][3]['owner']: extra key with value 'root@builtin' test_result['3-get']['children'][1]['name']: - table1 + .sys test_result['3-get']['children'][1]['owner']: - root@builtin + metadata@system test_result['3-get']['children'][1]['type']: - TABLE + DIRECTORY test_result['3-get']['children'][2]['name']: - test_dir + table1 test_result['3-get']['children'][2]['type']: - DIRECTORY + TABLE test_result['3-get']['children'][3]['name']: - topic1 + test_dir test_result['3-get']['children'][3]['type']: - TOPIC + DIRECTORY test_result['3-get']['children'][4]['created_at']['plan_step']: extra key with value 'not-zero-number-text', test_result['3-get']['children'][4]['created_at']['tx_id']: extra key with value 'not-zero-number-text' test_result['3-get']['children'][4]['name']: - .sys + topic1 test_result['3-get']['children'][4]['type']: - DIRECTORY + TOPIC test_result['3-get']['children'][4]['owner']: extra key with value 'root@builtin' test_result['5-get']['children'][1]['name']: - table1 + .sys test_result['5-get']['children'][1]['owner']: - root@builtin + metadata@system test_result['5-get']['children'][1]['type']: - TABLE + DIRECTORY test_result['5-get']['children'][2]['name']: - topic1 + table1 test_result['5-get']['children'][2]['type']: - TOPIC + TABLE test_result['5-get']['children'][3]['created_at']['plan_step']: extra key with value 'not-zero-number-text', test_result['5-get']['children'][3]['created_at']['tx_id']: extra key with value 'not-zero-number-text' test_result['5-get']['children'][3]['name']: - .sys + topic1 test_result['5-get']['children'][3]['type']: - DIRECTORY + TOPIC test_result['5-get']['children'][3]['owner']: extra key with value 'root@builtin' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_scheme_directory.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff ------ FAIL: 44 - GOOD, 7 - FAIL ydb/core/viewer/tests ydb/tests/functional/statistics [size:medium] nchunks:10 ------ [test_restarts.py 0/10] chunk ran 1 test (total:141.04s - test:140.99s) [fail] test_restarts.py::test_basic [default-linux-x86_64-release-asan] (135.39s) ydb/tests/functional/statistics/test_restarts.py:95: in test_basic assert_that(wait_for(get_base_stats_response, timeout_seconds=5), E AssertionError: base stats available after restart Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/statistics/test-results/py3test/testing_out_stuff/test_restarts.py.test_basic.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/statistics/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/functional/statistics ydb/core/persqueue/public/describer/ut ------ sole chunk ran 4 tests (total:76.13s - test:60.10s) Test was terminated by signal 3 Chunk exceeded 60s timeout and was killed List of the tests involved in the launch: TDescriberTests::TopicNotExists (good) duration: 16.26s TDescriberTests::TopicNotTopic (good) duration: 15.99s TDescriberTests::CDC (good) duration: 14.61s TDescriberTests::TopicExists (good) duration: 11.36s Killed by timeout (60 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/public/describer/ut/test-results/unittest/testing_out_stuff/stderr ------ TIMEOUT: 4 - GOOD ydb/core/persqueue/public/describer/ut ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [0/50] chunk ran 5 tests (total:93.97s - test:93.88s) [fail] KqpAnalyze::AnalyzeTable+ColumnStore [default-linux-x86_64-release-asan] (33.37s) assertion failed at ydb/core/statistics/ut_common/ut_common.cpp:472, std::shared_ptr NKikimr::NStat::ExtractCountMin(TTestActorRuntime &, const TPathId &, ui64): (rsp.Success) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 Construct at /-S/util/generic/string.h:212:17 operator() at /-S/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpAnalyze.AnalyzeTable.ColumnStore.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpAnalyze.AnalyzeTable.ColumnStore.out ------ [40/50] chunk ran 4 tests (total:342.43s - setup:0.01s test:342.39s) [fail] KqpStats::DeferredEffects+UseSink [default-linux-x86_64-release-asan] (311.58s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:239, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseDeferredEffects::Execute_(NUnitTest::TTestContext &) [UseSink = true]: (result.GetStatus() == EStatus::SUCCESS) failed: (TIMEOUT != SUCCESS)
: Error: Query did not complete within specified timeout 300000ms, session id ydb://session/3?node_id=2&id=NjYxNDdiYjUtMTk2OWEzY2UtYWRmZmNlYTgtM2I4ZDMyYTQ= , with diff: (TIM|SUCC)E(OUT|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1BE1F58B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1C31846B 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:239: Execute_ @ 0x1B9B4D67 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: operator() @ 0x1B9A61B7 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1B9A61B7 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19:1) &> @ 0x1B9A61B7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1B9A61B7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1B9A61B7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1C3510D9 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1C3510D9 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1C3510D9 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1C31F147 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:19: Execute @ 0x1B9A54AE 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1C3208FF 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1C34AF3C 15. ??:0: ?? @ 0x7F1430A35D8F 16. ??:0: ?? @ 0x7F1430A35E3F 17. ??:0: ?? @ 0x18A8F028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.DeferredEffects.UseSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.DeferredEffects.UseSink.out ------ FAIL: 232 - GOOD, 2 - FAIL ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [2/50] chunk ran 4 tests (total:225.60s - test:225.39s) [crashed] KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [default-linux-x86_64-release-asan] (17.89s) Test crashed (return code: 100) ==1271004==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 27200 byte(s) in 1 object(s) allocated from: #0 0x00001b748f1d in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021b8314b in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:271:10 #2 0x000021b8314b in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:295:10 #3 0x000021b8314b in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:103:32 #4 0x000021b8314b in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x000021b8314b in __vallocate /-S/contrib/libs/cxxsupp/libcxx/include/vector:807:25 #6 0x000021b8314b in vector /-S/contrib/libs/cxxsupp/libcxx/include/vector:461:7 #7 0x000021b8314b in make_unique >, unsigned long &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #8 0x000021b8314b in grpc_core::Server::ChannelData::InitTransport(grpc_core::RefCountedPtr, grpc_core::RefCountedPtr, unsigned long, grpc_transport*, long) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:1155:9 #9 0x000021b829bd in grpc_core::Server::SetupTransport(grpc_transport*, grpc_pollset*, grpc_core::ChannelArgs const&, grpc_core::RefCountedPtr co ..[snippet truncated].. #17 0x0000213a51d8 in cq_next(grpc_completion_queue*, gpr_timespec, void*) /-S/contrib/libs/grpc/src/core/lib/surface/completion_queue.cc:1036:29 #18 0x000021bede6e in grpc::CompletionQueue::AsyncNextInternal(void**, bool*, gpr_timespec) /-S/contrib/libs/grpc/src/cpp/common/completion_queue_cc.cc:166:15 #19 0x00002248c13f in Next /-S/contrib/libs/grpc/include/grpcpp/completion_queue.h:182:13 #20 0x00002248c13f in NYdbGrpc::Dev::PullEvents(grpc::CompletionQueue*) /-S/ydb/public/sdk/cpp/src/library/grpc/client/grpc_client_low.cpp:212:18 #21 0x00001ba7a85e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #22 0x00001ba7a85e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #23 0x00001ba7a85e in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #24 0x00001ba7adbc in Execute /-S/util/thread/factory.h:15:13 #25 0x00001ba7adbc in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #26 0x00001ba6eda4 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:245:20 #27 0x00001b70ca36 in asan_thread_start(void*) /-S/contrib/libs/clang20-rt/lib/asan/asan_interceptors.cpp:239:28 SUMMARY: AddressSanitizer: 428360 byte(s) leaked in 5250 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpLocksTricky.TestNoLocksIssueInteractiveTx-withSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpLocksTricky.TestNoLocksIssueInteractiveTx-withSink.out ------ [29/50] chunk ran 3 tests (total:418.32s - setup:0.01s test:418.06s) [crashed] KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables [default-linux-x86_64-release-asan] (156.86s) Test crashed (return code: 100) ==1429175==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 54400 byte(s) in 2 object(s) allocated from: #0 0x00001b748f1d in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021b8314b in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:271:10 #2 0x000021b8314b in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:295:10 #3 0x000021b8314b in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:103:32 #4 0x000021b8314b in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x000021b8314b in __vallocate /-S/contrib/libs/cxxsupp/libcxx/include/vector:807:25 #6 0x000021b8314b in vector /-S/contrib/libs/cxxsupp/libcxx/include/vector:461:7 #7 0x000021b8314b in make_unique >, unsigned long &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #8 0x000021b8314b in grpc_core::Server::ChannelData::InitTransport(grpc_core::RefCountedPtr, grpc_core::RefCountedPtr, unsigned long, grpc_transport*, long) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:1155:9 #9 0x000021b829bd in grpc_core::Server::SetupTransport(grpc_transport*, grpc_pollset*, grpc_core::ChannelArgs const&, grpc_core::RefCountedPtr co ..[snippet truncated].. /grpc_server.cpp:265:49) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #23 0x000021191b46 in __call<(lambda at /-S/ydb/library/grpc/server/grpc_server.cpp:265:49) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #24 0x000021191b46 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #25 0x000021191b46 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #26 0x00001ba7a85e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #27 0x00001ba7a85e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #28 0x00001ba7a85e in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #29 0x00001ba7adbc in Execute /-S/util/thread/factory.h:15:13 #30 0x00001ba7adbc in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #31 0x00001ba6eda4 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:245:20 #32 0x00001b70ca36 in asan_thread_start(void*) /-S/contrib/libs/clang20-rt/lib/asan/asan_interceptors.cpp:239:28 SUMMARY: AddressSanitizer: 857212 byte(s) leaked in 10514 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.ConflictWrite.IsOlap.FillTables.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.ConflictWrite.IsOlap.FillTables.out ------ [32/50] chunk ran 3 tests (total:229.89s - test:229.43s) [crashed] KqpSnapshotIsolation::TPragmaSettingOlap-IsSnapshotIsolation [default-linux-x86_64-release-asan] (35.04s) Test crashed (return code: 100) ==1270447==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 27200 byte(s) in 1 object(s) allocated from: #0 0x00001b748f1d in operator new(unsigned long) /-S/contrib/libs/clang20-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x000021b8314b in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:271:10 #2 0x000021b8314b in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:295:10 #3 0x000021b8314b in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:103:32 #4 0x000021b8314b in __allocate_at_least > /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocate_at_least.h:41:19 #5 0x000021b8314b in __vallocate /-S/contrib/libs/cxxsupp/libcxx/include/vector:807:25 #6 0x000021b8314b in vector /-S/contrib/libs/cxxsupp/libcxx/include/vector:461:7 #7 0x000021b8314b in make_unique >, unsigned long &> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:642:30 #8 0x000021b8314b in grpc_core::Server::ChannelData::InitTransport(grpc_core::RefCountedPtr, grpc_core::RefCountedPtr, unsigned long, grpc_transport*, long) /-S/contrib/libs/grpc/src/core/lib/surface/server.cc:1155:9 #9 0x000021b829bd in grpc_core::Server::SetupTransport(grpc_transport*, grpc_pollset*, grpc_core::ChannelArgs const&, grpc_core::RefCountedPtr co ..[snippet truncated].. r/grpc_server.cpp:265:49) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:25 #23 0x000021191b46 in __call<(lambda at /-S/ydb/library/grpc/server/grpc_server.cpp:265:49) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224:5 #24 0x000021191b46 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169:12 #25 0x000021191b46 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314:10 #26 0x00001ba7a85e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431:12 #27 0x00001ba7a85e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:10 #28 0x00001ba7a85e in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #29 0x00001ba7adbc in Execute /-S/util/thread/factory.h:15:13 #30 0x00001ba7adbc in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #31 0x00001ba6eda4 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:245:20 #32 0x00001b70ca36 in asan_thread_start(void*) /-S/contrib/libs/clang20-rt/lib/asan/asan_interceptors.cpp:239:28 SUMMARY: AddressSanitizer: 428565 byte(s) leaked in 5232 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TPragmaSettingOlap-IsSnapshotIsolation.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TPragmaSettingOlap-IsSnapshotIsolation.out ------ FAIL: 163 - GOOD, 3 - CRASHED ydb/core/kqp/ut/tx ------ [test_disk.py 0/10] chunk ran 1 test (total:59.32s - setup:0.01s test:59.04s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 14.6G (15314824K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1068051 54.9M 54.8M 7.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1068357 40.4M 23.0M 10.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1068374 745M 746M 662M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --do 1069928 1.5G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1070683 1.5G 1.5G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1071683 1.5G 1.5G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1072677 1.5G 1.4G 985M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1073527 1.5G 1.5G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1073897 1.5G 1.5G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1074383 1.5G 1.4G 948M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1074610 1.5G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1075433 1.5G 1.4G 1002M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [test_tablet.py 0/10] chunk ran 1 test (total:114.54s - test:114.34s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 14.5G (15219696K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1652559 54.9M 54.7M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1652960 39.9M 22.5M 9.7M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1652979 740M 734M 660M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --do 1656376 1.7G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1656895 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1676552 1.7G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_st 1657803 1.7G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1676508 1.7G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_st 1658829 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1676568 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_st 1660226 1.6G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff 1676700 1.6G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_st Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ydb/core/tx/schemeshard/ut_base [size:medium] nchunks:10 ------ [0/10] chunk ran 17 tests (total:72.99s - setup:0.01s test:72.70s) [fail] TSchemeShardCountersTest::PathsCounterDecrementsOnFail [default-linux-x86_64-release-asan] (2.34s) equal assertion failed at ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp:712, auto NSchemeShardUT_Private::NLs::PathsInsideDomain(ui64)::(anonymous class)::operator()(const NKikimrScheme::TEvDescribeSchemeResult &) const: curCount == count paths inside domain count mismatch, domain with id 1 has count 36 but expected 0 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 NTestSuiteTSchemeShardCountersTest::TTestCasePathsCounterDecrementsOnFail::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp:31:9 operator() at /-S/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_base/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_base/test-results/unittest/testing_out_stuff/TSchemeShardCountersTest.PathsCounterDecrementsOnFail.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_base/test-results/unittest/testing_out_stuff/TSchemeShardCountersTest.PathsCounterDecrementsOnFail.out ------ FAIL: 167 - GOOD, 1 - FAIL ydb/core/tx/schemeshard/ut_base ydb/core/kqp/ut/view [size:medium] ------ sole chunk ran 23 tests (total:218.52s - test:218.45s) [fail] TCreateAndDropViewTest::DropViewIfExists [default-linux-x86_64-release-asan] (8.98s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropViewIfExists.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropViewIfExists.out ------ FAIL: 22 - GOOD, 1 - FAIL ydb/core/kqp/ut/view ------ sole chunk ran 1 test (total:123.00s - test:122.72s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 15.6G (16320664K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1798491 54.9M 54.9M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1798737 40.7M 23.3M 10.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1798774 1.1G 1.1G 1.0G └─ functional-sqs-merge_split_common_table-std --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini 1802247 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1802802 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1803439 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1804129 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1804708 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1805208 1.8G 1.8G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1805907 1.7G 1.8G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1806623 1.7G 1.7G 1.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff/stderr ydb/core/tx/schemeshard/ut_index_build [size:medium] nchunks:10 ------ [7/10] chunk ran 6 tests (total:181.82s - test:181.30s) [fail] VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false [default-linux-x86_64-release-asan] (10.30s) assertion failed at ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp:954, void NTestSuiteVectorIndexBuildTest::Metering_ServerLessDB_Restarts(NUnitTest::TTestContext &) [doRestarts = false]: (meteringBlocker[0]->Get()->MeteringJson == expectedBill.ToString()) failed: ("{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-32-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n" != "{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-2-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n") , with diff: "{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-(3|)2-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n" TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 void NTestSuiteVectorIndexBuildTest::Metering_ServerLessDB_Restarts(NUnitTest::TTestContext&) at /-S/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp:0:13 operator() at /-S/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/testing_out_stuff/VectorIndexBuildTest.Metering_ServerLessDB_Restarts-doRestarts-false.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/testing_out_stuff/VectorIndexBuildTest.Metering_ServerLessDB_Restarts-doRestarts-false.out [fail] VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-true [default-linux-x86_64-release-asan] (10.78s) assertion failed at ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp:779, void NTestSuiteVectorIndexBuildTest::Metering_ServerLessDB(NUnitTest::TTestContext &) [smallScanBuffer = true]: (meteringBlocker[0]->Get()->MeteringJson == expectedBill.ToString()) failed: ("{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-32-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n" != "{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-2-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n") , with diff: "{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-(3|)2-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n" TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 void NTestSuiteVectorIndexBuildTest::Metering_ServerLessDB(NUnitTest::TTestContext&) at /-S/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp:0:13 operator() at /-S/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/testing_out_stuff/VectorIndexBuildTest.Metering_ServerLessDB-smallScanBuffer-true.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/testing_out_stuff/VectorIndexBuildTest.Metering_ServerLessDB-smallScanBuffer-true.out [fail] VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-false [default-linux-x86_64-release-asan] (10.22s) assertion failed at ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp:779, void NTestSuiteVectorIndexBuildTest::Metering_ServerLessDB(NUnitTest::TTestContext &) [smallScanBuffer = false]: (meteringBlocker[0]->Get()->MeteringJson == expectedBill.ToString()) failed: ("{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-32-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n" != "{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-2-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n") , with diff: "{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-(3|)2-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n" TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 void NTestSuiteVectorIndexBuildTest::Metering_ServerLessDB(NUnitTest::TTestContext&) at /-S/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp:0:13 operator() at /-S/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/testing_out_stuff/VectorIndexBuildTest.Metering_ServerLessDB-smallScanBuffer-false.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/testing_out_stuff/VectorIndexBuildTest.Metering_ServerLessDB-smallScanBuffer-false.out ------ [8/10] chunk ran 6 tests (total:91.76s - setup:0.01s test:91.11s) [fail] VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-true [default-linux-x86_64-release-asan] (8.88s) assertion failed at ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp:954, void NTestSuiteVectorIndexBuildTest::Metering_ServerLessDB_Restarts(NUnitTest::TTestContext &) [doRestarts = true]: (meteringBlocker[0]->Get()->MeteringJson == expectedBill.ToString()) failed: ("{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-32-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n" != "{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-2-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n") , with diff: "{\"usage\":{\"start\":0,\"quantity\":130,\"finish\":10,\"unit\":\"request_unit\",\"type\":\"delta\"},\"tags\":{},\"id\":\"109-72075186233409549-(3|)2-0-0-0-0-4-800-84-7200\",\"cloud_id\":\"CLOUD_ID_VAL\",\"source_wt\":10,\"source_id\":\"sless-docapi-ydb-ss\",\"resource_id\":\"DATABASE_ID_VAL\",\"schema\":\"ydb.serverless.requests.v1\",\"folder_id\":\"FOLDER_ID_VAL\",\"version\":\"1.0.0\"}\n" TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 void NTestSuiteVectorIndexBuildTest::Metering_ServerLessDB_Restarts(NUnitTest::TTestContext&) at /-S/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp:0:13 operator() at /-S/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/testing_out_stuff/VectorIndexBuildTest.Metering_ServerLessDB_Restarts-doRestarts-true.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/testing_out_stuff/VectorIndexBuildTest.Metering_ServerLessDB_Restarts-doRestarts-true.out ------ FAIL: 55 - GOOD, 4 - FAIL ydb/core/tx/schemeshard/ut_index_build ydb/core/tx/schemeshard/ut_export [size:medium] nchunks:11 ------ [2/11] chunk ran 6 tests (total:45.22s - recipes:0.44s test:44.22s recipes:0.48s) [fail] TExportToS3Tests::Checksums [default-linux-x86_64-release-asan] (1.28s) assertion failed at ydb/core/tx/schemeshard/ut_export/ut_export.cpp:2445, virtual void NTestSuiteTExportToS3Tests::TTestCaseChecksums::Execute_(NUnitTest::TTestContext &): (*metadataChecksum == "7f26737c8c9e7abe6541c08c5e41681bd3574b305075aeffd243d57a0e169780 metadata.json") failed: ("a5a7ca9bce00ac9d7e5b48a30a46f139592845cad0664b3fda92af32583b7d52 metadata.json" != 7f26737c8c9e7abe6541c08c5e41681bd3574b305075aeffd243d57a0e169780 metadata.json) , with diff: ("a5a|7f2673)7c(a|8c)9(bc|)e(00|7)a(c9d7|b)e(|6)5(b|)4(|1c0)8(a30a|c5e)4(|1)6(f|8)1(|bd)3(9|)5(928|7)4(|b30507)5(c|)a(|eff)d(066|2)4(b|)3(f|)d(|57)a(|0e16)9(2af325|7)8(3b7d52|0) metadata.json("|) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NTestSuiteTExportToS3Tests::TTestCaseChecksums::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/tx/schemeshard/ut_export/ut_export.cpp:2445:9 operator() at /-S/ydb/core/tx/schemeshard/ut_export/ut_export.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_export/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_export/test-results/unittest/testing_out_stuff/TExportToS3Tests.Checksums.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_export/test-results/unittest/testing_out_stuff/TExportToS3Tests.Checksums.out ------ [3/11] chunk ran 6 tests (total:29.09s - recipes:0.48s test:28.08s recipes:0.44s) [fail] TExportToS3Tests::EnableChecksumsPersistance [default-linux-x86_64-release-asan] (1.90s) assertion failed at ydb/core/tx/schemeshard/ut_export/ut_export.cpp:2512, virtual void NTestSuiteTExportToS3Tests::TTestCaseEnableChecksumsPersistance::Execute_(NUnitTest::TTestContext &): (*metadataChecksum == "7f26737c8c9e7abe6541c08c5e41681bd3574b305075aeffd243d57a0e169780 metadata.json") failed: ("a5a7ca9bce00ac9d7e5b48a30a46f139592845cad0664b3fda92af32583b7d52 metadata.json" != 7f26737c8c9e7abe6541c08c5e41681bd3574b305075aeffd243d57a0e169780 metadata.json) , with diff: ("a5a|7f2673)7c(a|8c)9(bc|)e(00|7)a(c9d7|b)e(|6)5(b|)4(|1c0)8(a30a|c5e)4(|1)6(f|8)1(|bd)3(9|)5(928|7)4(|b30507)5(c|)a(|eff)d(066|2)4(b|)3(f|)d(|57)a(|0e16)9(2af325|7)8(3b7d52|0) metadata.json("|) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NTestSuiteTExportToS3Tests::TTestCaseEnableChecksumsPersistance::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/tx/schemeshard/ut_export/ut_export.cpp:0:9 operator() at /-S/ydb/core/tx/schemeshard/ut_export/ut_export.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_export/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_export/test-results/unittest/testing_out_stuff/TExportToS3Tests.EnableChecksumsPersistance.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_export/test-results/unittest/testing_out_stuff/TExportToS3Tests.EnableChecksumsPersistance.out ------ FAIL: 62 - GOOD, 2 - FAIL ydb/core/tx/schemeshard/ut_export ydb/core/tx/tx_proxy/ut_schemereq [size:medium] nchunks:10 ------ [0/10] chunk ran 30 tests (total:267.99s - setup:0.01s test:267.81s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [default-linux-x86_64-release-asan] (13.72s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14080 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [default-linux-x86_64-release-asan] (8.96s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4135 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [default-linux-x86_64-release-asan] (10.42s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8449 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [default-linux-x86_64-release-asan] (11.27s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12942 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [default-linux-x86_64-release-asan] (11.76s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:7754 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [default-linux-x86_64-release-asan] (9.23s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14413 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [default-linux-x86_64-release-asan] (13.31s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14836 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [default-linux-x86_64-release-asan] (9.84s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:21817 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [default-linux-x86_64-release-asan] (9.46s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27702 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [default-linux-x86_64-release-asan] (11.33s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:31104 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7.out ------ [1/10] chunk ran 30 tests (total:264.60s - test:264.47s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 [default-linux-x86_64-release-asan] (10.54s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:10261 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [default-linux-x86_64-release-asan] (9.44s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:61904 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 [default-linux-x86_64-release-asan] (9.85s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12468 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [default-linux-x86_64-release-asan] (9.45s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25772 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [default-linux-x86_64-release-asan] (9.75s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:13283 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [default-linux-x86_64-release-asan] (12.47s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27939 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [default-linux-x86_64-release-asan] (10.33s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14095 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70.out ------ [2/10] chunk ran 30 tests (total:264.24s - test:264.07s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [default-linux-x86_64-release-asan] (8.99s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12625 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [default-linux-x86_64-release-asan] (8.55s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:30184 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [default-linux-x86_64-release-asan] (8.33s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22925 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 [default-linux-x86_64-release-asan] (9.44s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:2664 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [default-linux-x86_64-release-asan] (11.81s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:11500 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [default-linux-x86_64-release-asan] (10.45s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:11873 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [default-linux-x86_64-release-asan] (9.22s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:32186 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [default-linux-x86_64-release-asan] (9.29s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8604 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [default-linux-x86_64-release-asan] (9.09s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:24325 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [default-linux-x86_64-release-asan] (11.12s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:62658 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [default-linux-x86_64-release-asan] (10.55s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26262 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15.out ------ [3/10] chunk ran 30 tests (total:265.93s - test:265.79s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [default-linux-x86_64-release-asan] (9.49s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27875 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [default-linux-x86_64-release-asan] (11.70s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22272 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [default-linux-x86_64-release-asan] (9.42s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8244 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [default-linux-x86_64-release-asan] (10.78s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:11388 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [default-linux-x86_64-release-asan] (10.58s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:15040 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [default-linux-x86_64-release-asan] (10.07s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:6095 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [default-linux-x86_64-release-asan] (10.40s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16237 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [default-linux-x86_64-release-asan] (7.73s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:17064 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [default-linux-x86_64-release-asan] (9.86s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:13802 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [default-linux-x86_64-release-asan] (8.63s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14878 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [default-linux-x86_64-release-asan] (9.87s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:1770 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [default-linux-x86_64-release-asan] (8.91s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:17339 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71.out ------ [4/10] chunk ran 30 tests (total:268.23s - setup:0.02s test:268.01s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [default-linux-x86_64-release-asan] (11.28s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5407 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [default-linux-x86_64-release-asan] (10.36s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:11538 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 [default-linux-x86_64-release-asan] (10.93s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12798 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 [default-linux-x86_64-release-asan] (10.52s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26279 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [default-linux-x86_64-release-asan] (9.56s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:1077 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [default-linux-x86_64-release-asan] (8.52s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:32074 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [default-linux-x86_64-release-asan] (11.25s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22333 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 [default-linux-x86_64-release-asan] (11.21s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5828 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41.out ------ [5/10] chunk ran 30 tests (total:264.31s - setup:0.01s test:264.06s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [default-linux-x86_64-release-asan] (9.73s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14650 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 [default-linux-x86_64-release-asan] (9.01s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:17173 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [default-linux-x86_64-release-asan] (8.53s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:17349 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [default-linux-x86_64-release-asan] (7.93s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4152 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [default-linux-x86_64-release-asan] (8.65s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:30768 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 [default-linux-x86_64-release-asan] (10.33s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:21248 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [default-linux-x86_64-release-asan] (9.17s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:24868 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56.out ------ [6/10] chunk ran 30 tests (total:264.69s - test:264.50s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [default-linux-x86_64-release-asan] (8.72s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:10592 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [default-linux-x86_64-release-asan] (10.12s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:24287 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [default-linux-x86_64-release-asan] (8.78s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:3135 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [default-linux-x86_64-release-asan] (9.87s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20016 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [default-linux-x86_64-release-asan] (9.33s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:18745 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [default-linux-x86_64-release-asan] (11.87s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25989 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [default-linux-x86_64-release-asan] (13.41s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:21572 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69.out ------ [7/10] chunk ran 30 tests (total:264.72s - test:264.58s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [default-linux-x86_64-release-asan] (11.89s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:62377 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [default-linux-x86_64-release-asan] (9.52s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:30433 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [default-linux-x86_64-release-asan] (13.31s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:30760 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [default-linux-x86_64-release-asan] (9.04s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29896 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [default-linux-x86_64-release-asan] (10.36s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20812 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [default-linux-x86_64-release-asan] (9.51s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:18200 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [default-linux-x86_64-release-asan] (14.04s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5270 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [default-linux-x86_64-release-asan] (11.98s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22986 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [default-linux-x86_64-release-asan] (8.58s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12014 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9.out ------ [8/10] chunk ran 30 tests (total:265.43s - test:265.22s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [default-linux-x86_64-release-asan] (10.13s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:21798 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [default-linux-x86_64-release-asan] (8.82s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:28866 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [default-linux-x86_64-release-asan] (12.78s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:10998 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [default-linux-x86_64-release-asan] (9.55s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5133 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [default-linux-x86_64-release-asan] (9.52s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12458 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 [default-linux-x86_64-release-asan] (12.60s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22325 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 [default-linux-x86_64-release-asan] (9.90s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:10524 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 [default-linux-x86_64-release-asan] (11.90s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:9354 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66.out ------ [9/10] chunk ran 30 tests (total:323.51s - test:322.41s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [default-linux-x86_64-release-asan] (8.10s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4877 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [default-linux-x86_64-release-asan] (15.51s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20161 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [default-linux-x86_64-release-asan] (13.62s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25775 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [default-linux-x86_64-release-asan] (16.06s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:61773 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [default-linux-x86_64-release-asan] (13.87s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14687 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAdministerTenant-DomainLoginOnly.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAdministerTenant-DomainLoginOnly.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [default-linux-x86_64-release-asan] (13.73s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:62554 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [default-linux-x86_64-release-asan] (12.84s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:28903 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [default-linux-x86_64-release-asan] (8.44s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:2927 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [default-linux-x86_64-release-asan] (14.75s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:64041 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [default-linux-x86_64-release-asan] (15.90s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:1771 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [default-linux-x86_64-release-asan] (13.16s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:3455 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck.out ------ FAIL: 210 - GOOD, 90 - FAIL ydb/core/tx/tx_proxy/ut_schemereq ydb/core/transfer/ut/functional [size:medium] ------ sole chunk ran 58 tests (total:352.09s - recipes:25.83s test:323.12s recipes:2.89s) [fail] Transfer::LocalTopic_BigMessage [default-linux-x86_64-release-asan] (27.12s) assertion failed at ydb/core/transfer/ut/common/utils.h:836, void NReplicationTest::MainTestCase::CheckResult(const std::string &, const TExpectations &): (false) Unable to wait transfer result TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NReplicationTest::MainTestCase::CheckResult(std::__y1::basic_string, std::__y1::allocator> const&, TVector>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>, std::__y1::allocator>, std::__y1::shared_ptr>, std::__y1::allocator>, std::__y1::shared_ptr>>>>> const&) at /-S/ydb/core/transfer/ut/common/utils.h:0:9 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/vector:546:18 operator() at /-S/ydb/core/transfer/ut/functional/transfer_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/transfer/ut/functional/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/transfer/ut/functional/test-results/unittest/testing_out_stuff/Transfer.LocalTopic_BigMessage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/transfer/ut/functional/test-results/unittest/testing_out_stuff/Transfer.LocalTopic_BigMessage.out ------ FAIL: 57 - GOOD, 1 - FAIL ydb/core/transfer/ut/functional ------ [1/10] chunk ran 1 test (total:208.22s - test:208.11s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 15.3G (15994324K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1098388 54.8M 54.6M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1098458 41.9M 24.0M 11.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1098463 58.6M 58.4M 32.6M └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testi 1098833 7.6G 7.6G 7.4G └─ ydb-core-blobstorage-ut_blobstorage-ut_balancing --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancin 1146054 7.6G 7.6G 7.4G └─ ydb-core-blobstorage-ut_blobstorage-ut_balancing --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balan Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/testing_out_stuff/stderr ydb/tests/fq/streaming_optimize [size:medium] nchunks:8 ------ [test_sql_negative.py 0/4] chunk ran 1 test (total:24.32s - recipes:0.67s test:23.04s recipes:0.55s) [fail] test_sql_negative.py::test[watermarks-bad_column-default.txt] [default-linux-x86_64-release-asan] (20.26s) ydb/tests/fq/streaming_optimize/test_sql_negative.py:48: in test result = fq_run.yql_exec(check_error=False, action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_apvi0fpi/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_apvi0fpi/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_apvi0fpi/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_apvi0fpi/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_apvi0fpi/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_apvi0fpi/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_apvi0fpi/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_apvi0fpi/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_apvi0fpi/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_apvi0fpi/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b05682b8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b056823bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b05682e840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b05681e0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b05681e0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b05681e0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b0568337cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b0568337cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b05682b7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b05682b7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b056823bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b05681e036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b05681e036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_negative.py.test.watermarks-bad_column-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_negative.py 1/4] chunk ran 1 test (total:22.59s - recipes:0.73s test:21.26s recipes:0.54s) [fail] test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [default-linux-x86_64-release-asan] (18.62s) ydb/tests/fq/streaming_optimize/test_sql_negative.py:48: in test result = fq_run.yql_exec(check_error=False, action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y62r99zt/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y62r99zt/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y62r99zt/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y62r99zt/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y62r99zt/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y62r99zt/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y62r99zt/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y62r99zt/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y62r99zt/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y62r99zt/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7ba3855d8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7ba38555bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7ba38560840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7ba385500e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7ba385500e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7ba385500d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7ba385657cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7ba385657cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7ba3855d7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7ba3855d7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7ba38555bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7ba38550036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7ba38550036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_negative.py.test.watermarks-bad_pushdown-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 0/4] chunk ran 8 tests (total:158.07s - recipes:0.65s test:156.72s recipes:0.56s) [fail] test_sql_streaming.py::test[hop-GroupByHop-default.txt] [default-linux-x86_64-release-asan] (20.13s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eqtjadc5/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eqtjadc5/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eqtjadc5/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eqtjadc5/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eqtjadc5/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eqtjadc5/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eqtjadc5/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eqtjadc5/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eqtjadc5/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_eqtjadc5/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b1f8a9f8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b1f8a97bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b1f8aa2840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b1f8a920e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b1f8a920e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b1f8a920d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b1f8aa77cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b1f8aa77cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b1f8a9f7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b1f8a9f7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b1f8a97bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b1f8a92036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b1f8a92036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] [default-linux-x86_64-release-asan] (19.04s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ioxag999/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ioxag999/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ioxag999/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ioxag999/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ioxag999/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ioxag999/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ioxag999/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ioxag999/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ioxag999/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ioxag999/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b624b068960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b624afebb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b624b09840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b624af90e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b624af90e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b624af90d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b624b0e7cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b624b0e7cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b624b067977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b624b067977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b624afebb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b624af9036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b624af9036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] [default-linux-x86_64-release-asan] (17.38s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xgz97re/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xgz97re/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xgz97re/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xgz97re/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xgz97re/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xgz97re/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xgz97re/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xgz97re/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xgz97re/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3xgz97re/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b3342658960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b33425dbb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b334268840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b3342580e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b3342580e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b3342580d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b33426d7cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b33426d7cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b3342657977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b3342657977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b33425dbb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b334258036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b334258036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] [default-linux-x86_64-release-asan] (17.31s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wmh382bu/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wmh382bu/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wmh382bu/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wmh382bu/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wmh382bu/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wmh382bu/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wmh382bu/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wmh382bu/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wmh382bu/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_wmh382bu/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b5b57998960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b5b5791bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b5b579c840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b5b578c0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b5b578c0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b5b578c0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b5b57a17cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b5b57a17cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b5b57997977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b5b57997977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b5b5791bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b5b578c036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b5b578c036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453653 byte(s) leaked in 8614 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] [default-linux-x86_64-release-asan] (20.01s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1nhtrbwq/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1nhtrbwq/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1nhtrbwq/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1nhtrbwq/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1nhtrbwq/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1nhtrbwq/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1nhtrbwq/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1nhtrbwq/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1nhtrbwq/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_1nhtrbwq/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b91a4528960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b91a44abb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b91a455840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b91a4450e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b91a4450e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b91a4450d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b91a45a7cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b91a45a7cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b91a4527977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b91a4527977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b91a44abb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b91a445036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b91a445036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] [default-linux-x86_64-release-asan] (19.01s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3_ocvksu/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3_ocvksu/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3_ocvksu/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3_ocvksu/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3_ocvksu/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3_ocvksu/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3_ocvksu/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3_ocvksu/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3_ocvksu/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_3_ocvksu/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b75e3fe8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b75e3f6bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b75e401840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b75e3f10e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b75e3f10e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b75e3f10d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b75e4067cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b75e4067cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b75e3fe7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b75e3fe7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b75e3f6bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b75e3f1036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b75e3f1036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (19.42s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9ske33_8/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9ske33_8/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9ske33_8/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9ske33_8/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9ske33_8/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9ske33_8/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9ske33_8/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9ske33_8/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9ske33_8/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9ske33_8/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b1ac9f58960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b1ac9edbb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b1ac9f8840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b1ac9e80e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b1ac9e80e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b1ac9e80d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b1ac9fd7cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b1ac9fd7cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b1ac9f57977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b1ac9f57977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b1ac9edbb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b1ac9e8036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b1ac9e8036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (20.06s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qpxk_r1w/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qpxk_r1w/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qpxk_r1w/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qpxk_r1w/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qpxk_r1w/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qpxk_r1w/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qpxk_r1w/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qpxk_r1w/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qpxk_r1w/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qpxk_r1w/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7be5d8e48960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7be5d8dcbb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7be5d8e7840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7be5d8d70e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7be5d8d70e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7be5d8d70d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7be5d8ec7cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7be5d8ec7cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7be5d8e47977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7be5d8e47977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7be5d8dcbb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7be5d8d7036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7be5d8d7036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453558 byte(s) leaked in 8612 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 1/4] chunk ran 8 tests (total:165.12s - recipes:1.23s test:163.10s recipes:0.64s) [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] [default-linux-x86_64-release-asan] (25.91s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sb8ca05x/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sb8ca05x/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sb8ca05x/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sb8ca05x/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sb8ca05x/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sb8ca05x/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sb8ca05x/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sb8ca05x/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sb8ca05x/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_sb8ca05x/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b0bf1348960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b0bf12cbb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b0bf137840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b0bf1270e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b0bf1270e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b0bf1270d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b0bf13c7cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b0bf13c7cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b0bf1347977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b0bf1347977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b0bf12cbb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b0bf127036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b0bf127036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindow-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] [default-linux-x86_64-release-asan] (18.90s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t1kqo1zl/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t1kqo1zl/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t1kqo1zl/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t1kqo1zl/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t1kqo1zl/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t1kqo1zl/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t1kqo1zl/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t1kqo1zl/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t1kqo1zl/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t1kqo1zl/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b9118df8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b9118d7bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b9118e2840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b9118d20e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b9118d20e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b9118d20d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b9118e77cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b9118e77cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b9118df7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b9118df7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b9118d7bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b9118d2036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b9118d2036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] [default-linux-x86_64-release-asan] (17.02s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf56jsea/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf56jsea/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf56jsea/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf56jsea/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf56jsea/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf56jsea/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf56jsea/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf56jsea/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf56jsea/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf56jsea/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bc8a5308960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bc8a528bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bc8a533840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bc8a5230e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bc8a5230e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bc8a5230d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bc8a5387cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bc8a5387cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bc8a5307977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bc8a5307977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bc8a528bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bc8a523036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bc8a523036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] [default-linux-x86_64-release-asan] (17.27s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t96gyncs/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t96gyncs/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t96gyncs/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t96gyncs/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t96gyncs/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t96gyncs/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t96gyncs/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t96gyncs/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t96gyncs/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_t96gyncs/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b807a3a8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b807a32bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b807a3d840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b807a2d0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b807a2d0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b807a2d0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b807a427cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b807a427cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b807a3a7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b807a3a7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b807a32bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b807a2d036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b807a2d036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] [default-linux-x86_64-release-asan] (20.39s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vba8h8gb/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vba8h8gb/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vba8h8gb/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vba8h8gb/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vba8h8gb/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vba8h8gb/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vba8h8gb/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vba8h8gb/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vba8h8gb/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vba8h8gb/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b720ffc8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b720ff4bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b720fff840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b720fef0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b720fef0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b720fef0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b7210047cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b7210047cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b720ffc7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b720ffc7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b720ff4bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b720fef036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b720fef036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453606 byte(s) leaked in 8613 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] [default-linux-x86_64-release-asan] (18.67s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9nmm_p0p/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9nmm_p0p/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9nmm_p0p/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9nmm_p0p/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9nmm_p0p/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9nmm_p0p/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9nmm_p0p/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9nmm_p0p/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9nmm_p0p/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_9nmm_p0p/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b74245b8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b742453bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b74245e840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b74244e0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b74244e0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b74244e0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b7424637cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b7424637cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b74245b7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b74245b7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b742453bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b74244e036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b74244e036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (19.51s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kyk2zbup/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kyk2zbup/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kyk2zbup/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kyk2zbup/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kyk2zbup/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kyk2zbup/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kyk2zbup/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kyk2zbup/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kyk2zbup/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kyk2zbup/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b12a7ff8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b12a7f7bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b12a802840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b12a7f20e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b12a7f20e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b12a7f20d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b12a8077cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b12a8077cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b12a7ff7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b12a7ff7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b12a7f7bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b12a7f2036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b12a7f2036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453784 byte(s) leaked in 8617 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopic-default.txt] [default-linux-x86_64-release-asan] (18.64s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vreank1l/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vreank1l/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vreank1l/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vreank1l/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vreank1l/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vreank1l/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vreank1l/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vreank1l/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vreank1l/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vreank1l/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b4169a88960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b4169a0bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b4169ab840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b41699b0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b41699b0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b41699b0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b4169b07cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b4169b07cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b4169a87977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b4169a87977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b4169a0bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b41699b036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b41699b036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 2/4] chunk ran 8 tests (total:154.23s - recipes:0.61s test:152.87s recipes:0.60s) [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] [default-linux-x86_64-release-asan] (20.95s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0uu6up44/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0uu6up44/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0uu6up44/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0uu6up44/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0uu6up44/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0uu6up44/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0uu6up44/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0uu6up44/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0uu6up44/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_0uu6up44/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b2c1bd18960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b2c1bc9bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b2c1bd4840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b2c1bc40e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b2c1bc40e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b2c1bc40d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b2c1bd97cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b2c1bd97cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b2c1bd17977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b2c1bd17977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b2c1bc9bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b2c1bc4036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b2c1bc4036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadata-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] [default-linux-x86_64-release-asan] (18.80s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_70jf4vob/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_70jf4vob/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_70jf4vob/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_70jf4vob/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_70jf4vob/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_70jf4vob/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_70jf4vob/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_70jf4vob/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_70jf4vob/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_70jf4vob/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bd8e8478960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bd8e83fbb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bd8e84a840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bd8e83a0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bd8e83a0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bd8e83a0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bd8e84f7cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bd8e84f7cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bd8e8477977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bd8e8477977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bd8e83fbb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bd8e83a036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bd8e83a036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadataInsideFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] [default-linux-x86_64-release-asan] (17.17s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yz0cn35d/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yz0cn35d/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yz0cn35d/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yz0cn35d/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yz0cn35d/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yz0cn35d/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yz0cn35d/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yz0cn35d/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yz0cn35d/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_yz0cn35d/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b62d7be8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b62d7b6bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b62d7c1840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b62d7b10e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b62d7b10e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b62d7b10d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b62d7c67cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b62d7c67cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b62d7be7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b62d7be7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b62d7b6bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b62d7b1036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b62d7b1036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadataNestedDeep-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] [default-linux-x86_64-release-asan] (16.41s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mbmgf2_3/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mbmgf2_3/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mbmgf2_3/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mbmgf2_3/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mbmgf2_3/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mbmgf2_3/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mbmgf2_3/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mbmgf2_3/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mbmgf2_3/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_mbmgf2_3/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bd3adc28960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bd3adbabb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bd3adc5840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bd3adb50e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bd3adb50e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bd3adb50d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bd3adca7cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bd3adca7cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bd3adc27977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bd3adc27977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bd3adbabb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bd3adb5036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bd3adb5036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadataWithFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (20.54s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_btk_dacr/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_btk_dacr/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_btk_dacr/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_btk_dacr/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_btk_dacr/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_btk_dacr/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_btk_dacr/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_btk_dacr/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_btk_dacr/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_btk_dacr/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b33a4518960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b33a449bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b33a454840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b33a4440e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b33a4440e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b33a4440d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b33a4597cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b33a4597cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b33a4517977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b33a4517977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b33a449bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b33a444036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b33a444036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] [default-linux-x86_64-release-asan] (17.23s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8e6rlqli/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8e6rlqli/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8e6rlqli/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8e6rlqli/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8e6rlqli/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8e6rlqli/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8e6rlqli/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8e6rlqli/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8e6rlqli/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_8e6rlqli/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b5f83db8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b5f83d3bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b5f83de840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b5f83ce0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b5f83ce0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b5f83ce0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b5f83e37cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b5f83e37cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b5f83db7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b5f83db7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b5f83d3bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b5f83ce036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b5f83ce036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] [default-linux-x86_64-release-asan] (18.29s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q73_mahy/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q73_mahy/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q73_mahy/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q73_mahy/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q73_mahy/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q73_mahy/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q73_mahy/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q73_mahy/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q73_mahy/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_q73_mahy/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bd490c68960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bd490bebb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bd490c9840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bd490b90e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bd490b90e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bd490b90d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bd490ce7cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bd490ce7cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bd490c67977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bd490c67977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bd490bebb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bd490b9036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bd490b9036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadWriteSameTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [default-linux-x86_64-release-asan] (19.18s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uogkjgc/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uogkjgc/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uogkjgc/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uogkjgc/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uogkjgc/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uogkjgc/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uogkjgc/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uogkjgc/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uogkjgc/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uogkjgc/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b1e69688960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b1e6960bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b1e696b840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b1e695b0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b1e695b0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b1e695b0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b1e69707cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b1e69707cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b1e69687977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b1e69687977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b1e6960bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b1e695b036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b1e695b036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadWriteTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 3/4] chunk ran 7 tests (total:144.31s - recipes:1.37s test:142.21s recipes:0.58s) [fail] test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (25.78s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u7sbsl1q/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u7sbsl1q/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u7sbsl1q/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u7sbsl1q/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u7sbsl1q/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u7sbsl1q/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u7sbsl1q/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u7sbsl1q/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u7sbsl1q/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_u7sbsl1q/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bd7aa2c8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bd7aa24bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bd7aa2f840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bd7aa1f0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bd7aa1f0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bd7aa1f0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bd7aa347cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bd7aa347cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bd7aa2c7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bd7aa2c7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bd7aa24bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bd7aa1f036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bd7aa1f036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadWriteTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] [default-linux-x86_64-release-asan] (19.78s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2m9d2un2/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2m9d2un2/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2m9d2un2/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2m9d2un2/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2m9d2un2/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2m9d2un2/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2m9d2un2/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2m9d2un2/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2m9d2un2/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_2m9d2un2/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bcee1298960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bcee121bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bcee12c840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bcee11c0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bcee11c0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bcee11c0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bcee1317cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bcee1317cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bcee1297977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bcee1297977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bcee121bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bcee11c036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bcee11c036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-WriteTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] [default-linux-x86_64-release-asan] (17.10s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i1yjz1dt/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i1yjz1dt/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i1yjz1dt/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i1yjz1dt/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i1yjz1dt/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i1yjz1dt/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i1yjz1dt/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i1yjz1dt/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i1yjz1dt/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_i1yjz1dt/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b071f378960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b071f2fbb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b071f3a840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b071f2a0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b071f2a0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b071f2a0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b071f3f7cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b071f3f7cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b071f377977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b071f377977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b071f2fbb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b071f2a036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b071f2a036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.solomon-ReadTopicGroupWriteToSolomon-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks-default.txt] [default-linux-x86_64-release-asan] (16.64s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_if5melm2/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_if5melm2/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_if5melm2/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_if5melm2/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_if5melm2/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_if5melm2/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_if5melm2/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_if5melm2/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_if5melm2/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_if5melm2/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7be6f5c88960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7be6f5c0bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7be6f5cb840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7be6f5bb0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7be6f5bb0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7be6f5bb0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7be6f5d07cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7be6f5d07cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7be6f5c87977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7be6f5c87977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7be6f5c0bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7be6f5bb036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7be6f5bb036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] [default-linux-x86_64-release-asan] (20.53s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_whbuktom/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_whbuktom/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_whbuktom/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_whbuktom/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_whbuktom/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_whbuktom/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_whbuktom/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_whbuktom/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_whbuktom/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_whbuktom/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bb7691d8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bb76915bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bb76920840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bb769100e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bb769100e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bb769100d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bb769257cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bb769257cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bb7691d7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bb7691d7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bb76915bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bb76910036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bb76910036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453697 byte(s) leaked in 8615 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks_adjust-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] [default-linux-x86_64-release-asan] (17.65s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1j4x__l/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1j4x__l/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1j4x__l/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1j4x__l/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1j4x__l/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1j4x__l/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1j4x__l/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1j4x__l/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1j4x__l/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l1j4x__l/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b8f553e8960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b8f5536bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b8f5541840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b8f55310e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b8f55310e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b8f55310d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b8f55467cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b8f55467cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b8f553e7977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b8f553e7977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b8f5536bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b8f5531036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b8f5531036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453741 byte(s) leaked in 8616 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks_as-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [default-linux-x86_64-release-asan] (18.34s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gxchmh5m/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gxchmh5m/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gxchmh5m/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gxchmh5m/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gxchmh5m/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gxchmh5m/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gxchmh5m/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gxchmh5m/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gxchmh5m/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gxchmh5m/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b3564198960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b356411bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b35641c840d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b35640c0e9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b35640c0e9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b35640c0d08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b3564217cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b3564217cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b3564197977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b3564197977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b356411bb35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b35640c036c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b35640c036c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks_drop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ FAIL: 33 - FAIL ydb/tests/fq/streaming_optimize ydb/tests/stress/kafka/tests [size:medium] ------ sole chunk ran 1 test (total:176.33s - test:176.24s) [fail] test_kafka_streams.py::TestYdbTopicWorkload::test [default-linux-x86_64-release-asan] (170.59s) ydb/tests/stress/kafka/tests/test_kafka_streams.py:24: in test yatest.common.execute([ library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:422: in _finalise raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kafka/kafka_streams_test --endpoint grpc://localhost:14020 --database /Root --bootstrap http://localhost:21334 --source-path test-topic --target-path target-topic --consumer workload-consumer-0 --num-workers 2 --duration 120' has failed with code 255. E Errors: E ...erBatch.completeFutureAndFireCallbacks(ProducerBatch.java:273) E at org.apache.kafka.clients.producer.internals.ProducerBatch.abort(ProducerBatch.java:161) E at org.apache.kafka.clients.producer.internals.RecordAccumulator.abortUndrainedBatches(RecordAccumulator.java:1116) E at org.apache.kafka.clients.producer.internals.Sender.maybeSendAndPollTransactionalRequest(Sender.java:439) E at org.apache.kafka.clients.producer.internals.Sender.runOnce(Sender.java:316) E at org.apache.kafka.clients.producer.internals.Sender.run(Sender.java:243) E at java.base/java.lang.Thread.run(Thread.java:840) E Caused by: org.apache.kafka.common.errors.UnknownServerException: Unexpected termination E Traceback (most recent call last): E File "contrib/tools/python3/Lib/runpy.py", line 198, in _run_module_as_main E return _run_code(code, main_globals, None, E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/tools/python3/Lib/runpy.py", line 88, in _run_code E exec(code, run_globals) E File "ydb/tests/stress/kafka/__main__.py", line 27, in E workload.loop() E File "ydb/tests/stress/kafka/workload/__init__.py", line 125, in loop E assert totalMessCountTest == totalMessCountTarget, f"Source and target {self.target_topic_path}-{i} topics total messages count are not equal:" + \ E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E AssertionError: Source and target target-topic-0 topics total messages count are not equal:{totalMessCountTest} and {totalMessCountTarget} respectively. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kafka/tests/test-results/py3test/testing_out_stuff/test_kafka_streams.py.TestYdbTopicWorkload.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kafka/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/stress/kafka/tests ydb/core/cms/ut_sentinel_unstable [size:medium] ------ sole chunk ran 1 test (total:624.56s - test:600.02s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TSentinelUnstableTests::BSControllerCantChangeStatus (timeout) duration: 609.11s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/stderr [timeout] TSentinelUnstableTests::BSControllerCantChangeStatus [default-linux-x86_64-release-asan] (609.11s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/TSentinelUnstableTests.BSControllerCantChangeStatus.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/TSentinelUnstableTests.BSControllerCantChangeStatus.out ------ TIMEOUT: 1 - TIMEOUT ydb/core/cms/ut_sentinel_unstable ydb/core/health_check/ut [size:medium] nchunks:10 ------ [1/10] chunk ran 7 tests (total:388.65s - test:388.59s) [crashed] THealthCheckTest::BridgeTimeDifference [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) ==1534811==ERROR: AddressSanitizer: heap-use-after-free on address 0x7b878ee6cca8 at pc 0x000022dc0d2e bp 0x7b575d263390 sp 0x7b575d263388 READ of size 1 at 0x7b878ee6cca8 thread T510 (ydb-core-health) VERIFY failed (2025-12-23T19:00:16.561682Z): must not release when count is not zero: 1 library/cpp/messagebus/use_count_checker.cpp:11 ~TUseCountChecker(): requirement count == 0 failed Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/health_check/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/health_check/ut/test-results/unittest/testing_out_stuff/THealthCheckTest.BridgeTimeDifference.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/health_check/ut/test-results/unittest/testing_out_stuff/THealthCheckTest.BridgeTimeDifference.out ------ FAIL: 68 - GOOD, 1 - CRASHED ydb/core/health_check/ut ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut [size:medium] nchunks:10 ------ [1/10] chunk ran 14 tests (total:248.31s - test:247.92s) [fail] LocalPartition::WithoutPartitionWithSplit [default-linux-x86_64-release-asan] (42.00s) (NThreading::TFutureException) library/cpp/threading/future/core/future-inl.h:58: wait timeout Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/testing_out_stuff/LocalPartition.WithoutPartitionWithSplit.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/testing_out_stuff/LocalPartition.WithoutPartitionWithSplit.out ------ FAIL: 135 - GOOD, 1 - FAIL ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut ydb/tests/functional/sqs/cloud [size:medium] nchunks:40 ------ [36/40] chunk ran 2 tests (total:108.17s - setup:0.02s test:108.08s) [fail] test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [default-linux-x86_64-release-asan] (80.02s) ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:829: in test_yc_events_processor assert len(lines) >= 2, "Got only %s event lines after all attempts" % len(lines) E AssertionError: Got only 0 event lines after all attempts E assert 0 >= 2 E + where 0 = len([]) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff ------ FAIL: 79 - GOOD, 1 - FAIL ydb/tests/functional/sqs/cloud ydb/core/kqp/ut/pg [size:medium] nchunks:10 ------ [8/10] chunk ran 11 tests (total:311.85s - setup:0.01s test:311.77s) [fail] KqpPg::TempTablesDrop [default-linux-x86_64-release-asan] (8.84s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesDrop.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesDrop.out [fail] KqpPg::TempTablesSessionsIsolation [default-linux-x86_64-release-asan] (8.95s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesSessionsIsolation.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesSessionsIsolation.out [fail] KqpPg::TempTablesWithCache [default-linux-x86_64-release-asan] (8.59s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesWithCache.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/KqpPg.TempTablesWithCache.out ------ [9/10] chunk ran 11 tests (total:266.58s - setup:0.01s test:266.51s) [fail] PgCatalog::CheckSetConfig [default-linux-x86_64-release-asan] (8.59s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/PgCatalog.CheckSetConfig.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/pg/test-results/unittest/testing_out_stuff/PgCatalog.CheckSetConfig.out ------ FAIL: 110 - GOOD, 4 - FAIL ydb/core/kqp/ut/pg ydb/tests/stress/olap_workload/tests [size:medium] ------ sole chunk ran 1 test (total:185.54s - test:184.46s) [fail] test_workload.py::TestYdbWorkload::test [default-linux-x86_64-release-asan] (173.00s) ydb/tests/stress/olap_workload/tests/test_workload.py:27: in test yatest.common.execute([ library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:422: in _finalise raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/olap_workload --endpoint grpc://localhost:26937 --database /Root --duration 120' has failed with code 1. E Errors: E ...c E for next_opt in opt_generator: E ^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/retries.py", line 98, in retry_operation_impl E result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) E ^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/query/pool.py", line 250, in wrapped_callee E return [result_set for result_set in it] E ^^ E File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ E return self._next() E ^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next E res = self.wrapper(next(self.it)) E ^^^^^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/query/session.py", line 390, in E lambda resp: base.wrap_execute_query_response( E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/query/base.py", line 211, in decorator E return func(rpc_state, response_pb, session_state, *args, **kwargs) E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/query/base.py", line 229, in wrap_execute_query_response E issues._process_response(response_pb) E File "contrib/python/ydb/py3/ydb/issues.py", line 237, in _process_response E raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Timeout: message: "Query compilation timed out." severity: 1 (server_code: 400090) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbWorkload.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/stress/olap_workload/tests ydb/core/tx/schemeshard/ut_extsubdomain [size:medium] nchunks:10 ------ [8/10] chunk ran 7 tests (total:23.62s - test:23.57s) [fail] TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true [default-linux-x86_64-release-asan] (1.51s) equal assertion failed at ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp:712, auto NSchemeShardUT_Private::NLs::PathsInsideDomain(ui64)::(anonymous class)::operator()(const NKikimrScheme::TEvDescribeSchemeResult &) const: curCount == count paths inside domain count mismatch, domain with id 1 has count 36 but expected 0 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 void NTestSuiteTSchemeShardExtSubDomainTest::DropWithDeadTenantHive(NUnitTest::TTestContext&) at /-S/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp:1595:9 operator() at /-S/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/testing_out_stuff/TSchemeShardExtSubDomainTest.DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/testing_out_stuff/TSchemeShardExtSubDomainTest.DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true.out [fail] TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false [default-linux-x86_64-release-asan] (1.51s) equal assertion failed at ydb/core/tx/schemeshard/ut_helpers/ls_checks.cpp:712, auto NSchemeShardUT_Private::NLs::PathsInsideDomain(ui64)::(anonymous class)::operator()(const NKikimrScheme::TEvDescribeSchemeResult &) const: curCount == count paths inside domain count mismatch, domain with id 1 has count 36 but expected 0 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 void NTestSuiteTSchemeShardExtSubDomainTest::DropWithDeadTenantHive(NUnitTest::TTestContext&) at /-S/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp:1595:9 operator() at /-S/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/testing_out_stuff/TSchemeShardExtSubDomainTest.DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/testing_out_stuff/TSchemeShardExtSubDomainTest.DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false.out ------ FAIL: 74 - GOOD, 2 - FAIL ydb/core/tx/schemeshard/ut_extsubdomain ydb/core/tx/scheme_board/ut_cache [size:medium] nchunks:10 ------ [7/10] chunk ran 2 tests (total:16.59s - test:16.56s) [fail] TCacheTest::SystemViews [default-linux-x86_64-release-asan] (0.96s) assertion failed at ydb/core/tx/scheme_board/cache_ut.cpp:494, void NKikimr::NSchemeBoard::TCacheTest::SystemViews(): (tableId.SysViewInfo == ".sys") failed: ((empty) != .sys) , with diff: ((empt|.s)y()|s) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NKikimr::NSchemeBoard::TCacheTest::SystemViews() at /-S/ydb/core/tx/scheme_board/cache_ut.cpp:494:5 NKikimr::NSchemeBoard::TCacheTest::Execute()::TSystemViewsCaller::X(NKikimr::NSchemeBoard::TCacheTest*, NUnitTest::TTestContext&) at /-S/ydb/core/tx/scheme_board/cache_ut.cpp:243:5 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/testing_out_stuff/TCacheTest.SystemViews.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/testing_out_stuff/TCacheTest.SystemViews.out ------ FAIL: 21 - GOOD, 1 - FAIL ydb/core/tx/scheme_board/ut_cache ydb/tests/olap/scenario [size:medium] nchunks:10 ------ [0/10] chunk ran 2 tests (total:552.05s - test:551.83s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 19.6G (20574420K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1638374 54.8M 53.6M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1638566 40.6M 23.2M 10.6M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1638604 1.1G 1.1G 986M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 1643263 2.1G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1643558 2.1G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1644039 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1644642 2.0G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1645230 2.2G 2.2G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1646410 2.0G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1647663 2.0G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1648816 2.1G 2.1G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1650265 2.1G 2.1G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [1/10] chunk ran 2 tests (total:210.72s - test:209.49s) [fail] test_alter_tiering.py::TestAlterTiering::test[many_tables] [default-linux-x86_64-release-asan] (162.94s) ydb/tests/olap/scenario/conftest.py:126: in test raise errors[0] ydb/tests/olap/scenario/conftest.py:85: in worker self._test_suffix(local_ctx, suffix, codes, idx) ydb/tests/olap/scenario/conftest.py:136: in _test_suffix ctx.executable(self, ctx) ydb/tests/olap/scenario/test_alter_tiering.py:361: in scenario_many_tables threads.start_and_wait_all() ydb/tests/olap/common/thread_helper.py:49: in start_and_wait_all self.join_all() ydb/tests/olap/common/thread_helper.py:45: in join_all thread.join(timeout=timeout) ydb/tests/olap/common/thread_helper.py:18: in join raise self.exc ydb/tests/olap/common/thread_helper.py:11: in run self.ret = self._target(*self._args, **self._kwargs) ydb/tests/olap/scenario/test_alter_tiering.py:218: in _loop_bulk_upsert sth.bulk_upsert( ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:647: in bulk_upsert self._bulk_upsert_impl(tablename, data_generator, expected_status) ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:390: in _bulk_upsert_impl self._run_with_expected_status( ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:366: in _run_with_expected_status pytest.fail(f'Unexpected status: must be in {repr(expected_status)}, but get {repr(error or status)}') E Failed: Unexpected status: must be in {}, but get BadRequest('message: "Bulk upsert to table \\\'/Root/olap_yatests/TestAlterTiering/many_tables/store/table0\\\' " severity: 1 (server_code: 400010)') Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_tiering.py.TestAlterTiering.test.many_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ [2/10] chunk ran 2 tests (total:422.46s - test:422.26s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 21.7G (22793736K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1639096 54.8M 52.1M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1639326 40.4M 22.6M 10.1M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1639373 991M 986M 893M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 1643164 2.6G 2.5G 2.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1643457 2.3G 2.2G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1643949 2.4G 2.3G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1644545 2.3G 2.3G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1645102 2.3G 2.3G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1646269 2.3G 2.3G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1647183 2.3G 2.3G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1648043 2.3G 2.3G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1649064 2.3G 2.3G 1.7G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [3/10] chunk ran 2 tests (total:241.04s - test:240.31s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 16.2G (16963172K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1639663 54.8M 53.7M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1640025 40.7M 22.4M 10.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1640034 1.2G 1.1G 1.1G └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 1643163 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1643461 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1643904 1.7G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1644531 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1645073 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1646188 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1647125 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1648014 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1649104 1.7G 1.7G 1.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [4/10] chunk ran 2 tests (total:150.26s - test:150.09s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 17.2G (18073660K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1642961 54.8M 54.3M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1643133 40.2M 22.9M 10.0M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1643169 712M 682M 618M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 1649538 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1650783 2.0G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1651882 1.9G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1652713 1.9G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1653461 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1653977 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1654551 2.0G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1655145 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1655699 1.9G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [5/10] chunk ran 8 tests (total:131.24s - test:131.05s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 17.2G (18038568K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1646690 54.9M 54.8M 7.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1647163 40.5M 23.2M 10.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1647259 749M 714M 651M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 1653921 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1654420 1.7G 1.8G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1654908 1.8G 1.8G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1655258 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1655891 1.9G 1.9G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1656377 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1656894 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1657562 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch 1658225 1.8G 1.8G 1.3G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ch Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ FAIL: 17 - GOOD, 1 - FAIL ydb/tests/olap/scenario ydb/tests/functional/audit [size:medium] nchunks:500 ------ [test_canonical_records.py 0/100] chunk ran 1 test (total:343.87s - test:343.60s canon:0.21s) [fail] test_canonical_records.py::test_create_drop_and_alter_database [default-linux-x86_64-release-asan] (338.80s) Test results differ from canonical: test_result[1]: files content differs: '--- /home/runner/actions_runner/_work/ydb/ydb/ydb/tests/functional/audit/canondata/test_canonical_records.test_create_drop_and_alter_database/audit_log.1.json 2025-12-23 16:18:14.930476625 +0000 +++ /home/runner/.ya/build/build_root/8ptz/0055b3/canon_tmp0n2nwg0u/audit_log.1.json 2025-12-23 18:56:25.489664137 +0000 @@ -2,2 +2,32 @@ {"component": "schemeshard", "database": "/Root", "detailed_status": "StatusAccepted", "operation": "ALTER DATABASE", "paths": "[/Root/Database]", "remote_address": "", "sanitized_token": "**** (B6C6F477)", "status": "SUCCESS", "subject": "root@builtin", "tx_id": ""} +{"component": "schemeshard", "database": "/Root/Database", "detailed_status": "StatusAccepted", "operation": "CREATE DIRECTORY", "paths": "[/Root/Database/.sys]", "remote_address": "{none}", "sanitized_token": "{none}", "status": "SUCCESS", "subject": "metadata@system", "tx_id": ""} +{"component": "schemeshard", "database": "/Root/Database", "detailed_stat...' see full diff /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_canonical_records.test_create_drop_and_alter_database/audit_log.1.json.diff Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_canonical_records.py.test_create_drop_and_alter_database.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff ------ [test_canonical_records.py 5/100] chunk ran 1 test (total:87.17s - test:87.13s) [fail] test_canonical_records.py::test_dstool_evict_vdisk_grpc [default-linux-x86_64-release-asan] (82.15s) ydb/tests/functional/audit/test_canonical_records.py:348: in test_dstool_evict_vdisk_grpc execute_dstool_grpc(ydb_cluster, TOKEN, ['vdisk', 'evict', '--vdisk-ids', vdisk_id, '--ignore-degraded-group-check', '--ignore-failure-model-group-check']) ydb/tests/functional/audit/helpers.py:60: in execute_dstool_grpc assert False, f'Command\n{full_cmd}\n finished with exit code {proc_result.exit_code}, stderr:\n\n{proc_result.std_err.decode("utf-8")}\n\nstdout:\n{proc_result.std_out.decode("utf-8")}' E AssertionError: Command E ['/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/apps/dstool/ydb-dstool', '--endpoint', 'grpc://localhost:25692', 'vdisk', 'evict', '--vdisk-ids', '[82000000:1:0:0:0]', '--ignore-degraded-group-check', '--ignore-failure-model-group-check'] E finished with exit code 1, stderr: E E error, add --verbose for more info E E E stdout: Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_canonical_records.py.test_dstool_evict_vdisk_grpc.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff ------ FAIL: 32 - GOOD, 2 - FAIL ydb/tests/functional/audit ydb/services/config/ut [size:medium] ------ sole chunk ran 4 tests (total:27.89s - test:27.82s) [crashed] ConfigGRPCService::CheckV1IsBlocked [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/config/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/config/ut/test-results/unittest/testing_out_stuff/ConfigGRPCService.CheckV1IsBlocked.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/config/ut/test-results/unittest/testing_out_stuff/ConfigGRPCService.CheckV1IsBlocked.out ------ FAIL: 3 - GOOD, 1 - CRASHED ydb/services/config/ut ------ sole chunk ran 2 tests (total:141.71s - setup:0.07s recipes:27.18s test:108.07s recipes:6.02s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 16.8G (17624932K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 2049690 54.8M 52.2M 7.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2049710 39.9M 22.3M 9.8M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2066037 410M 0b 0b │ └─ ydb_recipe --build-root /home/runner/actions_runner/_work/ydb/ydb/tmp/out --source-root /home/runner/actions_runner/_work/ydb/ydb --gdb-path /home/runner/.ya/tools/v4/10 2050330 2.2G 2.2G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 2050665 2.1G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 2050768 2.1G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 2051203 2.2G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 2051582 2.1G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 2051740 2.0G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 2052195 2.1G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 2052564 2.0G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out_stuff/stderr ------ sole chunk ran 2 tests (total:544.21s - recipes:21.36s test:520.14s recipes:2.35s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 16.7G (17530832K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 2012406 54.9M 54.5M 7.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2012414 42.0M 24.5M 12.0M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 2016322 58.6M 49.1M 32.5M │ └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/test_tool.args 2016329 2.2G 2.2G 2.1G │ └─ ydb-tests-functional-kqp-kqp_indexes --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/ 2012516 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2012590 1.9G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2012910 1.9G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2013313 1.9G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2013637 1.9G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2014044 1.9G 1.7G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2014466 1.9G 1.6G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 2014919 1.9G 1.6G 1.3G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/stderr ------ [test_discovery.py] chunk ran 3 tests (total:188.88s - test:188.76s) Info: Test run has exceeded 10.0G (10485760K) memory limit with 14.9G (15615940K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1872293 54.8M 54.9M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1872336 40.3M 23.2M 10.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1872347 839M 842M 764M └─ ydb-tests-functional-api --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 1905665 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1905963 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1906269 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1906581 1.6G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1913333 1.6G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuf 1907374 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1913338 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuf 1907545 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1913328 1.8G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuf 1911705 1.7G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1913354 1.7G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuf 1911978 1.6G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1912425 1.5G 1.6G 1.2G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/stderr ydb/core/statistics/aggregator/ut [size:medium] nchunks:60 ------ [4/60] chunk ran 1 test (total:606.99s - test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: AnalyzeColumnshard::AnalyzeMultiOperationId (timeout) duration: 605.32s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/stderr [timeout] AnalyzeColumnshard::AnalyzeMultiOperationId [default-linux-x86_64-release-asan] (605.32s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeMultiOperationId.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeMultiOperationId.out ------ TIMEOUT: 31 - GOOD, 1 - TIMEOUT ydb/core/statistics/aggregator/ut ydb/core/tx/tx_proxy/ut_ext_tenant [size:medium] nchunks:10 ------ [0/10] chunk ran 2 tests (total:22.15s - test:22.07s) [fail] TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [default-linux-x86_64-release-asan] (5.45s) assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:518, void CreateTableInsideAndAlterDomainAndTable(TTestEnvWithPoolsSupport &): (ver.Version == 4) failed: (6 != 4) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 CreateTableInsideAndAlterDomainAndTable(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&) at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:13 ~TBaseTestEnv at /-S/ydb/core/tx/tx_proxy/proxy_ut_helpers.h:48:29 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true.out [fail] TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [default-linux-x86_64-release-asan] (6.26s) assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:518, void CreateTableInsideAndAlterDomainAndTable(TTestEnvWithPoolsSupport &): (ver.Version == 4) failed: (7 != 4) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 CreateTableInsideAndAlterDomainAndTable(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&) at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:13 ~TBaseTestEnv at /-S/ydb/core/tx/tx_proxy/proxy_ut_helpers.h:48:29 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false.out ------ [1/10] chunk ran 2 tests (total:22.71s - setup:0.01s test:22.63s) [fail] TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [default-linux-x86_64-release-asan] (5.50s) assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:129, void CreateTableInsideAndLs(TTestEnvWithPoolsSupport &): (ver.Version == (env.GetSettings().FeatureFlags.GetEnableAlterDatabaseCreateHiveFirst() ? 5 : 4)) failed: (8 != 5) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 CreateTableInsideAndLs(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&) at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:129:9 ~TBaseTestEnv at /-S/ydb/core/tx/tx_proxy/proxy_ut_helpers.h:48:29 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true.out [fail] TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false [default-linux-x86_64-release-asan] (6.48s) assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:129, void CreateTableInsideAndLs(TTestEnvWithPoolsSupport &): (ver.Version == (env.GetSettings().FeatureFlags.GetEnableAlterDatabaseCreateHiveFirst() ? 5 : 4)) failed: (7 != 4) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 CreateTableInsideAndLs(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&) at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:129:9 ~TBaseTestEnv at /-S/ydb/core/tx/tx_proxy/proxy_ut_helpers.h:48:29 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false.out ------ [2/10] chunk ran 2 tests (total:26.55s - setup:0.01s test:26.44s) [fail] TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [default-linux-x86_64-release-asan] (6.67s) assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:254, void CreateTableInsideThenStopTenantAndForceDeleteSubDomain(TTestEnvWithPoolsSupport &): (ver.Version == (env.GetSettings().FeatureFlags.GetEnableAlterDatabaseCreateHiveFirst() ? 5 : 4)) failed: (8 != 5) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 CreateTableInsideThenStopTenantAndForceDeleteSubDomain(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&) at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:254:13 ~TBaseTestEnv at /-S/ydb/core/tx/tx_proxy/proxy_ut_helpers.h:48:29 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true.out [fail] TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false [default-linux-x86_64-release-asan] (9.08s) assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:254, void CreateTableInsideThenStopTenantAndForceDeleteSubDomain(TTestEnvWithPoolsSupport &): (ver.Version == (env.GetSettings().FeatureFlags.GetEnableAlterDatabaseCreateHiveFirst() ? 5 : 4)) failed: (7 != 4) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 CreateTableInsideThenStopTenantAndForceDeleteSubDomain(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&) at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:254:13 ~TBaseTestEnv at /-S/ydb/core/tx/tx_proxy/proxy_ut_helpers.h:48:29 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false.out ------ [3/10] chunk ran 2 tests (total:24.23s - test:24.16s) [fail] TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [default-linux-x86_64-release-asan] (5.45s) assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:209, void DeclareAndAlterPools(TTestEnvWithPoolsSupport &): (ver.PathId == 2) failed: (32 != 2) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 DeclareAndAlterPools(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&) at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:209:9 ~TBaseTestEnv at /-S/ydb/core/tx/tx_proxy/proxy_ut_helpers.h:48:29 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true.out [fail] TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false [default-linux-x86_64-release-asan] (6.66s) assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:209, void DeclareAndAlterPools(TTestEnvWithPoolsSupport &): (ver.PathId == 2) failed: (32 != 2) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 DeclareAndAlterPools(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&) at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:209:9 ~TBaseTestEnv at /-S/ydb/core/tx/tx_proxy/proxy_ut_helpers.h:48:29 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false.out ------ [4/10] chunk ran 2 tests (total:22.83s - test:22.76s) [fail] TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [default-linux-x86_64-release-asan] (5.32s) assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:96, void DeclareAndDefineWithNodes(TTestEnvWithPoolsSupport &): (ver.Version == (env.GetSettings().FeatureFlags.GetEnableAlterDatabaseCreateHiveFirst() ? 5 : 4)) failed: (8 != 5) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 DeclareAndDefineWithNodes(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&) at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:96:9 ~TBaseTestEnv at /-S/ydb/core/tx/tx_proxy/proxy_ut_helpers.h:48:29 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true.out [fail] TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false [default-linux-x86_64-release-asan] (6.18s) assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:96, void DeclareAndDefineWithNodes(TTestEnvWithPoolsSupport &): (ver.Version == (env.GetSettings().FeatureFlags.GetEnableAlterDatabaseCreateHiveFirst() ? 5 : 4)) failed: (7 != 4) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 DeclareAndDefineWithNodes(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&) at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:96:9 ~TBaseTestEnv at /-S/ydb/core/tx/tx_proxy/proxy_ut_helpers.h:48:29 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false.out ------ [5/10] chunk ran 2 tests (total:22.42s - test:22.37s) [fail] TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [default-linux-x86_64-release-asan] (4.62s) assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:71, void DeclareAndDefineWithoutNodes(TTestEnvWithPoolsSupport &): (ver.Version == 5) failed: (8 != 5) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 DeclareAndDefineWithoutNodes(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&) at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:71:9 ~TBaseTestEnv at /-S/ydb/core/tx/tx_proxy/proxy_ut_helpers.h:48:29 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false.out [fail] TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [default-linux-x86_64-release-asan] (4.54s) assertion failed at ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:71, void DeclareAndDefineWithoutNodes(TTestEnvWithPoolsSupport &): (ver.Version == 5) failed: (8 != 5) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 DeclareAndDefineWithoutNodes(NKikimr::NTxProxyUT::TTestEnvWithPoolsSupport&) at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:71:9 ~TBaseTestEnv at /-S/ydb/core/tx/tx_proxy/proxy_ut_helpers.h:48:29 operator() at /-S/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/testing_out_stuff/TExtSubDomainTest.DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true.out ------ FAIL: 5 - GOOD, 12 - FAIL ydb/core/tx/tx_proxy/ut_ext_tenant ydb/library/yql/dq/runtime/ut [size:medium] nchunks:10 ------ [0/10] chunk ran 7 tests (total:605.98s - setup:0.01s test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: Channels20::IcChannelEarlyFinish (timeout) duration: 581.26s Channels20::IcChannelLateBinding (good) duration: 12.89s Channels20::IcChannelAsyncRead (good) duration: 9.25s 4 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/runtime/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/stderr [timeout] Channels20::IcChannelEarlyFinish [default-linux-x86_64-release-asan] (581.26s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/Channels20.IcChannelEarlyFinish.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/Channels20.IcChannelEarlyFinish.out ------ [1/10] chunk ran 7 tests (total:605.10s - setup:0.01s test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: Channels20::IcChannelTrivial (timeout) duration: 587.12s Channels20::LocalChannelBackPressure (good) duration: 8.29s Channels20::LocalChannelAsyncRead (good) duration: 7.43s 4 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/runtime/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/stderr [timeout] Channels20::IcChannelTrivial [default-linux-x86_64-release-asan] (587.12s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/Channels20.IcChannelTrivial.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/runtime/ut/test-results/unittest/testing_out_stuff/Channels20.IcChannelTrivial.out ------ TIMEOUT: 55 - GOOD, 8 - NOT_LAUNCHED, 2 - TIMEOUT ydb/library/yql/dq/runtime/ut ydb/core/transfer/ut/column_table [size:medium] ------ sole chunk ran 29 tests (total:433.23s - recipes:22.56s test:404.12s recipes:5.10s) [fail] Transfer_ColumnTable::MessageField_WriteTimestamp [default-linux-x86_64-release-asan] (9.50s) assertion failed at ydb/core/transfer/ut/common/utils.h:80, virtual void NReplicationTest::Timestamp64Checker::Assert(const std::string &, const ::Ydb::Value &): (v == Expected) failed: (2025-12-23T19:26:51.462000Z != 2025-12-23T19:26:44.267902Z) Row 0 column 'WriteTimestamp': , with diff: 2025-12-23T19:26:(51|44).(4|2)6(|790)2(000|)Z TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NReplicationTest::Timestamp64Checker::Assert(std::__y1::basic_string, std::__y1::allocator> const&, Ydb::Value const&) at /-S/ydb/core/transfer/ut/common/utils.h:80:13 UnRef at /-S/util/generic/ptr.h:637:13 NReplicationTest::MainTestCase::Run(NReplicationTest::TConfig const&, NReplicationTest::MainTestCase::CreateTransferSettings) at /-S/ydb/core/transfer/ut/common/utils.h:0:9 MessageField_WriteTimestamp(std::__y1::basic_string, std::__y1::allocator> const&, bool) at /-S/ydb/core/transfer/ut/common/transfer_common.cpp:671:5 operator() at /-S/ydb/core/transfer/ut/column_table/transfer_columntable_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/transfer/ut/column_table/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/transfer/ut/column_table/test-results/unittest/testing_out_stuff/Transfer_ColumnTable.MessageField_WriteTimestamp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/transfer/ut/column_table/test-results/unittest/testing_out_stuff/Transfer_ColumnTable.MessageField_WriteTimestamp.out ------ FAIL: 28 - GOOD, 1 - FAIL ydb/core/transfer/ut/column_table ydb/core/load_test/ut [size:medium] nchunks:10 ------ [2/10] chunk ran 1 test (total:9.22s - test:9.18s) [fail] GroupWriteTest::SimpleRdma [default-linux-x86_64-release-asan] (4.11s) assertion failed at ydb/core/load_test/ut/group_test_ut.cpp:40, auto (anonymous namespace)::TTetsEnv::RunSingleLoadTest(const TString &, bool)::(anonymous class)::operator()(ui32, std::unique_ptr &) const: (!memReg.Empty()) unable to extract mem region from chunk TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 operator() at /-S/ydb/core/load_test/ut/group_test_ut.cpp:0:29 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990:3 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 operator-> at /-S/contrib/libs/cxxsupp/libcxx/include/__memory/unique_ptr.h:266:101 TIntrusivePtr at /-S/util/generic/ptr.h:523:11 Get at /-S/util/generic/ptr.h:592:16 operator() at /-S/ydb/core/load_test/ut/group_test_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/load_test/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/load_test/ut/test-results/unittest/testing_out_stuff/GroupWriteTest.SimpleRdma.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/load_test/ut/test-results/unittest/testing_out_stuff/GroupWriteTest.SimpleRdma.out ------ FAIL: 5 - GOOD, 1 - FAIL ydb/core/load_test/ut ydb/tests/datashard/ttl [size:medium] nchunks:36 ------ [10/36] chunk ran 2 tests (total:399.81s - setup:0.01s test:399.73s) [fail] test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [default-linux-x86_64-release-asan] (283.40s) ydb/tests/datashard/ttl/test_ttl.py:355: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 2 lines, table table_Datetime_1_UNIQUE_SYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestTTL.test_ttl.table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ [11/36] chunk ran 2 tests (total:384.02s - setup:0.01s test:383.94s) [fail] test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] [default-linux-x86_64-release-asan] (273.37s) ydb/tests/datashard/ttl/test_ttl.py:355: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 5 lines, table table_Datetime_1__SYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestTTL.test_ttl.table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ [6/36] chunk ran 2 tests (total:381.12s - test:380.86s) [fail] test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] [default-linux-x86_64-release-asan] (254.18s) ydb/tests/datashard/ttl/test_ttl.py:355: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 6 lines, table table_Date_0__ASYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestTTL.test_ttl.table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ [7/36] chunk ran 2 tests (total:400.66s - setup:0.01s test:400.57s) [fail] test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [default-linux-x86_64-release-asan] (277.93s) ydb/tests/datashard/ttl/test_ttl.py:355: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 2 lines, table table_Date_1_UNIQUE_SYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestTTL.test_ttl.table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ [8/36] chunk ran 2 tests (total:395.53s - test:395.21s) [fail] test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] [default-linux-x86_64-release-asan] (281.66s) ydb/tests/datashard/ttl/test_ttl.py:355: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 4 lines, table table_Date_1__SYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestTTL.test_ttl.table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ FAIL: 43 - GOOD, 5 - FAIL ydb/tests/datashard/ttl ydb/tests/functional/serverless [size:medium] nchunks:20 ------ [test_serverless.py 3/10] chunk ran 5 tests (total:647.14s - test:600.06s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] (good) duration: 200.92s test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] (good) duration: 170.04s test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] (good) duration: 163.26s test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] (timeout) duration: 84.31s test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] (good) duration: 10.46s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/stderr [timeout] test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [default-linux-x86_64-release-asan] (84.31s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_column_disk_quotas.enable_alter_database_create_hive_first--true-enable_pool_encryption--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 4/10] chunk ran 4 tests (total:440.95s - test:437.93s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [default-linux-x86_64-release-asan] (41.99s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false-enable_pool_encryption--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [default-linux-x86_64-release-asan] (105.54s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false-enable_pool_encryption--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [default-linux-x86_64-release-asan] (106.01s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true-enable_pool_encryption--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [default-linux-x86_64-release-asan] (175.94s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true-enable_pool_encryption--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 9/10] chunk ran 4 tests (total:633.01s - test:600.10s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] (good) duration: 249.04s test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] (timeout) duration: 219.23s test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] (good) duration: 161.25s test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/stderr [timeout] test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [default-linux-x86_64-release-asan] (219.23s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_turn_on_serverless_storage_billing.enable_alter_database_create_hive_first--true-enable_pool_encryption--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ TIMEOUT: 37 - GOOD, 4 - FAIL, 1 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/functional/serverless ydb/tests/olap/column_family/compression [size:medium] nchunks:10 ------ [0/10] chunk ran 3 tests (total:406.73s - test:406.64s) [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_10_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 10] [default-linux-x86_64-release-asan] (71.73s) ydb/tests/olap/column_family/compression/alter_compression.py:194: in test_all_supported_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015691 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_10_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.10.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ [1/10] chunk ran 3 tests (total:454.04s - setup:0.01s test:453.91s) [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] [default-linux-x86_64-release-asan] (90.90s) ydb/tests/olap/column_family/compression/alter_compression.py:194: in test_all_supported_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015686 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_14_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.14.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ [2/10] chunk ran 3 tests (total:486.76s - setup:0.02s test:486.62s) [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_15_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 15] [default-linux-x86_64-release-asan] (199.47s) ydb/tests/olap/column_family/compression/alter_compression.py:194: in test_all_supported_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015704 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_15_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.15.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ [7/10] chunk ran 2 tests (total:304.35s - test:304.25s) [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_6_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 6] [default-linux-x86_64-release-asan] (174.87s) ydb/tests/olap/column_family/compression/alter_compression.py:194: in test_all_supported_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015340 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_6_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.6.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ FAIL: 19 - GOOD, 4 - FAIL ydb/tests/olap/column_family/compression ydb/core/statistics/service/ut [size:medium] nchunks:60 ------ [15/60] chunk ran 1 test (total:610.83s - test:600.04s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ColumnStatistics::CountMinSketchServerlessStatistics (timeout) duration: 609.04s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/stderr [timeout] ColumnStatistics::CountMinSketchServerlessStatistics [default-linux-x86_64-release-asan] (609.04s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.CountMinSketchServerlessStatistics.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.CountMinSketchServerlessStatistics.out ------ [17/60] chunk ran 1 test (total:605.28s - test:600.10s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ColumnStatistics::EqWidthHistogram (timeout) duration: 603.69s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/stderr [timeout] ColumnStatistics::EqWidthHistogram [default-linux-x86_64-release-asan] (603.69s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.EqWidthHistogram.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.EqWidthHistogram.out ------ TIMEOUT: 24 - GOOD, 2 - TIMEOUT ydb/core/statistics/service/ut ydb/core/client/ut [size:medium] nchunks:60 ------ [10/60] chunk ran 2 tests (total:26.60s - recipes:1.30s test:24.29s recipes:0.92s) [fail] TFlatTest::LsPathId [default-linux-x86_64-release-asan] (5.72s) assertion failed at ydb/core/client/flat_ut.cpp:1060, void NKikimr::NFlatTests::NTestSuiteTFlatTest::TestLsUknownPathId(TFlatMsgBusClient &, ui64, ui64): (res->Record.HasPathDescription() == false) failed: (1 != 0) Unxepected description for pathId 2 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 NKikimr::NFlatTests::NTestSuiteTFlatTest::TestLsUknownPathId(NKikimr::NFlatTests::TFlatMsgBusClient&, unsigned long, unsigned long) at /-S/ydb/core/client/flat_ut.cpp:0:9 NKikimr::NFlatTests::NTestSuiteTFlatTest::TTestCaseLsPathId::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/client/flat_ut.cpp:1078:30 operator() at /-S/ydb/core/client/flat_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/client/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/client/ut/test-results/unittest/testing_out_stuff/TFlatTest.LsPathId.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/client/ut/test-results/unittest/testing_out_stuff/TFlatTest.LsPathId.out ------ FAIL: 122 - GOOD, 1 - FAIL ydb/core/client/ut Total 431 suites: 397 - GOOD 28 - FAIL 6 - TIMEOUT Total 10770 tests: 10557 - GOOD 184 - FAIL 9 - NOT_LAUNCHED 8 - TIMEOUT 7 - SKIPPED 5 - CRASHED Cache efficiency ratio is 81.61% (44717 of 54792). Local: 0 (0.00%), dist: 15580 (28.43%), by dynamic uids: 0 (0.00%), avoided: 29137 (53.18%) Dist cache download: count=5530, size=12.34 GiB, speed=99.8 MiB/s Disk usage for tools/sdk at least 31.02 MiB Additional disk space consumed for build cache 1.08 TiB Critical path: [ 44666 ms] [CC] [vr1RyWIiUYRSvUIU_C-TJA tool]: $(BUILD_ROOT)/ydb/core/protos/config.pb.cc [started: 0 (1766511674513), finished: 44666 (1766511719179)] [ 427 ms] [AR] [hqIawOPZitexJxct-Y24-g tool]: $(BUILD_ROOT)/ydb/core/protos/libydb-core-protos.a [started: 45709 (1766511720222), finished: 46136 (1766511720649)] [ 1701 ms] [LD] [UQt4tdgIA947R_rXm4G5kg tool]: $(BUILD_ROOT)/ydb/core/control/lib/generated/codegen/ydb-core-control-generated-codegen [started: 46332 (1766511720845), finished: 48033 (1766511722546)] [ 86 ms] [PR] [SP2NndjvAHm0PtJ0iIxtHw default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/core/control/lib/generated/control_board_proto.h [started: 50186 (1766511724699), finished: 50272 (1766511724785)] [195791 ms] [CC] [6wQSAQ9arsx_9oiPNSX1XA default-linux-x86_64 release asan]: $(SOURCE_ROOT)/ydb/core/viewer/json_handlers_viewer.cpp [started: 820282 (1766512494795), finished: 1016073 (1766512690586)] [ 280 ms] [AR] [Wgz6lfaeftGs56dy_gcHRw default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/core/viewer/libydb-core-viewer.a [started: 1016645 (1766512691158), finished: 1016925 (1766512691438)] [ 50822 ms] [LD] [gj8-jJm03ls77t8uvv5qyw default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/apps/ydbd/ydbd [started: 2685004 (1766514359517), finished: 2735826 (1766514410339)] [647815 ms] [TM] [test-15989500525604782193 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 5674570 (1766517349083), finished: 6322385 (1766517996898)] [ 58101 ms] [TA] [test-8763634788469210463]: $(BUILD_ROOT)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} [started: 6340297 (1766518014810), finished: 6398398 (1766518072911)] Time from start: 6938303 ms, time elapsed by graph 999689 ms, time diff 5938614 ms. The longest 10 tasks: [969548 ms] [prepare:$(bazel-store-put)] local [count: 9463, cps: 9.76, ave time 102.46 msec] [647815 ms] [TM] [test-15989500525604782193 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 1766517349083, finished: 1766517996898] [633673 ms] [TM] [test-2354410924194659733 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 1766517380917, finished: 1766518014590] [625144 ms] [TM] [test-15742103540170447172 asan default-linux-x86_64 release]: ydb/core/cms/ut_sentinel_unstable/unittest [started: 1766514365085, finished: 1766514990229] [624885 ms] [TM] [test-9008413732644352584 asan default-linux-x86_64 release]: ydb/tests/stress/streaming/tests/py3test [started: 1766517703632, finished: 1766518328517] [611749 ms] [TM] [test-7706241441225952158 asan default-linux-x86_64 release]: ydb/core/statistics/service/ut/unittest [started: 1766515933856, finished: 1766516545605] [607682 ms] [TM] [test-1448508080036679943 asan default-linux-x86_64 release]: ydb/core/statistics/aggregator/ut/unittest [started: 1766516118279, finished: 1766516725961] [607050 ms] [TM] [test-6967591913488045060 asan default-linux-x86_64 release]: ydb/library/yql/dq/runtime/ut/unittest [started: 1766514979453, finished: 1766515586503] [606053 ms] [TM] [test-17324489200105524974 asan default-linux-x86_64 release]: ydb/core/statistics/service/ut/unittest [started: 1766516213756, finished: 1766516819809] [605990 ms] [TM] [test-9548216044578935444 asan default-linux-x86_64 release]: ydb/library/yql/dq/runtime/ut/unittest [started: 1766514983277, finished: 1766515589267] Total time by type: [163706423 ms] [TM] [count: 4805, ave time 34070.02 msec] [160106208 ms] [CC] [count: 3475, ave time 46073.73 msec] [ 16987766 ms] [prepare:get from dist cache] [count: 15580, ave time 1090.36 msec] [ 12602108 ms] [LD] [count: 613, ave time 20558.09 msec] [ 1629203 ms] [prepare:put to dist cache] [count: 9994, ave time 163.02 msec] [ 1358963 ms] [prepare:bazel-store] [count: 3, ave time 452987.67 msec] [ 1313478 ms] [TS] [count: 259, ave time 5071.34 msec] [ 643327 ms] [prepare:put into local cache, clean build dir] [count: 15602, ave time 41.23 msec] [ 513699 ms] [TA] [count: 263, ave time 1953.23 msec] [ 290164 ms] [prepare:tools] [count: 22, ave time 13189.27 msec] [ 217556 ms] [PY] [count: 23, ave time 9458.96 msec] [ 212620 ms] [prepare:AC] [count: 4, ave time 53155.00 msec] [ 111752 ms] [AR] [count: 523, ave time 213.67 msec] [ 15751 ms] [PB] [count: 25, ave time 630.04 msec] [ 2540 ms] [EN] [count: 41, ave time 61.95 msec] [ 2059 ms] [BN] [count: 15, ave time 137.27 msec] [ 1012 ms] [PR] [count: 20, ave time 50.60 msec] [ 560 ms] [BI] [count: 2, ave time 280.00 msec] [ 343 ms] [PD] [count: 2, ave time 171.50 msec] [ 216 ms] [CF] [count: 2, ave time 108.00 msec] [ 208 ms] [CP] [count: 3, ave time 69.33 msec] [ 129 ms] [PK] [count: 1, ave time 129.00 msec] [ 123 ms] [prepare:resources] [count: 1, ave time 123.00 msec] [ 109 ms] [UN] [count: 2, ave time 54.50 msec] [ 109 ms] [SB] [count: 1, ave time 109.00 msec] [ 15 ms] [prepare:clean] [count: 3, ave time 5.00 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 165533600 ms (48.89%) Total run tasks time - 338594260 ms Configure time - 5.8 s Statistics overhead 2867 ms Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Ok + echo 0